Commit 78b70c80 authored by Mele's avatar Mele
Browse files

Merge branch 'main' into 21meaa

parents 04bb5e71 e3020dfd
Showing with 570 additions and 9 deletions
+570 -9
<<<<<<< HEAD
=======
<<<<<<< HEAD
<<<<<<< HEAD
>>>>>>> origin/mqtt
############# #############
# python ignores: # python ignores:
############# #############
...@@ -203,3 +208,222 @@ pnpm-debug.log* ...@@ -203,3 +208,222 @@ pnpm-debug.log*
*.njsproj *.njsproj
*.sln *.sln
*.sw? *.sw?
<<<<<<< HEAD
=======
=======
# Ignoriere die virtuelle Umgebung
.venv/
venv/
=======
#############
# python ignores:
#############
>>>>>>> 5c73cc5 (Es tut endlich mit JSON, aber muss noch weiter angepasst werden)
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
<<<<<<< HEAD
*.bak
>>>>>>> d319744 (Initial Commit)
=======
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
#############
# vue ignores:
#############
.DS_Store
node_modules
/dist
# local env files
.env.local
.env.*.local
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
>>>>>>> 5c73cc5 (Es tut endlich mit JSON, aber muss noch weiter angepasst werden)
>>>>>>> origin/mqtt
.venv
__pycache__
*.pyc
\ No newline at end of file
# Verwende das neueste Python-Image
FROM python:latest
# Setze das Arbeitsverzeichnis im Container
WORKDIR /app
# Kopiere die pyproject.toml und uv.lock-Dateien
COPY pyproject.toml .
COPY uv.lock .
# Kopiere auch requirements.txt, falls vorhanden
COPY requirements.txt .
# Installiere uv und die Abhängigkeiten
RUN pip install --upgrade pip && \
pip install uv && \
pip install --no-cache-dir -r requirements.txt
# Kopiere den Rest des Projekts in den Container
COPY . .
# Setze Umgebungsvariablen, falls nötig
ENV PYTHONUNBUFFERED 1
# Exponiere den Port 8000 für den Server
EXPOSE 8000
# Der Startbefehl: Starte den Server mit uv
CMD ["uv", "run", "python", "manage.py", "runserver", "0.0.0.0:8000"]
...@@ -32,7 +32,7 @@ def login_view(request): ...@@ -32,7 +32,7 @@ def login_view(request):
user = authenticate(request, username=email, password=password) user = authenticate(request, username=email, password=password)
if user: if user:
login(request, user) login(request, user) # also creates a session in the browser
return JsonResponse({"success": True}) return JsonResponse({"success": True})
return JsonResponse( return JsonResponse(
{"success": False, "message": "Invalid credentials"}, status=401 {"success": False, "message": "Invalid credentials"}, status=401
...@@ -40,7 +40,7 @@ def login_view(request): ...@@ -40,7 +40,7 @@ def login_view(request):
def logout_view(request): def logout_view(request):
logout(request) logout(request)
return JsonResponse({"message": "Logged out"}) return JsonResponse({"message": "Logged out"})
......
...@@ -20,5 +20,5 @@ from django.urls import path, include ...@@ -20,5 +20,5 @@ from django.urls import path, include
urlpatterns = [ urlpatterns = [
path("admin/", admin.site.urls), path("admin/", admin.site.urls),
path("", include("sim.urls")), path("", include("app.urls")),
] ]
services:
mqtt-backend:
image: mqtt-influx-backend
container_name: mqtt-backend
build: ./mqtt
command: uv run -m mqtt_influx_backend.main
env_file:
- ../mqtt_to_influxdb/.env
restart: unless-stopped
#depends_on:
# - influxdb
# - mosquitto
backend:
build: ./backend
ports:
- "8000:8000"
volumes:
- ./backend:/app
frontend:
build: ./frontend
ports:
- "5173:5173"
volumes:
- ./frontend:/app
- /app/node_modules
stdin_open: true
tty: true
node_modules
npm-debug.log
\ No newline at end of file
# frontend/Dockerfile
FROM node:20
WORKDIR /app
# Nur lokale Abhängigkeiten, kein globales Vite!
COPY package*.json ./
RUN npm install
COPY . .
EXPOSE 5173
CMD ["npm", "run", "dev"]
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
}, },
"devDependencies": { "devDependencies": {
"@vitejs/plugin-vue": "^5.2.1", "@vitejs/plugin-vue": "^5.2.1",
"vite": "^6.2.0" "vite": "^6.3.2"
} }
}, },
"node_modules/@babel/helper-string-parser": { "node_modules/@babel/helper-string-parser": {
...@@ -958,6 +958,21 @@ ...@@ -958,6 +958,21 @@
"integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/fsevents": { "node_modules/fsevents": {
"version": "2.3.3", "version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
...@@ -1006,6 +1021,19 @@ ...@@ -1006,6 +1021,19 @@
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"license": "ISC" "license": "ISC"
}, },
"node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/postcss": { "node_modules/postcss": {
"version": "8.5.3", "version": "8.5.3",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
...@@ -1083,16 +1111,36 @@ ...@@ -1083,16 +1111,36 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/tinyglobby": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.4.4",
"picomatch": "^4.0.2"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/vite": { "node_modules/vite": {
"version": "6.2.6", "version": "6.3.2",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.6.tgz", "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.2.tgz",
"integrity": "sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==", "integrity": "sha512-ZSvGOXKGceizRQIZSz7TGJ0pS3QLlVY/9hwxVh17W3re67je1RKYzFHivZ/t0tubU78Vkyb9WnHPENSBCzbckg==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"esbuild": "^0.25.0", "esbuild": "^0.25.0",
"fdir": "^6.4.3",
"picomatch": "^4.0.2",
"postcss": "^8.5.3", "postcss": "^8.5.3",
"rollup": "^4.30.1" "rollup": "^4.34.9",
"tinyglobby": "^0.2.12"
}, },
"bin": { "bin": {
"vite": "bin/vite.js" "vite": "bin/vite.js"
......
...@@ -13,6 +13,6 @@ ...@@ -13,6 +13,6 @@
}, },
"devDependencies": { "devDependencies": {
"@vitejs/plugin-vue": "^5.2.1", "@vitejs/plugin-vue": "^5.2.1",
"vite": "^6.2.0" "vite": "^6.3.2"
} }
} }
FROM python:3.12-slim
# Arbeitsverzeichnis im Container
WORKDIR /app
# Projektdateien kopieren
COPY . .
# uv installieren
RUN pip install uv
# Installiere Abhängigkeiten aus pyproject.toml
RUN uv pip install . --no-cache-dir --system
# Startkommando
CMD ["uv", "run", "-m", "mqtt_influx_backend.main"]
mosquitto_pub -h 172.20.10.12 -t co2/esp32 -m "{
"metadata": {
"timestamp": "2025-04-12T14:22:35Z",
"mac": "AA:BB:CC:DD:EE:FF",
"room": "A123"
},
"co2": 615.3,
"temperature": 21.8,
"humidity": 45.2
}"
[project]
name = "mqtt-influx-backend"
version = "0.1.0"
description = "Backend to write MQTT sensor data to InfluxDB"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"paho-mqtt",
"influxdb-client",
"python-dotenv",
]
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
package-dir = {"" = "src"}
[tool.setuptools.packages.find]
where = ["src"]
from influxdb_client import InfluxDBClient, Point, WritePrecision
from influxdb_client.client.write_api import SYNCHRONOUS
class InfluxDBWriter:
def __init__(self, url: str, token: str, org: str, bucket: str):
self.client = InfluxDBClient(url=url, token=token, org=org)
self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
self.bucket = bucket
self.org = org
def write_point(self, measurement: str, tags: dict, fields: dict, timestamp=None):
point = Point(measurement)
for k, v in tags.items():
point.tag(k, v)
for k, v in fields.items():
point.field(k, v)
if timestamp:
point.time(timestamp, WritePrecision.NS)
self.write_api.write(bucket=self.bucket, org=self.org, record=point)
import json
import os
def load_json(file_name: str) -> dict:
"""
ladet eine JSON Datei, wenn diese existiert,
und gibt diese als dictionary zurück
key : value
"""
if not os.path.exists(file_name):
return {}
with open(file_name) as f:
mac_room_mapping = json.load(f)
return mac_room_mapping
def write_json(mac_room_mapping: dict, file_name: str):
"""
Nimmt ein dictionary und schreibt dessen
Inhalte in eine JSON Datei
"""
with open(file_name, "w") as f:
f.seek(0)
json.dump(mac_room_mapping, f, indent=4)
f.truncate() # TODO Check if truncate is necessary?
import logging
import os
from logging.handlers import RotatingFileHandler
LOG_DIR = "logs"
LOG_FILE = "app.log"
LOG_PATH = os.path.join(LOG_DIR, LOG_FILE)
class LoggerFactory:
#logger.info("Connected with result code %s", str(rc))
#logger.warning("Neue MAC-Adresse gefunden: %s", mac)
#logger.error("Failed writing to InfluxDb: %s", e)
@staticmethod
def get_logger(name: str, level=logging.DEBUG) -> logging.Logger:
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
logger = logging.getLogger(name)
if logger.hasHandlers():
return logger # vermeidet doppelte Handler
logger.setLevel(level)
formatter = logging.Formatter(
'[%(asctime)s] %(levelname)s in %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
file_handler = RotatingFileHandler(LOG_PATH, maxBytes=5_000_000, backupCount=5)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
import json
from src.mqtt_influx_backend.loggingFactory import LoggerFactory
from datetime import datetime
import paho.mqtt.client as mqtt
from src.mqtt_influx_backend import jsonhandler
from src.mqtt_influx_backend import influxDBWriter
class MQTTClientHandler:
MAPPING_FILE_NAME = "src/mqtt_influx_backend/mac_to_room.json"
MEASUREMENT_NAME = "sensor_data"
TAG_ROOM = "room"
TAG_MAC = "mac"
FIELD_CO2 = "co2"
FIELD_TEMP = "temperature"
FIELD_HUMIDITY = "humidity"
# Konstruktor
def __init__(self, broker_url: str, topic: str, influx_writer: influxDBWriter):
self.logger = LoggerFactory.get_logger(__name__)
# key: mac : value : room
self.mac_to_room = jsonhandler.load_json(self.MAPPING_FILE_NAME)
self.broker_url = broker_url
self.topic = topic
self.influx_writer = influx_writer
self.client = mqtt.Client()
# Methoden werden hier Events zugeteilt
self.client.on_connect = self.on_connect
self.client.on_message = self.on_message
def on_connect(self, client, userdata, flags, rc):
self.logger.info("Connected with result code " + str(rc))
client.subscribe(self.topic)
self.logger.info("Subscribed to " + self.topic )
# eventuell refactorn und die Aufgaben in Methoden aufteilen
def on_message(self, client, userdata, msg):
"""
Wenn das Topic eine Nachricht bekommt wird diese Methode ausgeführt
self: ist die MQTTClientHandler instanz, die wird gebraucht um die Einträge in
die InfluxDB zu schreiben
"""
msg = json.loads(msg.payload)
metadate = msg["metadata"]
# hier prüfen, ob die Mac-Adresse einen Raum hat,
# wenn nicht trage es in mac_to_room leer ein
# "aa:bb:cc:dd:ee:ff" : ""
mac = metadate["mac-address"]
if mac not in self.mac_to_room:
self.logger.warning(f"Neue MAC-Adresse gefunden: {mac}. Mapping wird ergänzt.")
self.mac_to_room[mac] = "" # leerer Platzhalter
jsonhandler.write_json(self.mac_to_room, self.MAPPING_FILE_NAME)
self.mac_to_room = jsonhandler.load_json(self.MAPPING_FILE_NAME)
return
self.write_to_influxDB(msg,metadate)
def write_to_influxDB(self, msg : dict, metadate: dict):
try:
self.influx_writer.write_point(
measurement=self.MEASUREMENT_NAME,
tags={
self.TAG_ROOM : self.mac_to_room[metadate["mac-address"]],
self.TAG_MAC: metadate["mac-address"]
},
fields={
self.FIELD_CO2: msg["co2"],
self.FIELD_TEMP: msg["temp"],
self.FIELD_HUMIDITY: msg["rh"],
},
timestamp=metadate["time"], #fix
)
print("Wrote to InfluxDB:", msg) # muss später rausgeschmiessen werden
except Exception as e:
self.logger.error(f"Failed writing to InfluxDb: {e}")
def start(self):
self.client.connect(self.broker_url)
self.client.loop_forever()
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment