Compare commits
8 Commits
2308aa56a3
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| d7343c2e3c | |||
| f903b5508a | |||
| 4b86536e7e | |||
| 105cdc74c2 | |||
| 6d549ed69b | |||
| 1776e2962f | |||
| bd44740649 | |||
| 6d9d94f2fa |
@@ -7,3 +7,9 @@ mocks
|
|||||||
test
|
test
|
||||||
node_modules
|
node_modules
|
||||||
doc
|
doc
|
||||||
|
.env*
|
||||||
|
*.md
|
||||||
|
deploy.sh
|
||||||
|
generate-apikey.js
|
||||||
|
test_influx_sql.js
|
||||||
|
|
||||||
|
|||||||
629
API_DOCUMENTATION.md
Normal file
629
API_DOCUMENTATION.md
Normal file
@@ -0,0 +1,629 @@
|
|||||||
|
# SensorAPI - API Dokumentation
|
||||||
|
|
||||||
|
## Übersicht
|
||||||
|
|
||||||
|
Die SensorAPI ist eine REST-API zur Abfrage von Umweltsensordaten (Lärm, Feinstaub, Temperatur, Radioaktivität). Sie unterstützt sowohl InfluxDB 1.8 (InfluxQL) als auch InfluxDB 2.0 (Flux) sowie MongoDB für Metadaten.
|
||||||
|
|
||||||
|
**Version:** 1.4.1
|
||||||
|
**Basis-URL:** `http://<host>:<port>/api/`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technologie-Stack
|
||||||
|
|
||||||
|
- **Backend:** Node.js mit Express.js
|
||||||
|
- **Datenbanken:**
|
||||||
|
- InfluxDB 1.8/2.0 (Zeitreihendaten)
|
||||||
|
- MongoDB (Sensor-Metadaten und -Eigenschaften)
|
||||||
|
- **Sprachen:** JavaScript (ES6 Module)
|
||||||
|
- **Key Dependencies:** axios, luxon, i18next, dotenv
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Unterstützte Sensortypen
|
||||||
|
|
||||||
|
| Typ | Beschreibung | Messgrößen |
|
||||||
|
|-----|--------------|------------|
|
||||||
|
| `noise` | Lärmsensoren | LA_max, LA_min, LA_eq, E10tel_eq |
|
||||||
|
| `pm` | Feinstaubsensoren | P1, P2, P0 |
|
||||||
|
| `thp` | Temperatur/Luftfeuchte/Druck | temperature, humidity, pressure |
|
||||||
|
| `radioactivity` | Radioaktivitätssensoren | counts_per_minute |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API Endpunkte
|
||||||
|
|
||||||
|
Alle Endpunkte folgen dem Schema: `GET /api/<command>?<parameter>`
|
||||||
|
|
||||||
|
### 1. **getsensordata**
|
||||||
|
|
||||||
|
Hauptendpunkt zum Abrufen von Sensordaten. Leitet je nach Sensortyp an spezialisierte Handler weiter.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getsensordata`
|
||||||
|
|
||||||
|
**Pflichtparameter:**
|
||||||
|
- `sensorid` (int) - Eindeutige Sensor-ID
|
||||||
|
|
||||||
|
**Optionale Parameter (abhängig vom Sensortyp):**
|
||||||
|
|
||||||
|
#### Für Lärmsensoren (`noise`):
|
||||||
|
- `data` (string, default: 'live') - Art der Daten
|
||||||
|
- `span` (int, default: 1) - Zeitspanne in Tagen
|
||||||
|
- `daystart` (bool, default: null) - Start um 00:00:00 Uhr
|
||||||
|
- `peak` (int, default: 70) - Schwellenwert für Lärmspitzen in dB
|
||||||
|
- `since` (date, default: '1900-01-01T00:00:00Z') - Startdatum
|
||||||
|
- `datetime` (date, default: null) - Spezifisches Datum/Zeit
|
||||||
|
- `long` (bool, default: false) - Erweiterte Ausgabe
|
||||||
|
- `sort` (int, default: 1) - Sortierung (1=aufsteigend, -1=absteigend)
|
||||||
|
- `csv` (bool, default: false) - CSV-Ausgabe
|
||||||
|
- `out` (string, default: '') - Ausgabeformat
|
||||||
|
|
||||||
|
#### Für Radioaktivitätssensoren (`radioactivity`):
|
||||||
|
- `what` (string, default: 'day') - Art der Aggregation
|
||||||
|
- `span` (int, default: 1) - Zeitspanne in Tagen
|
||||||
|
- `avg` (int, default: 1) - Mittelwert-Intervall
|
||||||
|
- `moving` (bool, default: false) - Gleitender Mittelwert
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getsensordata?sensorid=12345&span=7&peak=75
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": null,
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"_time": "2025-11-04T10:00:00.000Z",
|
||||||
|
"DNMS_noise_LA_max": 78.5,
|
||||||
|
"DNMS_noise_LA_min": 45.2,
|
||||||
|
"DNMS_noise_LA_eq": 65.3,
|
||||||
|
"E10tel_eq": 72777980.45
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2. **getactdata**
|
||||||
|
|
||||||
|
Abrufen aktueller/historischer Rohdaten eines Sensors.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getactdata`
|
||||||
|
|
||||||
|
**Parameter:**
|
||||||
|
- `sensorid` (int) - Sensor-ID
|
||||||
|
- `span` (int, optional) - Zeitspanne in Tagen
|
||||||
|
- `datetime` (date, optional) - Startdatum
|
||||||
|
- `sort` (int, optional) - Sortierung
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getactdata?sensorid=12345&span=1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": null,
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"_time": "2025-11-04T10:00:00.000Z",
|
||||||
|
"DNMS_noise_LA_max": 78.62,
|
||||||
|
"DNMS_noise_LA_min": 47.36,
|
||||||
|
"DNMS_noise_LA_eq": null,
|
||||||
|
"E10tel_eq": 72777980.45
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3. **getavgdata**
|
||||||
|
|
||||||
|
Abrufen von Durchschnittswerten mit konfigurierbarem Zeitfenster.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getavgdata`
|
||||||
|
|
||||||
|
**Pflichtparameter:**
|
||||||
|
- `sensorid` (int) - Sensor-ID
|
||||||
|
|
||||||
|
**Optionale Parameter:**
|
||||||
|
- `span` (int, default: 1) - Zeitspanne in Tagen
|
||||||
|
- `datetime` (date, default: null) - Startdatum
|
||||||
|
- `avg` (int, default: 10) - Mittelwert-Intervall in Minuten
|
||||||
|
- `moving` (bool, default: true) - Gleitender Mittelwert
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getavgdata?sensorid=12345&span=7&avg=60&moving=true
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"start": "2025-10-28T10:00:00Z",
|
||||||
|
"span": 7,
|
||||||
|
"avg": 60,
|
||||||
|
"moving": true,
|
||||||
|
"count": 168,
|
||||||
|
"values": [...]
|
||||||
|
},
|
||||||
|
"err": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4. **getlongavg**
|
||||||
|
|
||||||
|
Langzeit-Durchschnittswerte über einen längeren Zeitraum.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getlongavg`
|
||||||
|
|
||||||
|
**Pflichtparameter:**
|
||||||
|
- `sensorid` (int) - Sensor-ID
|
||||||
|
|
||||||
|
**Optionale Parameter:**
|
||||||
|
- `span` (int, default: 2) - Zeitspanne in Tagen
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getlongavg?sensorid=12345&span=30
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"span": 30,
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"_stop": "2025-11-04T00:00:00.000Z",
|
||||||
|
"LA_max": 75.3,
|
||||||
|
"LA_min": 42.1,
|
||||||
|
"LA_eq": 63.8
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"err": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5. **getoneproperty**
|
||||||
|
|
||||||
|
Abrufen der Eigenschaften eines Sensors (Metadaten, Standort, etc.).
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getoneproperty`
|
||||||
|
|
||||||
|
**Pflichtparameter:**
|
||||||
|
- `sensorid` (int) - Sensor-ID
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getoneproperty?sensorid=12345
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": null,
|
||||||
|
"props": {
|
||||||
|
"_id": 12345,
|
||||||
|
"name": [{"name": "Hauptstraße"}],
|
||||||
|
"type": "noise",
|
||||||
|
"location": [{
|
||||||
|
"id": "loc_123",
|
||||||
|
"loc": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [9.123, 48.456]
|
||||||
|
},
|
||||||
|
"indoor": false
|
||||||
|
}],
|
||||||
|
"othersensors": [
|
||||||
|
{"name": "PM Sensor", "sid": 12346}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 6. **getmapdata**
|
||||||
|
|
||||||
|
Abrufen von Sensordaten für Kartendarstellung mit geografischer Filterung.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getmapdata`
|
||||||
|
|
||||||
|
**Pflichtparameter:**
|
||||||
|
- `type` (string) - Sensortyp (noise, pm, thp, radioactivity)
|
||||||
|
|
||||||
|
**Optionale Parameter (einer muss angegeben werden):**
|
||||||
|
- `box` (string) - Begrenzungsbox: "west,south,east,north"
|
||||||
|
- `poly` (JSON-Array) - Polygon-Koordinaten
|
||||||
|
- `center` (Array) - Mittelpunkt [lng, lat]
|
||||||
|
- `distance` (int, default: 10) - Radius in km (nur mit `center`)
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getmapdata?type=noise&box=9.0,48.0,9.5,48.5
|
||||||
|
```
|
||||||
|
|
||||||
|
**Antwort:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": null,
|
||||||
|
"options": {
|
||||||
|
"lastdate": "2025-11-04T10:30:00.000Z",
|
||||||
|
"count": 25,
|
||||||
|
"data": "map"
|
||||||
|
},
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"location": [9.123, 48.456],
|
||||||
|
"id": 12345,
|
||||||
|
"name": "Hauptstraße",
|
||||||
|
"indoor": false,
|
||||||
|
"lastseen": "2025-11-04T10:25:00.000Z",
|
||||||
|
"value": 75.3,
|
||||||
|
"weeks": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Wert-Kodierung:**
|
||||||
|
- `value >= 0`: Aktueller Messwert
|
||||||
|
- `value = -1`: Daten älter als 2 Stunden
|
||||||
|
- `value = -2`: Daten älter als 7 Tage
|
||||||
|
- `value = -3`: Daten älter als 30 Tage
|
||||||
|
- `value = -4`: Daten älter als 365 Tage
|
||||||
|
- `value = -5`: Keine Daten vorhanden
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 7. **getaddress**
|
||||||
|
|
||||||
|
Geocoding: Koordinaten aus Adresse ermitteln.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getaddress`
|
||||||
|
|
||||||
|
**Parameter:**
|
||||||
|
- `address` (string) - Adresse
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getaddress?address=Hauptstraße+10,+Stuttgart
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 8. **getcitycoords**
|
||||||
|
|
||||||
|
Koordinaten einer Stadt abrufen.
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getcitycoords`
|
||||||
|
|
||||||
|
**Parameter:**
|
||||||
|
- `city` (string) - Stadtname
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
```
|
||||||
|
GET /api/getcitycoords?city=Stuttgart
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 9. **getakwdata**
|
||||||
|
|
||||||
|
Abrufen von Atomkraftwerks-Daten (spezialisiert).
|
||||||
|
|
||||||
|
**Endpunkt:** `/api/getakwdata`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Zeitformat-Konvertierung
|
||||||
|
|
||||||
|
Die API akzeptiert verschiedene Zeitformate:
|
||||||
|
|
||||||
|
### InfluxDB 2.0 (Flux) Format:
|
||||||
|
```
|
||||||
|
start: -7d
|
||||||
|
stop: now()
|
||||||
|
```
|
||||||
|
|
||||||
|
### InfluxDB 1.8 (InfluxQL) Format:
|
||||||
|
```
|
||||||
|
now() - 7d
|
||||||
|
now()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Absolute Zeitangaben (ISO 8601):
|
||||||
|
```
|
||||||
|
2025-11-04T10:00:00.000Z
|
||||||
|
```
|
||||||
|
|
||||||
|
**Wichtig:** Bei der Verwendung von ISO-Zeitstempeln mit InfluxDB 1.8 werden diese automatisch in Anführungszeichen gesetzt.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Fehlerbehandlung
|
||||||
|
|
||||||
|
Alle Endpunkte geben Fehler im folgenden Format zurück:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": "ERROR_CODE",
|
||||||
|
"values": []
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Häufige Fehlercodes:**
|
||||||
|
- `NODATA` - Keine Daten gefunden
|
||||||
|
- `SYNTAXURL` - Ungültige Query-Syntax
|
||||||
|
- `NOPROPSREAD` - Eigenschaften konnten nicht gelesen werden
|
||||||
|
- `NOPROPSFOUND` - Keine Eigenschaften gefunden
|
||||||
|
- `CMNDUNKNOWN` - Unbekannter Befehl
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Spezielle Berechnungen für Lärmdaten
|
||||||
|
|
||||||
|
### Logarithmische Mittelwertbildung
|
||||||
|
|
||||||
|
Für Dezibel-Werte (LA_max) wird eine korrekte logarithmische Mittelwertbildung durchgeführt:
|
||||||
|
|
||||||
|
1. **Konvertierung zu linear:** `E10tel = 10^(LA_max/10)`
|
||||||
|
2. **Arithmetischer Mittelwert:** `mean_E10tel = sum(E10tel) / count`
|
||||||
|
3. **Rückkonvertierung zu dB:** `n_AVG = 10 * log10(mean_E10tel)`
|
||||||
|
|
||||||
|
### Peak-Zählung
|
||||||
|
|
||||||
|
Bei `getlongavg` mit `long=true` werden Lärmspitzen über einem Schwellenwert gezählt:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"_time": "2025-11-04T12:00:00.000Z",
|
||||||
|
"peakcount": 13,
|
||||||
|
"n_AVG": 78.51,
|
||||||
|
"count": 18
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Datenbank-Konfiguration
|
||||||
|
|
||||||
|
### Umgebungsvariablen (.env)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# InfluxDB 1.8
|
||||||
|
INFLUXHOST=localhost
|
||||||
|
INFLUXPORT=8086
|
||||||
|
INFLUXUSER=username
|
||||||
|
INFLUXPASS=password
|
||||||
|
INFLUXDB=sensor_data
|
||||||
|
|
||||||
|
# InfluxDB 2.0
|
||||||
|
INFLUXTOKEN=your_token_here
|
||||||
|
INFLUXDATABUCKET=sensor_data
|
||||||
|
INFLUXORG=citysensor
|
||||||
|
|
||||||
|
# MongoDB
|
||||||
|
MONGOHOST=localhost
|
||||||
|
MONGOPORT=27017
|
||||||
|
MONGODB=sensordb
|
||||||
|
|
||||||
|
# Datenbank-Auswahl
|
||||||
|
DBASE=mongo # oder 'influx'
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ausgabeformate
|
||||||
|
|
||||||
|
### JSON (Standard)
|
||||||
|
Alle Endpunkte geben standardmäßig JSON zurück.
|
||||||
|
|
||||||
|
### CSV (Optional)
|
||||||
|
Bei einigen Endpunkten kann `csv=true` übergeben werden:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/getsensordata?sensorid=12345&csv=true
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Paginierung
|
||||||
|
|
||||||
|
Die API verwendet keine explizite Paginierung. Große Datenmengen sollten über `span` und `datetime` Parameter zeitlich eingeschränkt werden.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Rate Limiting
|
||||||
|
|
||||||
|
Aktuell ist kein Rate Limiting implementiert. Dies sollte auf Proxy/Load-Balancer-Ebene erfolgen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Beispiel-Workflows
|
||||||
|
|
||||||
|
### Workflow 1: Karte mit Lärmsensoren anzeigen
|
||||||
|
|
||||||
|
1. Sensoren in Gebiet abrufen:
|
||||||
|
```
|
||||||
|
GET /api/getmapdata?type=noise&box=9.0,48.0,9.5,48.5
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Details für einen Sensor abrufen:
|
||||||
|
```
|
||||||
|
GET /api/getoneproperty?sensorid=12345
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Historische Daten abrufen:
|
||||||
|
```
|
||||||
|
GET /api/getactdata?sensorid=12345&span=7
|
||||||
|
```
|
||||||
|
|
||||||
|
### Workflow 2: Lärmstatistik über eine Woche
|
||||||
|
|
||||||
|
1. Durchschnittswerte mit Lärmspitzen:
|
||||||
|
```
|
||||||
|
GET /api/getsensordata?sensorid=12345&span=7&peak=70&long=true
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Ergebnis analysieren:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"_time": "2025-11-04T00:00:00.000Z",
|
||||||
|
"peakcount": 13,
|
||||||
|
"n_AVG": 78.51,
|
||||||
|
"count": 1440,
|
||||||
|
"LA_max_log_avg": 78.51
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Internationalisierung
|
||||||
|
|
||||||
|
Die API unterstützt mehrere Sprachen über i18next:
|
||||||
|
- Deutsch (de)
|
||||||
|
- Englisch (en)
|
||||||
|
|
||||||
|
Fehlermeldungen werden automatisch in der gewünschten Sprache zurückgegeben.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sicherheit
|
||||||
|
|
||||||
|
### API-Key-Authentifizierung
|
||||||
|
|
||||||
|
Die API unterstützt API-Key-Authentifizierung für programmatischen Zugriff.
|
||||||
|
|
||||||
|
#### Aktivierung
|
||||||
|
|
||||||
|
1. **API-Keys generieren:**
|
||||||
|
```bash
|
||||||
|
node generate-apikey.js 3
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **In .env konfigurieren:**
|
||||||
|
```bash
|
||||||
|
API_AUTH_REQUIRED=true
|
||||||
|
API_KEYS=key1,key2,key3
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Server neu starten**
|
||||||
|
|
||||||
|
#### Verwendung
|
||||||
|
|
||||||
|
API-Keys können auf zwei Arten übergeben werden:
|
||||||
|
|
||||||
|
**Option 1: HTTP-Header (empfohlen)**
|
||||||
|
```bash
|
||||||
|
curl -H "X-API-Key: your-api-key-here" \
|
||||||
|
"http://localhost:3000/api/getactdata?sensorid=12345"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2: Query-Parameter**
|
||||||
|
```bash
|
||||||
|
curl "http://localhost:3000/api/getactdata?sensorid=12345&apikey=your-api-key-here"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Beispiel mit JavaScript/Node.js
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import axios from 'axios'
|
||||||
|
|
||||||
|
const API_KEY = 'your-api-key-here'
|
||||||
|
const BASE_URL = 'http://localhost:3000/api'
|
||||||
|
|
||||||
|
// Mit Header
|
||||||
|
const response = await axios.get(`${BASE_URL}/getactdata`, {
|
||||||
|
params: { sensorid: 12345 },
|
||||||
|
headers: { 'X-API-Key': API_KEY }
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mit Query-Parameter
|
||||||
|
const response2 = await axios.get(`${BASE_URL}/getactdata`, {
|
||||||
|
params: {
|
||||||
|
sensorid: 12345,
|
||||||
|
apikey: API_KEY
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Beispiel mit Python
|
||||||
|
|
||||||
|
```python
|
||||||
|
import requests
|
||||||
|
|
||||||
|
API_KEY = 'your-api-key-here'
|
||||||
|
BASE_URL = 'http://localhost:3000/api'
|
||||||
|
|
||||||
|
# Mit Header
|
||||||
|
headers = {'X-API-Key': API_KEY}
|
||||||
|
response = requests.get(f'{BASE_URL}/getactdata',
|
||||||
|
params={'sensorid': 12345},
|
||||||
|
headers=headers)
|
||||||
|
|
||||||
|
# Mit Query-Parameter
|
||||||
|
response2 = requests.get(f'{BASE_URL}/getactdata',
|
||||||
|
params={'sensorid': 12345, 'apikey': API_KEY})
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Fehlermeldungen
|
||||||
|
|
||||||
|
**401 Unauthorized - Kein API-Key angegeben:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": "UNAUTHORIZED",
|
||||||
|
"message": "API key required. Provide X-API-Key header or apikey query parameter."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**403 Forbidden - Ungültiger API-Key:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"err": "FORBIDDEN",
|
||||||
|
"message": "Invalid API key"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Best Practices
|
||||||
|
|
||||||
|
- **Niemals API-Keys im Code committen** - verwenden Sie Umgebungsvariablen
|
||||||
|
- **Verschiedene Keys für verschiedene Clients** - ermöglicht granulare Kontrolle
|
||||||
|
- **Keys regelmäßig rotieren** - besonders nach Mitarbeiterabgängen
|
||||||
|
- **HTTPS verwenden** - schützt Keys bei Übertragung
|
||||||
|
- **Keys sicher speichern** - z.B. in Secret Management Systemen
|
||||||
|
- **Logging aktivieren** - überwachen Sie API-Zugriffe
|
||||||
|
|
||||||
|
### Weitere Sicherheitsmaßnahmen
|
||||||
|
|
||||||
|
Für Produktionsumgebungen sollten zusätzlich implementiert werden:
|
||||||
|
- **HTTPS erzwungen** - verhindert Man-in-the-Middle-Angriffe
|
||||||
|
- **CORS-Regeln konfiguriert** - beschränkt Browser-Zugriffe
|
||||||
|
- **Rate Limiting aktiviert** - schützt vor Missbrauch
|
||||||
|
- **IP-Whitelisting** - zusätzliche Zugriffskontrolle
|
||||||
|
- **Request-Logging** - Audit-Trail für Compliance
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Support und Weiterentwicklung
|
||||||
|
|
||||||
|
Für Fragen, Bugs oder Feature-Requests siehe die Projekt-Repository-Dokumentation.
|
||||||
|
|
||||||
|
**Version History:**
|
||||||
|
- 1.4.1: Unterstützung für InfluxDB 1.8 mit InfluxQL, logarithmische Mittelwertbildung für Lärmdaten
|
||||||
186
AUTH_SETUP.md
Normal file
186
AUTH_SETUP.md
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
# API-Key-Authentifizierung Setup
|
||||||
|
|
||||||
|
## Schnellstart
|
||||||
|
|
||||||
|
### 1. API-Keys generieren
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node generate-apikey.js 3
|
||||||
|
```
|
||||||
|
|
||||||
|
Dies generiert 3 zufällige API-Keys wie:
|
||||||
|
```
|
||||||
|
1. a1b2c3d4e5f6...
|
||||||
|
2. f6e5d4c3b2a1...
|
||||||
|
3. 1a2b3c4d5e6f...
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Konfiguration in .env
|
||||||
|
|
||||||
|
Kopiere `.env.example` nach `.env` und füge deine Keys hinzu:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Bearbeite `.env`:
|
||||||
|
```bash
|
||||||
|
# Aktiviere Authentifizierung
|
||||||
|
API_AUTH_REQUIRED=true
|
||||||
|
|
||||||
|
# Füge generierte Keys hinzu (komma-separiert)
|
||||||
|
API_KEYS=a1b2c3d4e5f6...,f6e5d4c3b2a1...,1a2b3c4d5e6f...
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Server starten/neu starten
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm start
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verwendung
|
||||||
|
|
||||||
|
### Mit curl
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Header-Methode (empfohlen)
|
||||||
|
curl -H "X-API-Key: your-key-here" \
|
||||||
|
"http://localhost:3000/api/getactdata?sensorid=12345"
|
||||||
|
|
||||||
|
# Query-Parameter-Methode
|
||||||
|
curl "http://localhost:3000/api/getactdata?sensorid=12345&apikey=your-key-here"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mit JavaScript/Node.js
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import axios from 'axios'
|
||||||
|
|
||||||
|
const API_KEY = process.env.SENSOR_API_KEY
|
||||||
|
const client = axios.create({
|
||||||
|
baseURL: 'http://localhost:3000/api',
|
||||||
|
headers: { 'X-API-Key': API_KEY }
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await client.get('/getactdata', {
|
||||||
|
params: { sensorid: 12345 }
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mit Python
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
|
||||||
|
API_KEY = os.getenv('SENSOR_API_KEY')
|
||||||
|
headers = {'X-API-Key': API_KEY}
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
'http://localhost:3000/api/getactdata',
|
||||||
|
params={'sensorid': 12345},
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verwaltung
|
||||||
|
|
||||||
|
### Neuen Key hinzufügen
|
||||||
|
|
||||||
|
1. Generiere neuen Key: `node generate-apikey.js 1`
|
||||||
|
2. Füge zur `API_KEYS` Liste in `.env` hinzu
|
||||||
|
3. Restart Server
|
||||||
|
|
||||||
|
### Key entfernen
|
||||||
|
|
||||||
|
1. Entferne Key aus `API_KEYS` Liste in `.env`
|
||||||
|
2. Restart Server
|
||||||
|
|
||||||
|
### Authentifizierung deaktivieren
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In .env
|
||||||
|
API_AUTH_REQUIRED=false
|
||||||
|
# oder
|
||||||
|
API_KEYS=
|
||||||
|
```
|
||||||
|
|
||||||
|
## Sicherheit
|
||||||
|
|
||||||
|
### ✅ Do's
|
||||||
|
|
||||||
|
- ✅ Keys in Umgebungsvariablen speichern
|
||||||
|
- ✅ Unterschiedliche Keys für verschiedene Clients
|
||||||
|
- ✅ HTTPS in Produktion verwenden
|
||||||
|
- ✅ Keys regelmäßig rotieren
|
||||||
|
- ✅ Zugriffe loggen und überwachen
|
||||||
|
|
||||||
|
### ❌ Don'ts
|
||||||
|
|
||||||
|
- ❌ Keys in Git committen
|
||||||
|
- ❌ Keys im Frontend-Code verwenden
|
||||||
|
- ❌ Denselben Key für alle Clients
|
||||||
|
- ❌ Keys über unverschlüsselte Verbindungen senden
|
||||||
|
- ❌ Keys in URLs verwenden (bevorzuge Header)
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### "API key required" Fehler
|
||||||
|
|
||||||
|
- Stelle sicher, dass der Key im Header oder Query-Parameter übergeben wird
|
||||||
|
- Prüfe Schreibweise: `X-API-Key` (Header) oder `apikey` (Query)
|
||||||
|
|
||||||
|
### "Invalid API key" Fehler
|
||||||
|
|
||||||
|
- Prüfe, ob der Key in der `API_KEYS` Liste vorhanden ist
|
||||||
|
- Stelle sicher, dass keine Leerzeichen oder Zeilenumbrüche im Key sind
|
||||||
|
- Prüfe, ob `.env` korrekt geladen wird
|
||||||
|
|
||||||
|
### Keys werden nicht erkannt
|
||||||
|
|
||||||
|
- Server nach `.env` Änderungen neu starten
|
||||||
|
- Prüfe, ob `dotenv` korrekt konfiguriert ist
|
||||||
|
- Teste mit: `node -e "console.log(process.env.API_KEYS)"`
|
||||||
|
|
||||||
|
## Integration in bestehende Systeme
|
||||||
|
|
||||||
|
### Nginx Proxy
|
||||||
|
|
||||||
|
```nginx
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://localhost:3000/api/;
|
||||||
|
proxy_set_header X-API-Key $http_x_api_key;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Apache Proxy
|
||||||
|
|
||||||
|
```apache
|
||||||
|
<Location /api/>
|
||||||
|
ProxyPass http://localhost:3000/api/
|
||||||
|
ProxyPassReverse http://localhost:3000/api/
|
||||||
|
RequestHeader set X-API-Key %{HTTP:X-API-Key}e
|
||||||
|
</Location>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Erweiterte Konfiguration
|
||||||
|
|
||||||
|
### Programmatische Key-Verwaltung
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import { addApiKey, removeApiKey, generateApiKey } from './utilities/apiauth.js'
|
||||||
|
|
||||||
|
// Neuen Key zur Laufzeit hinzufügen
|
||||||
|
const newKey = generateApiKey()
|
||||||
|
addApiKey(newKey)
|
||||||
|
|
||||||
|
// Key zur Laufzeit entfernen
|
||||||
|
removeApiKey('old-key-here')
|
||||||
|
```
|
||||||
|
|
||||||
|
### Logging
|
||||||
|
|
||||||
|
Authentifizierungs-Ereignisse werden automatisch geloggt:
|
||||||
|
- Erfolgreiche Authentifizierung
|
||||||
|
- Fehlgeschlagene Versuche mit IP-Adresse
|
||||||
|
- Key-Verwaltungsoperationen
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
import {DateTime} from "luxon"
|
import {DateTime} from "luxon"
|
||||||
import * as mongo from "../databases/mongo.js"
|
import * as mongo from "../databases/mongo.js"
|
||||||
import { returnOnError } from "../utilities/reporterror.js"
|
import { returnOnError } from "../utilities/reporterror.js"
|
||||||
|
import { fetchLatestLAmaxForChips } from '../databases/influx_sql.js'
|
||||||
|
|
||||||
|
|
||||||
// Default distance for center search ( in km)
|
// Default distance for center search ( in km)
|
||||||
@@ -127,6 +128,8 @@ export var getData4map = async (params) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Nur Einträge verwenden, die eine ESP-Chipid haben
|
||||||
|
query.chip = { $exists: true }
|
||||||
try {
|
try {
|
||||||
// fetch mapdata from mongodb
|
// fetch mapdata from mongodb
|
||||||
let { properties, err } = await mongo.getallProperties(mongo.properties_collection, query)
|
let { properties, err } = await mongo.getallProperties(mongo.properties_collection, query)
|
||||||
@@ -134,19 +137,26 @@ export var getData4map = async (params) => {
|
|||||||
return returnOnError(ret, 'NOPROPSFOUND', getData4map.name)
|
return returnOnError(ret, 'NOPROPSFOUND', getData4map.name)
|
||||||
}
|
}
|
||||||
let v4map = getValue4Map(typ)
|
let v4map = getValue4Map(typ)
|
||||||
|
|
||||||
|
const chipIds = properties.map(prop => prop.chip?.id).filter(id => id)
|
||||||
|
const result = await fetchLatestLAmaxForChips({ chipids: chipIds})
|
||||||
|
|
||||||
for (let sensor of properties) {
|
for (let sensor of properties) {
|
||||||
|
const resIndex = result.values.findIndex((id) => id.chipid == sensor.chip.id)
|
||||||
|
const chpvalues = result.values[resIndex]
|
||||||
let oneAktData = {}
|
let oneAktData = {}
|
||||||
if (sensor.values !== undefined) {
|
if (!((sensor.values === undefined) || (chpvalues === undefined))) {
|
||||||
oneAktData = {
|
oneAktData = {
|
||||||
location: sensor.location[0].loc.coordinates,
|
location: sensor.location[0].loc.coordinates,
|
||||||
id: sensor._id,
|
id: sensor._id,
|
||||||
name: sensor.name[0].name,
|
name: sensor.chip.name,
|
||||||
indoor: sensor.location[0].indoor,
|
indoor: sensor.location[0].indoor,
|
||||||
lastseen: sensor.values.timestamp
|
lastseen: chpvalues.timestamp
|
||||||
}
|
}
|
||||||
let now = new Date().getTime()
|
let now = new Date().getTime()
|
||||||
if(oneAktData.lastseen !== '') {
|
if(oneAktData.lastseen !== '') {
|
||||||
let diff = now - oneAktData.lastseen.getTime()
|
const dt = new Date(oneAktData.lastseen).getTime()
|
||||||
|
let diff = now - dt
|
||||||
if (diff >= 365 * 24 * 3600 * 1000) {
|
if (diff >= 365 * 24 * 3600 * 1000) {
|
||||||
oneAktData.value = -4
|
oneAktData.value = -4
|
||||||
} else if (diff >= 30 * 24 * 3600 * 1000) {
|
} else if (diff >= 30 * 24 * 3600 * 1000) {
|
||||||
@@ -156,15 +166,15 @@ export var getData4map = async (params) => {
|
|||||||
} else if (diff >= 2 * 3600 * 1000) {
|
} else if (diff >= 2 * 3600 * 1000) {
|
||||||
oneAktData.value = -1
|
oneAktData.value = -1
|
||||||
} else {
|
} else {
|
||||||
if (sensor.values !== undefined) {
|
if (chpvalues !== undefined) {
|
||||||
oneAktData.value = Math.round(sensor.values[v4map] * 100) / 100
|
oneAktData.value = Math.round(chpvalues[v4map] * 100) / 100
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let weeks = Math.round(diff / (7 * 24 * 3600 * 1000))
|
let weeks = Math.round(diff / (7 * 24 * 3600 * 1000))
|
||||||
oneAktData.weeks = weeks
|
oneAktData.weeks = weeks
|
||||||
}
|
}
|
||||||
if (sensor.values.timestamp > lastDate) {
|
if (new Date(oneAktData.lastseen).getTime() > lastDate) {
|
||||||
lastDate = sensor.values.timestamp
|
lastDate = new Date(oneAktData.lastseen).getTime()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
oneAktData.value = -5
|
oneAktData.value = -5
|
||||||
@@ -176,7 +186,7 @@ export var getData4map = async (params) => {
|
|||||||
ret = {
|
ret = {
|
||||||
err: null,
|
err: null,
|
||||||
options: {
|
options: {
|
||||||
lastdate: lastDate,
|
lastdate: new Date(lastDate).toISOString(),
|
||||||
count: aktData.length,
|
count: aktData.length,
|
||||||
data: 'map'
|
data: 'map'
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import {returnOnError} from "../utilities/reporterror.js"
|
|||||||
import checkParams from "../utilities/checkparams.js"
|
import checkParams from "../utilities/checkparams.js"
|
||||||
|
|
||||||
let readProperties = mongo.readProperties
|
let readProperties = mongo.readProperties
|
||||||
let readChipData = mongo.readChipData
|
|
||||||
|
|
||||||
// Read properties for sensorid and properties for all other sensors on same location
|
// Read properties for sensorid and properties for all other sensors on same location
|
||||||
export const getOneProperty = async (params) => {
|
export const getOneProperty = async (params) => {
|
||||||
@@ -14,11 +13,6 @@ export const getOneProperty = async (params) => {
|
|||||||
if (err) {
|
if (err) {
|
||||||
return returnOnError(properties, err, getOneProperty.name)
|
return returnOnError(properties, err, getOneProperty.name)
|
||||||
}
|
}
|
||||||
// read 'chip'-data (special for noise sensors)
|
|
||||||
const chipdata = await readChipData(opts.sensorid)
|
|
||||||
if (chipdata.err == undefined) {
|
|
||||||
properties.chip = chipdata
|
|
||||||
}
|
|
||||||
let sensorEntries = [];
|
let sensorEntries = [];
|
||||||
try {
|
try {
|
||||||
let pp = await readProperties({sid: opts.sensorid}); // read for given sensorID
|
let pp = await readProperties({sid: opts.sensorid}); // read for given sensorID
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
const DBASE = process.env.DBASE || 'mongo'
|
const DBASE = process.env.DBASE || 'mongo'
|
||||||
import {DateTime} from "luxon"
|
import {DateTime} from "luxon"
|
||||||
import * as influx from "../databases/influx.js"
|
import {logit} from "../utilities/logit.js"
|
||||||
|
import * as influx from "../databases/influx_sql.js"
|
||||||
import * as mongo from "../databases/mongo.js"
|
import * as mongo from "../databases/mongo.js"
|
||||||
import {returnOnError} from "../utilities/reporterror.js"
|
import {returnOnError} from "../utilities/reporterror.js"
|
||||||
import {csv2Json} from "../utilities/csv2json.js"
|
import {csv2Json} from "../utilities/csv2json.js"
|
||||||
@@ -23,6 +24,7 @@ const noiseParams = [
|
|||||||
{name: 'csv', type: 'bool', default: false},
|
{name: 'csv', type: 'bool', default: false},
|
||||||
{name: 'long', type: 'bool', default: false},
|
{name: 'long', type: 'bool', default: false},
|
||||||
{name: 'sort', type: 'int', default: 1},
|
{name: 'sort', type: 'int', default: 1},
|
||||||
|
{name: 'db', type: 'string', default: ''},
|
||||||
{name: 'last_seen', type: 'date', default: '1900-01-01T00:00:00Z'},
|
{name: 'last_seen', type: 'date', default: '1900-01-01T00:00:00Z'},
|
||||||
{name: 'datetime', type: 'date', default: null}
|
{name: 'datetime', type: 'date', default: null}
|
||||||
]
|
]
|
||||||
@@ -146,17 +148,30 @@ export async function getSensorData(params) {
|
|||||||
return returnOnError(ret, 'CMNDUNKNOWN', getActData.name)
|
return returnOnError(ret, 'CMNDUNKNOWN', getActData.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getChipPropety = async (opts) => {
|
||||||
|
// read 'chip'-data (special for noise sensors)
|
||||||
|
opts.chipid = null
|
||||||
|
const chipdata = await mongo.readChipData(opts.sensorid)
|
||||||
|
logit('Ergebnis von readChipData:', chipdata)
|
||||||
|
if ((chipdata.err === null) && (chipdata.chipdata !== null)) {
|
||||||
|
opts.chipid = chipdata.chipdata.chip.id
|
||||||
|
}
|
||||||
|
if (opts.db === 'm') {
|
||||||
|
opts.chipid = null
|
||||||
|
}
|
||||||
|
return chipdata.err
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// export const getActData = async (opts) => {
|
// export const getActData = async (opts) => {
|
||||||
export async function getActData(opts) {
|
export async function getActData(opts) {
|
||||||
if (DBASE === 'mongo') {
|
const chiperr = await getChipPropety(opts)
|
||||||
return await mongo.fetchActData(opts)
|
if (opts.chipid) {
|
||||||
} else if (DBASE === 'influx') {
|
|
||||||
return await influx.fetchActData(opts)
|
return await influx.fetchActData(opts)
|
||||||
|
} else {
|
||||||
|
return await mongo.fetchActData(opts)
|
||||||
}
|
}
|
||||||
return {err: 'DBASEUNKNOWN', values: []}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
5
app.js
5
app.js
@@ -1,3 +1,4 @@
|
|||||||
|
import 'dotenv/config'
|
||||||
import createError from 'http-errors'
|
import createError from 'http-errors'
|
||||||
import logger from 'morgan'
|
import logger from 'morgan'
|
||||||
import express from 'express'
|
import express from 'express'
|
||||||
@@ -19,6 +20,7 @@ import indexRouter from './routes/index.js'
|
|||||||
import { apiRouter } from './routes/api.js'
|
import { apiRouter } from './routes/api.js'
|
||||||
import {fileURLToPath} from "url";
|
import {fileURLToPath} from "url";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { validateApiKey } from './utilities/apiauth.js'
|
||||||
|
|
||||||
|
|
||||||
i18next
|
i18next
|
||||||
@@ -42,7 +44,8 @@ app.use(express.urlencoded({ extended: true }))
|
|||||||
app.use(cookieParser())
|
app.use(cookieParser())
|
||||||
|
|
||||||
app.use('/', indexRouter)
|
app.use('/', indexRouter)
|
||||||
app.use('/api', apiRouter)
|
// API-Key-Authentifizierung für alle /api/* Routen
|
||||||
|
app.use('/api', validateApiKey, apiRouter)
|
||||||
|
|
||||||
|
|
||||||
// catch 404 and forward to error handler
|
// catch 404 and forward to error handler
|
||||||
|
|||||||
@@ -14,8 +14,8 @@
|
|||||||
|
|
||||||
set -x
|
set -x
|
||||||
port=""
|
port=""
|
||||||
orgName=sensorapi
|
orgName=sensorapi_i
|
||||||
name=sensorapi
|
name=sensorapi_i
|
||||||
|
|
||||||
usage()
|
usage()
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
// This implementation converts LA_max to E10tel_eq at runtime to maintain
|
// This implementation converts LA_max to E10tel_eq at runtime to maintain
|
||||||
// compatibility with the Flux version while ensuring correct logarithmic averaging.
|
// compatibility with the Flux version while ensuring correct logarithmic averaging.
|
||||||
|
|
||||||
|
import 'dotenv/config'
|
||||||
import axios from 'axios'
|
import axios from 'axios'
|
||||||
import { DateTime } from 'luxon'
|
import { DateTime } from 'luxon'
|
||||||
import { logit, logerror } from '../utilities/logit.js'
|
import { logit, logerror } from '../utilities/logit.js'
|
||||||
@@ -31,6 +32,7 @@ const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/write`
|
|||||||
const influxRead = async (query) => {
|
const influxRead = async (query) => {
|
||||||
let start = DateTime.now()
|
let start = DateTime.now()
|
||||||
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
||||||
|
logit(`Query: ${query}`)
|
||||||
let erg = { values: [], err: null}
|
let erg = { values: [], err: null}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -54,17 +56,23 @@ const influxRead = async (query) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if (ret.status !== 200) {
|
if (ret.status !== 200) {
|
||||||
return returnOnError(erg, 'RESPSTATUS', influxRead.name, ret.status)
|
erg.err = `RESPSTATUS: ${ret.status}`
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
// InfluxDB 1.8 returns JSON format
|
// InfluxDB 1.8 returns JSON format
|
||||||
if (ret.data.error) {
|
if (ret.data.error) {
|
||||||
return returnOnError(erg, ret.data.error, influxRead.name)
|
erg.err = ret.data.error
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
erg.values = ret.data.results
|
erg.values = ret.data.results
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return returnOnError(erg, e, influxRead.name)
|
erg.err = e.toString()
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||||
@@ -135,7 +143,10 @@ const transformInfluxResult = (series) => {
|
|||||||
columns.forEach((col, index) => {
|
columns.forEach((col, index) => {
|
||||||
if (col === 'time') {
|
if (col === 'time') {
|
||||||
// Convert timestamp to ISO string for compatibility
|
// Convert timestamp to ISO string for compatibility
|
||||||
record._time = new Date(row[index]).toISOString()
|
record.datetime = new Date(row[index]).toISOString()
|
||||||
|
} else if (col.startsWith('DNMS')) {
|
||||||
|
col = col.slice(11)
|
||||||
|
record[col] = row[index]
|
||||||
} else {
|
} else {
|
||||||
record[col] = row[index]
|
record[col] = row[index]
|
||||||
}
|
}
|
||||||
@@ -156,15 +167,15 @@ const transformInfluxResult = (series) => {
|
|||||||
const fetchFromInflux = async (ret, query) => {
|
const fetchFromInflux = async (ret, query) => {
|
||||||
let { values, err } = await influxRead(query)
|
let { values, err } = await influxRead(query)
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.toString().includes('400')) {
|
ret.err = err.toString().includes('400') ? 'SYNTAXURL' : err.toString()
|
||||||
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
|
logit(`ERROR ${fetchFromInflux.name}: ${ret.err}`)
|
||||||
} else {
|
return ret
|
||||||
return returnOnError(ret, err, fetchFromInflux.name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
logit(`values.length: ${values.length}`)
|
||||||
if (!values || !values.length || !values[0].series) {
|
if (!values || !values.length || !values[0].series) {
|
||||||
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
|
ret.err = 'NODATA'
|
||||||
|
logit(`ERROR ${fetchFromInflux.name}: No data returned from query`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.values = transformInfluxResult(values[0].series)
|
ret.values = transformInfluxResult(values[0].series)
|
||||||
@@ -187,6 +198,14 @@ export const fetchActData = async (opts) => {
|
|||||||
let startTime = opts.start.replace('start: ', '').trim()
|
let startTime = opts.start.replace('start: ', '').trim()
|
||||||
let stopTime = opts.stop.replace('stop: ', '').trim()
|
let stopTime = opts.stop.replace('stop: ', '').trim()
|
||||||
|
|
||||||
|
// If time is ISO string, wrap in quotes; if it's a relative time (like now() - 1h), leave as is
|
||||||
|
if (startTime.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||||
|
startTime = `'${startTime}'`
|
||||||
|
}
|
||||||
|
if (stopTime.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||||
|
stopTime = `'${stopTime}'`
|
||||||
|
}
|
||||||
|
|
||||||
// Build sorting clause
|
// Build sorting clause
|
||||||
let orderClause = ''
|
let orderClause = ''
|
||||||
if (opts.sort) {
|
if (opts.sort) {
|
||||||
@@ -200,9 +219,9 @@ export const fetchActData = async (opts) => {
|
|||||||
// InfluxQL query to get LA_max for a sensor within time range
|
// InfluxQL query to get LA_max for a sensor within time range
|
||||||
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
|
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
|
||||||
const query = `
|
const query = `
|
||||||
SELECT "LA_max", "LA_min", "LA_eq"
|
SELECT "DNMS_noise_LA_max", "DNMS_noise_LA_min", "DNMS_noise_LAeq"
|
||||||
FROM "measurements"
|
FROM "DNMS"
|
||||||
WHERE "sid" = '${opts.sensorid}'
|
WHERE "node" = '${opts.chipid}'
|
||||||
AND time >= ${startTime}
|
AND time >= ${startTime}
|
||||||
AND time <= ${stopTime}
|
AND time <= ${stopTime}
|
||||||
${orderClause}
|
${orderClause}
|
||||||
@@ -217,12 +236,13 @@ export const fetchActData = async (opts) => {
|
|||||||
|
|
||||||
// Transform data to add E10tel_eq field for compatibility with Flux version
|
// Transform data to add E10tel_eq field for compatibility with Flux version
|
||||||
// E10tel_eq = 10^(LA_max/10)
|
// E10tel_eq = 10^(LA_max/10)
|
||||||
|
if (opts.data !== 'live') {
|
||||||
result.values = result.values.map(record => ({
|
result.values = result.values.map(record => ({
|
||||||
...record,
|
...record,
|
||||||
E10tel_eq: record.LA_max !== null && record.LA_max !== undefined
|
E10tel_eq: record.LA_max !== null && record.LA_max !== undefined
|
||||||
? Math.pow(10, record.LA_max / 10)
|
? Math.pow(10, record.LA_max / 10)
|
||||||
: null
|
: null
|
||||||
}))
|
}))}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
@@ -267,22 +287,30 @@ const calculateLogMean = (values) => {
|
|||||||
export const fetchNoiseAVGData = async (opts) => {
|
export const fetchNoiseAVGData = async (opts) => {
|
||||||
let ret = { err: null, values: [] }
|
let ret = { err: null, values: [] }
|
||||||
|
|
||||||
|
|
||||||
// Convert Flux time format to InfluxQL format
|
// Convert Flux time format to InfluxQL format
|
||||||
let startTime = opts.start.replace('start: ', '').trim()
|
let startTime = opts.start.replace('start: ', '').trim()
|
||||||
let stopTime = opts.stop.replace('stop: ', '').trim()
|
let stopTime = opts.stop.replace('stop: ', '').trim()
|
||||||
|
|
||||||
|
// If time is ISO string, wrap in quotes; if it's a relative time (like now() - 1h), leave as is
|
||||||
|
if (startTime.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||||
|
startTime = `'${startTime}'`
|
||||||
|
}
|
||||||
|
if (stopTime.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||||
|
stopTime = `'${stopTime}'`
|
||||||
|
}
|
||||||
|
|
||||||
// Since InfluxQL doesn't support complex joins like Flux, we need to make multiple queries
|
// Since InfluxQL doesn't support complex joins like Flux, we need to make multiple queries
|
||||||
// and combine the results in JavaScript
|
// and combine the results in JavaScript
|
||||||
|
|
||||||
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
|
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
|
||||||
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
|
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
|
||||||
const queryLAmaxForE10 = `
|
const queryLAmaxForE10 = `
|
||||||
SELECT "LA_max", time
|
SELECT "DNMS_noise_LA_max"
|
||||||
FROM "measurements"
|
FROM "DNMS"
|
||||||
WHERE "sid" = '${opts.sensorid}'
|
WHERE "node" = '${opts.chipid}'
|
||||||
AND time >= ${startTime}
|
AND time >= ${startTime}
|
||||||
AND time <= ${stopTime}
|
AND time <= ${stopTime}
|
||||||
AND "LA_max" IS NOT NULL
|
|
||||||
ORDER BY time ASC
|
ORDER BY time ASC
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -293,11 +321,15 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
|
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
|
||||||
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
|
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
|
||||||
if (lamaxErr) {
|
if (lamaxErr) {
|
||||||
return returnOnError(ret, lamaxErr, fetchNoiseAVGData.name)
|
ret.err = lamaxErr.toString()
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
||||||
return returnOnError(ret, 'NODATA', fetchNoiseAVGData.name)
|
ret.err = 'NODATA'
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: No data returned from query`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transform LA_max results
|
// Transform LA_max results
|
||||||
@@ -323,7 +355,7 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const lamax = record.LA_max
|
const lamax = record.DNMS_noise_LA_max || record.LA_max
|
||||||
if (lamax !== null && lamax !== undefined) {
|
if (lamax !== null && lamax !== undefined) {
|
||||||
// Store original LA_max value
|
// Store original LA_max value
|
||||||
hourlyData[hourKey].lamaxValues.push(lamax)
|
hourlyData[hourKey].lamaxValues.push(lamax)
|
||||||
@@ -385,7 +417,87 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return returnOnError(ret, e, fetchNoiseAVGData.name)
|
ret.err = e.toString()
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch latest LA_max values for multiple chip IDs
|
||||||
|
* @param {Object} opts - Options object
|
||||||
|
* @param {Array<string>} opts.chipids - Array of chip IDs
|
||||||
|
* @returns {Object} - {err: null, values: [{chipid, LA_max, timestamp}]}
|
||||||
|
*/
|
||||||
|
export const fetchLatestLAmaxForChips = async (opts) => {
|
||||||
|
let ret = { err: null, values: [] }
|
||||||
|
|
||||||
|
if (!opts.chipids || !Array.isArray(opts.chipids) || opts.chipids.length === 0) {
|
||||||
|
ret.err = 'No chip IDs provided'
|
||||||
|
logit(`ERROR ${fetchLatestLAmaxForChips.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Build WHERE clause with multiple chip IDs using OR (InfluxQL doesn't support IN)
|
||||||
|
const chipIdConditions = opts.chipids.map(id => `"node" = '${id}'`).join(' OR ')
|
||||||
|
|
||||||
|
// Query to get latest LA_max for each chip
|
||||||
|
const query = `SELECT "DNMS_noise_LA_max", "node" FROM "DNMS" WHERE (${chipIdConditions}) AND time >= now() - 24h ORDER BY time DESC`
|
||||||
|
|
||||||
|
let { values: lamaxValues, err: lamaxErr } = await influxRead(query)
|
||||||
|
if (lamaxErr) {
|
||||||
|
ret.err = lamaxErr.toString()
|
||||||
|
logit(`ERROR ${fetchLatestLAmaxForChips.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
||||||
|
ret.err = 'NODATA'
|
||||||
|
logit(`ERROR ${fetchLatestLAmaxForChips.name}: No data returned from query`)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform results
|
||||||
|
const allData = transformInfluxResult(lamaxValues[0].series)
|
||||||
|
|
||||||
|
// Get latest value for each chip (data is already sorted by time DESC)
|
||||||
|
const latestByChip = {}
|
||||||
|
|
||||||
|
allData.forEach(record => {
|
||||||
|
const chipid = record.node
|
||||||
|
const lamax = record.LA_max
|
||||||
|
|
||||||
|
// Only keep the first (latest) value for each chip
|
||||||
|
if (!latestByChip[chipid] && lamax !== null && lamax !== undefined) {
|
||||||
|
latestByChip[chipid] = {
|
||||||
|
chipid: chipid,
|
||||||
|
LA_max: lamax,
|
||||||
|
timestamp: record.datetime
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Convert to array
|
||||||
|
ret.values = Object.values(latestByChip)
|
||||||
|
|
||||||
|
// Add null entries for chips without data
|
||||||
|
opts.chipids.forEach(chipid => {
|
||||||
|
if (!latestByChip[chipid]) {
|
||||||
|
ret.values.push({
|
||||||
|
chipid: chipid,
|
||||||
|
LA_max: null,
|
||||||
|
timestamp: null
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
ret.err = e.toString()
|
||||||
|
logit(`ERROR ${fetchLatestLAmaxForChips.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ export const readProperties = async (query, limit = 0) => {
|
|||||||
let client = await connectMongo()
|
let client = await connectMongo()
|
||||||
try {
|
try {
|
||||||
if ("sid" in query) { // if sid is given, read property for sid
|
if ("sid" in query) { // if sid is given, read property for sid
|
||||||
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
|
ret.properties = await client.db(MONGOBASE).collection(properties_collection).findOne({_id: query.sid})
|
||||||
} else { // otherwise read props corresponding to query
|
} else { // otherwise read props corresponding to query
|
||||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
||||||
}
|
}
|
||||||
@@ -72,7 +72,7 @@ export const readChipData = async (sid) => {
|
|||||||
let ret = { err: null, chipdata: null}
|
let ret = { err: null, chipdata: null}
|
||||||
let client = await connectMongo()
|
let client = await connectMongo()
|
||||||
try {
|
try {
|
||||||
ret.chipdata = await client.db(MONGOBASE).collection('prop_flux').findOne({_id: sid},{projection: {chip: 1, _id: 0}})
|
ret.chipdata = await client.db(MONGOBASE).collection('properties').findOne({_id: sid},{projection: {chip: 1, _id: 0}})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ret.err = e
|
ret.err = e
|
||||||
}
|
}
|
||||||
@@ -186,7 +186,7 @@ export const fetchActData = async (opts) => {
|
|||||||
LA_minx: '$values.noise_LA_min',
|
LA_minx: '$values.noise_LA_min',
|
||||||
LA_max: '$values.LA_max',
|
LA_max: '$values.LA_max',
|
||||||
LAeq: '$values.LAeq',
|
LAeq: '$values.LAeq',
|
||||||
E10tel_eq: '$values.E10tel_eq' }
|
}
|
||||||
},
|
},
|
||||||
// {$project: {
|
// {$project: {
|
||||||
// datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
// datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
||||||
@@ -202,24 +202,6 @@ export const fetchActData = async (opts) => {
|
|||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
/*
|
|
||||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
|
||||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
let docs = await collection.find(
|
|
||||||
{ datetime:
|
|
||||||
{ $gte: start.toDate(), $lt: end.toDate() }
|
|
||||||
},
|
|
||||||
{ projection:
|
|
||||||
{_id:0, E_eq:0, E_mx:0, E_mi:0, E10tel_mx:0, E10tel_mi:0}, sort: {datetime: sort}
|
|
||||||
},
|
|
||||||
).toArray();
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const fetchgeigerAVGData = async (opts) => {
|
export const fetchgeigerAVGData = async (opts) => {
|
||||||
let docs = []
|
let docs = []
|
||||||
|
|||||||
210
doc/2DomainsfuerContainer.rtf
Normal file
210
doc/2DomainsfuerContainer.rtf
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
{\rtf1\ansi\ansicpg1252\cocoartf2867
|
||||||
|
\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fnil\fcharset0 .SFNS-Regular;\f1\fnil\fcharset0 HelveticaNeue-Bold;\f2\fswiss\fcharset0 Helvetica;
|
||||||
|
\f3\fnil\fcharset0 .SFNS-Semibold;\f4\fnil\fcharset0 .AppleSystemUIFontMonospaced-Regular;\f5\fmodern\fcharset0 Courier;
|
||||||
|
\f6\fnil\fcharset0 .SFNS-Bold;}
|
||||||
|
{\colortbl;\red255\green255\blue255;\red14\green14\blue14;\red111\green90\blue30;\red0\green0\blue0;
|
||||||
|
\red181\green0\blue19;\red20\green0\blue196;\red13\green100\blue1;\red151\green0\blue126;\red135\green5\blue129;
|
||||||
|
\red0\green0\blue0;}
|
||||||
|
{\*\expandedcolortbl;;\cssrgb\c6700\c6700\c6700;\cssrgb\c51373\c42353\c15686;\csgray\c0;
|
||||||
|
\cssrgb\c76863\c10196\c8627;\cssrgb\c10980\c0\c81176;\cssrgb\c0\c45490\c0;\cssrgb\c66667\c5098\c56863;\cssrgb\c60784\c13725\c57647;
|
||||||
|
\cssrgb\c0\c0\c0;}
|
||||||
|
\paperw11900\paperh16840\margl1440\margr1440\vieww29020\viewh16240\viewkind0
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 Ja, das ist absolut m\'f6glich \uc0\u9989 \'97 du kannst
|
||||||
|
\f1\b denselben Docker-Container \'fcber mehrere Domains oder Subdomains
|
||||||
|
\f0\b0 via
|
||||||
|
\f1\b Traefik
|
||||||
|
\f0\b0 erreichbar machen, und
|
||||||
|
\f1\b im Container selbst unterscheiden
|
||||||
|
\f0\b0 , \'fcber welche Domain der Aufruf kam.\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\uc0\u11835 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f3\b\fs44 \cf2 \uc0\u55357 \u56615 \'dcberblick
|
||||||
|
\f0\b0\fs42 \
|
||||||
|
\
|
||||||
|
Traefik ist ein Reverse Proxy, der anhand von Regeln Anfragen an Container weiterleitet.\
|
||||||
|
Du kannst mehrere
|
||||||
|
\f1\b Router-Regeln
|
||||||
|
\f0\b0 definieren, die alle auf denselben
|
||||||
|
\f1\b Service
|
||||||
|
\f0\b0 (also denselben Container) zeigen.\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\uc0\u11835 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f3\b\fs44 \cf2 \uc0\u55358 \u56809 Beispiel: Zwei Domains auf denselben Container
|
||||||
|
\f0\b0\fs42 \
|
||||||
|
\
|
||||||
|
Angenommen, du hast zwei Domains:\
|
||||||
|
\pard\tqr\tx100\tx260\li260\fi-260\sl324\slmult1\sb240\partightenfactor0
|
||||||
|
\cf2 \'95
|
||||||
|
\f4 app.example.com
|
||||||
|
\f0 \
|
||||||
|
\'95
|
||||||
|
\f4 test.example.org
|
||||||
|
\f0 \
|
||||||
|
\
|
||||||
|
Und einen Container
|
||||||
|
\f4 myapp
|
||||||
|
\f0 , der auf Port 8080 lauscht.\
|
||||||
|
\
|
||||||
|
Dann kannst du in deinen
|
||||||
|
\f1\b Traefik-Labels
|
||||||
|
\f0\b0 Folgendes setzen:
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f5\fs28 \cf3 version:\cf4 \cf5 '3.8'\cf4 \
|
||||||
|
\
|
||||||
|
\cf3 services:\cf4 \
|
||||||
|
\cf3 myapp:\cf4 \
|
||||||
|
\cf3 image:\cf4 \cf5 myapp:latest\cf4 \
|
||||||
|
\cf3 labels:\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.enable=true"\cf4 \
|
||||||
|
\
|
||||||
|
\cf7 # Router 1\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp1.rule=Host(`app.example.com`)"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp1.entrypoints=websecure"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp1.tls.certresolver=myresolver"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp1.service=myapp-service"\cf4 \
|
||||||
|
\
|
||||||
|
\cf7 # Router 2\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp2.rule=Host(`test.example.org`)"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp2.entrypoints=websecure"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp2.tls.certresolver=myresolver"\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.routers.myapp2.service=myapp-service"\cf4 \
|
||||||
|
\
|
||||||
|
\cf7 # Gemeinsamer Service\cf4 \
|
||||||
|
\cf6 -\cf4 \cf5 "traefik.http.services.myapp-service.loadbalancer.server.port=8080"
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 Beide Router (
|
||||||
|
\f4 myapp1
|
||||||
|
\f0 ,
|
||||||
|
\f4 myapp2
|
||||||
|
\f0 ) leiten an denselben Service (
|
||||||
|
\f4 myapp-service
|
||||||
|
\f0 ) weiter, der den Container auf Port 8080 anspricht.\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\uc0\u11835 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f3\b\fs44 \cf2 \uc0\u55358 \u56800 Im Container unterscheiden, welche Domain aufgerufen wurde
|
||||||
|
\f0\b0\fs42 \
|
||||||
|
\
|
||||||
|
Sobald eine Anfrage im Container landet, kannst du anhand des
|
||||||
|
\f1\b HTTP-Headers Host
|
||||||
|
\f0\b0 erkennen, \'fcber welche Domain sie kam.\
|
||||||
|
\
|
||||||
|
Beispiel (z. B. in einer Node.js-, PHP-, Python- oder Go-App):
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f5\fs28 \cf7 # Beispiel Flask / Python\cf4 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
\cf8 from\cf4 flask \cf8 import\cf4 Flask, request\
|
||||||
|
\
|
||||||
|
app = Flask(__name__)\
|
||||||
|
\
|
||||||
|
\cf9 @app.route(\cf5 '/'\cf9 )\cf4 \
|
||||||
|
\cf8 def\cf4 index():\
|
||||||
|
host = request.headers.get(\cf5 'Host'\cf4 )\
|
||||||
|
\cf8 return\cf4 \cf5 f"Aufgerufen \'fcber: \cf10 \{host\}\cf5 "\cf4 \
|
||||||
|
\
|
||||||
|
app.run(host=\cf5 '0.0.0.0'\cf4 , port=\cf6 8080\cf4 )
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 Oder in Node.js:
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f5\fs28 \cf4 app.get(\cf5 '/'\cf4 , (req, res) => \{\
|
||||||
|
res.send(\cf5 `Aufgerufen \'fcber: \cf10 $\{req.headers.host\}\cf5 `\cf4 );\
|
||||||
|
\});
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 Das Feld
|
||||||
|
\f4 req.headers.host
|
||||||
|
\f0 enth\'e4lt genau den Hostnamen, den Traefik aus der Anfrage durchreicht (
|
||||||
|
\f4 app.example.com
|
||||||
|
\f0 oder
|
||||||
|
\f4 test.example.org
|
||||||
|
\f0 ).\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\uc0\u11835 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f3\b\fs44 \cf2 \uc0\u9989 Zusammengefasst
|
||||||
|
\f2\b0\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f6\b\fs40 \cf2 Ziel
|
||||||
|
\f2\b0\fs24 \cf0
|
||||||
|
\f6\b\fs40 \cf2 Vorgehen
|
||||||
|
\f2\b0\fs24 \cf0 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs40 \cf2 Mehrere Domains/Subdomains auf denselben Container routen
|
||||||
|
\f2\fs24 \cf0
|
||||||
|
\f0\fs40 \cf2 Mehrere Traefik-Router mit unterschiedlichen
|
||||||
|
\f4 Host()
|
||||||
|
\f0 -Regeln, aber demselben Service
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
|
||||||
|
\f0\fs40 \cf2 Domain im Container unterscheiden
|
||||||
|
\f2\fs24 \cf0
|
||||||
|
\f0\fs40 \cf2 Den
|
||||||
|
\f4 Host
|
||||||
|
\f0 -Header der Anfrage auslesen
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
|
||||||
|
\f0\fs40 \cf2 SSL-Zertifikate
|
||||||
|
\f2\fs24 \cf0
|
||||||
|
\f0\fs40 \cf2 F\'fcr jede Domain einen Router mit
|
||||||
|
\f4 tls.certresolver
|
||||||
|
\f0 definieren (Traefik k\'fcmmert sich um Let\'92s Encrypt automatisch)
|
||||||
|
\f2\fs24 \cf0 \
|
||||||
|
\
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\partightenfactor0
|
||||||
|
\cf0 \
|
||||||
|
\uc0\u11835 \
|
||||||
|
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\sl324\slmult1\pardirnatural\partightenfactor0
|
||||||
|
|
||||||
|
\f0\fs42 \cf2 \
|
||||||
|
Wenn du magst, kann ich dir ein vollst\'e4ndiges
|
||||||
|
\f4 docker-compose.yml
|
||||||
|
\f0 -Beispiel mit Traefik + deinem App-Container (inkl. HTTPS via Let\'92s Encrypt) zusammenstellen.\
|
||||||
|
\uc0\u55357 \u56393 Soll ich das machen?}
|
||||||
9
doc/ToDo.md
Normal file
9
doc/ToDo.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# ToDos
|
||||||
|
|
||||||
|
2025-11-09 rxf
|
||||||
|
|
||||||
|
### Allgemain
|
||||||
|
|
||||||
|
* Daten, die via Mongo geholt werden (insbesondere die properties) cachen
|
||||||
|
* Vergleich auf die sensorid. Wenn die sich geändert hat, dann neu holen. Ansonsten aus dem Cache nehmen.
|
||||||
|
|
||||||
18
generate-apikey.js
Executable file
18
generate-apikey.js
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
// Script to generate API keys for the SensorAPI
|
||||||
|
|
||||||
|
import crypto from 'crypto'
|
||||||
|
|
||||||
|
const count = parseInt(process.argv[2]) || 1
|
||||||
|
|
||||||
|
console.log(`Generating ${count} API Key(s):\n`)
|
||||||
|
|
||||||
|
for (let i = 0; i < count; i++) {
|
||||||
|
const apiKey = crypto.randomBytes(32).toString('hex')
|
||||||
|
console.log(`${i + 1}. ${apiKey}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\nAdd these keys to your .env file:')
|
||||||
|
console.log('API_KEYS=key1,key2,key3')
|
||||||
|
console.log('\nOr set API_AUTH_REQUIRED=true to enable authentication')
|
||||||
18
package-lock.json
generated
18
package-lock.json
generated
@@ -14,6 +14,7 @@
|
|||||||
"cookie-parser": "~1.4.7",
|
"cookie-parser": "~1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"debug": "~4.4.3",
|
"debug": "~4.4.3",
|
||||||
|
"dotenv": "^17.2.3",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"http-errors": "~2.0.0",
|
"http-errors": "~2.0.0",
|
||||||
"i18next": "^25.5.2",
|
"i18next": "^25.5.2",
|
||||||
@@ -2117,6 +2118,18 @@
|
|||||||
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
|
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "17.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz",
|
||||||
|
"integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/dunder-proto": {
|
"node_modules/dunder-proto": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
@@ -5980,6 +5993,11 @@
|
|||||||
"resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
|
||||||
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ=="
|
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ=="
|
||||||
},
|
},
|
||||||
|
"dotenv": {
|
||||||
|
"version": "17.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz",
|
||||||
|
"integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="
|
||||||
|
},
|
||||||
"dunder-proto": {
|
"dunder-proto": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
{
|
{
|
||||||
"name": "sensorapi",
|
"name": "sensorapi_influx",
|
||||||
"version": "1.4.1",
|
"version": "1.5.0",
|
||||||
"date": "2023-11-29 16:00 UTC",
|
"date": "2025-11-22 10:00 UTC",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node ./bin/www.js >>/var/log/sensorapi.log 2>&1",
|
"start": "node ./bin/www.js >>/var/log/sensorapi_i.log 2>&1",
|
||||||
"test": "mocha ./test/test.js"
|
"test": "mocha ./test/test.js"
|
||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -18,6 +18,7 @@
|
|||||||
"cookie-parser": "~1.4.7",
|
"cookie-parser": "~1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"debug": "~4.4.3",
|
"debug": "~4.4.3",
|
||||||
|
"dotenv": "^17.2.3",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"http-errors": "~2.0.0",
|
"http-errors": "~2.0.0",
|
||||||
"i18next": "^25.5.2",
|
"i18next": "^25.5.2",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { getActData, getAvgData, getLongAvg, calcRange} from "../actions/getsens
|
|||||||
import checkParams from "../utilities/checkparams.js";
|
import checkParams from "../utilities/checkparams.js";
|
||||||
import {DateTime} from 'luxon'
|
import {DateTime} from 'luxon'
|
||||||
import { translate as trans } from '../routes/api.js'
|
import { translate as trans } from '../routes/api.js'
|
||||||
import * as influx from "../databases/influx.js"
|
import * as influx from "../databases/influx_sql.js"
|
||||||
import * as mongo from "../databases/mongo.js"
|
import * as mongo from "../databases/mongo.js"
|
||||||
import { setoptionfromtable } from "../utilities/chartoptions.js"
|
import { setoptionfromtable } from "../utilities/chartoptions.js"
|
||||||
|
|
||||||
@@ -42,6 +42,7 @@ export const getNoiseData = async (params, possibles, props) => {
|
|||||||
ret = {
|
ret = {
|
||||||
err: erg.err,
|
err: erg.err,
|
||||||
options: {
|
options: {
|
||||||
|
dbase: opts.db === 'm' ? 'Mongo' : 'Influx',
|
||||||
sid: opts.sensorid,
|
sid: opts.sensorid,
|
||||||
indoor: props.location[0].indoor,
|
indoor: props.location[0].indoor,
|
||||||
span: opts.span,
|
span: opts.span,
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ async function testInfluxSQL() {
|
|||||||
|
|
||||||
// Test options similar to what would be used in the application
|
// Test options similar to what would be used in the application
|
||||||
const testOpts = {
|
const testOpts = {
|
||||||
sensorid: 'test_sensor_001',
|
sensorid: 'esp8266-5829557',
|
||||||
start: 'now() - 1h',
|
start: 'now() - 1h',
|
||||||
stop: 'now()',
|
stop: 'now()',
|
||||||
sort: 1,
|
sort: 1,
|
||||||
|
|||||||
94
utilities/apiauth.js
Normal file
94
utilities/apiauth.js
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
// API Key Authentication Middleware
|
||||||
|
|
||||||
|
import crypto from 'crypto'
|
||||||
|
import { logit, logerror } from './logit.js'
|
||||||
|
|
||||||
|
// API Keys aus Umgebungsvariablen oder Datenbank laden
|
||||||
|
const API_KEYS = new Set(
|
||||||
|
(process.env.API_KEYS || '').split(',')
|
||||||
|
.map(key => key.trim())
|
||||||
|
.filter(key => key.length > 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Optionaler Modus: wenn keine API_KEYS definiert sind, wird keine Auth durchgeführt
|
||||||
|
const AUTH_REQUIRED = (process.env.API_AUTH_REQUIRED || '').trim().toLowerCase() === 'true'
|
||||||
|
|
||||||
|
// Log authentication status on startup
|
||||||
|
if (AUTH_REQUIRED || API_KEYS.size > 0) {
|
||||||
|
logit(`API Authentication: ENABLED (${API_KEYS.size} keys configured)`)
|
||||||
|
} else {
|
||||||
|
logit(`API Authentication: DISABLED`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a new API key (for administrative purposes)
|
||||||
|
* @returns {string} - New API key
|
||||||
|
*/
|
||||||
|
export const generateApiKey = () => {
|
||||||
|
return crypto.randomBytes(32).toString('hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Middleware to validate API key
|
||||||
|
* Accepts API key in:
|
||||||
|
* - Header: X-API-Key
|
||||||
|
* - Query parameter: apikey
|
||||||
|
*/
|
||||||
|
export const validateApiKey = (req, res, next) => {
|
||||||
|
// Skip auth if not required
|
||||||
|
if (!AUTH_REQUIRED && API_KEYS.size === 0) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract API key from header or query parameter
|
||||||
|
const apiKey = req.header('X-API-Key') || req.query.apikey
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
logit(`ERROR API Auth: No API key provided - ${req.ip}`)
|
||||||
|
return res.status(401).json({
|
||||||
|
err: 'UNAUTHORIZED',
|
||||||
|
message: 'API key required. Provide X-API-Key header or apikey query parameter.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate API key
|
||||||
|
if (!API_KEYS.has(apiKey)) {
|
||||||
|
logit(`ERROR API Auth: Invalid API key - ${req.ip}`)
|
||||||
|
return res.status(403).json({
|
||||||
|
err: 'FORBIDDEN',
|
||||||
|
message: 'Invalid API key'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log successful authentication
|
||||||
|
logit(`API Auth: Valid request from ${req.ip}`)
|
||||||
|
|
||||||
|
// API key is valid, proceed
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add API key to the allowed list (for runtime management)
|
||||||
|
* @param {string} apiKey - API key to add
|
||||||
|
*/
|
||||||
|
export const addApiKey = (apiKey) => {
|
||||||
|
API_KEYS.add(apiKey)
|
||||||
|
logit(`API Key added: ${apiKey.substring(0, 8)}...`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove API key from the allowed list
|
||||||
|
* @param {string} apiKey - API key to remove
|
||||||
|
*/
|
||||||
|
export const removeApiKey = (apiKey) => {
|
||||||
|
API_KEYS.delete(apiKey)
|
||||||
|
logit(`API Key removed: ${apiKey.substring(0, 8)}...`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if API authentication is active
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
export const isAuthActive = () => {
|
||||||
|
return AUTH_REQUIRED || API_KEYS.size > 0
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user