Ertser Commit der test-Version

This commit is contained in:
2025-10-10 16:27:06 +00:00
commit 2308aa56a3
37 changed files with 10988 additions and 0 deletions

9
.dockerignore Normal file
View File

@@ -0,0 +1,9 @@
build_and_copy.sh
Dockerfile_sensorapi
docker-compose.yml
.vscode
log
mocks
test
node_modules
doc

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
node_modules/
.env*
.DS_Store
sensorapi.log

75
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,75 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch reception",
"skipFiles": [
"<node_internals>/**"
],
"env": {
"DEBUG": "sensorapi:*",
"INFLUXHOST": "192.168.178.190",
"INFLUXTOKEN": "gHGGgjaK0lmM6keMa01JeuDpqOE_vRq8UimsU4QKb2miI5BDh2PfWynEbwKizdJapXy8jVbTat5mVZLQTNmSdw==",
"MONGOHOST": "192.168.178.190",
"MONGOAUTH": "true",
"MONGOUSRP": "admin:mongo4noise",
// "DBASE": "influx",
},
"program": "${workspaceFolder}/bin/www.js"
},
{
"type": "node",
"request": "launch",
"name": "Launch esprimo",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/bin/www.js",
"env": {
"DEBUG": "sensorapi:*",
"MONGOUSRP": "admin:mongo4noise",
"MONGOPORT": "27037",
"MONGOHOST": "217.72.203.152",
//"MONGOUSRP": "rexfue:5g2h4j3XC$$C$§442dcdsvDCx",
"MONGOAUTH": "true",
"INFLUXTOKEN": "q35XUBaElzcy8dDd9HF2_mpeHvYCampZg_9mJNP5jeBQRopq3EWIzNTZ555QLSIXhZC05RXCoFgjiaT7VzyNkQ==",
"DEVELOP": "true",
}
},
{
"type": "node",
"request": "launch",
"name": "Ralf",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/bin/www.js",
"env": {
"MONGOHOST": "192.168.51.22",
"MONGOAUTH": "true",
"MONGOUSRP": "rexfue:s25BMmW2gg",
}
},
{
"type": "node",
"request": "launch",
"name": "Launch localhost",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/bin/www.js",
"env": {
"INFLUXHOST": "localhost",
"INFLUXTOKEN": "Pt7cDkKS1pAI2a0qsAhfSY97EVsfeNwJxo-ZdiNvfwC4kBiqxmoj7WbR7XkNRr23YELydv_9HXrN2SMofq9vhQ==",
"MONGOHOST": "localhost",
"MONGOPORT": "27017",
"DEBUG": "sensorapi:*"
}
}
]
}

23
Dockerfile_sensorapi Normal file
View File

@@ -0,0 +1,23 @@
FROM node:alpine
ADD package.json /tmp/package.json
RUN cd /tmp && npm install
RUN mkdir -p /opt/app && cp -a /tmp/node_modules /opt/app/
WORKDIR /opt/app
ADD . /opt/app
# RUN apk add busybox-initscripts
RUN apk add --no-cache tzdata
ENV TZ Europe/Berlin
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN deluser --remove-home node
RUN touch cmds.sh \
&& echo 'npm start' >>cmds.sh
CMD sh ./cmds.sh

158
INFLUX_SQL_DOCUMENTATION.md Normal file
View File

@@ -0,0 +1,158 @@
# InfluxDB 1.8 SQL Implementation - Dokumentation
## Übersicht
Diese Datei (`databases/influx_sql.js`) implementiert die gleiche Funktionalität wie `databases/influx_flux.js`, aber für InfluxDB 1.8 mit InfluxQL anstatt Flux-Abfragen.
## Wichtige Unterschiede zu InfluxDB 2.0/Flux
### Datenschema-Unterschiede
- **InfluxDB 1.8**: Speichert nur `LA_max`, `LA_min`, `LA_eq` (alle in dB)
- **InfluxDB 2.0**: Speichert zusätzlich `E10tel_eq` als vorberechneten linearen Wert (`10^(LA_max/10)`)
### Datenkonvertierung
Da InfluxDB 1.8 kein `E10tel_eq` Feld hat, wird es zur Laufzeit berechnet:
```javascript
E10tel_eq = Math.pow(10, LA_max / 10)
```
### Konfiguration
- **InfluxDB 1.8**: Verwendet Username/Password-Authentifizierung
- **InfluxDB 2.0**: Verwendet Token-basierte Authentifizierung
- **Endpoint**: `/query` für Lesen, `/write` für Schreiben (statt `/api/v2/...`)
### Umgebungsvariablen
```bash
# InfluxDB 1.8 Konfiguration
INFLUXHOST=localhost
INFLUXPORT=8086
INFLUXUSER=username # Neu für 1.8
INFLUXPASS=password # Neu für 1.8
INFLUXDB=sensor_data # Database statt Bucket
```
## Logarithmische Mittelwertberechnung
### Problem
LAmax-Werte sind logarithmische Werte (Dezibel). Ein einfacher arithmetischer Mittelwert ist mathematisch falsch.
### Datenschema-Unterschiede
- **InfluxDB 1.8**: Nur `LA_max` (dB) verfügbar → `E10tel_eq` wird zur Laufzeit berechnet
- **InfluxDB 2.0**: `E10tel_eq` bereits als `10^(LA_max/10)` gespeichert
### Implementierung in InfluxDB 1.8
```javascript
// 1. Konvertierung LA_max → E10tel_eq zur Laufzeit
const e10tel = Math.pow(10, lamax / 10)
// 2. Mittelwert der E10tel-Werte
const e10telMean = e10telValues.reduce((sum, val) => sum + val, 0) / e10telValues.length
// 3. Rückkonvertierung zu dB für n_AVG
const n_AVG = 10.0 * Math.log10(e10telMean)
```
Dies entspricht exakt der Flux-Version:
```flux
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|> map(fn: (r) => ({r with _value: (10.0 * math.log10(x: r._value))}))
```
## Funktionen
### fetchActData(opts)
**Zweck**: Abrufen aktueller/historischer Sensordaten
**Parameter**:
- `opts.sensorid`: Sensor-ID
- `opts.start`: Startzeit (z.B. "now() - 1h")
- `opts.stop`: Endzeit (z.B. "now()")
- `opts.sort`: Sortierung (1 = aufsteigend, -1 = absteigend)
**InfluxQL-Query**:
```sql
SELECT *
FROM "measurements"
WHERE "sid" = 'sensor_id'
AND time >= start_time
AND time <= stop_time
ORDER BY time ASC/DESC
```
### fetchNoiseAVGData(opts)
**Zweck**: Berechnung von Lärmstatistiken mit korrekter logarithmischer Mittelwertbildung
**Parameter**:
- `opts.sensorid`: Sensor-ID
- `opts.start`: Startzeit
- `opts.stop`: Endzeit
- `opts.peak`: Schwellenwert für Peak-Zählung (dB)
- `opts.long`: Vollständige Daten (true) oder nur Zusammenfassung (false)
**Besonderheiten**:
- Verwendet multiple InfluxQL-Queries (da InfluxQL keine komplexen Joins wie Flux unterstützt)
- Kombiniert Ergebnisse in JavaScript
- Korrekte logarithmische Mittelwertberechnung für LAmax-Werte
## Flux vs. InfluxQL Mapping
| Flux-Operation | InfluxQL-Äquivalent |
|----------------|---------------------|
| `from(bucket)` | `FROM "measurement"` |
| `range(start, stop)` | `WHERE time >= start AND time <= stop` |
| `filter(fn: (r) => r.sid == "x")` | `WHERE "sid" = 'x'` |
| `aggregateWindow(every: 1h, fn: mean)` | `GROUP BY time(1h)` mit `mean()` |
| `pivot()` | Mehrere Spalten in SELECT |
| `join()` | Multiple Queries + JavaScript-Kombination |
## Beispiel-Nutzung
```javascript
import { fetchActData, fetchNoiseAVGData } from './databases/influx_sql.js'
// Aktuelle Daten abrufen
const actData = await fetchActData({
sensorid: 'noise_001',
start: 'now() - 1h',
stop: 'now()',
sort: 1
})
// Lärmstatistiken mit Peak-Zählung
const noiseStats = await fetchNoiseAVGData({
sensorid: 'noise_001',
start: 'now() - 24h',
stop: 'now()',
peak: 70,
long: false
})
```
## Rückgabewerte
Beide Funktionen geben ein Objekt zurück:
```javascript
{
err: null, // Fehler oder null
values: [ // Array mit Ergebnissen
{
_time: "2025-10-10T10:00:00.000Z",
// ... weitere Felder je nach Funktion
}
]
}
```
## Migration von Flux zu InfluxQL
1. **Umgebungsvariablen anpassen** (siehe oben)
2. **Import ändern**: `from './databases/influx_flux.js'``from './databases/influx_sql.js'`
3. **Funktionsaufrufe bleiben identisch** - gleiche Parameter und Rückgabewerte
4. **InfluxDB-Server auf Version 1.8 umstellen**
## Vorteile der InfluxQL-Implementierung
- **Kompatibilität**: Funktioniert mit älteren InfluxDB 1.8 Installationen
- **Korrekte Mathematik**: Logarithmische Mittelwerte für Dezibel-Werte
- **Identische API**: Drop-in-Replacement für Flux-Version
- **Performance**: Optimierte Queries für InfluxDB 1.8

190
actions/data4map.js Normal file
View File

@@ -0,0 +1,190 @@
// Fetch the actual (= newest) data out of the dbase to show it on the map
import {DateTime} from "luxon"
import * as mongo from "../databases/mongo.js"
import { returnOnError } from "../utilities/reporterror.js"
// Default distance for center search ( in km)
const DEFAULT_DISTANCE = 10
// Value to use fpr map
const value4map = [
{ typ: 'noise', value: 'LA_max'},
{ typ: 'pm', value: 'P1'},
{ typ: 'thp', value: 'temperature'},
{ typ: 'radioactivity', value: 'counts_per_minute'},
]
// get value for map from tabel
const getValue4Map = (t) => {
for(let x of value4map) {
if(x.typ == t) {
return x.value
}
}
return ''
}
// Relations between types and value type
const vtype2measurement = {
P1: 'pm', P2: 'pm', P0: 'pm',
temperature: 'thp', humidity: 'thp', pressure: 'thp',
LAeq: 'noise',
counts_per_minute: 'radioactivity'
};
// find first value type from measurement
const getfieldfromtype = (typ) => {
for (const [key, value] of Object.entries(vtype2measurement)) {
if (value === typ) {
return key
}
}
return ' '
}
// read the last entries from the influx database
const readLastDates = async (typ) => {
let ret = {values: [], err: null}
let query = `
from(bucket: "sensor_data")
|> range(start: -2h)
|> filter(fn: (r) => r._measurement == "${typ}" and r._field == "${getValue4Map(typ)}")
|> last()
|> group()
|> map(fn: (r) => ({r with sid: int(v: r.sid)}))
|> sort(columns: ["sid"])
|> keep(columns: ["_time","sid","_value"])
`
return await fetchFromInflux(ret, query)
}
export var getData4map = async (params) => {
let start = DateTime.now()
let ret = {err: null}
// ***** This function will (at the moment) only be called by internal routines, so there is no need to check the parameters !
const typ = params.type
let poly = []
let south = null, north = null, east = null, west = null, center = null
let distance = DEFAULT_DISTANCE
if(params.box !== undefined) {
let val = params.box.split(',')
for (let i = 0; i < val.length; i++) {
val[i] = parseFloat(val[i])
}
south = parseFloat(val[1])
north = parseFloat(val[3])
east = parseFloat(val[2])
west = parseFloat(val[0])
// logit(`getData4map: S=${south} N=${north} E=${east} W=${west}`)
}
if (!((params.poly === undefined) || (params.poly === ' '))){
poly = JSON.parse(params.poly)
}
if (params.center !== undefined) {
center = params.center
if ((params.distance !== undefined) &&
(params.distance >= 1) && (params.distance <= 1000)) {
distance = params.distance
}
}
const aktData = []
let lastDate = 0
let query = {type: typ}
// if polyline or box were given, set query
if (poly.length !== 0) { // polyline given
query.location = {
$geoWithin: {
$geometry: {
type: "Polygon",
coordinates: [poly],
}
}
}
} else if (south !== null) { // box given
query["location.loc"] = {
$geoWithin: {
$box: [
[west, south],
[east, north]
]
}
}
} else if (center !== null) { // center point given
query["location.loc"] = {
$nearSphere: {
$geometry: {
type: "Point",
coordinates: center
},
$maxDistance: distance * 1000
}
}
}
try {
// fetch mapdata from mongodb
let { properties, err } = await mongo.getallProperties(mongo.properties_collection, query)
if(err) {
return returnOnError(ret, 'NOPROPSFOUND', getData4map.name)
}
let v4map = getValue4Map(typ)
for (let sensor of properties) {
let oneAktData = {}
if (sensor.values !== undefined) {
oneAktData = {
location: sensor.location[0].loc.coordinates,
id: sensor._id,
name: sensor.name[0].name,
indoor: sensor.location[0].indoor,
lastseen: sensor.values.timestamp
}
let now = new Date().getTime()
if(oneAktData.lastseen !== '') {
let diff = now - oneAktData.lastseen.getTime()
if (diff >= 365 * 24 * 3600 * 1000) {
oneAktData.value = -4
} else if (diff >= 30 * 24 * 3600 * 1000) {
oneAktData.value = -3
} else if (diff >= 7 * 24 * 3600 * 1000) {
oneAktData.value = -2
} else if (diff >= 2 * 3600 * 1000) {
oneAktData.value = -1
} else {
if (sensor.values !== undefined) {
oneAktData.value = Math.round(sensor.values[v4map] * 100) / 100
}
}
let weeks = Math.round(diff / (7 * 24 * 3600 * 1000))
oneAktData.weeks = weeks
}
if (sensor.values.timestamp > lastDate) {
lastDate = sensor.values.timestamp
}
} else {
oneAktData.value = -5
oneAktData.weeks = 0
oneAktData.lastseen = ''
}
aktData.push(oneAktData)
}
ret = {
err: null,
options: {
lastdate: lastDate,
count: aktData.length,
data: 'map'
},
values: aktData
}
return ret
}
catch(e) {
return returnOnError(ret, `catch\n${e}`, getData4map.name)
}
}

62
actions/getAKWData.js Normal file
View File

@@ -0,0 +1,62 @@
// get data for one sensor
import {DateTime} from "luxon"
import { logit, logerror } from '../utilities/logit.js'
import * as mongo from "../databases/mongo.js"
import {reportError, returnOnError} from "../utilities/reporterror.js"
import {csv2Json} from "../utilities/csv2json.js"
// Fetch all akw data out of the dbase
// router.get('/getakwdata/', async function (req, res) {
export const getakwdata = async (options) => {
let data = {err: null, ...options, count: 0, values: []}
let erg = []
try {
let rawAKWs = await mongo.readAKWs(options)
if (rawAKWs.err) {
return returnOnError(date, rawAKWs.err, getakwdata.name)
}
for (let item of rawAKWs.values.akws) {
var oneAktData = {};
oneAktData['location'] = {
type: 'Point',
coordinates: [item.lon, item.lat]
};
oneAktData['name'] = item.Name;
oneAktData['active'] = item.Status == 'aktiv';
oneAktData['start'] = item.Baujahr;
oneAktData['end'] = item.Stillgeleg;
oneAktData['type'] = item.Status === 'aktiv' ? 'akw_a' : 'akw_s';
oneAktData['link'] = item.Wiki_Link;
erg.push(oneAktData); // dies ganzen Werte nun in das Array
}
for (let item of rawAKWs.values.th1_akws) {
let oneAktData = {};
let loc = item.geo.substr(6).split(' ');
let lon = parseFloat(loc[0]);
let lat = parseFloat(loc[1]);
oneAktData['location'] = {
type: 'Point',
coordinates: [lon, lat]
};
oneAktData['name'] = item.name;
oneAktData['typeText'] = item.types;
oneAktData['type'] = item.types == "Nuclear power plant" ? 'akw_a' : 'other';
oneAktData['link'] = item.item;
if (item.itemServiceretirement != undefined) {
oneAktData['ende'] = item.itemServiceretirement.substr(0, 4);
}
if (item.itemServiceentry != undefined) {
oneAktData['begin'] = item.itemServiceentry.substr(0, 4);
}
erg.push(oneAktData);
}
data.values = erg
data.count = erg.length
} catch (e) {
return returnOnError(data, e, getakwdata.name)
}
return data
}

80
actions/getaddress.js Normal file
View File

@@ -0,0 +1,80 @@
import {returnOnError} from "../utilities/reporterror.js"
import axios from 'axios'
import {logit} from "../utilities/logit.js"
import {getOneProperty} from "./getproperties.js"
import { response } from "express"
const NOMINATIM_URL = `https://nominatim.openstreetmap.org/reverse?lat=${'xx'}&lon=${'yy'}&format=json`
const NOMINATIM_CITY_URL = `https://nominatim.openstreetmap.org/?q="${'xx'}"&format=json`
export const getCityCoords = async (params) => {
let ret = {coords: [], city: params.city, err: null}
let url = NOMINATIM_CITY_URL.replace('xx', params.city)
// let url = 'https://nominatim.openstreetmap.org/?q="K%C3%B6ln"&format=json'
try {
const response = await axios(url)
if (response.status !== 200) {
return returnOnError(ret, 'RESPSTATUS', getCityCoord.name, response.status)
}
ret.coords = [response.data[0].lat,response.data[0].lon]
logit(JSON.stringify(ret.coords))
} catch (e) {
return returnOnError(ret, e, getCityCoords.name)
}
return ret
}
export const getAddress = async (params) => {
let ret = {address: "", err: null}
let {props, err} = await getOneProperty(params)
if (err) {
return returnOnError(ret, err, getAddress.name)
}
let coord = props.location[0].loc.coordinates
let url = NOMINATIM_URL.replace('xx', coord[1]).replace('yy', coord[0])
try {
const response = await axios(encodeURI(url),{
headers: {
'User-Agent': 'Firefox 132.0.1'
}
});
if (response.status !== 200) {
return returnOnError(ret, 'RESPSTATUS', getAddress.name, response.status)
}
let akt = response.data.address
// logit(JSON.stringify(akt))
const CITY = ['city', 'town', 'village', 'suburb', 'county']
let city = "unknown"
for (let c of CITY) {
if(akt[c] !== undefined) {
city = akt[c]
break
}
}
// ret.address = `${(akt.road ? akt.road : "")} ${(akt.postcode ? akt.postcode : "")} ${city}`;
ret.address = {
street: `${(akt.road ? akt.road : "")}`,
plz: `${(akt.postcode ? akt.postcode : "")}`,
city: `${city}`}
} catch (e) {
return returnOnError(ret, e, getAddress.name)
}
return ret
}
/*
let addr = "Addr";
try {
let ret = await $.get("api/getprops?sensorid=" + marker.options.name);
if(ret.values[0].address.city == null) {
addr += " unbekannt !"
} else {
let akt = ret.values[0].address;
addr = (akt.street ? akt.street : "") + "&nbsp;&nbsp;" + (akt.plz ? akt.plz : "") + " " + akt.city;
}
} catch (e) {
console.log("onMarker - getpops", e)
}
console.log("addr:", addr);
return addr;
*/

48
actions/getproperties.js Normal file
View File

@@ -0,0 +1,48 @@
// Fetch the properties for the given sensor
import * as mongo from "../databases/mongo.js"
import {returnOnError} from "../utilities/reporterror.js"
import checkParams from "../utilities/checkparams.js"
let readProperties = mongo.readProperties
let readChipData = mongo.readChipData
// Read properties for sensorid and properties for all other sensors on same location
export const getOneProperty = async (params) => {
let properties = {err: null, props: {}, chip: {}}
let {opts, err} = checkParams(params, {mandatory:[{name:'sensorid', type: 'int'}], optional:[]})
if (err) {
return returnOnError(properties, err, getOneProperty.name)
}
// read 'chip'-data (special for noise sensors)
const chipdata = await readChipData(opts.sensorid)
if (chipdata.err == undefined) {
properties.chip = chipdata
}
let sensorEntries = [];
try {
let pp = await readProperties({sid: opts.sensorid}); // read for given sensorID
if ((pp.properties == null) || (pp.error)) {
return returnOnError(properties, 'NOPROPSREAD', getOneProperty.name, opts.sensorid)
}
// now find sensors with same location
let query = {"location.0.id": pp.properties.location[0].id}
let others = await readProperties(query)
if (others.err) {
return returnOnError(properties, 'NOPROPSREAD', getOneProperty.name, others.errortext)
}
if (others.properties.length > 0) {
for (const x of others.properties) {
if(x._id === pp.properties._id) {
continue
}
sensorEntries.push({name: x.name[0].name, sid: x._id})
}
}
properties.props = pp.properties
properties.props.othersensors = sensorEntries;
} catch (e) {
return returnOnError(properties, e, getOneProperty.name)
}
return properties
}

308
actions/getsensorData.js Normal file
View File

@@ -0,0 +1,308 @@
// get data for one sensor
const DBASE = process.env.DBASE || 'mongo'
import {DateTime} from "luxon"
import * as influx from "../databases/influx.js"
import * as mongo from "../databases/mongo.js"
import {returnOnError} from "../utilities/reporterror.js"
import {csv2Json} from "../utilities/csv2json.js"
import checkParams from "../utilities/checkparams.js"
import {getOneProperty} from "./getproperties.js"
import {getNoiseData} from "../sensorspecials/noise.js"
import {getgeigerData} from "../sensorspecials/geigeract.js"
// Possible params for the different sensor types
const noiseParams = [
{name:'data', type: 'string', default: 'live'},
{name: 'span', type: 'int', default: 1},
{name: 'daystart', type: 'bool', default: null},
{name: 'peak', type: 'int', default: 70},
{name: 'since', type: 'date', default: '1900-01-01T00:00:00Z'},
{name: 'box', type: 'array', default: null},
{name: 'out', type: 'string', default: ''},
{name: 'csv', type: 'bool', default: false},
{name: 'long', type: 'bool', default: false},
{name: 'sort', type: 'int', default: 1},
{name: 'last_seen', type: 'date', default: '1900-01-01T00:00:00Z'},
{name: 'datetime', type: 'date', default: null}
]
const thpParams = []
const pmParams = []
const geigerParams = [
{name: 'what', type: 'string', default: 'day'},
{name: 'span', type: 'int', default: 1},
{name: 'daystart', type: 'bool', default: null},
{name: 'avg', type: 'int', default: 1},
{name: 'since', type: 'date', default: '1900-01-01T00:00:00Z'},
{name: 'box', type: 'array', default: null},
{name: 'out', type: 'string', default: ''},
{name: 'csv', type: 'bool', default: false},
{name: 'long', type: 'bool', default: false},
{name: 'sort', type: 'int', default: 1},
{name: 'last_seen', type: 'date', default: '1900-01-01T00:00:00Z'},
{name: 'datetime', type: 'date', default: null},
{name: 'moving', type: 'bool', default: false},
]
// >>>>>>>>>>>>>>>>>>>>> DUMMIES
async function getPmData(opts) {
}
async function getThpData(opts) {
}
// <<<<<<<<<<<<<<<<<<<<< DUMMIES
// Table for the different sensor types
const sensorTypeTable = [
{ typ: 'thp', possibleParams: thpParams, func: getThpData},
{ typ: 'noise', possibleParams: noiseParams, func: getNoiseData},
{ typ: 'pm', possibleParams: pmParams, func: getPmData},
{ typ: 'radioactivity', possibleParams: geigerParams, func: getgeigerData}
]
/* Units:
* span -> days
* avg -> minutes
*/
// *********************************************
// calcRange
//
// Calculate the date/time range ro read the data from
//
// If 'datetime' is not given, use 'now()' as end and 'now()' - span as start
// If 'datetime' is given, use it as start and 'datetime'+span as end
// If 'daystart'==true, change start and end so, that they begin at 00:00:00h
// if 'avg' has a value, move start backward ba 'avg' minutes
//
// params:
// opts: Object with different options, specially
// datetime, avg, daystart, span
//
// return:
// Object with start and stop in ISO-Format
// *********************************************
export const calcRange = (opts) => {
let start, end
let ret = { start: start, stop: end, err: null}
if((opts.datetime === null) || (opts.datetime === undefined) || (opts.datetime === '')) {
start = end = DateTime.local().toUTC()
start = start.minus({days: opts.span})
} else {
start = end = DateTime.fromISO(opts.datetime).toUTC()
end = end.plus({days: opts.span})
}
if(opts.daystart) {
start = start.startOf("day")
end = end.startOf("day")
}
// start = start.toUTC()
// end = end.toUTC()
// if(opts.avg !== undefined) {
// start = start.minus({minutes: opts.avg})
// }
ret.start = `start: ${start.toISO()}`
ret.stop = `stop: ${end.toISO()}`
return ret
}
// *********************************************
// getSensorData
//
// Depending on the parameter 'sensorid', distribute to the special routines for
// the sensors
//
// params:
// params: all parameters from the url
//
// return:
// Returns from the special routines
// *********************************************
// export const getSensorData = async (params) => {
export async function getSensorData(params) {
let ret = {err: null}
let {opts, err} = checkParams(params, { // check for sensorid
mandatory: [{name: 'sensorid', type: 'int'}],
optional: []
})
if (err) {
return returnOnError(ret, err, getSensorData.name)
}
// with the sensorid get the type of this sensor
let erg = await getOneProperty({sensorid: opts.sensorid})
if (erg.err) {
return returnOnError(ret, erg.err, getSensorData.name)
}
// distribute to the right routine
for(let item of sensorTypeTable) {
if(item.typ === erg.props.type) {
ret = await item.func(params, item.possibleParams, erg.props) // get the values from database
return ret
}
}
return returnOnError(ret, 'CMNDUNKNOWN', getActData.name)
}
// export const getActData = async (opts) => {
export async function getActData(opts) {
if (DBASE === 'mongo') {
return await mongo.fetchActData(opts)
} else if (DBASE === 'influx') {
return await influx.fetchActData(opts)
}
return {err: 'DBASEUNKNOWN', values: []}
}
// ..../api/getavgdata?
// ToDo: check UTC !!!!
export var getAvgData = async (params) => {
let ret = {data: {count: 0, values: []}, err: null}
let {opts, err} = checkParams(params,
{
mandatory:
[
{name: 'sensorid', type: 'int'},
],
optional:
[
{name: 'span', type: 'int', default: 1},
{name: 'datetime', type: 'date', default: null},
{name: 'avg', type: 'int', default: 10},
{name: 'moving', type: 'bool', default: true},
]
}
)
if (err) {
return returnOnError(ret, err, getActdata.name)
}
let {start, stop} = calcRange(opts)
if (stop === '') {
ret.data.start = DateTime.now().toUTC().minus({days: `${opts.span}`}).toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
} else {
ret.data.start = DateTime.fromISO(opts.datetime, {zone: 'utc'}).toISO()
}
ret.data.span = opts.span
ret.data.moving = opts.moving
ret.data.avg = opts.avg
let every = opts.moving ? '150s' : `${opts.avg}m`
let period = `${opts.avg}m`
let query = `
from(bucket: "sensor_data")
|> range(${start}, ${stop})
|> filter(fn: (r) => r.sid == "${opts.sensorid}")
|> timedMovingAverage(every: ${every}, period: ${period})
|> keep(columns: ["_time","_field","_value"])
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
`
return await fetchFromInflux(ret, query)
}
// ..../api/getlongavg?sensorid=123&span=2
export var getLongAvg = async (params) => {
let ret = {data: {count: 0, values: []}, err: null}
let {opts, err} = checkParams(params,
{
mandatory:
[
{name: 'sensorid', type: 'int'},
],
optional:
[
{name: 'span', type: 'int', default: 2},
]
}
)
if (err) {
return returnOnError(ret, err, getActdata.name)
}
ret.data.span = opts.span
let query = `
from(bucket: "sensor_data")
|> range(start: -${opts.span}d)
|> filter(fn: (r) => r.sid == "${opts.sensorid}")
|> mean()
|> drop(columns: ["_start","_measurement", "sid"])
|> pivot(rowKey:["_stop"], columnKey: ["_field"], valueColumn: "_value")
`
return await fetchFromInflux(ret, query)
}
// *********************************************
// function getMAPaktData() {
//
// Get the actual data from the database within the bounds for the given sensor typ
//
// params:
// opt: opt.box => region for which to find sensor data
// opt.typ: type of sensor
//
// return
// JSON
// { avgs: [
// { location: [ 9.00, 48.80 ], id: 29174, lastSeen: "2019-10-23T12:03:00.000Z", max: "65" }
// { location: [ 9.10, 49.80 ], id: 28194, lastSeen: "2019-10-22T16:03:00.000Z", max: "45" }
// .........
// ], lastDate: "2019-10-29T15:05:59.000Z" }
//
// *********************************************
export const getMAPaktData = async (opt) => {
let box = opt.box
if ((box == "") || (box == undefined)) {
return {"avgs": [], "lastDate": null}
}
let south = parseFloat(box[0][1])
let north = parseFloat(box[1][1])
let east = parseFloat(box[1][0])
let west = parseFloat(box[0][0])
let aktData = []
let lastDate = 0
let loc = {
location: {
$geoWithin: {
$box: [
[west, south],
[east, north]
]
}
},
typ: opt.typ,
}
let docs = await mongo.readMapdata(loc,0)
// console.log(docs)
for (let i = 0;i < docs.length; i++) {
let item = docs[i]
let oneAktData = {}
oneAktData['location'] = item.location.coordinates
oneAktData['id'] = item._id // ID des Sensors holen
oneAktData['lastSeen'] = item.values.datetime
oneAktData['indoor'] = item.indoor
let dati = item.values.datetime
let dt = new Date(dati)
if ((now - dt) >= 31 * 24 * 3600 * 1000) { // älter als 1 Monat ->
oneAktData['noise_max'] = -2 // -2 zurückgeben
} else if ((now - dt) >= 3600 * 1000) { // älter als 1 Stunde ->
oneAktData['noise_max'] = -1 // -1 zurückgeben
} else {
oneAktData['noise_max'] = -5 // bedutet -> nicht anzeigen
if (item.values.hasOwnProperty('LA_max')) {
oneAktData['noise_max'] = item.values.LA_max.toFixed(0) // und merken
}
if (dati > lastDate) {
lastDate = dati
}
}
aktData.push(oneAktData) // dies ganzen Werte nun in das Array
}
return {"avgs": aktData, "lastDate": lastDate} // alles bearbeitet _> Array senden
}

66
app.js Normal file
View File

@@ -0,0 +1,66 @@
import createError from 'http-errors'
import logger from 'morgan'
import express from 'express'
import cookieParser from 'cookie-parser'
import cors from 'cors'
import i18next from 'i18next'
import i18nextMiddleware from 'i18next-http-middleware'
import Backend from 'i18next-node-fs-backend'
import {logit} from "./utilities/logit.js"
import pkg from './package.json' with { type: "json" }
const app = express()
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
import indexRouter from './routes/index.js'
import { apiRouter } from './routes/api.js'
import {fileURLToPath} from "url";
import path from "path";
i18next
.use(Backend)
.use(i18nextMiddleware.LanguageDetector)
.init({
backend: {
loadPath: __dirname + '/locales/{{lng}}/{{ns}}.json'
},
fallbackLng: 'de',
debug: false,
preload: ['de', 'en']
});
app.use(i18nextMiddleware.handle(i18next));
app.use(cors())
app.use(logger('dev'))
app.use(express.json())
app.use(express.urlencoded({ extended: true }))
app.use(cookieParser())
app.use('/', indexRouter)
app.use('/api', apiRouter)
// catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404))
})
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message
res.locals.error = req.app.get('env') === 'development' ? err : {}
// render the error page
res.status(err.status || 500)
res.send(`ERROR: ${err.status}, ${err.stack}`)
})
logit(`Start of Program Version: ${pkg.version} vom ${pkg.date}`)
export default app

94
bin/www.js Normal file
View File

@@ -0,0 +1,94 @@
#!/usr/bin/env node
/**
* Module dependencies.
*/
import app from '../app.js'
import Debug from 'debug'
import http from 'http'
const debug = Debug('sensorapi:*')
/**
* Get port from environment and store in Express.
*/
const port = normalizePort(process.env.PORT || '3005');
app.set('port', port);
/**
* Create HTTP server.
*/
const server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
/**
* Normalize a port into a number, string, or false.
*/
function normalizePort(val) {
const port = parseInt(val, 10);
if (isNaN(port)) {
// named pipe
return val;
}
if (port >= 0) {
// port number
return port;
}
return false;
}
/**
* Event listener for HTTP server "error" event.
*/
function onError(error) {
if (error.syscall !== 'listen') {
throw error;
}
const bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + ' is already in use');
process.exit(1);
break;
default:
throw error;
}
}
/**
* Event listener for HTTP server "listening" event.
*/
function onListening() {
console.log('DEBUG:', process.env.DEBUG, process.env.INFLUXHOST)
const addr = server.address();
const bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
debug('Listening on ' + bind);
console.log('Listening on ' + bind);
}

63
build_and_copy.sh Executable file
View File

@@ -0,0 +1,63 @@
#!/bin/bash
# Build Docker-Container
#
# Call: buildit.sh name [target]
#
# The Dockerfile must be named like Dockerfile_name
#
# 2018-09-20 rxf
# - before sending docker image to remote, tag actual remote image
#
# 2018-09-14 rxf
# - first Version
#
set -x
port=""
orgName=sensorapi
name=sensorapi
usage()
{
echo "Usage build_and_copy.sh [-p port] [-n name] target"
echo " Build docker container $name and copy to target"
echo "Params:"
echo " target: Where to copy the container to "
echo " -p port: ssh port (default 22)"
echo " -n name: new name for container (default: $orgName)"
}
while getopts n:p:h? o
do
case "$o" in
n) name="$OPTARG";;
p) port="-p $OPTARG";;
h) usage; exit 0;;
*) usage; exit 1;;
esac
done
shift $((OPTIND-1))
while [ $# -gt 0 ]; do
if [[ -z "$target" ]]; then
target=$1
shift
else
echo "bad option $1"
# exit 1
shift
fi
done
docker build -f Dockerfile_$orgName -t $name .
dat=`date +%Y%m%d%H%M`
if [ "$target" == "localhost" ]
then
docker tag $name $name:V_$dat
exit
fi
ssh $port $target "docker tag $name $name:V_$dat"
docker save $name | bzip2 | pv | ssh $port $target 'bunzip2 | docker load'

166
databases/influx_flux.js Normal file
View File

@@ -0,0 +1,166 @@
// Access to influxDB vie HTTP
import axios from 'axios'
import { DateTime } from 'luxon'
// import csvParse from 'csv-parser'
import { logit, logerror } from '../utilities/logit.js'
import {returnOnError} from "../utilities/reporterror.js";
import {csv2Json} from "../utilities/csv2json.js";
let INFLUXHOST = process.env.INFLUXHOST || "localhost"
let INFLUXPORT = process.env.INFLUXPORT || 8086
let INFLUXTOKEN = process.env.INFLUXTOKEN || ""
//"rklEClT22KfdXZhA47eyJhbqcvekb8bcKCqlUG7n72uDSmR2xGvif0CmGJe0WQtXB96y29mmt-9BdsgWA5npfg=="
//"BNR6cGdb006O1T6hQkGcfB8tgH-UPO6QkOPToeAvrP7LATJbCuWi1wYf3HBpVdZQEBxHxNSrNenZsOSMogX-lg=="
let INFLUXDATABUCKET = process.env.INFLUXDATABUCKET || "sensor_data"
let INFLUXORG = process.env.INFLUXORG || "citysensor"
const INFLUXURL_READ = `http://${INFLUXHOST}:${INFLUXPORT}/api/v2/query?org=${INFLUXORG}`
const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/api/v2/write?org=${INFLUXORG}&bucket=${INFLUXDATABUCKET}`
const influxRead = async (query) => {
let start = DateTime.now()
logit(`ReadInflux from ${INFLUXURL_READ}`)
let erg = { values: [], err: null}
try {
let ret = await axios({
method: 'post',
url: INFLUXURL_READ,
data: query,
headers: {
Authorization: `Token ${INFLUXTOKEN}`,
Accept: 'application/csv',
'Content-type': 'application/vnd.flux'
},
timeout: 10000,
})
if (ret.status !== 200) {
return returnOnError(erg, RESPSTATUS, influxRead.name, ret.status)
}
erg.values = ret.data
} catch (e) {
return returnOnError(erg, e, influxRead.name)
}
// logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
return erg
}
const influxWrite = async (data) => {
let start = DateTime.now()
let ret
try {
ret = await axios({
method: 'post',
url: INFLUXURL_WRITE,
data: data,
headers: {
Authorization: `Token ${INFLUXTOKEN}`,
Accept: 'application/json',
'Content-Type': 'text/plain; charset=utf-8'
},
timeout: 10000,
})
if (ret.status !== 204) {
logerror(`doWrite2API Status: ${ret.status}`)
}
} catch (e) {
logerror(`doWrite2API ${e}`)
}
logit(`Influx-Write-Time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
return ret
}
const fetchFromInflux = async (ret, query) => {
let { values, err} = await influxRead(query)
if(err) {
if(err.toString().includes('400')) {
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
} else {
return returnOnError(ret, err, fetchFromInflux.name)
}
}
if (values.length <= 2) {
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
}
ret.values = csv2Json(values)
return ret
}
export const fetchActData = async (opts) => {
let ret = {err: null, values: []}
let sorting = ''
if(opts.sort) {
if (opts.sort === 1) {
sorting = '|> sort(columns: ["_time"], desc: false)'
} else if (opts.sort === -1) {
sorting = '|> sort(columns: ["_time"], desc: true)'
}
}
// build the flux query
let query = `
from(bucket: "sensor_data")
|> range(${opts.start}, ${opts.stop})
|> filter(fn: (r) => r.sid == "${opts.sensorid}")
${sorting}
|> keep(columns: ["_time","_field","_value"])
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
`
return await fetchFromInflux(ret, query)
}
export const fetchNoiseAVGData = async (opts) => {
let ret = {err: null, values: []}
let small = '|> keep(columns: ["_time", "peakcount", "n_AVG"])'
if (opts.long) {
small = ''
}
let queryAVG = `
import "math"
threshold = ${opts.peak}
data = from(bucket: "sensor_data")
|> range(${opts.start}, ${opts.stop})
|> filter(fn: (r) => r["sid"] == "${opts.sensorid}")
e10 = data
|> filter(fn: (r) => r._field == "E10tel_eq")
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|> map(fn: (r) => ({r with _value: (10.0 * math.log10(x: r._value))}))
|> keep(columns: ["_time","_field","_value"])
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|> rename(columns: {"E10tel_eq" : "n_AVG"})
ecnt = data
|> filter(fn: (r) => r._field == "E10tel_eq")
|> aggregateWindow(every: 1h, fn: count, createEmpty: false)
|> keep(columns: ["_time","_field","_value"])
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|> rename(columns: {"E10tel_eq" : "count"})
esum = data
|> filter(fn: (r) => r._field == "E10tel_eq")
|> aggregateWindow(every: 1h, fn: sum, createEmpty: false)
|> keep(columns: ["_time","_field","_value"])
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|> rename(columns: {"E10tel_eq" : "n_sum"})
peak = data
|> filter(fn: (r) => r._field == "LA_max")
|> aggregateWindow(
every: 1h,
fn: (column, tables=<-) => tables
|> reduce(
identity: {peakcount: 0.0},
fn: (r, accumulator) => ({
peakcount: if r._value >= threshold then
accumulator.peakcount + 1.0
else
accumulator.peakcount + 0.0,
}),
),
)
|> keep(columns: ["_time","peakcount"])
part1 = join( tables: {e10: e10, ecnt: ecnt}, on: ["_time"])
part2 = join( tables: {esum: esum, peak: peak}, on: ["_time"])
join( tables: {P1: part1, P2: part2}, on: ["_time"])
${small}
`
return await fetchFromInflux(ret, queryAVG)
}

395
databases/influx_sql.js Normal file
View File

@@ -0,0 +1,395 @@
// Access to InfluxDB 1.8 via HTTP using InfluxQL
//
// IMPORTANT: InfluxDB 1.8 vs 2.0 Data Schema Differences:
// - InfluxDB 1.8: Only stores LA_max, LA_min, LA_eq (all in dB)
// - InfluxDB 2.0: Additionally stores E10tel_eq as pre-calculated linear value (10^(LA_max/10))
//
// This implementation converts LA_max to E10tel_eq at runtime to maintain
// compatibility with the Flux version while ensuring correct logarithmic averaging.
import axios from 'axios'
import { DateTime } from 'luxon'
import { logit, logerror } from '../utilities/logit.js'
import { returnOnError } from "../utilities/reporterror.js"
// InfluxDB 1.8 Configuration
let INFLUXHOST = process.env.INFLUXHOST || "localhost"
let INFLUXPORT = process.env.INFLUXPORT || 8086
let INFLUXUSER = process.env.INFLUXUSER || ""
let INFLUXPASS = process.env.INFLUXPASS || ""
let INFLUXDB = process.env.INFLUXDB || "sensor_data"
// InfluxDB 1.8 URLs
const INFLUXURL_READ = `http://${INFLUXHOST}:${INFLUXPORT}/query`
const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/write`
/**
* Execute InfluxQL query against InfluxDB 1.8
* @param {string} query - InfluxQL query string
* @returns {Object} - {values: [], err: null}
*/
const influxRead = async (query) => {
let start = DateTime.now()
logit(`ReadInflux from ${INFLUXURL_READ}`)
let erg = { values: [], err: null}
try {
const params = {
db: INFLUXDB,
q: query,
epoch: 'ms' // Return timestamps in milliseconds
}
// Add authentication if provided
if (INFLUXUSER && INFLUXPASS) {
params.u = INFLUXUSER
params.p = INFLUXPASS
}
let ret = await axios({
method: 'get',
url: INFLUXURL_READ,
params: params,
timeout: 10000,
})
if (ret.status !== 200) {
return returnOnError(erg, 'RESPSTATUS', influxRead.name, ret.status)
}
// InfluxDB 1.8 returns JSON format
if (ret.data.error) {
return returnOnError(erg, ret.data.error, influxRead.name)
}
erg.values = ret.data.results
} catch (e) {
return returnOnError(erg, e, influxRead.name)
}
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
return erg
}
/**
* Write data to InfluxDB 1.8
* @param {string} data - Line protocol data
* @returns {Object} - Response object
*/
const influxWrite = async (data) => {
let start = DateTime.now()
let ret
try {
const params = {
db: INFLUXDB,
precision: 'ms'
}
// Add authentication if provided
if (INFLUXUSER && INFLUXPASS) {
params.u = INFLUXUSER
params.p = INFLUXPASS
}
ret = await axios({
method: 'post',
url: INFLUXURL_WRITE,
params: params,
data: data,
headers: {
'Content-Type': 'text/plain; charset=utf-8'
},
timeout: 10000,
})
if (ret.status !== 204) {
logerror(`doWrite2API Status: ${ret.status}`)
}
} catch (e) {
logerror(`doWrite2API ${e}`)
}
logit(`Influx-Write-Time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
return ret
}
/**
* Helper function to transform InfluxDB 1.8 result to format compatible with Flux version
* @param {Array} series - InfluxDB series data
* @returns {Array} - Transformed data array
*/
const transformInfluxResult = (series) => {
if (!series || !series.length) return []
const result = []
series.forEach(serie => {
if (!serie.values) return
const columns = serie.columns
const timeIndex = columns.indexOf('time')
serie.values.forEach(row => {
const record = {}
columns.forEach((col, index) => {
if (col === 'time') {
// Convert timestamp to ISO string for compatibility
record._time = new Date(row[index]).toISOString()
} else {
record[col] = row[index]
}
})
result.push(record)
})
})
return result
}
/**
* Execute query and transform results
* @param {Object} ret - Return object
* @param {string} query - InfluxQL query
* @returns {Object} - Transformed result
*/
const fetchFromInflux = async (ret, query) => {
let { values, err } = await influxRead(query)
if (err) {
if (err.toString().includes('400')) {
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
} else {
return returnOnError(ret, err, fetchFromInflux.name)
}
}
if (!values || !values.length || !values[0].series) {
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
}
ret.values = transformInfluxResult(values[0].series)
return ret
}
/**
* Fetch current/historical sensor data from InfluxDB 1.8
* @param {Object} opts - Options object
* @param {string} opts.sensorid - Sensor ID
* @param {string} opts.start - Start time (e.g., "start: -1h")
* @param {string} opts.stop - Stop time (e.g., "stop: now()")
* @param {number} opts.sort - Sort order (1 for ascending, -1 for descending)
* @returns {Object} - {err: null, values: []}
*/
export const fetchActData = async (opts) => {
let ret = { err: null, values: [] }
// Convert Flux time format to InfluxQL format
let startTime = opts.start.replace('start: ', '').trim()
let stopTime = opts.stop.replace('stop: ', '').trim()
// Build sorting clause
let orderClause = ''
if (opts.sort) {
if (opts.sort === 1) {
orderClause = 'ORDER BY time ASC'
} else if (opts.sort === -1) {
orderClause = 'ORDER BY time DESC'
}
}
// InfluxQL query to get LA_max for a sensor within time range
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
const query = `
SELECT "LA_max", "LA_min", "LA_eq"
FROM "measurements"
WHERE "sid" = '${opts.sensorid}'
AND time >= ${startTime}
AND time <= ${stopTime}
${orderClause}
`
// Get the data and transform it to include E10tel_eq equivalent
const result = await fetchFromInflux(ret, query)
if (result.err) {
return result
}
// Transform data to add E10tel_eq field for compatibility with Flux version
// E10tel_eq = 10^(LA_max/10)
result.values = result.values.map(record => ({
...record,
E10tel_eq: record.LA_max !== null && record.LA_max !== undefined
? Math.pow(10, record.LA_max / 10)
: null
}))
return result
}
/**
* Helper function to calculate logarithmic average for decibel values
* For decibel values, we need to:
* 1. Convert dB to linear scale (10^(dB/10))
* 2. Calculate arithmetic mean of linear values
* 3. Convert back to dB (10 * log10(mean))
* @param {Array} values - Array of decibel values
* @returns {number} - Logarithmic average in dB
*/
const calculateLogMean = (values) => {
if (!values || values.length === 0) return null
// Convert dB to linear scale, calculate mean, convert back to dB
const linearSum = values.reduce((sum, val) => {
if (val !== null && val !== undefined) {
return sum + Math.pow(10, val / 10)
}
return sum
}, 0)
const validCount = values.filter(val => val !== null && val !== undefined).length
if (validCount === 0) return null
const linearMean = linearSum / validCount
return 10 * Math.log10(linearMean)
}
/**
* Fetch noise averaging data from InfluxDB 1.8 with proper logarithmic averaging for LAmax
* @param {Object} opts - Options object
* @param {string} opts.sensorid - Sensor ID
* @param {string} opts.start - Start time
* @param {string} opts.stop - Stop time
* @param {number} opts.peak - Peak threshold for counting
* @param {boolean} opts.long - Return full data or just summarized
* @returns {Object} - {err: null, values: []}
*/
export const fetchNoiseAVGData = async (opts) => {
let ret = { err: null, values: [] }
// Convert Flux time format to InfluxQL format
let startTime = opts.start.replace('start: ', '').trim()
let stopTime = opts.stop.replace('stop: ', '').trim()
// Since InfluxQL doesn't support complex joins like Flux, we need to make multiple queries
// and combine the results in JavaScript
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
const queryLAmaxForE10 = `
SELECT "LA_max", time
FROM "measurements"
WHERE "sid" = '${opts.sensorid}'
AND time >= ${startTime}
AND time <= ${stopTime}
AND "LA_max" IS NOT NULL
ORDER BY time ASC
`
// Query 2: Same query for peak counting (we'll process the same data)
const queryLAmaxForPeaks = queryLAmaxForE10
try {
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
if (lamaxErr) {
return returnOnError(ret, lamaxErr, fetchNoiseAVGData.name)
}
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
return returnOnError(ret, 'NODATA', fetchNoiseAVGData.name)
}
// Transform LA_max results
const lamaxData = transformInfluxResult(lamaxValues[0].series)
// Group LA_max data by hour and calculate:
// 1. E10tel equivalent values (10^(LA_max/10))
// 2. Peak counting
// 3. Statistics for n_AVG calculation
const hourlyData = {}
lamaxData.forEach(record => {
const timestamp = new Date(record._time)
const hourKey = new Date(timestamp.getFullYear(), timestamp.getMonth(),
timestamp.getDate(), timestamp.getHours()).toISOString()
if (!hourlyData[hourKey]) {
hourlyData[hourKey] = {
time: hourKey,
lamaxValues: [],
e10telValues: [], // Converted LA_max to E10tel equivalent
peakCount: 0
}
}
const lamax = record.LA_max
if (lamax !== null && lamax !== undefined) {
// Store original LA_max value
hourlyData[hourKey].lamaxValues.push(lamax)
// Convert LA_max (dB) to E10tel equivalent: 10^(LA_max/10)
const e10tel = Math.pow(10, lamax / 10)
hourlyData[hourKey].e10telValues.push(e10tel)
// Count peaks
if (lamax >= opts.peak) {
hourlyData[hourKey].peakCount++
}
}
})
// Calculate final results for each hour
const combinedResults = []
Object.values(hourlyData).forEach(hourData => {
const result = {
_time: hourData.time,
count: hourData.e10telValues.length,
peakcount: hourData.peakCount
}
// Calculate E10tel statistics
if (hourData.e10telValues.length > 0) {
// Sum of E10tel values
result.n_sum = hourData.e10telValues.reduce((sum, val) => sum + val, 0)
// Mean of E10tel values, then convert back to dB for n_AVG
// This matches the Flux version: mean(E10tel_eq) then 10*log10(mean)
const e10telMean = result.n_sum / hourData.e10telValues.length
result.n_AVG = 10.0 * Math.log10(e10telMean)
}
// Add additional fields if opts.long is true
if (opts.long) {
result.LA_max_values = hourData.lamaxValues
result.LA_max_log_avg = calculateLogMean(hourData.lamaxValues)
result.E10tel_values = hourData.e10telValues
}
combinedResults.push(result)
})
// Sort by time
combinedResults.sort((a, b) => new Date(a._time) - new Date(b._time))
// Filter results based on opts.long
if (!opts.long) {
ret.values = combinedResults.map(record => ({
_time: record._time,
peakcount: record.peakcount,
n_AVG: record.n_AVG
}))
} else {
ret.values = combinedResults
}
} catch (e) {
return returnOnError(ret, e, fetchNoiseAVGData.name)
}
return ret
}
// Export write function for compatibility
export { influxWrite }

424
databases/mongo.js Normal file
View File

@@ -0,0 +1,424 @@
/* Interface for MongoDB
*/
import { MongoClient } from 'mongodb'
import { logit, logerror } from '../utilities/logit.js'
import { DateTime } from 'luxon'
import {returnOnError} from "../utilities/reporterror.js";
// const nodemailer = require('nodemailer');
let MONGOHOST = process.env.MONGOHOST;
let MONGOPORT = process.env.MONGOPORT;
let MONGOAUTH = process.env.MONGOAUTH;
let MONGOUSRP = process.env.MONGOUSRP;
let MONGOBASE = process.env.MONGOBASE;
if (MONGOHOST === undefined) { MONGOHOST = 'localhost';}
if (MONGOPORT === undefined) { MONGOPORT = 27017; }
if (MONGOAUTH === undefined) { MONGOAUTH = 'false'; }
if (MONGOBASE === undefined) { MONGOBASE = 'sensor_data'; }
let MONGO_URL = 'mongodb://'+MONGOHOST+':'+MONGOPORT; // URL to mongo database
if (MONGOAUTH === 'true') {
// MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=' + MONGOBASE; // URL to mongo database
MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=admin'; // URL to mongo database
}
export const properties_collection = 'properties'
export const connectMongo = async () => {
try {
logit(`Try to connect to ${MONGO_URL}`)
let client = await MongoClient.connect(MONGO_URL)
logit(`Mongodbase connected to ${MONGO_URL}`)
return client
}
catch(error){
throw(error)
}
}
const listDatabases = async (client) => {
let databasesList = await client.db().admin().listDatabases();
console.log("Databases:");
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
/* ***************************************************
// READ routines
******************************************************/
// Read properties from the database
export const readProperties = async (query, limit = 0) => {
let ret = {err: null, properties: null}
let client = await connectMongo()
try {
if ("sid" in query) { // if sid is given, read property for sid
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
} else { // otherwise read props corresponding to query
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
}
} catch (e) {
ret.err = e
}
finally {
client.close()
}
return ret
}
export const readChipData = async (sid) => {
let ret = { err: null, chipdata: null}
let client = await connectMongo()
try {
ret.chipdata = await client.db(MONGOBASE).collection('prop_flux').findOne({_id: sid},{projection: {chip: 1, _id: 0}})
} catch (e) {
ret.err = e
}
finally {
client.close()
}
return ret
}
// read mapdata from database
export const readMapdata = async (query, limit) => {
let ret = {err: null, mapdata: []}
let client = await connectMongo()
try {
ret.mapdata = await client.db(MONGOBASE).collection("mapdata").find(query).limit(limit).toArray()
} catch (e) {
ret.err = e
}
finally {
client.close()
}
return ret
}
export const getallProperties = async (coll, query) => {
let ret = {err: null, properties: []}
let client = await connectMongo()
try {
ret.properties = await client.db(MONGOBASE).collection(coll)
.find(query).toArray()
} catch (e) {
ret.err = e
}
finally {
client.close()
}
return ret
}
export const getOneproperty = async (sid) => {
let ret = {error: false}
let client = await connectMongo()
try {
ret.properties = await client.db(MONGOBASE).collection(properties_collection)
.findOne({_id: sid})
} catch (e) {
ret = {error: true, errortext: e}
}
finally {
client.close()
}
return ret
}
export const readAKWs = async (options) => {
let ret = {values: { akws: [], th1_akws: []}, err: null}
let erg = []
let client = await connectMongo()
try {
let docs = await client.db(MONGOBASE).collection("akws")
.find().toArray()
if(docs == null) {
return returnOnError(ret, 'akws - docs == null', readAKWs.name)
}
logit(`getawkdata: data fetched from akws, length= ${docs.length}`);
ret.values.akws = docs
let docs1 = await client.db(MONGOBASE).collection("th1_akws")
.find().toArray()
if(docs1 == null) {
return returnOnError(ret, 'th1_akws - docs == null', readAKWs.name)
}
logit(`getawkdata: data fetched from th1_akws, length= ${docs1.length}`)
ret.values.th1_akws = docs1
} catch (e) {
return returnOnError(ret, e, readAKWs.name)
}
finally {
client.close()
}
return ret
}
export const fetchActData = async (opts) => {
let ret = {err: null, values: []}
let start = opts.start.slice(7)
let end = opts.stop.slice(6)
start = DateTime.fromISO(start).toJSDate()
end = DateTime.fromISO(end).toJSDate()
let query = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
let options = { projection: {_id: 0, values: 1, datetime: 1}, sort: {datetime: 1}}
let client = await connectMongo()
try {
// ret.values = await client.db(MONGOBASE).collection('noise_sensors')
// .find(query, options).toArray()
ret.values = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
{$match: query},
{$sort: { datetime: 1}},
// {$replaceWith:
// {
// '$values.LA_min': '$values.noise_LA_min'
// }
// },
{$replaceWith:
{
datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
LA_min: '$values.LA_min',
LA_minx: '$values.noise_LA_min',
LA_max: '$values.LA_max',
LAeq: '$values.LAeq',
E10tel_eq: '$values.E10tel_eq' }
},
// {$project: {
// datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
// _id: 0, values:1
// }},
]).toArray()
}
catch(e) {
ret.err = e
}
finally {
client.close()
}
return ret
}
/*
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
*/
/*
let docs = await collection.find(
{ datetime:
{ $gte: start.toDate(), $lt: end.toDate() }
},
{ projection:
{_id:0, E_eq:0, E_mx:0, E_mi:0, E10tel_mx:0, E10tel_mi:0}, sort: {datetime: sort}
},
).toArray();
*/
export const fetchgeigerAVGData = async (opts) => {
let docs = []
let ret = {err: null, values: []}
let start = opts.start.slice(7)
let end = opts.stop.slice(6)
start = DateTime.fromISO(start).toJSDate()
end = DateTime.fromISO(end).toJSDate()
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
let sorting = {datetime: opts.sort};
let client = await connectMongo()
try {
if(opts.moving) {
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
{
$sort: sorting
}, // sort by date
{
$match: {sensorid: opts.sensorid}
}, // select only values for given sensor
{
$match: datRange
}, // select only values in give data range
{
$setWindowFields: {
sortBy: {datetime: 1},
output: {
cpm_avg: {
$avg: "$values.counts_per_minute",
window: {
range: [-60, 0],
unit: "minute"
}
}
}
}
},
{
$project: {_id:0, cpm_avg: 1, datetime:1, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}}
},
{
$sort: {datetime: 1}
}
]).toArray();
} else {
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
{
$sort: sorting
}, // sort by date
{
$match: {sensorid: opts.sensorid}
}, // select only values for given sensor
{
$match: datRange
}, // select only values in give data range
{ $group: {
_id: {$dateTrunc: {
date: "$datetime",
unit: "minute",
binSize: 60
}},
cpm_avg: {$avg: "$values.counts_per_minute"}, // calculate the average
}
},
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'datetime
{
$project: {_id:0, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}, datetime: 1, cpm_avg: 1}
},
{
$sort: {datetime: 1}
}
]).toArray();
}
} catch(e) { // if there was an error
ret.err = e // log it to console
}
finally {
client.close()
}
ret.values = docs
return ret
}
export const fetchNoiseAVGData = async (opts) => {
let docs = []
let ret = {err: null, values: []}
let start = opts.start.slice(7)
let end = opts.stop.slice(6)
start = DateTime.fromISO(start).toJSDate()
end = DateTime.fromISO(end).toJSDate()
let peak = opts.peak; // threshold for peak count
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
let sorting = {datetime: opts.sort};
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}}
let client = await connectMongo()
try {
docs = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
{$sort: sorting}, // sort by date
{$match: datRange}, // select only values in give data range
{
$group: {
_id: grpId,
n_average: {$avg: "$values.E10tel_eq"}, // calculate the average
n_sum: {$sum: "$values.E10tel_eq"}, // calculate the sum
peakcount: {$sum: {$cond: [{$gte: ["$values.LA_max", peak]}, 1, 0]}}, // count peaks
count: {$sum: 1}, // count entries
}
},
{$sort: {_id: 1}}, // sort by result dates
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
{$project: opts.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
]).toArray(); // return not all fields, depending on 'long'
} catch(e) { // if there was an error
ret.err = e // log it to console
}
finally {
client.close()
}
ret.values = docs
return ret
}
export const fetchAVGData = async (opts) => {
}
/*
// *********************************************
// getAverageData
//
// Calculate different values per hour
// average of E10tel_eq ( E10tel_eq => 10 ^(LAeq/10) )
// sum of E10tel_eq, to calculate day, night and eveniung averages
// count, how many values are used for average/sum
// paeakcount, how many values of LAmax are over defined peak value in every hour
//
// params:
// db: Database
// opt: different options (see further down)
//
// return
// depending an calling parameter 'what', not all values will be sent in 'values'
// JSON
// {[
// { datetime: "2019-10-23T00:00:00Z" , n_AVG: 67.22, n_sum: 32783, count: 24, peakcount: 6 }.
// { datetime: "2019-10-23T01:00:00Z" , n_AVG: 52.89, n_sum: 23561, count: 26, peakcount: 5 }.
// .........
// ]}
//
// *********************************************
async function getAverageData(db,opt) {
let start = opt.start;
let end = opt.end; // start and ent time for aggregation
let docs = []; // collect data here
const collection = db.collection('data_' + opt.sid);;
let span = opt.span // date range in days
let peak = opt.peak; // threshold for peak count
let long = opt.long; // true => give extra output
let nbrOfHours = opt.end.diff(opt.start,'hours') + 24;
let datRange = {datetime: {$gte: opt.start.toDate(), $lt: opt.end.toDate()}};
let sorting = {datetime: opt.sort};
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}};
try {
docs = await collection.aggregate([
{$sort: sorting}, // sort by date
{$match: datRange}, // select only values in give data range
{
$group: {
_id: grpId,
n_average: {$avg: '$E10tel_eq'}, // calculate the average
n_sum: {$sum: '$E10tel_eq'}, // calculate the sum
peakcount: {$sum: {$cond: [{$gte: ["$LA_max", peak]}, 1, 0]}}, // count peaks
count: {$sum: 1}, // count entries
}
},
{$sort: {_id: 1}}, // sort by result dates
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
{$project: opt.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
]).toArray(); // return not all fields, depending on 'long'
} catch(e) { // if there was an error
console.log(e); // log it to console
}
// To easily extract the values, we copy the data from docs into a new array, so that the
// hour in an element in docs becomes the index into the new array (for every new day this
// index will be incremented by 24). Missing values are marked by: {n_sum=-1, n_AVG=-1}.
let hoursArr = new Array(nbrOfHours); // generate new array
let emptyValues = opt.long ? {n_sum: -1, n_AVG:-1} : {n_AVG:-1};
hoursArr.fill(emptyValues); // fill with 'empty' value
let startDay = moment.utc(docs[0].datetime).date(); // calc first day
let k = 0;
for (let i=0; i<docs.length; i++) { // loop through docs
let stunde = moment.utc(docs[i].datetime).hours(); // extract current hour
let day = moment.utc(docs[i].datetime).date(); // and curren t day
if (day != startDay) { // if date has changed
k += 24; // increment index by 24
startDay = day;
}
hoursArr[k+stunde] = docs[i]; // copy date into hourArray
}
return hoursArr;
}
*/

53
deploy.sh Executable file
View File

@@ -0,0 +1,53 @@
# Deploy Docker-Projekt auf das docker registry (docker.citysensor.de)
#
# V 1.1 2024-09-25 rxf
# - geht nun für verscheiden Files, Übergabe des Namens
#
# v 1.0 2024-09-01 rxf
# erste Version
#set -x
registry=docker.citysensor.de
name=''
usage()
{
echo "Usage ./deploy.sh fname"
echo " Build docker container 'fname' and deploy to $registry"
echo " Params:"
echo " -h show this usage"
}
while getopts h? o
do
case "$o" in
h) usage; exit 0;;
*) usage; exit 1;;
esac
done
shift $((OPTIND-1))
while [ $# -gt 0 ]; do
if [[ -z "$fname" ]]; then
name=$1
shift
else
echo "bad option $1"
# exit 1
shift
fi
done
if [[ -z "$name" ]]; then
echo "No name given"
usage
exit 1
fi
./build_and_copy.sh localhost
docker tag $name docker.citysensor.de/$name:latest
dat=`date +%Y%m%d%H%M`
docker tag $name docker.citysensor.de/$name:V_$dat
docker push docker.citysensor.de/$name

BIN
doc/Planung.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 570 KiB

23
doc/Planung.md Normal file
View File

@@ -0,0 +1,23 @@
##Planung, Ideen und Entscheidungen
###Websocket
Da Websockets nur dann Vorteile bringen, wenn der Server von sich aus was 'zu sagen' hat, wird in diesem Projekt auf
Websockets verzichtet und der Verkher zwischen der API und dem GUI über normale HTTP-Calls ausgeführt.
###Planung
![Planung](planung.jpg "Text to show on mouseover").
####Ergänzungen zur Grafik (2022-05-26)
* Für die 3 GUIs je ein extra Container, so wie jetzt auch (Feinstaub, Geiger, Laerm)
* ab sofort (d.h. möglichst bald) die **influx-** und die **mongo-**Databases parallel laufen lassen
* nach ca. 1 Monat **nur noch mongo** verwenden
*
####Versions-History
|Version | Datum | Author
|--------|------|-------
|1.0 | 2022-05-27 | rxf

6
doc/Readne.md Normal file
View File

@@ -0,0 +1,6 @@
# Sensor-API
### Allgemein
* **ALLE** Zeiten werden in UTC ausgegeben.
Wird beim Aufruf die Zeit im ISO-Format **ohne** Zonenbezeichnung eingegeben, wird sie in UTC umgerechnet und auch so ausgegeben. Wenn die Eingabe schon UTC ist (Z ale letztes Zeichen) dann bleibt es so.

19
docker-compose.yml Normal file
View File

@@ -0,0 +1,19 @@
version: '3.9'
services:
node:
image: sensorapi
environment:
DEVELOP: "true"
MONGOHOST: ${MONGOHOST}
MONGOPORT: ${MONGOPORT}
INFLUXHOST: ${INFLUXHOST}
INFLUXTOKEN: ${INFLUXTOKEN}
LIVE: "true"
PORT: 3004
ports:
- "3004:3004"
volumes:
- ${PWD}/log/sensorapi:/var/log
container_name: node_sensorapi
restart: unless-stopped

View File

@@ -0,0 +1,16 @@
{
"CMNDUNKNOWN": "Unbekanntes Kommando",
"NOTHING": "Nichts anzuzeigen",
"PARAMNONUM": "Parameter \"xxx\" ist keine Zahl",
"NOTYP": "KeinTyp angegeben",
"NOSENSFOUND": "Kein passender Sensor in den Properties gefunden",
"NOMANDPARAM": "Nitewendiger Parameter xxx nicht angegeben",
"NOPROPSREAD": "Keine properties für Sensor xxx eingelesen",
"NOPARAMETER": "Keine Parameter übergene",
"RESPSTATUS": "Rückgabe-Status: xxx",
"NODATA": "keine Daten gefunden",
"SYNTAXURL": "Syntax Fehler beim Aufruf der URL",
"WRONGTYPE": "Sensor xxx ist kein yyy Sensor",
"NOLASTDATES": "Probleme bein abholen der letzten Daten",
"NOPROPSFOUND": "Properties Collection nicht gefunden"
}

View File

@@ -0,0 +1,16 @@
{
"CMNDUNKNOWN": "Command not known",
"NOTHING": "Nothing to show",
"PARAMNONUM": "Parameter 'xxx' is not a number",
"NOTYP": "No type given",
"NOSENSFOUND": "No suitable sensors found in properties",
"NOMANDPARAM": "Mandatory parameter xxx not given",
"NOPROPSREAD": "No properties read for sensor xxx",
"NOPARAMETER": "No parameter given",
"RESPSTATUS": "Returned status: xxxx",
"NODATA": "No data found",
"SYNTAXURL": "Syntax error in calling url!",
"WRONGTYPE": "Sensor xxx is not of type yyy",
"NOLASTDATES": "Problems fetching last dates from database",
"NOPROPSFOUND": "Properties collection not found"
}

74
mocks/mongo_mock.js Normal file
View File

@@ -0,0 +1,74 @@
// Gibt für eine Property-Anfrage immer den 140er Sensor zurück
// rxf 2022-05-25
export const readProperties = async (query, limit = 0) => {
let ret = {error: false}
if ("sid" in query) { // if sid is given, read property for sid
ret.values = {
"_id" : 140,
"location_id" : 65,
"name" : "SDS011",
"since" : "2021-04-21T09:44:12.888Z",
"location" : [
{
"loc" :
{
"type" : "Point",
"coordinates" : [ 9.16, 48.778 ]
},
"id" : 65,
"altitude" : 282,
"since" : "2021-04-21T09:44:12.888Z",
"address" : { },
"exact_loc" : 0,
"indoor" : 0
}
]
}
} else { // otherwise read props corresponding to query
ret.values = [
{
"_id" : 140,
"location_id" : 65,
"name" : "SDS011",
"since" : "2021-04-21T09:44:12.888Z",
"location" : [
{
"loc" :
{
"type" : "Point",
"coordinates" : [ 9.16, 48.778 ]
},
"id" : 65,
"altitude" : 282,
"since" : "2021-04-21T09:44:12.888Z",
"address" : { },
"exact_loc" : 0,
"indoor" : 0
}
]
},{
"_id" : 141,
"location_id" : 65,
"name" : "BME280",
"since" : "2021-04-21T09:44:12.888Z",
"location" : [
{
"loc" :
{
"type" : "Point",
"coordinates" : [ 9.16, 48.778 ]
},
"id" : 65,
"altitude" : 282,
"since" : "2021-04-21T09:44:12.888Z",
"address" : { },
"exact_loc" : 0,
"indoor" : 0
}
]
}
]
}
return ret
}

7506
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

35
package.json Normal file
View File

@@ -0,0 +1,35 @@
{
"name": "sensorapi",
"version": "1.4.1",
"date": "2023-11-29 16:00 UTC",
"private": true,
"scripts": {
"start": "node ./bin/www.js >>/var/log/sensorapi.log 2>&1",
"test": "mocha ./test/test.js"
},
"type": "module",
"bin": {
"www": "./bin/www.js"
},
"dependencies": {
"@influxdata/influxdb-client": "^1.35.0",
"@influxdata/influxdb-client-apis": "^1.35.0",
"axios": "^1.12.2",
"cookie-parser": "~1.4.7",
"cors": "^2.8.5",
"debug": "~4.4.3",
"express": "^5.1.0",
"http-errors": "~2.0.0",
"i18next": "^25.5.2",
"i18next-http-middleware": "^3.8.0",
"i18next-node-fs-backend": "^2.1.3",
"luxon": "^3.7.2",
"mongodb": "^6.19.0",
"morgan": "~1.10.1",
"pug": "^3.0.3",
"ws": "^8.18.3"
},
"devDependencies": {
"mocha": "^11.7.2"
}
}

54
routes/api.js Normal file
View File

@@ -0,0 +1,54 @@
import express from 'express'
import {getData4map} from "../actions/data4map.js"
import * as getData from "../actions/getsensorData.js";
import * as getProps from "../actions/getproperties.js";
import * as getAKWs from "../actions/getAKWData.js";
import * as holAddr from "../actions/getaddress.js";
export const apiRouter = express.Router();
const cmdTable = [
{cmd: 'getactdata', func: getData.getActData},
{cmd: 'getlongavg', func: getData.getLongAvg},
{cmd: 'getavgdata', func: getData.getAvgData},
{cmd: 'getoneproperty', func: getProps.getOneProperty},
{cmd: 'getakwdata', func: getAKWs.getakwdata},
{cmd: 'getaddress', func: holAddr.getAddress},
{cmd: 'getcitycoords', func: holAddr.getCityCoords},
{cmd: 'getsensordata', func: getData.getSensorData},
{cmd: 'getmapdata', func: getData4map}
]
let i18n;
export const translate = (x) => {
return i18n.t(x)
}
export const dispatchCommand = async (cmd, table, params, res) => {
let notfound = true
for (let c of table) {
if (c.cmd === cmd) {
notfound = false
let erg = await c.func(params)
if (typeof erg === 'string') {
res.type('text')
res.send(erg)
} else {
res.json(erg)
}
}
}
if (notfound) {
res.json({err: translate('CMNDUNKNOWN')})
}
}
// normal routes called from javascript client
apiRouter.get('/:cmd', async (req, res) => {
const params = req.query
params.chart = false
i18n = req.i18n
await dispatchCommand(req.params.cmd, cmdTable, params, res)
})

12
routes/index.js Normal file
View File

@@ -0,0 +1,12 @@
import express from 'express'
import { translate as trans } from '../routes/api.js'
const router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
res.status(200).json({message: trans('NOTHING')})
});
export default router

152
sensorspecials/geigeract.js Normal file
View File

@@ -0,0 +1,152 @@
// Data preparation for fetching geigeractivity data
// rxf 2022-06-24
import {returnOnError} from "../utilities/reporterror.js";
import { calcRange, getActData, getAvgData, getLongAvg } from "../actions/getsensorData.js"
import checkParams from "../utilities/checkparams.js";
import { setoptionfromtable } from "../utilities/chartoptions.js"
import {DateTime} from 'luxon'
import { fetchgeigerAVGData } from "../databases/mongo.js";
import { translate as trans } from '../routes/api.js'
const geigeractFilter = (data, opts, actual) => {
let erg = {}
erg.sid = opts.sensorid
erg.sname = opts.sname
erg.values = []
for (let x of data.values) {
let entry = {}
entry.datetime = x.datetime
if(actual) {
entry.cpm = x.counts_per_minute,
entry.uSvh = x.counts_per_minute / 60 * opts.factor
} else {
entry.cpmAvg = x.counts_per_minute,
entry.uSvhAvg = x.counts_per_minute / 60 * opts.factor
}
erg.values.push(entry)
}
return erg
}
const getgeigerDWMData = async (opts) => {
if(opts.avg === 1) {
const erg = await getActData(opts)
return erg
} else {
const erg = await fetchgeigerAVGData(opts)
return erg
}
}
const factorTable = [
{name: 'SBM-20', factor: 1 / 2.47},
{name: 'SBM-19', factor: 1 / 9.81888},
{name: 'Si22G', factor: 0.081438},
{name: 'J306', factor: 0.06536}
]
const getfactor = (name) => {
const typ = name.slice((10))
for(const x of factorTable) {
if (x.name === typ) {
return 1 / 60 * x.factor
}
}
return 1
}
export const getgeigerData = async (params, possibles, props) => {
let ret = {err: null}
let {opts, erro} = checkParams(params, {
mandatory:[
{name:'sensorid', type: 'int'},
],
optional: possibles
})
// To be compatible with old API:
if (opts.out === 'csv') {
opts.csv = true
}
if (erro) {
return returnOnError(ret, erro, getNoiseData.name)
}
opts.factor = getfactor(props.name[0].name)
opts.span = setoptionfromtable(opts.span, 1)
opts.daystart = setoptionfromtable(opts.daystart, false)
opts.avg = setoptionfromtable(opts.avg, 1)
opts.moving = setoptionfromtable(opts.moving, false)
let {start, stop} = calcRange(opts) // calc time range
opts.start = start
opts.stop = stop
let erg = await getgeigerDWMData(opts) // get the data
ret = {
err: erg.err,
options: {
sid: opts.sensorid,
indoor: props.location[0].indoor,
span: opts.span,
start: DateTime.fromISO(opts.start.slice(7)),
data: opts.data,
count: erg.values.length,
avg: opts.avg,
moving: opts.moving
},
values: erg.values,
}
if(ret.values.length === 0) {
ret.err = trans('NODATA')
}
return ret
}
/*
let erg = { err: null, data: {}}
let params = {
sensorid: opts.sensorid,
avg: opts.avg,
datetime: opts.start
}
if (opts.what === 'oneday') {
params.span = 1
} else if (opts.what === 'oneweek') {
params.span = 7
} else {
params.span = 31
params.moving = false
params.avg = 1440
}
let { data, err } = await getAvgData(params)
if (err != null) {
return returnOnError(erg, err, getgeigerData.name)
}
erg.data.geigermovavg = geigeractFilter(data, opts, false)
if (opts.what === 'oneday') {
const { data, err} = await getActData(params)
if (err != null) {
return returnOnError(erg, err, getgeigerData.name)
}
erg.data.geigeractual = geigeractFilter(data, opts, true)
}
if(opts.climatesid && ((opts.what === 'oneday') || (opts.what === 'oneweek'))) {
params.sensorid = opts.climatesid
params.avg = 10
const { data, err} = await getAvgData(params)
if (err != null) {
return returnOnError(erg, err, getgeigerData.name)
}
data.sid = opts.climatesid
data.sname = opts.climatesname
erg.data.climate = data
}
return erg
}
*/

499
sensorspecials/noise.js Normal file
View File

@@ -0,0 +1,499 @@
// Data preparation for fetching noise data
// rxf 2023-03-05
const DBASE = process.env.DBASE || 'mongo'
import {returnOnError} from "../utilities/reporterror.js";
import { getActData, getAvgData, getLongAvg, calcRange} from "../actions/getsensorData.js"
import checkParams from "../utilities/checkparams.js";
import {DateTime} from 'luxon'
import { translate as trans } from '../routes/api.js'
import * as influx from "../databases/influx.js"
import * as mongo from "../databases/mongo.js"
import { setoptionfromtable } from "../utilities/chartoptions.js"
export const getNoiseData = async (params, possibles, props) => {
let ret = {err: null}
let {opts, err} = checkParams(params, {
mandatory:[
{name:'sensorid', type: 'int'},
],
optional: possibles
})
// To be compatible with old API:
if (opts.out === 'csv') {
opts.csv = true
}
if (err) {
return returnOnError(ret, err, getNoiseData.name)
}
// execute function depending on given 'data'
for(let x of whatTable) {
if (x.what === opts.data) {
opts.span = setoptionfromtable(opts.span, x.span)
opts.daystart = setoptionfromtable(opts.daystart, x.daystart)
let {start, stop} = calcRange(opts) // calc time range
opts.start = start
opts.stop = stop
let erg = await x.func(opts) // get the data
if (opts.csv === true) {
ret = erg
} else {
ret = {
err: erg.err,
options: {
sid: opts.sensorid,
indoor: props.location[0].indoor,
span: opts.span,
start: DateTime.fromISO(opts.start.slice(7)).toUTC().toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'"),
data: opts.data,
peak: opts.peak,
count: erg.values.length,
},
values: erg.values,
}
if (!x.peak) {
delete ret.options.peak
}
if(ret.values.length === 0) {
ret.err = trans('NODATA')
}
}
return ret
}
}
return returnOnError(ret, 'CMNDUNKNOWN', getNoiseData.name)
}
// *********************************************
// getLiveData
//
// Get all actual data from database. Values are stored every 2.5min
//
// params:
// db: Database
// opt: different options (see further down)
//
// return:
// JSON:
// { sid: 29212, span: 1, start: "2019-10-23T00:00", count: 381, values: [
// { datetime: "2019-10-22T22:05:34.000Z", LAeq: 42.22, LA_min: 39.91, LA_max: 45.18, E10tel_eq: 16672.47212551061 },
// { datetime: "2019-10-22T22:07:59.000Z", LAeq: 53.72, LA_min: 39.97, LA_max: 63.54, E10tel_eq: 235504.9283896009 },
// .........
// ]}
// CSV
// datetime,LAeq,LAmax,LAmin,"10^(LAeq/10)"
// 2019-10-22T22:05:34.000Z,42.22,45.18,39.91,16672.47212551061
// 2019-10-22T22:07:59.000Z,53.72,63.54,39.97,235504.9283896009
// 2019-10-22T22:15:16.000Z,44.02,48.99,42.14,25234.807724805756
// ....
//
// *********************************************
const getLiveData = async (opts) => {
const erg = await getActData(opts)
if (opts.csv) {
let csvStr = "datetime,LAeq,LAmax,LAmin,10^(LAeq/10)\n"
if (!erg.err) {
for (let item of erg.values) {
if (item.n_AVG != -1) {
csvStr += item.datetime + ','
+ item.LAeq + ','
+ item.LA_max + ','
+ item.LA_min+ ','
+ item.E10tel_eq + '\n'
}
}
}
return csvStr
}
return erg
}
// *********************************************
// gethavgData
//
// Get average per hour, default: 5 days
//
// params:
// db: Database
// opt: different options (see further down)
//
// return:
// JSON:
// { sid: 29212, span: 5, start: "2019-11-01T23:00:00Z", average: 'hour', peak: 70, count: 120, values: [
// { datetime: "2019-10-22T23:00:00.000Z", n_AVG: 58.27, peakcount: 3 },
// { datetime: "2019-10-23T00:00:00.000Z", n_AVG: 45.77, peakcount: 4 },
// { datetime: "2019-10-23T01:00:00.000Z", n_AVG: 62.34, peakcount: 6 },
// .........
// ]}
// CSV:
// datetime,n_AVG,peakcount
// 2019-10-22T23:00:00.000Z,58.27,3
// 2019-10-23T00:00:00.000Z,45.77,4
// 2019-10-23T01:00:00.000Z,62.34,6
// ....
//
// *********************************************
const gethavgData = async (opts, props) => {
let erg = await getNoiseAVGData(opts)
if (opts.csv) {
let csvStr = "datetime,n_AVG,peakcount\n"
if (!erg.err) {
for (let item of erg.values) {
if (item.n_AVG != -1) {
csvStr += item.datetime + ',' + item.n_AVG + ',' + item.peakcount + '\n'
}
}
}
return csvStr
}
return {err: erg.err, values: erg.values}
}
// *********************************************
// getdavgData
//
// Get average per day , default: 30 days
//
// params:
// db: Database
// opt: different options (see further down)
//
// return:
// JSON:
// { sid: 29212, span: 30, start: "2019-10-23T00:00", average: 'day', peak: 70, count: 30, values: [
// { datetime: "2019-10-22T23:00:00.000Z", n_AVG: 58.27, peakcount: 300 },
// { datetime: "2019-10-23T23:00:00.000Z", n_AVG: 62.34, peakcount: 245 },
// .........
// ]}
//
// CSV:
// datetime,n_AVG,peakcount
// 2019-10-22T23:00:00.000Z,58.27,300
// 2019-10-23T23:00:00.000Z,62.34,245
// ....
//
// *********************************************
async function getdavgData(opts) {
opts.long = true;
let erg = await getNoiseAVGData(opts);
let val = [];
let csvStr = 'datetime,n_AVG,peakcount\n';
if (!erg.err) {
for (let i = 0; i < erg.values.length; i += 24) {
let sum = 0;
let count = 0;
let pk = 0;
let werte = {};
for (let k = 0; k < 24; k++) {
const item = erg.values[i + k]
if ((item != null) && (item.n_sum != -1)) {
sum += item.n_sum;
count += item.count;
pk += item.peakcount;
if (werte.datetime === undefined) {
let dt = DateTime.fromISO(item.datetime)
werte.datetime = dt.startOf('day').toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
}
}
}
if (count === 0) {
werte.n_AVG = -1
} else {
werte.n_AVG = 10 * Math.log10(sum / count);
}
werte.peakcount = pk;
if (opts.csv) {
csvStr += werte.datetime + ',' + werte.n_AVG + ',' + werte.peakcount + '\n'
} else {
val.push(werte);
}
}
}
if (opts.csv) {
return csvStr;
}
return {err: erg.err, values: val}
}
// addDatetime
// add the actual date to wert, if werte is undefined
const addDatetime = (werte, item) => {
if (werte.datetime === undefined) {
let dt = DateTime.fromISO(item.datetime)
werte.datetime = dt.startOf('day').toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
}
}
// *********************************************
// getdaynightData
//
// Get average for day (6h00 - 22h00) and night (22h00 - 6h00) separated
// Use the hour average calculation, which brings the sum and the count for every hour
// then add these values up for the desired time range and calculate the average.
//
// The night-value of the last day is always 0, because the night is not complete (day is
// over at 24:00 and the night lasts til 6:00)
//
// params:
// db: Database
// opt: different options (see further down)
//
// return
// JSON
// { sid: 29212, span: 30, start: "2019-09-29", count: 30, values: [
// { date: "2019-09-29", n_dayAVG: 49.45592437272605, n_nightAVG: 53.744277577490614 },
// { date: "2019-09-30", n_dayAVG: 51.658169450663465, n_nightAVG: 47.82407695888631 },
// .........
// ]}
// CSV
// datetime,n_dayAVG,n_nightAVG
// 2019-09-29,49.45592437272605,53.744277577490614
// 2019-09-30,51.658169450663465,47.82407695888631
// ....
//
// *********************************************
async function getdaynightData(opts) {
opts.long = true;
let erg = await getNoiseAVGData(opts);
let val = [];
let csvStr = 'datetime,n_dayAVG,n_nightAVG\n';
if (!erg.err) {
let done = false;
let dt;
// The received hourly data array always (!!) starts at 0h00 local (!) time.
// So to calculate day values, we skip the first 6 hour and start from there
// now we add 16 hour for day and following 8 hour for night
const length = erg.values.length
for (let i = 6; i < length;) {
let dsum = 0, dcnt = 0;
let nsum = 0, ncnt = 0;
let werte = {};
for (let k = 0; k < 16; k++, i++) {
if ( i < length) {
const item = erg.values[i]
if (item.n_sum != -1) {
addDatetime(werte, item)
dsum += item.n_sum;
dcnt += item.count;
}
}
}
if (i < (length - 8)) {
const item = erg.values[i]
for (let k = 0; k < 8; k++, i++) {
if (i < length) {
if (item.n_sum != -1) {
addDatetime(werte, item)
nsum += item.n_sum;
ncnt += item.count;
}
}
}
} else {
done = true;
}
if (dcnt != 0) {
werte.n_dayAVG = 10 * Math.log10(dsum / dcnt);
} else {
werte.n_dayAVG = 0;
}
if (ncnt != 0) {
werte.n_nightAVG = 10 * Math.log10(nsum / ncnt);
} else {
werte.n_nightAVG = 0;
}
if (opts.csv) {
csvStr += werte.datetime + ',' + werte.n_dayAVG + ',' + werte.n_nightAVG + '\n'
} else {
val.push(werte);
}
if (done) {
break;
}
}
}
if (opts.csv) {
return csvStr;
}
return {err: erg.err, values: val}
}
// *********************************************
// getLdenData
//
// Use hour averages to calculate the LDEN.
// Formula:
// LDEN = 10 * log10 ( 1/24 ( (12 * 10^(Lday/10)) + (4*10^((Levn+5)/10) + (8*10^((Lnight+10)/10)) )
//
// params:
// db: Database
// sid: sensor number
// opt: different options (see further down)
//
// return:
// JSON:
// { sid: 29212, span: 30, start: "2019-09-29", count: 30, values: [
// { lden: 59.53553743437777, date: "2019-09-29" },
// { lden: 55.264733497513554, date: "2019-09-30" },
// .........
// ]}
// CSV
// datetime,lden
// 2019-09-29,59.53553743437777
// 2019-09-30,55.264733497513554
// ....
//
// *********************************************
async function getLdenData(opts) {
opts.long = true;
let erg = await getNoiseAVGData(opts);
let val = [];
let csvStr = 'datetime,lden\n';
if (!erg.err) {
let done = false;
const calcAVG = (sum, cnt) => {
if (cnt != 0) {
return (10 * Math.log10(sum / cnt));
} else {
return 0;
}
}
// The received hourly data array always (!!) starts at 0h00 local (!) time.
// So to calculate day values, we skip the first 6 hour and start from there
// now we add 12 hour for day and following 4 hour for evening and
// additional 8 hours for night
const length = erg.values.length
for (let i = 6; i < length;) {
let dsum = 0, dcnt = 0;
let nsum = 0, ncnt = 0;
let esum = 0, ecnt = 0;
let werte = {};
let dayAVG = 0, evnAVG = 0, nightAVG = 0;
for (let k = 0; k < 12; k++, i++) {
if ( i < length) {
const item = erg.values[i]
if (item.n_sum != -1) {
addDatetime(werte, item)
dsum += item.n_sum;
dcnt += item.count;
}
}
}
for (let k = 0; k < 4; k++, i++) {
if ( i < length) {
const item = erg.values[i]
if (item.n_sum != -1) {
addDatetime(werte, item)
esum += item.n_sum;
ecnt += item.count;
}
}
}
if (i < (erg.values.length - 8)) {
for (let k = 0; k < 8; k++, i++) {
if (i < length) {
const item = erg.values[i]
if (item.n_sum != -1) {
addDatetime(werte, item)
nsum += item.n_sum;
ncnt += item.count;
}
}
}
} else {
done = true;
}
dayAVG = calcAVG(dsum, dcnt);
evnAVG = calcAVG(esum, ecnt);
nightAVG = calcAVG(nsum, ncnt);
// Calculate LDEN:
let day = 12 * Math.pow(10, dayAVG / 10); // ... and calculate the LDEN values following ...
let evn = 4 * Math.pow(10, (evnAVG + 5) / 10); // ... the LDEN formaula (see function description)
let night = 8 * Math.pow(10, (nightAVG + 10) / 10);
werte.lden = 10 * Math.log10((day + evn + night) / 24);
if (opts.csv) {
csvStr += werte.datetime + ',' + werte.lden + '\n'
} else {
val.push(werte);
}
if (done) {
break;
}
}
}
if (opts.csv) {
return csvStr;
}
return {err: erg.err, values: val}
}
const getAPIprops = (opt) => {
}
const getNoiseAVGData = async (opts) => {
let ret
if (DBASE === 'mongo') {
ret = await mongo.fetchNoiseAVGData(opts)
} else if (DBASE === 'influx') {
ret = await influx.fetchNoiseAVGData(opts)
// Influx stores the average from 00:00h to 01:00h as 01:00h, so we have to shift the time 1 hour back
for (let x=0; x < ret.values.length; x++) {
ret.values[x].datetime = DateTime.fromISO(ret.values[x].datetime).toUTC().minus({hours:1}).toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
}
} else {
ret.err = 'DBASEUNKNOWN'
}
if(ret.err) {
return returnOnError(ret, ret.err, getNoiseAVGData.name)
}
if(ret.values.length === 0) {
return returnOnError(ret, 'NODATA', getNoiseAVGData.name)
}
// The times are always the END of the period (so: period from 00:00h to 01:00h -> time is 01:00)
// To easily extract the values, we copy the data from docs into a new array, so that the
// hour in an element in docs becomes the index into the new array (for every new day this
// index will be incremented by 24). Missing values are marked by: {n_sum=-1, n_AVG=-1}.
// For havg add the missed hours to the arry
let emptyValues = {n_AVG: -1}
if (opts.long) {
emptyValues.n_sum = -1
}
const misshours = DateTime.fromISO(ret.values[0].datetime).toUTC().get('hour')
let hoursArr = new Array(opts.span * 24 + misshours); // generate new array
hoursArr.fill(emptyValues) // fill array with 'empty' values
let startDay = DateTime.fromISO(ret.values[0].datetime).toUTC().get('day') // calc first day
let k = 0
for (let d of ret.values) { // loop through docs
let stunde = DateTime.fromISO(d.datetime).toUTC().get('hour') // get current hour
let day = DateTime.fromISO(d.datetime).toUTC().get('day') // get current day
if (day != startDay) { // if date has changed
k += 24 // increment index by 24
startDay = day
}
hoursArr[k + stunde] = d // copy date into hourArray
}
return { err: ret.err, values: hoursArr}
}
const whatTable = [
{'what':'live', 'span': 1, 'daystart': false, peak: false, 'func': getLiveData},
{'what':'havg', 'span': 7, 'daystart': false, peak: true, 'func': gethavgData},
{'what':'davg', 'span': 30, 'daystart': true, peak: true, 'func': getdavgData},
{'what':'daynight', 'span': 30, 'daystart': true, peak: false, 'func': getdaynightData},
{'what':'lden', 'span': 30, 'daystart': true, peak: false, 'func': getLdenData},
{'what':'props', 'span': 0, 'daystart': true, peak:false, 'func': getAPIprops},
];

29
test/test.js Normal file
View File

@@ -0,0 +1,29 @@
import {getData4map} from '../actions/data4map.js'
import assert from 'assert/strict'
describe("get data for map - test", function() {
const box = {
"east": 9.322391662597672,
"north": 48.86726810417461,
"south": 48.69057640500091,
"west": 8.99760833740236
}
it("geigeractivity should return at least 4 elements", async function() {
const erg = await getData4map(
{
"type": "radioactivity",
"box": box,
test: true
})
assert.ok(erg.count >= 4)
})
it("pm should return at least 270 elements", async function() {
const erg = await getData4map(
{
"type": "pm",
"box": box
})
assert.ok(erg.count >= 270)
})
})

54
test_influx_sql.js Normal file
View File

@@ -0,0 +1,54 @@
// Simple test for influx_sql.js functions
import { fetchActData, fetchNoiseAVGData } from './databases/influx_sql.js'
async function testInfluxSQL() {
console.log('Testing InfluxDB 1.8 SQL implementation...')
// Test options similar to what would be used in the application
const testOpts = {
sensorid: 'test_sensor_001',
start: 'now() - 1h',
stop: 'now()',
sort: 1,
peak: 70, // dB threshold for noise peaks
long: false
}
try {
console.log('\n1. Testing fetchActData...')
const actResult = await fetchActData(testOpts)
console.log('fetchActData result structure:', {
err: actResult.err,
valuesCount: actResult.values ? actResult.values.length : 0,
sampleValue: actResult.values && actResult.values.length > 0 ? actResult.values[0] : null
})
console.log('\n2. Testing fetchNoiseAVGData...')
const noiseResult = await fetchNoiseAVGData(testOpts)
console.log('fetchNoiseAVGData result structure:', {
err: noiseResult.err,
valuesCount: noiseResult.values ? noiseResult.values.length : 0,
sampleValue: noiseResult.values && noiseResult.values.length > 0 ? noiseResult.values[0] : null
})
console.log('\n3. Testing fetchNoiseAVGData with long format...')
testOpts.long = true
const noiseLongResult = await fetchNoiseAVGData(testOpts)
console.log('fetchNoiseAVGData (long) result structure:', {
err: noiseLongResult.err,
valuesCount: noiseLongResult.values ? noiseLongResult.values.length : 0,
sampleValue: noiseLongResult.values && noiseLongResult.values.length > 0 ? noiseLongResult.values[0] : null
})
} catch (error) {
console.error('Test error:', error)
}
}
// Export for use in other test files
export { testInfluxSQL }
// Run test if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
testInfluxSQL()
}

148
utilities/chartoptions.js Normal file
View File

@@ -0,0 +1,148 @@
// Utility routine for plotting the data
export const colors = {'eq': '#0000FF', 'max': '#FF0000', 'min': '#008000', 'peaks': '#DAA520'};
export const noise_ymin = 30;
export function createGlobObtions() {
// Options, die für alle Plots identisch sind
let globObject = {
chart: {
spacingRight: 20,
spacingLeft: 20,
spacingTop: 25,
backgroundColor: {
linearGradient: [0, 400, 0, 0],
stops: [
[0, '#eee'],//[0, '#ACD0AA'], //[0, '#A18D99'], // [0, '#886A8B'], // [0, '#F2D0B5'],
[1, '#fff']
]
},
type: 'line',
borderWidth: '2',
// events: {
// selection: function (event) {
// if (event.xAxis) {
// doUpdate = false;
// } else {
// doUpdate = true;
// }
// }
// }
},
title: {
align: 'left',
style: {'fontSize': '25px'},
useHTML: true,
},
subtitle: {
align: 'left',
},
tooltip: {
valueDecimals: 1,
backgroundColor: '#ffffff',
borderWidth: 0,
borderRadius: 0,
useHTML: true,
},
xAxis: {
type: 'datetime',
title: {
text: 'date/time',
},
gridLineWidth: 2,
labels: {
formatter: function () {
let v = this.axis.defaultLabelFormatter.call(this);
if (v.indexOf(':') == -1) {
return '<span style="font-weight:bold;color:red">' + v + '<span>';
} else {
return v;
}
}
},
},
legend: {
enabled: true,
layout: 'horizontal',
// verticalAlign: 'top',
borderWidth: 1,
align: 'center',
},
plotOptions: {
series: {
animation: false,
turboThreshold: 0,
marker: {
enabled: false,
},
},
}
};
return globObject;
}
export function calcWeekends(data, isyear) {
/* let weekend = [];
let oldDay = 8;
for (let i = 0; i < data.length; i++) {
let mom = moment(data[i].date);
if (isyear) {
mom = moment(data[i]._id)
}
let day = mom.day();
let st = mom.startOf('day');
if (day != oldDay) {
if (day == 6) {
weekend.push({
color: 'rgba(169,235,158,0.4)',
from: st.valueOf(),
to: st.add(1, 'days').valueOf(),
zIndex: 0
})
} else if (day == 0) {
weekend.push({
color: 'rgba(169,235,158,0.4)',
from: st.valueOf(),
to: st.add(1, 'days').valueOf(),
zIndex: 0
})
}
oldDay = day;
}
}
return weekend;
*/}
export function calcDays(data, isyear) {
let days = [];
if (data.length == 0) {
return days
}
let oldday = moment(data[0].date).day();
if (isyear) {
oldday = moment(data[0]._id).day();
}
for (let i = 0; i < data.length; i++) {
let m = moment(data[i].date);
if (isyear) {
m = moment(data[i]._id);
}
let tag = m.day()
if (tag != oldday) {
m.startOf('day');
days.push({color: 'lightgray', value: m.valueOf(), width: 1, zIndex: 2});
oldday = tag;
}
}
return days;
};
export const setoptionfromtable = (opt,tabval) => {
let ret = opt
if ((opt === null) || (opt === '') || (opt === undefined) || (opt < tabval)){
ret = tabval
}
return ret
}

54
utilities/checkparams.js Normal file
View File

@@ -0,0 +1,54 @@
// parse the params from http call
import {returnOnError} from "./reporterror.js"
const checkParams = (params, mo) => {
let o = {opts: {}, err: null}
if ((mo.mandatory.length !== 0) && (params === undefined)) {
return returnOnError(o, 'NOPARAMETER', checkParams.name )
}
for (let p of mo.mandatory) {
if (!(p.name in params)) {
return returnOnError(o, 'NOMANDPARAM', checkParams.name, p.name)
}
if (p.type === 'int') {
let x = parseInt(params[p.name])
if (isNaN(x)) {
return returnOnError(o, 'PARAMNONUM', checkParams.name, p.name)
} else {
o.opts[p.name] = x
continue
}
} else if (p.type === 'float') {
let x = parseFloat(params[p.name])
if (isNaN(x)) {
return returnOnError(o, 'PARAMNONUM', checkParams.name, p.name)
} else {
o.opts[p.name] = x
continue
}
}
o.opts[p.name] = params[p.name]
}
for(let p of mo.optional) {
if (!(p.name in params)) {
o.opts[p.name] = p.default
} else {
if (p.type === 'int') {
let x = parseInt(params[p.name])
if (isNaN(x)) {
o.opts[p.name] = p.default
} else {
o.opts[p.name] = x
}
} else if (p.type === 'bool') {
o.opts[p.name] = params[p.name] === 'true'
} else {
o.opts[p.name] = params[p.name]
}
}
}
return o
}
export default checkParams

35
utilities/csv2json.js Normal file
View File

@@ -0,0 +1,35 @@
// convert influx csv output to JSON
export function csv2Json(str, delimiter = ",") {
// slice from start of text to the first \n index
// use split to create an array from string by delimiter
const headers = str.slice(0, str.indexOf("\r\n")).split(delimiter).slice(3);
let x = headers.findIndex((x) => (x === '_time') || (x === '_stop'))
if (x != -1) {
headers[x] = 'datetime'
}
// slice from \n index + 1 to the end of the text
// use split to create an array of each csv value row
const rows = str.slice(str.indexOf("\r\n") + 2).split("\r\n").slice(0,-2)
// Map the rows
// split values from each row into an array
// use headers.reduce to create an object
// object properties derived from headers:values
// the object passed as an element of the array
const arr = rows.map(function (row) {
const values = row.split(delimiter).slice(3);
const el = headers.reduce(function (object, header, index) {
if(header !== 'datetime') {
object[header] = parseFloat(values[index]);
} else {
object[header] = values[index];
}
return object;
}, {});
return el;
});
// return the array
return arr;
}

16
utilities/logit.js Normal file
View File

@@ -0,0 +1,16 @@
import { DateTime} from 'luxon'
const MOCHA_TEST = process.env.MOCHA_TEST || false
export function logit(str) {
if(MOCHA_TEST) return
let s = `${DateTime.now().toISO()} => ${str}`;
console.log(s);
}
export function logerror(str) {
if(MOCHA_TEST) return
let s = `${DateTime.toISO()} => *** ERROR *** ${str}`;
console.log(s);
}

22
utilities/reporterror.js Normal file
View File

@@ -0,0 +1,22 @@
import {logit} from "./logit.js";
import { translate as trans } from '../routes/api.js'
export const reportError = (message, errortext) => {
message.error = true
message.errortext = errortext
return message
}
export const returnOnError = (pr, error, name, p1='', p2='') => {
error = trans(error)
if (error.indexOf('xxx') !== -1) {
error = error.replace('xxx', p1)
}
if (error.indexOf('yyy') !== -1) {
error = error.replace('yyy', p1)
}
pr.err = error
logit(`${name}: ${error}`)
return pr
}