Anpassungen, damit INFLUX mindestens aml ausgelesen werden kann

This commit is contained in:
2025-11-03 18:45:07 +00:00
parent 2308aa56a3
commit 6d9d94f2fa
7 changed files with 63 additions and 24 deletions

View File

@@ -7,6 +7,7 @@
// This implementation converts LA_max to E10tel_eq at runtime to maintain
// compatibility with the Flux version while ensuring correct logarithmic averaging.
import 'dotenv/config'
import axios from 'axios'
import { DateTime } from 'luxon'
import { logit, logerror } from '../utilities/logit.js'
@@ -31,6 +32,7 @@ const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/write`
const influxRead = async (query) => {
let start = DateTime.now()
logit(`ReadInflux from ${INFLUXURL_READ}`)
logit(`Query: ${query}`)
let erg = { values: [], err: null}
try {
@@ -54,17 +56,23 @@ const influxRead = async (query) => {
})
if (ret.status !== 200) {
return returnOnError(erg, 'RESPSTATUS', influxRead.name, ret.status)
erg.err = `RESPSTATUS: ${ret.status}`
logit(`ERROR ${influxRead.name}: ${erg.err}`)
return erg
}
// InfluxDB 1.8 returns JSON format
if (ret.data.error) {
return returnOnError(erg, ret.data.error, influxRead.name)
erg.err = ret.data.error
logit(`ERROR ${influxRead.name}: ${erg.err}`)
return erg
}
erg.values = ret.data.results
} catch (e) {
return returnOnError(erg, e, influxRead.name)
erg.err = e.toString()
logit(`ERROR ${influxRead.name}: ${erg.err}`)
return erg
}
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
@@ -156,15 +164,15 @@ const transformInfluxResult = (series) => {
const fetchFromInflux = async (ret, query) => {
let { values, err } = await influxRead(query)
if (err) {
if (err.toString().includes('400')) {
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
} else {
return returnOnError(ret, err, fetchFromInflux.name)
}
ret.err = err.toString().includes('400') ? 'SYNTAXURL' : err.toString()
logit(`ERROR ${fetchFromInflux.name}: ${ret.err}`)
return ret
}
if (!values || !values.length || !values[0].series) {
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
ret.err = 'NODATA'
logit(`ERROR ${fetchFromInflux.name}: No data returned from query`)
return ret
}
ret.values = transformInfluxResult(values[0].series)
@@ -200,9 +208,9 @@ export const fetchActData = async (opts) => {
// InfluxQL query to get LA_max for a sensor within time range
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
const query = `
SELECT "LA_max", "LA_min", "LA_eq"
FROM "measurements"
WHERE "sid" = '${opts.sensorid}'
SELECT "DNMS_noise_LA_max", "DNMS_noise_LA_min", "DNMS_noise_LA_eq"
FROM "DNMS"
WHERE "node" = '${opts.sensorid}'
AND time >= ${startTime}
AND time <= ${stopTime}
${orderClause}
@@ -267,6 +275,13 @@ const calculateLogMean = (values) => {
export const fetchNoiseAVGData = async (opts) => {
let ret = { err: null, values: [] }
// convert sensorID ti esp-chip-is, if possible
const convert2espid = (opts) => {
const sid = opts.sensorid
}
// Convert Flux time format to InfluxQL format
let startTime = opts.start.replace('start: ', '').trim()
let stopTime = opts.stop.replace('stop: ', '').trim()
@@ -277,12 +292,11 @@ export const fetchNoiseAVGData = async (opts) => {
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
const queryLAmaxForE10 = `
SELECT "LA_max", time
FROM "measurements"
WHERE "sid" = '${opts.sensorid}'
SELECT "DNMS_noise_LA_max"
FROM "DNMS"
WHERE "node" = '${opts.sensorid}'
AND time >= ${startTime}
AND time <= ${stopTime}
AND "LA_max" IS NOT NULL
ORDER BY time ASC
`
@@ -293,11 +307,15 @@ export const fetchNoiseAVGData = async (opts) => {
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
if (lamaxErr) {
return returnOnError(ret, lamaxErr, fetchNoiseAVGData.name)
ret.err = lamaxErr.toString()
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
return ret
}
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
return returnOnError(ret, 'NODATA', fetchNoiseAVGData.name)
ret.err = 'NODATA'
logit(`ERROR ${fetchNoiseAVGData.name}: No data returned from query`)
return ret
}
// Transform LA_max results
@@ -323,7 +341,7 @@ export const fetchNoiseAVGData = async (opts) => {
}
}
const lamax = record.LA_max
const lamax = record.DNMS_noise_LA_max || record.LA_max
if (lamax !== null && lamax !== undefined) {
// Store original LA_max value
hourlyData[hourKey].lamaxValues.push(lamax)
@@ -385,7 +403,9 @@ export const fetchNoiseAVGData = async (opts) => {
}
} catch (e) {
return returnOnError(ret, e, fetchNoiseAVGData.name)
ret.err = e.toString()
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
return ret
}
return ret

View File

@@ -55,7 +55,7 @@ export const readProperties = async (query, limit = 0) => {
let client = await connectMongo()
try {
if ("sid" in query) { // if sid is given, read property for sid
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
ret.properties = await client.db(MONGOBASE).collection(properties_collection).findOne({_id: query.sid})
} else { // otherwise read props corresponding to query
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
}