Anpassungen, damit INFLUX mindestens aml ausgelesen werden kann
This commit is contained in:
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
const DBASE = process.env.DBASE || 'mongo'
|
const DBASE = process.env.DBASE || 'mongo'
|
||||||
import {DateTime} from "luxon"
|
import {DateTime} from "luxon"
|
||||||
import * as influx from "../databases/influx.js"
|
import * as influx from "../databases/influx_sql.js"
|
||||||
import * as mongo from "../databases/mongo.js"
|
import * as mongo from "../databases/mongo.js"
|
||||||
import {returnOnError} from "../utilities/reporterror.js"
|
import {returnOnError} from "../utilities/reporterror.js"
|
||||||
import {csv2Json} from "../utilities/csv2json.js"
|
import {csv2Json} from "../utilities/csv2json.js"
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
// This implementation converts LA_max to E10tel_eq at runtime to maintain
|
// This implementation converts LA_max to E10tel_eq at runtime to maintain
|
||||||
// compatibility with the Flux version while ensuring correct logarithmic averaging.
|
// compatibility with the Flux version while ensuring correct logarithmic averaging.
|
||||||
|
|
||||||
|
import 'dotenv/config'
|
||||||
import axios from 'axios'
|
import axios from 'axios'
|
||||||
import { DateTime } from 'luxon'
|
import { DateTime } from 'luxon'
|
||||||
import { logit, logerror } from '../utilities/logit.js'
|
import { logit, logerror } from '../utilities/logit.js'
|
||||||
@@ -31,6 +32,7 @@ const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/write`
|
|||||||
const influxRead = async (query) => {
|
const influxRead = async (query) => {
|
||||||
let start = DateTime.now()
|
let start = DateTime.now()
|
||||||
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
||||||
|
logit(`Query: ${query}`)
|
||||||
let erg = { values: [], err: null}
|
let erg = { values: [], err: null}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -54,17 +56,23 @@ const influxRead = async (query) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if (ret.status !== 200) {
|
if (ret.status !== 200) {
|
||||||
return returnOnError(erg, 'RESPSTATUS', influxRead.name, ret.status)
|
erg.err = `RESPSTATUS: ${ret.status}`
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
// InfluxDB 1.8 returns JSON format
|
// InfluxDB 1.8 returns JSON format
|
||||||
if (ret.data.error) {
|
if (ret.data.error) {
|
||||||
return returnOnError(erg, ret.data.error, influxRead.name)
|
erg.err = ret.data.error
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
erg.values = ret.data.results
|
erg.values = ret.data.results
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return returnOnError(erg, e, influxRead.name)
|
erg.err = e.toString()
|
||||||
|
logit(`ERROR ${influxRead.name}: ${erg.err}`)
|
||||||
|
return erg
|
||||||
}
|
}
|
||||||
|
|
||||||
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||||
@@ -156,15 +164,15 @@ const transformInfluxResult = (series) => {
|
|||||||
const fetchFromInflux = async (ret, query) => {
|
const fetchFromInflux = async (ret, query) => {
|
||||||
let { values, err } = await influxRead(query)
|
let { values, err } = await influxRead(query)
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.toString().includes('400')) {
|
ret.err = err.toString().includes('400') ? 'SYNTAXURL' : err.toString()
|
||||||
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
|
logit(`ERROR ${fetchFromInflux.name}: ${ret.err}`)
|
||||||
} else {
|
return ret
|
||||||
return returnOnError(ret, err, fetchFromInflux.name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!values || !values.length || !values[0].series) {
|
if (!values || !values.length || !values[0].series) {
|
||||||
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
|
ret.err = 'NODATA'
|
||||||
|
logit(`ERROR ${fetchFromInflux.name}: No data returned from query`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.values = transformInfluxResult(values[0].series)
|
ret.values = transformInfluxResult(values[0].series)
|
||||||
@@ -200,9 +208,9 @@ export const fetchActData = async (opts) => {
|
|||||||
// InfluxQL query to get LA_max for a sensor within time range
|
// InfluxQL query to get LA_max for a sensor within time range
|
||||||
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
|
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
|
||||||
const query = `
|
const query = `
|
||||||
SELECT "LA_max", "LA_min", "LA_eq"
|
SELECT "DNMS_noise_LA_max", "DNMS_noise_LA_min", "DNMS_noise_LA_eq"
|
||||||
FROM "measurements"
|
FROM "DNMS"
|
||||||
WHERE "sid" = '${opts.sensorid}'
|
WHERE "node" = '${opts.sensorid}'
|
||||||
AND time >= ${startTime}
|
AND time >= ${startTime}
|
||||||
AND time <= ${stopTime}
|
AND time <= ${stopTime}
|
||||||
${orderClause}
|
${orderClause}
|
||||||
@@ -267,6 +275,13 @@ const calculateLogMean = (values) => {
|
|||||||
export const fetchNoiseAVGData = async (opts) => {
|
export const fetchNoiseAVGData = async (opts) => {
|
||||||
let ret = { err: null, values: [] }
|
let ret = { err: null, values: [] }
|
||||||
|
|
||||||
|
// convert sensorID ti esp-chip-is, if possible
|
||||||
|
const convert2espid = (opts) => {
|
||||||
|
const sid = opts.sensorid
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// Convert Flux time format to InfluxQL format
|
// Convert Flux time format to InfluxQL format
|
||||||
let startTime = opts.start.replace('start: ', '').trim()
|
let startTime = opts.start.replace('start: ', '').trim()
|
||||||
let stopTime = opts.stop.replace('stop: ', '').trim()
|
let stopTime = opts.stop.replace('stop: ', '').trim()
|
||||||
@@ -277,12 +292,11 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
|
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
|
||||||
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
|
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
|
||||||
const queryLAmaxForE10 = `
|
const queryLAmaxForE10 = `
|
||||||
SELECT "LA_max", time
|
SELECT "DNMS_noise_LA_max"
|
||||||
FROM "measurements"
|
FROM "DNMS"
|
||||||
WHERE "sid" = '${opts.sensorid}'
|
WHERE "node" = '${opts.sensorid}'
|
||||||
AND time >= ${startTime}
|
AND time >= ${startTime}
|
||||||
AND time <= ${stopTime}
|
AND time <= ${stopTime}
|
||||||
AND "LA_max" IS NOT NULL
|
|
||||||
ORDER BY time ASC
|
ORDER BY time ASC
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -293,11 +307,15 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
|
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
|
||||||
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
|
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
|
||||||
if (lamaxErr) {
|
if (lamaxErr) {
|
||||||
return returnOnError(ret, lamaxErr, fetchNoiseAVGData.name)
|
ret.err = lamaxErr.toString()
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
||||||
return returnOnError(ret, 'NODATA', fetchNoiseAVGData.name)
|
ret.err = 'NODATA'
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: No data returned from query`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transform LA_max results
|
// Transform LA_max results
|
||||||
@@ -323,7 +341,7 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const lamax = record.LA_max
|
const lamax = record.DNMS_noise_LA_max || record.LA_max
|
||||||
if (lamax !== null && lamax !== undefined) {
|
if (lamax !== null && lamax !== undefined) {
|
||||||
// Store original LA_max value
|
// Store original LA_max value
|
||||||
hourlyData[hourKey].lamaxValues.push(lamax)
|
hourlyData[hourKey].lamaxValues.push(lamax)
|
||||||
@@ -385,7 +403,9 @@ export const fetchNoiseAVGData = async (opts) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return returnOnError(ret, e, fetchNoiseAVGData.name)
|
ret.err = e.toString()
|
||||||
|
logit(`ERROR ${fetchNoiseAVGData.name}: ${ret.err}`)
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ export const readProperties = async (query, limit = 0) => {
|
|||||||
let client = await connectMongo()
|
let client = await connectMongo()
|
||||||
try {
|
try {
|
||||||
if ("sid" in query) { // if sid is given, read property for sid
|
if ("sid" in query) { // if sid is given, read property for sid
|
||||||
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
|
ret.properties = await client.db(MONGOBASE).collection(properties_collection).findOne({_id: query.sid})
|
||||||
} else { // otherwise read props corresponding to query
|
} else { // otherwise read props corresponding to query
|
||||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
||||||
}
|
}
|
||||||
|
|||||||
18
package-lock.json
generated
18
package-lock.json
generated
@@ -14,6 +14,7 @@
|
|||||||
"cookie-parser": "~1.4.7",
|
"cookie-parser": "~1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"debug": "~4.4.3",
|
"debug": "~4.4.3",
|
||||||
|
"dotenv": "^17.2.3",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"http-errors": "~2.0.0",
|
"http-errors": "~2.0.0",
|
||||||
"i18next": "^25.5.2",
|
"i18next": "^25.5.2",
|
||||||
@@ -2117,6 +2118,18 @@
|
|||||||
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
|
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "17.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz",
|
||||||
|
"integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/dunder-proto": {
|
"node_modules/dunder-proto": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
@@ -5980,6 +5993,11 @@
|
|||||||
"resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
|
||||||
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ=="
|
"integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ=="
|
||||||
},
|
},
|
||||||
|
"dotenv": {
|
||||||
|
"version": "17.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz",
|
||||||
|
"integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="
|
||||||
|
},
|
||||||
"dunder-proto": {
|
"dunder-proto": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
"cookie-parser": "~1.4.7",
|
"cookie-parser": "~1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"debug": "~4.4.3",
|
"debug": "~4.4.3",
|
||||||
|
"dotenv": "^17.2.3",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"http-errors": "~2.0.0",
|
"http-errors": "~2.0.0",
|
||||||
"i18next": "^25.5.2",
|
"i18next": "^25.5.2",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { getActData, getAvgData, getLongAvg, calcRange} from "../actions/getsens
|
|||||||
import checkParams from "../utilities/checkparams.js";
|
import checkParams from "../utilities/checkparams.js";
|
||||||
import {DateTime} from 'luxon'
|
import {DateTime} from 'luxon'
|
||||||
import { translate as trans } from '../routes/api.js'
|
import { translate as trans } from '../routes/api.js'
|
||||||
import * as influx from "../databases/influx.js"
|
import * as influx from "../databases/influx_sql.js"
|
||||||
import * as mongo from "../databases/mongo.js"
|
import * as mongo from "../databases/mongo.js"
|
||||||
import { setoptionfromtable } from "../utilities/chartoptions.js"
|
import { setoptionfromtable } from "../utilities/chartoptions.js"
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ async function testInfluxSQL() {
|
|||||||
|
|
||||||
// Test options similar to what would be used in the application
|
// Test options similar to what would be used in the application
|
||||||
const testOpts = {
|
const testOpts = {
|
||||||
sensorid: 'test_sensor_001',
|
sensorid: 'esp8266-5829557',
|
||||||
start: 'now() - 1h',
|
start: 'now() - 1h',
|
||||||
stop: 'now()',
|
stop: 'now()',
|
||||||
sort: 1,
|
sort: 1,
|
||||||
|
|||||||
Reference in New Issue
Block a user