500 lines
17 KiB
JavaScript
500 lines
17 KiB
JavaScript
// Data preparation for fetching noise data
|
|
// rxf 2023-03-05
|
|
const DBASE = process.env.DBASE || 'mongo'
|
|
|
|
import {returnOnError} from "../utilities/reporterror.js";
|
|
import { getActData, getAvgData, getLongAvg, calcRange} from "../actions/getsensorData.js"
|
|
import checkParams from "../utilities/checkparams.js";
|
|
import {DateTime} from 'luxon'
|
|
import { translate as trans } from '../routes/api.js'
|
|
import * as influx from "../databases/influx.js"
|
|
import * as mongo from "../databases/mongo.js"
|
|
import { setoptionfromtable } from "../utilities/chartoptions.js"
|
|
|
|
export const getNoiseData = async (params, possibles, props) => {
|
|
let ret = {err: null}
|
|
|
|
let {opts, err} = checkParams(params, {
|
|
mandatory:[
|
|
{name:'sensorid', type: 'int'},
|
|
],
|
|
optional: possibles
|
|
})
|
|
// To be compatible with old API:
|
|
if (opts.out === 'csv') {
|
|
opts.csv = true
|
|
}
|
|
if (err) {
|
|
return returnOnError(ret, err, getNoiseData.name)
|
|
}
|
|
// execute function depending on given 'data'
|
|
for(let x of whatTable) {
|
|
if (x.what === opts.data) {
|
|
opts.span = setoptionfromtable(opts.span, x.span)
|
|
opts.daystart = setoptionfromtable(opts.daystart, x.daystart)
|
|
let {start, stop} = calcRange(opts) // calc time range
|
|
opts.start = start
|
|
opts.stop = stop
|
|
let erg = await x.func(opts) // get the data
|
|
if (opts.csv === true) {
|
|
ret = erg
|
|
} else {
|
|
ret = {
|
|
err: erg.err,
|
|
options: {
|
|
sid: opts.sensorid,
|
|
indoor: props.location[0].indoor,
|
|
span: opts.span,
|
|
start: DateTime.fromISO(opts.start.slice(7)).toUTC().toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'"),
|
|
data: opts.data,
|
|
peak: opts.peak,
|
|
count: erg.values.length,
|
|
},
|
|
values: erg.values,
|
|
}
|
|
if (!x.peak) {
|
|
delete ret.options.peak
|
|
}
|
|
if(ret.values.length === 0) {
|
|
ret.err = trans('NODATA')
|
|
}
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
return returnOnError(ret, 'CMNDUNKNOWN', getNoiseData.name)
|
|
}
|
|
|
|
|
|
// *********************************************
|
|
// getLiveData
|
|
//
|
|
// Get all actual data from database. Values are stored every 2.5min
|
|
//
|
|
// params:
|
|
// db: Database
|
|
// opt: different options (see further down)
|
|
//
|
|
// return:
|
|
// JSON:
|
|
// { sid: 29212, span: 1, start: "2019-10-23T00:00", count: 381, values: [
|
|
// { datetime: "2019-10-22T22:05:34.000Z", LAeq: 42.22, LA_min: 39.91, LA_max: 45.18, E10tel_eq: 16672.47212551061 },
|
|
// { datetime: "2019-10-22T22:07:59.000Z", LAeq: 53.72, LA_min: 39.97, LA_max: 63.54, E10tel_eq: 235504.9283896009 },
|
|
// .........
|
|
// ]}
|
|
// CSV
|
|
// datetime,LAeq,LAmax,LAmin,"10^(LAeq/10)"
|
|
// 2019-10-22T22:05:34.000Z,42.22,45.18,39.91,16672.47212551061
|
|
// 2019-10-22T22:07:59.000Z,53.72,63.54,39.97,235504.9283896009
|
|
// 2019-10-22T22:15:16.000Z,44.02,48.99,42.14,25234.807724805756
|
|
// ....
|
|
//
|
|
// *********************************************
|
|
const getLiveData = async (opts) => {
|
|
const erg = await getActData(opts)
|
|
if (opts.csv) {
|
|
let csvStr = "datetime,LAeq,LAmax,LAmin,10^(LAeq/10)\n"
|
|
if (!erg.err) {
|
|
for (let item of erg.values) {
|
|
if (item.n_AVG != -1) {
|
|
csvStr += item.datetime + ','
|
|
+ item.LAeq + ','
|
|
+ item.LA_max + ','
|
|
+ item.LA_min+ ','
|
|
+ item.E10tel_eq + '\n'
|
|
}
|
|
}
|
|
}
|
|
return csvStr
|
|
}
|
|
return erg
|
|
}
|
|
|
|
|
|
// *********************************************
|
|
// gethavgData
|
|
//
|
|
// Get average per hour, default: 5 days
|
|
//
|
|
// params:
|
|
// db: Database
|
|
// opt: different options (see further down)
|
|
//
|
|
// return:
|
|
// JSON:
|
|
// { sid: 29212, span: 5, start: "2019-11-01T23:00:00Z", average: 'hour', peak: 70, count: 120, values: [
|
|
// { datetime: "2019-10-22T23:00:00.000Z", n_AVG: 58.27, peakcount: 3 },
|
|
// { datetime: "2019-10-23T00:00:00.000Z", n_AVG: 45.77, peakcount: 4 },
|
|
// { datetime: "2019-10-23T01:00:00.000Z", n_AVG: 62.34, peakcount: 6 },
|
|
// .........
|
|
// ]}
|
|
|
|
// CSV:
|
|
// datetime,n_AVG,peakcount
|
|
// 2019-10-22T23:00:00.000Z,58.27,3
|
|
// 2019-10-23T00:00:00.000Z,45.77,4
|
|
// 2019-10-23T01:00:00.000Z,62.34,6
|
|
// ....
|
|
//
|
|
// *********************************************
|
|
const gethavgData = async (opts, props) => {
|
|
let erg = await getNoiseAVGData(opts)
|
|
if (opts.csv) {
|
|
let csvStr = "datetime,n_AVG,peakcount\n"
|
|
if (!erg.err) {
|
|
for (let item of erg.values) {
|
|
if (item.n_AVG != -1) {
|
|
csvStr += item.datetime + ',' + item.n_AVG + ',' + item.peakcount + '\n'
|
|
}
|
|
}
|
|
}
|
|
return csvStr
|
|
}
|
|
return {err: erg.err, values: erg.values}
|
|
}
|
|
|
|
|
|
// *********************************************
|
|
// getdavgData
|
|
//
|
|
// Get average per day , default: 30 days
|
|
//
|
|
// params:
|
|
// db: Database
|
|
// opt: different options (see further down)
|
|
//
|
|
// return:
|
|
// JSON:
|
|
// { sid: 29212, span: 30, start: "2019-10-23T00:00", average: 'day', peak: 70, count: 30, values: [
|
|
// { datetime: "2019-10-22T23:00:00.000Z", n_AVG: 58.27, peakcount: 300 },
|
|
// { datetime: "2019-10-23T23:00:00.000Z", n_AVG: 62.34, peakcount: 245 },
|
|
// .........
|
|
// ]}
|
|
//
|
|
// CSV:
|
|
// datetime,n_AVG,peakcount
|
|
// 2019-10-22T23:00:00.000Z,58.27,300
|
|
// 2019-10-23T23:00:00.000Z,62.34,245
|
|
// ....
|
|
//
|
|
// *********************************************
|
|
async function getdavgData(opts) {
|
|
opts.long = true;
|
|
let erg = await getNoiseAVGData(opts);
|
|
let val = [];
|
|
let csvStr = 'datetime,n_AVG,peakcount\n';
|
|
if (!erg.err) {
|
|
for (let i = 0; i < erg.values.length; i += 24) {
|
|
let sum = 0;
|
|
let count = 0;
|
|
let pk = 0;
|
|
let werte = {};
|
|
for (let k = 0; k < 24; k++) {
|
|
const item = erg.values[i + k]
|
|
if ((item != null) && (item.n_sum != -1)) {
|
|
sum += item.n_sum;
|
|
count += item.count;
|
|
pk += item.peakcount;
|
|
if (werte.datetime === undefined) {
|
|
let dt = DateTime.fromISO(item.datetime)
|
|
werte.datetime = dt.startOf('day').toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
|
|
}
|
|
}
|
|
}
|
|
if (count === 0) {
|
|
werte.n_AVG = -1
|
|
} else {
|
|
werte.n_AVG = 10 * Math.log10(sum / count);
|
|
}
|
|
werte.peakcount = pk;
|
|
if (opts.csv) {
|
|
csvStr += werte.datetime + ',' + werte.n_AVG + ',' + werte.peakcount + '\n'
|
|
} else {
|
|
val.push(werte);
|
|
}
|
|
}
|
|
}
|
|
if (opts.csv) {
|
|
return csvStr;
|
|
}
|
|
return {err: erg.err, values: val}
|
|
}
|
|
|
|
// addDatetime
|
|
// add the actual date to wert, if werte is undefined
|
|
const addDatetime = (werte, item) => {
|
|
if (werte.datetime === undefined) {
|
|
let dt = DateTime.fromISO(item.datetime)
|
|
werte.datetime = dt.startOf('day').toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
|
|
}
|
|
}
|
|
|
|
|
|
|
|
// *********************************************
|
|
// getdaynightData
|
|
//
|
|
// Get average for day (6h00 - 22h00) and night (22h00 - 6h00) separated
|
|
// Use the hour average calculation, which brings the sum and the count for every hour
|
|
// then add these values up for the desired time range and calculate the average.
|
|
//
|
|
// The night-value of the last day is always 0, because the night is not complete (day is
|
|
// over at 24:00 and the night lasts til 6:00)
|
|
//
|
|
// params:
|
|
// db: Database
|
|
// opt: different options (see further down)
|
|
//
|
|
// return
|
|
// JSON
|
|
// { sid: 29212, span: 30, start: "2019-09-29", count: 30, values: [
|
|
// { date: "2019-09-29", n_dayAVG: 49.45592437272605, n_nightAVG: 53.744277577490614 },
|
|
// { date: "2019-09-30", n_dayAVG: 51.658169450663465, n_nightAVG: 47.82407695888631 },
|
|
// .........
|
|
// ]}
|
|
// CSV
|
|
// datetime,n_dayAVG,n_nightAVG
|
|
// 2019-09-29,49.45592437272605,53.744277577490614
|
|
// 2019-09-30,51.658169450663465,47.82407695888631
|
|
// ....
|
|
//
|
|
// *********************************************
|
|
async function getdaynightData(opts) {
|
|
opts.long = true;
|
|
let erg = await getNoiseAVGData(opts);
|
|
let val = [];
|
|
let csvStr = 'datetime,n_dayAVG,n_nightAVG\n';
|
|
if (!erg.err) {
|
|
let done = false;
|
|
let dt;
|
|
// The received hourly data array always (!!) starts at 0h00 local (!) time.
|
|
// So to calculate day values, we skip the first 6 hour and start from there
|
|
// now we add 16 hour for day and following 8 hour for night
|
|
const length = erg.values.length
|
|
for (let i = 6; i < length;) {
|
|
let dsum = 0, dcnt = 0;
|
|
let nsum = 0, ncnt = 0;
|
|
let werte = {};
|
|
for (let k = 0; k < 16; k++, i++) {
|
|
if ( i < length) {
|
|
const item = erg.values[i]
|
|
if (item.n_sum != -1) {
|
|
addDatetime(werte, item)
|
|
dsum += item.n_sum;
|
|
dcnt += item.count;
|
|
}
|
|
}
|
|
}
|
|
if (i < (length - 8)) {
|
|
const item = erg.values[i]
|
|
for (let k = 0; k < 8; k++, i++) {
|
|
if (i < length) {
|
|
if (item.n_sum != -1) {
|
|
addDatetime(werte, item)
|
|
nsum += item.n_sum;
|
|
ncnt += item.count;
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
done = true;
|
|
}
|
|
if (dcnt != 0) {
|
|
werte.n_dayAVG = 10 * Math.log10(dsum / dcnt);
|
|
} else {
|
|
werte.n_dayAVG = 0;
|
|
}
|
|
if (ncnt != 0) {
|
|
werte.n_nightAVG = 10 * Math.log10(nsum / ncnt);
|
|
} else {
|
|
werte.n_nightAVG = 0;
|
|
}
|
|
if (opts.csv) {
|
|
csvStr += werte.datetime + ',' + werte.n_dayAVG + ',' + werte.n_nightAVG + '\n'
|
|
} else {
|
|
val.push(werte);
|
|
}
|
|
if (done) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (opts.csv) {
|
|
return csvStr;
|
|
}
|
|
return {err: erg.err, values: val}
|
|
}
|
|
|
|
|
|
// *********************************************
|
|
// getLdenData
|
|
//
|
|
// Use hour averages to calculate the LDEN.
|
|
// Formula:
|
|
// LDEN = 10 * log10 ( 1/24 ( (12 * 10^(Lday/10)) + (4*10^((Levn+5)/10) + (8*10^((Lnight+10)/10)) )
|
|
//
|
|
// params:
|
|
// db: Database
|
|
// sid: sensor number
|
|
// opt: different options (see further down)
|
|
//
|
|
// return:
|
|
// JSON:
|
|
// { sid: 29212, span: 30, start: "2019-09-29", count: 30, values: [
|
|
// { lden: 59.53553743437777, date: "2019-09-29" },
|
|
// { lden: 55.264733497513554, date: "2019-09-30" },
|
|
// .........
|
|
// ]}
|
|
// CSV
|
|
// datetime,lden
|
|
// 2019-09-29,59.53553743437777
|
|
// 2019-09-30,55.264733497513554
|
|
// ....
|
|
//
|
|
// *********************************************
|
|
async function getLdenData(opts) {
|
|
opts.long = true;
|
|
let erg = await getNoiseAVGData(opts);
|
|
let val = [];
|
|
let csvStr = 'datetime,lden\n';
|
|
if (!erg.err) {
|
|
let done = false;
|
|
const calcAVG = (sum, cnt) => {
|
|
if (cnt != 0) {
|
|
return (10 * Math.log10(sum / cnt));
|
|
} else {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
// The received hourly data array always (!!) starts at 0h00 local (!) time.
|
|
// So to calculate day values, we skip the first 6 hour and start from there
|
|
// now we add 12 hour for day and following 4 hour for evening and
|
|
// additional 8 hours for night
|
|
const length = erg.values.length
|
|
for (let i = 6; i < length;) {
|
|
let dsum = 0, dcnt = 0;
|
|
let nsum = 0, ncnt = 0;
|
|
let esum = 0, ecnt = 0;
|
|
let werte = {};
|
|
let dayAVG = 0, evnAVG = 0, nightAVG = 0;
|
|
for (let k = 0; k < 12; k++, i++) {
|
|
if ( i < length) {
|
|
const item = erg.values[i]
|
|
if (item.n_sum != -1) {
|
|
addDatetime(werte, item)
|
|
dsum += item.n_sum;
|
|
dcnt += item.count;
|
|
}
|
|
}
|
|
}
|
|
for (let k = 0; k < 4; k++, i++) {
|
|
if ( i < length) {
|
|
const item = erg.values[i]
|
|
if (item.n_sum != -1) {
|
|
addDatetime(werte, item)
|
|
esum += item.n_sum;
|
|
ecnt += item.count;
|
|
}
|
|
}
|
|
}
|
|
if (i < (erg.values.length - 8)) {
|
|
for (let k = 0; k < 8; k++, i++) {
|
|
if (i < length) {
|
|
const item = erg.values[i]
|
|
if (item.n_sum != -1) {
|
|
addDatetime(werte, item)
|
|
nsum += item.n_sum;
|
|
ncnt += item.count;
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
done = true;
|
|
}
|
|
dayAVG = calcAVG(dsum, dcnt);
|
|
evnAVG = calcAVG(esum, ecnt);
|
|
nightAVG = calcAVG(nsum, ncnt);
|
|
// Calculate LDEN:
|
|
let day = 12 * Math.pow(10, dayAVG / 10); // ... and calculate the LDEN values following ...
|
|
let evn = 4 * Math.pow(10, (evnAVG + 5) / 10); // ... the LDEN formaula (see function description)
|
|
let night = 8 * Math.pow(10, (nightAVG + 10) / 10);
|
|
werte.lden = 10 * Math.log10((day + evn + night) / 24);
|
|
if (opts.csv) {
|
|
csvStr += werte.datetime + ',' + werte.lden + '\n'
|
|
} else {
|
|
val.push(werte);
|
|
}
|
|
if (done) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (opts.csv) {
|
|
return csvStr;
|
|
}
|
|
return {err: erg.err, values: val}
|
|
}
|
|
|
|
|
|
const getAPIprops = (opt) => {
|
|
|
|
}
|
|
|
|
const getNoiseAVGData = async (opts) => {
|
|
let ret
|
|
if (DBASE === 'mongo') {
|
|
ret = await mongo.fetchNoiseAVGData(opts)
|
|
} else if (DBASE === 'influx') {
|
|
ret = await influx.fetchNoiseAVGData(opts)
|
|
// Influx stores the average from 00:00h to 01:00h as 01:00h, so we have to shift the time 1 hour back
|
|
for (let x=0; x < ret.values.length; x++) {
|
|
ret.values[x].datetime = DateTime.fromISO(ret.values[x].datetime).toUTC().minus({hours:1}).toFormat("yyyy-LL-dd'T'HH:mm:ss'Z'")
|
|
}
|
|
} else {
|
|
ret.err = 'DBASEUNKNOWN'
|
|
}
|
|
|
|
if(ret.err) {
|
|
return returnOnError(ret, ret.err, getNoiseAVGData.name)
|
|
}
|
|
if(ret.values.length === 0) {
|
|
return returnOnError(ret, 'NODATA', getNoiseAVGData.name)
|
|
}
|
|
// The times are always the END of the period (so: period from 00:00h to 01:00h -> time is 01:00)
|
|
|
|
// To easily extract the values, we copy the data from docs into a new array, so that the
|
|
// hour in an element in docs becomes the index into the new array (for every new day this
|
|
// index will be incremented by 24). Missing values are marked by: {n_sum=-1, n_AVG=-1}.
|
|
// For havg add the missed hours to the arry
|
|
let emptyValues = {n_AVG: -1}
|
|
if (opts.long) {
|
|
emptyValues.n_sum = -1
|
|
}
|
|
const misshours = DateTime.fromISO(ret.values[0].datetime).toUTC().get('hour')
|
|
let hoursArr = new Array(opts.span * 24 + misshours); // generate new array
|
|
hoursArr.fill(emptyValues) // fill array with 'empty' values
|
|
let startDay = DateTime.fromISO(ret.values[0].datetime).toUTC().get('day') // calc first day
|
|
let k = 0
|
|
for (let d of ret.values) { // loop through docs
|
|
let stunde = DateTime.fromISO(d.datetime).toUTC().get('hour') // get current hour
|
|
let day = DateTime.fromISO(d.datetime).toUTC().get('day') // get current day
|
|
if (day != startDay) { // if date has changed
|
|
k += 24 // increment index by 24
|
|
startDay = day
|
|
}
|
|
hoursArr[k + stunde] = d // copy date into hourArray
|
|
}
|
|
return { err: ret.err, values: hoursArr}
|
|
}
|
|
|
|
const whatTable = [
|
|
{'what':'live', 'span': 1, 'daystart': false, peak: false, 'func': getLiveData},
|
|
{'what':'havg', 'span': 7, 'daystart': false, peak: true, 'func': gethavgData},
|
|
{'what':'davg', 'span': 30, 'daystart': true, peak: true, 'func': getdavgData},
|
|
{'what':'daynight', 'span': 30, 'daystart': true, peak: false, 'func': getdaynightData},
|
|
{'what':'lden', 'span': 30, 'daystart': true, peak: false, 'func': getLdenData},
|
|
{'what':'props', 'span': 0, 'daystart': true, peak:false, 'func': getAPIprops},
|
|
];
|
|
|