Ertser Commit der test-Version
This commit is contained in:
166
databases/influx_flux.js
Normal file
166
databases/influx_flux.js
Normal file
@@ -0,0 +1,166 @@
|
||||
// Access to influxDB vie HTTP
|
||||
|
||||
import axios from 'axios'
|
||||
import { DateTime } from 'luxon'
|
||||
// import csvParse from 'csv-parser'
|
||||
import { logit, logerror } from '../utilities/logit.js'
|
||||
import {returnOnError} from "../utilities/reporterror.js";
|
||||
import {csv2Json} from "../utilities/csv2json.js";
|
||||
|
||||
let INFLUXHOST = process.env.INFLUXHOST || "localhost"
|
||||
let INFLUXPORT = process.env.INFLUXPORT || 8086
|
||||
let INFLUXTOKEN = process.env.INFLUXTOKEN || ""
|
||||
//"rklEClT22KfdXZhA47eyJhbqcvekb8bcKCqlUG7n72uDSmR2xGvif0CmGJe0WQtXB96y29mmt-9BdsgWA5npfg=="
|
||||
//"BNR6cGdb006O1T6hQkGcfB8tgH-UPO6QkOPToeAvrP7LATJbCuWi1wYf3HBpVdZQEBxHxNSrNenZsOSMogX-lg=="
|
||||
let INFLUXDATABUCKET = process.env.INFLUXDATABUCKET || "sensor_data"
|
||||
let INFLUXORG = process.env.INFLUXORG || "citysensor"
|
||||
|
||||
const INFLUXURL_READ = `http://${INFLUXHOST}:${INFLUXPORT}/api/v2/query?org=${INFLUXORG}`
|
||||
const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/api/v2/write?org=${INFLUXORG}&bucket=${INFLUXDATABUCKET}`
|
||||
|
||||
const influxRead = async (query) => {
|
||||
let start = DateTime.now()
|
||||
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
||||
let erg = { values: [], err: null}
|
||||
try {
|
||||
let ret = await axios({
|
||||
method: 'post',
|
||||
url: INFLUXURL_READ,
|
||||
data: query,
|
||||
headers: {
|
||||
Authorization: `Token ${INFLUXTOKEN}`,
|
||||
Accept: 'application/csv',
|
||||
'Content-type': 'application/vnd.flux'
|
||||
},
|
||||
timeout: 10000,
|
||||
})
|
||||
if (ret.status !== 200) {
|
||||
return returnOnError(erg, RESPSTATUS, influxRead.name, ret.status)
|
||||
}
|
||||
erg.values = ret.data
|
||||
} catch (e) {
|
||||
return returnOnError(erg, e, influxRead.name)
|
||||
}
|
||||
// logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||
return erg
|
||||
}
|
||||
|
||||
|
||||
const influxWrite = async (data) => {
|
||||
let start = DateTime.now()
|
||||
let ret
|
||||
try {
|
||||
ret = await axios({
|
||||
method: 'post',
|
||||
url: INFLUXURL_WRITE,
|
||||
data: data,
|
||||
headers: {
|
||||
Authorization: `Token ${INFLUXTOKEN}`,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'text/plain; charset=utf-8'
|
||||
},
|
||||
timeout: 10000,
|
||||
})
|
||||
if (ret.status !== 204) {
|
||||
logerror(`doWrite2API Status: ${ret.status}`)
|
||||
}
|
||||
} catch (e) {
|
||||
logerror(`doWrite2API ${e}`)
|
||||
}
|
||||
logit(`Influx-Write-Time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||
return ret
|
||||
}
|
||||
|
||||
const fetchFromInflux = async (ret, query) => {
|
||||
let { values, err} = await influxRead(query)
|
||||
if(err) {
|
||||
if(err.toString().includes('400')) {
|
||||
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
|
||||
} else {
|
||||
return returnOnError(ret, err, fetchFromInflux.name)
|
||||
}
|
||||
}
|
||||
if (values.length <= 2) {
|
||||
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
|
||||
}
|
||||
ret.values = csv2Json(values)
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchActData = async (opts) => {
|
||||
let ret = {err: null, values: []}
|
||||
let sorting = ''
|
||||
if(opts.sort) {
|
||||
if (opts.sort === 1) {
|
||||
sorting = '|> sort(columns: ["_time"], desc: false)'
|
||||
} else if (opts.sort === -1) {
|
||||
sorting = '|> sort(columns: ["_time"], desc: true)'
|
||||
}
|
||||
}
|
||||
// build the flux query
|
||||
let query = `
|
||||
from(bucket: "sensor_data")
|
||||
|> range(${opts.start}, ${opts.stop})
|
||||
|> filter(fn: (r) => r.sid == "${opts.sensorid}")
|
||||
${sorting}
|
||||
|> keep(columns: ["_time","_field","_value"])
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
`
|
||||
return await fetchFromInflux(ret, query)
|
||||
}
|
||||
|
||||
export const fetchNoiseAVGData = async (opts) => {
|
||||
let ret = {err: null, values: []}
|
||||
let small = '|> keep(columns: ["_time", "peakcount", "n_AVG"])'
|
||||
if (opts.long) {
|
||||
small = ''
|
||||
}
|
||||
let queryAVG = `
|
||||
import "math"
|
||||
threshold = ${opts.peak}
|
||||
|
||||
data = from(bucket: "sensor_data")
|
||||
|> range(${opts.start}, ${opts.stop})
|
||||
|> filter(fn: (r) => r["sid"] == "${opts.sensorid}")
|
||||
e10 = data
|
||||
|> filter(fn: (r) => r._field == "E10tel_eq")
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> map(fn: (r) => ({r with _value: (10.0 * math.log10(x: r._value))}))
|
||||
|> keep(columns: ["_time","_field","_value"])
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
|> rename(columns: {"E10tel_eq" : "n_AVG"})
|
||||
ecnt = data
|
||||
|> filter(fn: (r) => r._field == "E10tel_eq")
|
||||
|> aggregateWindow(every: 1h, fn: count, createEmpty: false)
|
||||
|> keep(columns: ["_time","_field","_value"])
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
|> rename(columns: {"E10tel_eq" : "count"})
|
||||
esum = data
|
||||
|> filter(fn: (r) => r._field == "E10tel_eq")
|
||||
|> aggregateWindow(every: 1h, fn: sum, createEmpty: false)
|
||||
|> keep(columns: ["_time","_field","_value"])
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
|> rename(columns: {"E10tel_eq" : "n_sum"})
|
||||
peak = data
|
||||
|> filter(fn: (r) => r._field == "LA_max")
|
||||
|> aggregateWindow(
|
||||
every: 1h,
|
||||
fn: (column, tables=<-) => tables
|
||||
|> reduce(
|
||||
identity: {peakcount: 0.0},
|
||||
fn: (r, accumulator) => ({
|
||||
peakcount: if r._value >= threshold then
|
||||
accumulator.peakcount + 1.0
|
||||
else
|
||||
accumulator.peakcount + 0.0,
|
||||
}),
|
||||
),
|
||||
)
|
||||
|> keep(columns: ["_time","peakcount"])
|
||||
part1 = join( tables: {e10: e10, ecnt: ecnt}, on: ["_time"])
|
||||
part2 = join( tables: {esum: esum, peak: peak}, on: ["_time"])
|
||||
join( tables: {P1: part1, P2: part2}, on: ["_time"])
|
||||
${small}
|
||||
`
|
||||
return await fetchFromInflux(ret, queryAVG)
|
||||
}
|
||||
395
databases/influx_sql.js
Normal file
395
databases/influx_sql.js
Normal file
@@ -0,0 +1,395 @@
|
||||
// Access to InfluxDB 1.8 via HTTP using InfluxQL
|
||||
//
|
||||
// IMPORTANT: InfluxDB 1.8 vs 2.0 Data Schema Differences:
|
||||
// - InfluxDB 1.8: Only stores LA_max, LA_min, LA_eq (all in dB)
|
||||
// - InfluxDB 2.0: Additionally stores E10tel_eq as pre-calculated linear value (10^(LA_max/10))
|
||||
//
|
||||
// This implementation converts LA_max to E10tel_eq at runtime to maintain
|
||||
// compatibility with the Flux version while ensuring correct logarithmic averaging.
|
||||
|
||||
import axios from 'axios'
|
||||
import { DateTime } from 'luxon'
|
||||
import { logit, logerror } from '../utilities/logit.js'
|
||||
import { returnOnError } from "../utilities/reporterror.js"
|
||||
|
||||
// InfluxDB 1.8 Configuration
|
||||
let INFLUXHOST = process.env.INFLUXHOST || "localhost"
|
||||
let INFLUXPORT = process.env.INFLUXPORT || 8086
|
||||
let INFLUXUSER = process.env.INFLUXUSER || ""
|
||||
let INFLUXPASS = process.env.INFLUXPASS || ""
|
||||
let INFLUXDB = process.env.INFLUXDB || "sensor_data"
|
||||
|
||||
// InfluxDB 1.8 URLs
|
||||
const INFLUXURL_READ = `http://${INFLUXHOST}:${INFLUXPORT}/query`
|
||||
const INFLUXURL_WRITE = `http://${INFLUXHOST}:${INFLUXPORT}/write`
|
||||
|
||||
/**
|
||||
* Execute InfluxQL query against InfluxDB 1.8
|
||||
* @param {string} query - InfluxQL query string
|
||||
* @returns {Object} - {values: [], err: null}
|
||||
*/
|
||||
const influxRead = async (query) => {
|
||||
let start = DateTime.now()
|
||||
logit(`ReadInflux from ${INFLUXURL_READ}`)
|
||||
let erg = { values: [], err: null}
|
||||
|
||||
try {
|
||||
const params = {
|
||||
db: INFLUXDB,
|
||||
q: query,
|
||||
epoch: 'ms' // Return timestamps in milliseconds
|
||||
}
|
||||
|
||||
// Add authentication if provided
|
||||
if (INFLUXUSER && INFLUXPASS) {
|
||||
params.u = INFLUXUSER
|
||||
params.p = INFLUXPASS
|
||||
}
|
||||
|
||||
let ret = await axios({
|
||||
method: 'get',
|
||||
url: INFLUXURL_READ,
|
||||
params: params,
|
||||
timeout: 10000,
|
||||
})
|
||||
|
||||
if (ret.status !== 200) {
|
||||
return returnOnError(erg, 'RESPSTATUS', influxRead.name, ret.status)
|
||||
}
|
||||
|
||||
// InfluxDB 1.8 returns JSON format
|
||||
if (ret.data.error) {
|
||||
return returnOnError(erg, ret.data.error, influxRead.name)
|
||||
}
|
||||
|
||||
erg.values = ret.data.results
|
||||
} catch (e) {
|
||||
return returnOnError(erg, e, influxRead.name)
|
||||
}
|
||||
|
||||
logit(`Influx read time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||
return erg
|
||||
}
|
||||
|
||||
/**
|
||||
* Write data to InfluxDB 1.8
|
||||
* @param {string} data - Line protocol data
|
||||
* @returns {Object} - Response object
|
||||
*/
|
||||
const influxWrite = async (data) => {
|
||||
let start = DateTime.now()
|
||||
let ret
|
||||
|
||||
try {
|
||||
const params = {
|
||||
db: INFLUXDB,
|
||||
precision: 'ms'
|
||||
}
|
||||
|
||||
// Add authentication if provided
|
||||
if (INFLUXUSER && INFLUXPASS) {
|
||||
params.u = INFLUXUSER
|
||||
params.p = INFLUXPASS
|
||||
}
|
||||
|
||||
ret = await axios({
|
||||
method: 'post',
|
||||
url: INFLUXURL_WRITE,
|
||||
params: params,
|
||||
data: data,
|
||||
headers: {
|
||||
'Content-Type': 'text/plain; charset=utf-8'
|
||||
},
|
||||
timeout: 10000,
|
||||
})
|
||||
|
||||
if (ret.status !== 204) {
|
||||
logerror(`doWrite2API Status: ${ret.status}`)
|
||||
}
|
||||
} catch (e) {
|
||||
logerror(`doWrite2API ${e}`)
|
||||
}
|
||||
|
||||
logit(`Influx-Write-Time: ${start.diffNow('seconds').toObject().seconds * -1} sec`)
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to transform InfluxDB 1.8 result to format compatible with Flux version
|
||||
* @param {Array} series - InfluxDB series data
|
||||
* @returns {Array} - Transformed data array
|
||||
*/
|
||||
const transformInfluxResult = (series) => {
|
||||
if (!series || !series.length) return []
|
||||
|
||||
const result = []
|
||||
|
||||
series.forEach(serie => {
|
||||
if (!serie.values) return
|
||||
|
||||
const columns = serie.columns
|
||||
const timeIndex = columns.indexOf('time')
|
||||
|
||||
serie.values.forEach(row => {
|
||||
const record = {}
|
||||
columns.forEach((col, index) => {
|
||||
if (col === 'time') {
|
||||
// Convert timestamp to ISO string for compatibility
|
||||
record._time = new Date(row[index]).toISOString()
|
||||
} else {
|
||||
record[col] = row[index]
|
||||
}
|
||||
})
|
||||
result.push(record)
|
||||
})
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute query and transform results
|
||||
* @param {Object} ret - Return object
|
||||
* @param {string} query - InfluxQL query
|
||||
* @returns {Object} - Transformed result
|
||||
*/
|
||||
const fetchFromInflux = async (ret, query) => {
|
||||
let { values, err } = await influxRead(query)
|
||||
if (err) {
|
||||
if (err.toString().includes('400')) {
|
||||
return returnOnError(ret, 'SYNTAXURL', fetchFromInflux.name)
|
||||
} else {
|
||||
return returnOnError(ret, err, fetchFromInflux.name)
|
||||
}
|
||||
}
|
||||
|
||||
if (!values || !values.length || !values[0].series) {
|
||||
return returnOnError(ret, 'NODATA', fetchFromInflux.name)
|
||||
}
|
||||
|
||||
ret.values = transformInfluxResult(values[0].series)
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch current/historical sensor data from InfluxDB 1.8
|
||||
* @param {Object} opts - Options object
|
||||
* @param {string} opts.sensorid - Sensor ID
|
||||
* @param {string} opts.start - Start time (e.g., "start: -1h")
|
||||
* @param {string} opts.stop - Stop time (e.g., "stop: now()")
|
||||
* @param {number} opts.sort - Sort order (1 for ascending, -1 for descending)
|
||||
* @returns {Object} - {err: null, values: []}
|
||||
*/
|
||||
export const fetchActData = async (opts) => {
|
||||
let ret = { err: null, values: [] }
|
||||
|
||||
// Convert Flux time format to InfluxQL format
|
||||
let startTime = opts.start.replace('start: ', '').trim()
|
||||
let stopTime = opts.stop.replace('stop: ', '').trim()
|
||||
|
||||
// Build sorting clause
|
||||
let orderClause = ''
|
||||
if (opts.sort) {
|
||||
if (opts.sort === 1) {
|
||||
orderClause = 'ORDER BY time ASC'
|
||||
} else if (opts.sort === -1) {
|
||||
orderClause = 'ORDER BY time DESC'
|
||||
}
|
||||
}
|
||||
|
||||
// InfluxQL query to get LA_max for a sensor within time range
|
||||
// Note: In InfluxDB 1.8 we only have LA_max, not E10tel_eq like in 2.0
|
||||
const query = `
|
||||
SELECT "LA_max", "LA_min", "LA_eq"
|
||||
FROM "measurements"
|
||||
WHERE "sid" = '${opts.sensorid}'
|
||||
AND time >= ${startTime}
|
||||
AND time <= ${stopTime}
|
||||
${orderClause}
|
||||
`
|
||||
|
||||
// Get the data and transform it to include E10tel_eq equivalent
|
||||
const result = await fetchFromInflux(ret, query)
|
||||
|
||||
if (result.err) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Transform data to add E10tel_eq field for compatibility with Flux version
|
||||
// E10tel_eq = 10^(LA_max/10)
|
||||
result.values = result.values.map(record => ({
|
||||
...record,
|
||||
E10tel_eq: record.LA_max !== null && record.LA_max !== undefined
|
||||
? Math.pow(10, record.LA_max / 10)
|
||||
: null
|
||||
}))
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to calculate logarithmic average for decibel values
|
||||
* For decibel values, we need to:
|
||||
* 1. Convert dB to linear scale (10^(dB/10))
|
||||
* 2. Calculate arithmetic mean of linear values
|
||||
* 3. Convert back to dB (10 * log10(mean))
|
||||
* @param {Array} values - Array of decibel values
|
||||
* @returns {number} - Logarithmic average in dB
|
||||
*/
|
||||
const calculateLogMean = (values) => {
|
||||
if (!values || values.length === 0) return null
|
||||
|
||||
// Convert dB to linear scale, calculate mean, convert back to dB
|
||||
const linearSum = values.reduce((sum, val) => {
|
||||
if (val !== null && val !== undefined) {
|
||||
return sum + Math.pow(10, val / 10)
|
||||
}
|
||||
return sum
|
||||
}, 0)
|
||||
|
||||
const validCount = values.filter(val => val !== null && val !== undefined).length
|
||||
if (validCount === 0) return null
|
||||
|
||||
const linearMean = linearSum / validCount
|
||||
return 10 * Math.log10(linearMean)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch noise averaging data from InfluxDB 1.8 with proper logarithmic averaging for LAmax
|
||||
* @param {Object} opts - Options object
|
||||
* @param {string} opts.sensorid - Sensor ID
|
||||
* @param {string} opts.start - Start time
|
||||
* @param {string} opts.stop - Stop time
|
||||
* @param {number} opts.peak - Peak threshold for counting
|
||||
* @param {boolean} opts.long - Return full data or just summarized
|
||||
* @returns {Object} - {err: null, values: []}
|
||||
*/
|
||||
export const fetchNoiseAVGData = async (opts) => {
|
||||
let ret = { err: null, values: [] }
|
||||
|
||||
// Convert Flux time format to InfluxQL format
|
||||
let startTime = opts.start.replace('start: ', '').trim()
|
||||
let stopTime = opts.stop.replace('stop: ', '').trim()
|
||||
|
||||
// Since InfluxQL doesn't support complex joins like Flux, we need to make multiple queries
|
||||
// and combine the results in JavaScript
|
||||
|
||||
// Query 1: Get LA_max data aggregated by hour for E10tel calculation
|
||||
// In InfluxDB 1.8, we only have LA_max (dB), need to convert to E10tel equivalent
|
||||
const queryLAmaxForE10 = `
|
||||
SELECT "LA_max", time
|
||||
FROM "measurements"
|
||||
WHERE "sid" = '${opts.sensorid}'
|
||||
AND time >= ${startTime}
|
||||
AND time <= ${stopTime}
|
||||
AND "LA_max" IS NOT NULL
|
||||
ORDER BY time ASC
|
||||
`
|
||||
|
||||
// Query 2: Same query for peak counting (we'll process the same data)
|
||||
const queryLAmaxForPeaks = queryLAmaxForE10
|
||||
|
||||
try {
|
||||
// Execute LA_max query (we use the same data for both E10tel calculation and peak counting)
|
||||
let { values: lamaxValues, err: lamaxErr } = await influxRead(queryLAmaxForE10)
|
||||
if (lamaxErr) {
|
||||
return returnOnError(ret, lamaxErr, fetchNoiseAVGData.name)
|
||||
}
|
||||
|
||||
if (!lamaxValues || !lamaxValues.length || !lamaxValues[0].series) {
|
||||
return returnOnError(ret, 'NODATA', fetchNoiseAVGData.name)
|
||||
}
|
||||
|
||||
// Transform LA_max results
|
||||
const lamaxData = transformInfluxResult(lamaxValues[0].series)
|
||||
|
||||
// Group LA_max data by hour and calculate:
|
||||
// 1. E10tel equivalent values (10^(LA_max/10))
|
||||
// 2. Peak counting
|
||||
// 3. Statistics for n_AVG calculation
|
||||
const hourlyData = {}
|
||||
|
||||
lamaxData.forEach(record => {
|
||||
const timestamp = new Date(record._time)
|
||||
const hourKey = new Date(timestamp.getFullYear(), timestamp.getMonth(),
|
||||
timestamp.getDate(), timestamp.getHours()).toISOString()
|
||||
|
||||
if (!hourlyData[hourKey]) {
|
||||
hourlyData[hourKey] = {
|
||||
time: hourKey,
|
||||
lamaxValues: [],
|
||||
e10telValues: [], // Converted LA_max to E10tel equivalent
|
||||
peakCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
const lamax = record.LA_max
|
||||
if (lamax !== null && lamax !== undefined) {
|
||||
// Store original LA_max value
|
||||
hourlyData[hourKey].lamaxValues.push(lamax)
|
||||
|
||||
// Convert LA_max (dB) to E10tel equivalent: 10^(LA_max/10)
|
||||
const e10tel = Math.pow(10, lamax / 10)
|
||||
hourlyData[hourKey].e10telValues.push(e10tel)
|
||||
|
||||
// Count peaks
|
||||
if (lamax >= opts.peak) {
|
||||
hourlyData[hourKey].peakCount++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Calculate final results for each hour
|
||||
const combinedResults = []
|
||||
|
||||
Object.values(hourlyData).forEach(hourData => {
|
||||
const result = {
|
||||
_time: hourData.time,
|
||||
count: hourData.e10telValues.length,
|
||||
peakcount: hourData.peakCount
|
||||
}
|
||||
|
||||
// Calculate E10tel statistics
|
||||
if (hourData.e10telValues.length > 0) {
|
||||
// Sum of E10tel values
|
||||
result.n_sum = hourData.e10telValues.reduce((sum, val) => sum + val, 0)
|
||||
|
||||
// Mean of E10tel values, then convert back to dB for n_AVG
|
||||
// This matches the Flux version: mean(E10tel_eq) then 10*log10(mean)
|
||||
const e10telMean = result.n_sum / hourData.e10telValues.length
|
||||
result.n_AVG = 10.0 * Math.log10(e10telMean)
|
||||
}
|
||||
|
||||
// Add additional fields if opts.long is true
|
||||
if (opts.long) {
|
||||
result.LA_max_values = hourData.lamaxValues
|
||||
result.LA_max_log_avg = calculateLogMean(hourData.lamaxValues)
|
||||
result.E10tel_values = hourData.e10telValues
|
||||
}
|
||||
|
||||
combinedResults.push(result)
|
||||
})
|
||||
|
||||
// Sort by time
|
||||
combinedResults.sort((a, b) => new Date(a._time) - new Date(b._time))
|
||||
|
||||
// Filter results based on opts.long
|
||||
if (!opts.long) {
|
||||
ret.values = combinedResults.map(record => ({
|
||||
_time: record._time,
|
||||
peakcount: record.peakcount,
|
||||
n_AVG: record.n_AVG
|
||||
}))
|
||||
} else {
|
||||
ret.values = combinedResults
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
return returnOnError(ret, e, fetchNoiseAVGData.name)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// Export write function for compatibility
|
||||
export { influxWrite }
|
||||
424
databases/mongo.js
Normal file
424
databases/mongo.js
Normal file
@@ -0,0 +1,424 @@
|
||||
/* Interface for MongoDB
|
||||
*/
|
||||
import { MongoClient } from 'mongodb'
|
||||
import { logit, logerror } from '../utilities/logit.js'
|
||||
import { DateTime } from 'luxon'
|
||||
import {returnOnError} from "../utilities/reporterror.js";
|
||||
|
||||
// const nodemailer = require('nodemailer');
|
||||
|
||||
let MONGOHOST = process.env.MONGOHOST;
|
||||
let MONGOPORT = process.env.MONGOPORT;
|
||||
let MONGOAUTH = process.env.MONGOAUTH;
|
||||
let MONGOUSRP = process.env.MONGOUSRP;
|
||||
let MONGOBASE = process.env.MONGOBASE;
|
||||
|
||||
if (MONGOHOST === undefined) { MONGOHOST = 'localhost';}
|
||||
if (MONGOPORT === undefined) { MONGOPORT = 27017; }
|
||||
if (MONGOAUTH === undefined) { MONGOAUTH = 'false'; }
|
||||
if (MONGOBASE === undefined) { MONGOBASE = 'sensor_data'; }
|
||||
|
||||
let MONGO_URL = 'mongodb://'+MONGOHOST+':'+MONGOPORT; // URL to mongo database
|
||||
if (MONGOAUTH === 'true') {
|
||||
// MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=' + MONGOBASE; // URL to mongo database
|
||||
MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=admin'; // URL to mongo database
|
||||
}
|
||||
|
||||
export const properties_collection = 'properties'
|
||||
|
||||
export const connectMongo = async () => {
|
||||
try {
|
||||
logit(`Try to connect to ${MONGO_URL}`)
|
||||
let client = await MongoClient.connect(MONGO_URL)
|
||||
logit(`Mongodbase connected to ${MONGO_URL}`)
|
||||
return client
|
||||
}
|
||||
catch(error){
|
||||
throw(error)
|
||||
}
|
||||
}
|
||||
|
||||
const listDatabases = async (client) => {
|
||||
let databasesList = await client.db().admin().listDatabases();
|
||||
|
||||
console.log("Databases:");
|
||||
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
|
||||
}
|
||||
|
||||
/* ***************************************************
|
||||
// READ routines
|
||||
******************************************************/
|
||||
|
||||
// Read properties from the database
|
||||
export const readProperties = async (query, limit = 0) => {
|
||||
let ret = {err: null, properties: null}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
if ("sid" in query) { // if sid is given, read property for sid
|
||||
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
|
||||
} else { // otherwise read props corresponding to query
|
||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
||||
}
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
export const readChipData = async (sid) => {
|
||||
let ret = { err: null, chipdata: null}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.chipdata = await client.db(MONGOBASE).collection('prop_flux').findOne({_id: sid},{projection: {chip: 1, _id: 0}})
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
// read mapdata from database
|
||||
export const readMapdata = async (query, limit) => {
|
||||
let ret = {err: null, mapdata: []}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.mapdata = await client.db(MONGOBASE).collection("mapdata").find(query).limit(limit).toArray()
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const getallProperties = async (coll, query) => {
|
||||
let ret = {err: null, properties: []}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.properties = await client.db(MONGOBASE).collection(coll)
|
||||
.find(query).toArray()
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const getOneproperty = async (sid) => {
|
||||
let ret = {error: false}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection)
|
||||
.findOne({_id: sid})
|
||||
} catch (e) {
|
||||
ret = {error: true, errortext: e}
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const readAKWs = async (options) => {
|
||||
let ret = {values: { akws: [], th1_akws: []}, err: null}
|
||||
let erg = []
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
let docs = await client.db(MONGOBASE).collection("akws")
|
||||
.find().toArray()
|
||||
if(docs == null) {
|
||||
return returnOnError(ret, 'akws - docs == null', readAKWs.name)
|
||||
}
|
||||
logit(`getawkdata: data fetched from akws, length= ${docs.length}`);
|
||||
ret.values.akws = docs
|
||||
let docs1 = await client.db(MONGOBASE).collection("th1_akws")
|
||||
.find().toArray()
|
||||
if(docs1 == null) {
|
||||
return returnOnError(ret, 'th1_akws - docs == null', readAKWs.name)
|
||||
}
|
||||
logit(`getawkdata: data fetched from th1_akws, length= ${docs1.length}`)
|
||||
ret.values.th1_akws = docs1
|
||||
} catch (e) {
|
||||
return returnOnError(ret, e, readAKWs.name)
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchActData = async (opts) => {
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let query = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let options = { projection: {_id: 0, values: 1, datetime: 1}, sort: {datetime: 1}}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
// ret.values = await client.db(MONGOBASE).collection('noise_sensors')
|
||||
// .find(query, options).toArray()
|
||||
ret.values = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
|
||||
{$match: query},
|
||||
{$sort: { datetime: 1}},
|
||||
// {$replaceWith:
|
||||
// {
|
||||
// '$values.LA_min': '$values.noise_LA_min'
|
||||
// }
|
||||
// },
|
||||
{$replaceWith:
|
||||
{
|
||||
datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
||||
LA_min: '$values.LA_min',
|
||||
LA_minx: '$values.noise_LA_min',
|
||||
LA_max: '$values.LA_max',
|
||||
LAeq: '$values.LAeq',
|
||||
E10tel_eq: '$values.E10tel_eq' }
|
||||
},
|
||||
// {$project: {
|
||||
// datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
||||
// _id: 0, values:1
|
||||
// }},
|
||||
]).toArray()
|
||||
}
|
||||
catch(e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
/*
|
||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
let docs = await collection.find(
|
||||
{ datetime:
|
||||
{ $gte: start.toDate(), $lt: end.toDate() }
|
||||
},
|
||||
{ projection:
|
||||
{_id:0, E_eq:0, E_mx:0, E_mi:0, E10tel_mx:0, E10tel_mi:0}, sort: {datetime: sort}
|
||||
},
|
||||
).toArray();
|
||||
*/
|
||||
|
||||
export const fetchgeigerAVGData = async (opts) => {
|
||||
let docs = []
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let sorting = {datetime: opts.sort};
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
if(opts.moving) {
|
||||
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
|
||||
{
|
||||
$sort: sorting
|
||||
}, // sort by date
|
||||
{
|
||||
$match: {sensorid: opts.sensorid}
|
||||
}, // select only values for given sensor
|
||||
{
|
||||
$match: datRange
|
||||
}, // select only values in give data range
|
||||
{
|
||||
$setWindowFields: {
|
||||
sortBy: {datetime: 1},
|
||||
output: {
|
||||
cpm_avg: {
|
||||
$avg: "$values.counts_per_minute",
|
||||
window: {
|
||||
range: [-60, 0],
|
||||
unit: "minute"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {_id:0, cpm_avg: 1, datetime:1, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}}
|
||||
},
|
||||
{
|
||||
$sort: {datetime: 1}
|
||||
}
|
||||
]).toArray();
|
||||
} else {
|
||||
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
|
||||
{
|
||||
$sort: sorting
|
||||
}, // sort by date
|
||||
{
|
||||
$match: {sensorid: opts.sensorid}
|
||||
}, // select only values for given sensor
|
||||
{
|
||||
$match: datRange
|
||||
}, // select only values in give data range
|
||||
{ $group: {
|
||||
_id: {$dateTrunc: {
|
||||
date: "$datetime",
|
||||
unit: "minute",
|
||||
binSize: 60
|
||||
}},
|
||||
cpm_avg: {$avg: "$values.counts_per_minute"}, // calculate the average
|
||||
}
|
||||
},
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'datetime
|
||||
{
|
||||
$project: {_id:0, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}, datetime: 1, cpm_avg: 1}
|
||||
},
|
||||
{
|
||||
$sort: {datetime: 1}
|
||||
}
|
||||
]).toArray();
|
||||
}
|
||||
} catch(e) { // if there was an error
|
||||
ret.err = e // log it to console
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
ret.values = docs
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchNoiseAVGData = async (opts) => {
|
||||
let docs = []
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let peak = opts.peak; // threshold for peak count
|
||||
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let sorting = {datetime: opts.sort};
|
||||
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
docs = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
|
||||
{$sort: sorting}, // sort by date
|
||||
{$match: datRange}, // select only values in give data range
|
||||
{
|
||||
$group: {
|
||||
_id: grpId,
|
||||
n_average: {$avg: "$values.E10tel_eq"}, // calculate the average
|
||||
n_sum: {$sum: "$values.E10tel_eq"}, // calculate the sum
|
||||
peakcount: {$sum: {$cond: [{$gte: ["$values.LA_max", peak]}, 1, 0]}}, // count peaks
|
||||
count: {$sum: 1}, // count entries
|
||||
}
|
||||
},
|
||||
{$sort: {_id: 1}}, // sort by result dates
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
|
||||
{$project: opts.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
|
||||
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
|
||||
]).toArray(); // return not all fields, depending on 'long'
|
||||
} catch(e) { // if there was an error
|
||||
ret.err = e // log it to console
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
ret.values = docs
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchAVGData = async (opts) => {
|
||||
}
|
||||
/*
|
||||
// *********************************************
|
||||
// getAverageData
|
||||
//
|
||||
// Calculate different values per hour
|
||||
// average of E10tel_eq ( E10tel_eq => 10 ^(LAeq/10) )
|
||||
// sum of E10tel_eq, to calculate day, night and eveniung averages
|
||||
// count, how many values are used for average/sum
|
||||
// paeakcount, how many values of LAmax are over defined peak value in every hour
|
||||
//
|
||||
// params:
|
||||
// db: Database
|
||||
// opt: different options (see further down)
|
||||
//
|
||||
// return
|
||||
// depending an calling parameter 'what', not all values will be sent in 'values'
|
||||
// JSON
|
||||
// {[
|
||||
// { datetime: "2019-10-23T00:00:00Z" , n_AVG: 67.22, n_sum: 32783, count: 24, peakcount: 6 }.
|
||||
// { datetime: "2019-10-23T01:00:00Z" , n_AVG: 52.89, n_sum: 23561, count: 26, peakcount: 5 }.
|
||||
// .........
|
||||
// ]}
|
||||
//
|
||||
// *********************************************
|
||||
async function getAverageData(db,opt) {
|
||||
let start = opt.start;
|
||||
let end = opt.end; // start and ent time for aggregation
|
||||
let docs = []; // collect data here
|
||||
const collection = db.collection('data_' + opt.sid);;
|
||||
let span = opt.span // date range in days
|
||||
let peak = opt.peak; // threshold for peak count
|
||||
let long = opt.long; // true => give extra output
|
||||
let nbrOfHours = opt.end.diff(opt.start,'hours') + 24;
|
||||
let datRange = {datetime: {$gte: opt.start.toDate(), $lt: opt.end.toDate()}};
|
||||
let sorting = {datetime: opt.sort};
|
||||
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}};
|
||||
try {
|
||||
docs = await collection.aggregate([
|
||||
{$sort: sorting}, // sort by date
|
||||
{$match: datRange}, // select only values in give data range
|
||||
{
|
||||
$group: {
|
||||
_id: grpId,
|
||||
n_average: {$avg: '$E10tel_eq'}, // calculate the average
|
||||
n_sum: {$sum: '$E10tel_eq'}, // calculate the sum
|
||||
peakcount: {$sum: {$cond: [{$gte: ["$LA_max", peak]}, 1, 0]}}, // count peaks
|
||||
count: {$sum: 1}, // count entries
|
||||
}
|
||||
},
|
||||
{$sort: {_id: 1}}, // sort by result dates
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
|
||||
{$project: opt.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
|
||||
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
|
||||
]).toArray(); // return not all fields, depending on 'long'
|
||||
} catch(e) { // if there was an error
|
||||
console.log(e); // log it to console
|
||||
}
|
||||
// To easily extract the values, we copy the data from docs into a new array, so that the
|
||||
// hour in an element in docs becomes the index into the new array (for every new day this
|
||||
// index will be incremented by 24). Missing values are marked by: {n_sum=-1, n_AVG=-1}.
|
||||
let hoursArr = new Array(nbrOfHours); // generate new array
|
||||
let emptyValues = opt.long ? {n_sum: -1, n_AVG:-1} : {n_AVG:-1};
|
||||
hoursArr.fill(emptyValues); // fill with 'empty' value
|
||||
let startDay = moment.utc(docs[0].datetime).date(); // calc first day
|
||||
let k = 0;
|
||||
for (let i=0; i<docs.length; i++) { // loop through docs
|
||||
let stunde = moment.utc(docs[i].datetime).hours(); // extract current hour
|
||||
let day = moment.utc(docs[i].datetime).date(); // and curren t day
|
||||
if (day != startDay) { // if date has changed
|
||||
k += 24; // increment index by 24
|
||||
startDay = day;
|
||||
}
|
||||
hoursArr[k+stunde] = docs[i]; // copy date into hourArray
|
||||
}
|
||||
return hoursArr;
|
||||
}
|
||||
*/
|
||||
Reference in New Issue
Block a user