Ertser Commit der test-Version
This commit is contained in:
424
databases/mongo.js
Normal file
424
databases/mongo.js
Normal file
@@ -0,0 +1,424 @@
|
||||
/* Interface for MongoDB
|
||||
*/
|
||||
import { MongoClient } from 'mongodb'
|
||||
import { logit, logerror } from '../utilities/logit.js'
|
||||
import { DateTime } from 'luxon'
|
||||
import {returnOnError} from "../utilities/reporterror.js";
|
||||
|
||||
// const nodemailer = require('nodemailer');
|
||||
|
||||
let MONGOHOST = process.env.MONGOHOST;
|
||||
let MONGOPORT = process.env.MONGOPORT;
|
||||
let MONGOAUTH = process.env.MONGOAUTH;
|
||||
let MONGOUSRP = process.env.MONGOUSRP;
|
||||
let MONGOBASE = process.env.MONGOBASE;
|
||||
|
||||
if (MONGOHOST === undefined) { MONGOHOST = 'localhost';}
|
||||
if (MONGOPORT === undefined) { MONGOPORT = 27017; }
|
||||
if (MONGOAUTH === undefined) { MONGOAUTH = 'false'; }
|
||||
if (MONGOBASE === undefined) { MONGOBASE = 'sensor_data'; }
|
||||
|
||||
let MONGO_URL = 'mongodb://'+MONGOHOST+':'+MONGOPORT; // URL to mongo database
|
||||
if (MONGOAUTH === 'true') {
|
||||
// MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=' + MONGOBASE; // URL to mongo database
|
||||
MONGO_URL = 'mongodb://'+MONGOUSRP+'@' + MONGOHOST + ':' + MONGOPORT + '/?authSource=admin'; // URL to mongo database
|
||||
}
|
||||
|
||||
export const properties_collection = 'properties'
|
||||
|
||||
export const connectMongo = async () => {
|
||||
try {
|
||||
logit(`Try to connect to ${MONGO_URL}`)
|
||||
let client = await MongoClient.connect(MONGO_URL)
|
||||
logit(`Mongodbase connected to ${MONGO_URL}`)
|
||||
return client
|
||||
}
|
||||
catch(error){
|
||||
throw(error)
|
||||
}
|
||||
}
|
||||
|
||||
const listDatabases = async (client) => {
|
||||
let databasesList = await client.db().admin().listDatabases();
|
||||
|
||||
console.log("Databases:");
|
||||
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
|
||||
}
|
||||
|
||||
/* ***************************************************
|
||||
// READ routines
|
||||
******************************************************/
|
||||
|
||||
// Read properties from the database
|
||||
export const readProperties = async (query, limit = 0) => {
|
||||
let ret = {err: null, properties: null}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
if ("sid" in query) { // if sid is given, read property for sid
|
||||
ret.properties = await client.db(MONGOBASE).collection('properties_collection').findOne({_id: query.sid})
|
||||
} else { // otherwise read props corresponding to query
|
||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection).find(query).limit(limit).toArray()
|
||||
}
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
export const readChipData = async (sid) => {
|
||||
let ret = { err: null, chipdata: null}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.chipdata = await client.db(MONGOBASE).collection('prop_flux').findOne({_id: sid},{projection: {chip: 1, _id: 0}})
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
// read mapdata from database
|
||||
export const readMapdata = async (query, limit) => {
|
||||
let ret = {err: null, mapdata: []}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.mapdata = await client.db(MONGOBASE).collection("mapdata").find(query).limit(limit).toArray()
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const getallProperties = async (coll, query) => {
|
||||
let ret = {err: null, properties: []}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.properties = await client.db(MONGOBASE).collection(coll)
|
||||
.find(query).toArray()
|
||||
} catch (e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const getOneproperty = async (sid) => {
|
||||
let ret = {error: false}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
ret.properties = await client.db(MONGOBASE).collection(properties_collection)
|
||||
.findOne({_id: sid})
|
||||
} catch (e) {
|
||||
ret = {error: true, errortext: e}
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
export const readAKWs = async (options) => {
|
||||
let ret = {values: { akws: [], th1_akws: []}, err: null}
|
||||
let erg = []
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
let docs = await client.db(MONGOBASE).collection("akws")
|
||||
.find().toArray()
|
||||
if(docs == null) {
|
||||
return returnOnError(ret, 'akws - docs == null', readAKWs.name)
|
||||
}
|
||||
logit(`getawkdata: data fetched from akws, length= ${docs.length}`);
|
||||
ret.values.akws = docs
|
||||
let docs1 = await client.db(MONGOBASE).collection("th1_akws")
|
||||
.find().toArray()
|
||||
if(docs1 == null) {
|
||||
return returnOnError(ret, 'th1_akws - docs == null', readAKWs.name)
|
||||
}
|
||||
logit(`getawkdata: data fetched from th1_akws, length= ${docs1.length}`)
|
||||
ret.values.th1_akws = docs1
|
||||
} catch (e) {
|
||||
return returnOnError(ret, e, readAKWs.name)
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchActData = async (opts) => {
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let query = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let options = { projection: {_id: 0, values: 1, datetime: 1}, sort: {datetime: 1}}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
// ret.values = await client.db(MONGOBASE).collection('noise_sensors')
|
||||
// .find(query, options).toArray()
|
||||
ret.values = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
|
||||
{$match: query},
|
||||
{$sort: { datetime: 1}},
|
||||
// {$replaceWith:
|
||||
// {
|
||||
// '$values.LA_min': '$values.noise_LA_min'
|
||||
// }
|
||||
// },
|
||||
{$replaceWith:
|
||||
{
|
||||
datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
||||
LA_min: '$values.LA_min',
|
||||
LA_minx: '$values.noise_LA_min',
|
||||
LA_max: '$values.LA_max',
|
||||
LAeq: '$values.LAeq',
|
||||
E10tel_eq: '$values.E10tel_eq' }
|
||||
},
|
||||
// {$project: {
|
||||
// datetime: {$dateToString: {format: '%Y-%m-%dT%H:%M:%SZ', date: '$datetime'}},
|
||||
// _id: 0, values:1
|
||||
// }},
|
||||
]).toArray()
|
||||
}
|
||||
catch(e) {
|
||||
ret.err = e
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
/*
|
||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
||||
Try to connect to mongodb://rexfue:s25BMmW2gg@192.168.51.22:27017
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
let docs = await collection.find(
|
||||
{ datetime:
|
||||
{ $gte: start.toDate(), $lt: end.toDate() }
|
||||
},
|
||||
{ projection:
|
||||
{_id:0, E_eq:0, E_mx:0, E_mi:0, E10tel_mx:0, E10tel_mi:0}, sort: {datetime: sort}
|
||||
},
|
||||
).toArray();
|
||||
*/
|
||||
|
||||
export const fetchgeigerAVGData = async (opts) => {
|
||||
let docs = []
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let sorting = {datetime: opts.sort};
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
if(opts.moving) {
|
||||
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
|
||||
{
|
||||
$sort: sorting
|
||||
}, // sort by date
|
||||
{
|
||||
$match: {sensorid: opts.sensorid}
|
||||
}, // select only values for given sensor
|
||||
{
|
||||
$match: datRange
|
||||
}, // select only values in give data range
|
||||
{
|
||||
$setWindowFields: {
|
||||
sortBy: {datetime: 1},
|
||||
output: {
|
||||
cpm_avg: {
|
||||
$avg: "$values.counts_per_minute",
|
||||
window: {
|
||||
range: [-60, 0],
|
||||
unit: "minute"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {_id:0, cpm_avg: 1, datetime:1, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}}
|
||||
},
|
||||
{
|
||||
$sort: {datetime: 1}
|
||||
}
|
||||
]).toArray();
|
||||
} else {
|
||||
docs = await client.db(MONGOBASE).collection('sensors').aggregate([
|
||||
{
|
||||
$sort: sorting
|
||||
}, // sort by date
|
||||
{
|
||||
$match: {sensorid: opts.sensorid}
|
||||
}, // select only values for given sensor
|
||||
{
|
||||
$match: datRange
|
||||
}, // select only values in give data range
|
||||
{ $group: {
|
||||
_id: {$dateTrunc: {
|
||||
date: "$datetime",
|
||||
unit: "minute",
|
||||
binSize: 60
|
||||
}},
|
||||
cpm_avg: {$avg: "$values.counts_per_minute"}, // calculate the average
|
||||
}
|
||||
},
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'datetime
|
||||
{
|
||||
$project: {_id:0, uSvph_avg: { $multiply: ["$cpm_avg", opts.factor]}, datetime: 1, cpm_avg: 1}
|
||||
},
|
||||
{
|
||||
$sort: {datetime: 1}
|
||||
}
|
||||
]).toArray();
|
||||
}
|
||||
} catch(e) { // if there was an error
|
||||
ret.err = e // log it to console
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
ret.values = docs
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchNoiseAVGData = async (opts) => {
|
||||
let docs = []
|
||||
let ret = {err: null, values: []}
|
||||
let start = opts.start.slice(7)
|
||||
let end = opts.stop.slice(6)
|
||||
start = DateTime.fromISO(start).toJSDate()
|
||||
end = DateTime.fromISO(end).toJSDate()
|
||||
let peak = opts.peak; // threshold for peak count
|
||||
let datRange = {sensorid: opts.sensorid, datetime: {$gte: start, $lt: end}}
|
||||
let sorting = {datetime: opts.sort};
|
||||
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}}
|
||||
let client = await connectMongo()
|
||||
try {
|
||||
docs = await client.db(MONGOBASE).collection('noise_sensors').aggregate([
|
||||
{$sort: sorting}, // sort by date
|
||||
{$match: datRange}, // select only values in give data range
|
||||
{
|
||||
$group: {
|
||||
_id: grpId,
|
||||
n_average: {$avg: "$values.E10tel_eq"}, // calculate the average
|
||||
n_sum: {$sum: "$values.E10tel_eq"}, // calculate the sum
|
||||
peakcount: {$sum: {$cond: [{$gte: ["$values.LA_max", peak]}, 1, 0]}}, // count peaks
|
||||
count: {$sum: 1}, // count entries
|
||||
}
|
||||
},
|
||||
{$sort: {_id: 1}}, // sort by result dates
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
|
||||
{$project: opts.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
|
||||
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
|
||||
]).toArray(); // return not all fields, depending on 'long'
|
||||
} catch(e) { // if there was an error
|
||||
ret.err = e // log it to console
|
||||
}
|
||||
finally {
|
||||
client.close()
|
||||
}
|
||||
ret.values = docs
|
||||
return ret
|
||||
}
|
||||
|
||||
export const fetchAVGData = async (opts) => {
|
||||
}
|
||||
/*
|
||||
// *********************************************
|
||||
// getAverageData
|
||||
//
|
||||
// Calculate different values per hour
|
||||
// average of E10tel_eq ( E10tel_eq => 10 ^(LAeq/10) )
|
||||
// sum of E10tel_eq, to calculate day, night and eveniung averages
|
||||
// count, how many values are used for average/sum
|
||||
// paeakcount, how many values of LAmax are over defined peak value in every hour
|
||||
//
|
||||
// params:
|
||||
// db: Database
|
||||
// opt: different options (see further down)
|
||||
//
|
||||
// return
|
||||
// depending an calling parameter 'what', not all values will be sent in 'values'
|
||||
// JSON
|
||||
// {[
|
||||
// { datetime: "2019-10-23T00:00:00Z" , n_AVG: 67.22, n_sum: 32783, count: 24, peakcount: 6 }.
|
||||
// { datetime: "2019-10-23T01:00:00Z" , n_AVG: 52.89, n_sum: 23561, count: 26, peakcount: 5 }.
|
||||
// .........
|
||||
// ]}
|
||||
//
|
||||
// *********************************************
|
||||
async function getAverageData(db,opt) {
|
||||
let start = opt.start;
|
||||
let end = opt.end; // start and ent time for aggregation
|
||||
let docs = []; // collect data here
|
||||
const collection = db.collection('data_' + opt.sid);;
|
||||
let span = opt.span // date range in days
|
||||
let peak = opt.peak; // threshold for peak count
|
||||
let long = opt.long; // true => give extra output
|
||||
let nbrOfHours = opt.end.diff(opt.start,'hours') + 24;
|
||||
let datRange = {datetime: {$gte: opt.start.toDate(), $lt: opt.end.toDate()}};
|
||||
let sorting = {datetime: opt.sort};
|
||||
let grpId = {$dateToString: {format: '%Y-%m-%dT%H:00:00Z', date: '$datetime'}};
|
||||
try {
|
||||
docs = await collection.aggregate([
|
||||
{$sort: sorting}, // sort by date
|
||||
{$match: datRange}, // select only values in give data range
|
||||
{
|
||||
$group: {
|
||||
_id: grpId,
|
||||
n_average: {$avg: '$E10tel_eq'}, // calculate the average
|
||||
n_sum: {$sum: '$E10tel_eq'}, // calculate the sum
|
||||
peakcount: {$sum: {$cond: [{$gte: ["$LA_max", peak]}, 1, 0]}}, // count peaks
|
||||
count: {$sum: 1}, // count entries
|
||||
}
|
||||
},
|
||||
{$sort: {_id: 1}}, // sort by result dates
|
||||
{ $addFields: { datetime: "$_id"}}, // change '_id' to 'date'
|
||||
{$project: opt.long ? { _id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1, count:1, n_sum:1} :
|
||||
{_id:0, n_AVG: { $multiply: [10, {$log10: "$n_average"}]}, datetime:1, peakcount:1}}
|
||||
]).toArray(); // return not all fields, depending on 'long'
|
||||
} catch(e) { // if there was an error
|
||||
console.log(e); // log it to console
|
||||
}
|
||||
// To easily extract the values, we copy the data from docs into a new array, so that the
|
||||
// hour in an element in docs becomes the index into the new array (for every new day this
|
||||
// index will be incremented by 24). Missing values are marked by: {n_sum=-1, n_AVG=-1}.
|
||||
let hoursArr = new Array(nbrOfHours); // generate new array
|
||||
let emptyValues = opt.long ? {n_sum: -1, n_AVG:-1} : {n_AVG:-1};
|
||||
hoursArr.fill(emptyValues); // fill with 'empty' value
|
||||
let startDay = moment.utc(docs[0].datetime).date(); // calc first day
|
||||
let k = 0;
|
||||
for (let i=0; i<docs.length; i++) { // loop through docs
|
||||
let stunde = moment.utc(docs[i].datetime).hours(); // extract current hour
|
||||
let day = moment.utc(docs[i].datetime).date(); // and curren t day
|
||||
if (day != startDay) { // if date has changed
|
||||
k += 24; // increment index by 24
|
||||
startDay = day;
|
||||
}
|
||||
hoursArr[k+stunde] = docs[i]; // copy date into hourArray
|
||||
}
|
||||
return hoursArr;
|
||||
}
|
||||
*/
|
||||
Reference in New Issue
Block a user