V 1.4.0 Komplette Absicherung mit Hilfe von Claude

This commit is contained in:
2026-04-26 12:23:17 +02:00
parent 8961b9237c
commit 035c21ba23
11 changed files with 2114 additions and 860 deletions

View File

@@ -1,4 +1,5 @@
from fastapi import FastAPI, HTTPException, Query
from contextlib import asynccontextmanager
from fastapi import FastAPI, HTTPException, Query, Depends
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field, ConfigDict
from typing import List, Optional
@@ -8,6 +9,7 @@ from pathlib import Path
from dotenv import load_dotenv
import psycopg
from psycopg.rows import dict_row
from psycopg_pool import ConnectionPool
import logging
# Logging konfigurieren
@@ -28,21 +30,75 @@ DB_NAME = os.getenv('DB_NAME', 'wetterstation')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
# --------------------------------------------------------------------------- #
# Connection Pool + Lifespan
# --------------------------------------------------------------------------- #
DB_POOL_MIN = int(os.getenv("DB_POOL_MIN", 2))
DB_POOL_MAX = int(os.getenv("DB_POOL_MAX", 10))
pool: Optional["ConnectionPool"] = None
def _build_conninfo() -> str:
return (
f"host={DB_HOST} port={DB_PORT} dbname={DB_NAME} "
f"user={DB_USER} password={DB_PASSWORD}"
)
@asynccontextmanager
async def lifespan(app: FastAPI):
global pool
if not DB_USER or not DB_PASSWORD:
raise RuntimeError("DB_USER/DB_PASSWORD nicht gesetzt")
pool = ConnectionPool(
conninfo=_build_conninfo(),
min_size=DB_POOL_MIN,
max_size=DB_POOL_MAX,
timeout=10,
kwargs={"row_factory": dict_row, "autocommit": True},
)
pool.wait()
logger.info("DB-Pool initialisiert (min=%d, max=%d)", DB_POOL_MIN, DB_POOL_MAX)
try:
yield
finally:
if pool is not None:
pool.close()
logger.info("DB-Pool geschlossen")
# FastAPI App erstellen
app = FastAPI(
title="Wetterstation API",
description="API zum Auslesen von Wetterdaten",
version="1.0.0"
version="1.1.0",
lifespan=lifespan,
)
# CORS Middleware
# CORS Middleware — auf bekannte Frontend-Domains beschraenkt.
# Zusaetzliche Origins koennen via ENV CORS_EXTRA_ORIGINS (Komma-separiert) gesetzt werden.
_default_origins = [
"https://stwwetter.fuerst-stuttgart.de",
"https://sternwarte-welzheim.de",
"http://localhost:3000",
"http://localhost:5173",
]
_extra = os.getenv("CORS_EXTRA_ORIGINS", "")
_extra_list = [o.strip() for o in _extra.split(",") if o.strip()]
ALLOWED_ORIGINS = _default_origins + _extra_list
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
allow_origins=ALLOWED_ORIGINS,
allow_credentials=False, # API liest nur, keine Cookies/Auth noetig
allow_methods=["GET", "OPTIONS"], # API ist read-only
allow_headers=["Content-Type"],
max_age=600,
)
logger.info("CORS aktiv fuer: %s", ALLOWED_ORIGINS)
# Pydantic Models
@@ -81,22 +137,17 @@ class HealthResponse(BaseModel):
timestamp: datetime
# Datenbankverbindung
def get_db_connection():
"""Erstellt eine Datenbankverbindung"""
# Datenbankverbindung — aus dem Pool, als FastAPI-Dependency.
def get_db_conn():
"""Yieldet eine Connection aus dem Pool und gibt sie automatisch zurueck."""
if pool is None:
raise HTTPException(status_code=503, detail="DB-Pool nicht initialisiert")
try:
conn = psycopg.connect(
host=DB_HOST,
port=DB_PORT,
dbname=DB_NAME,
user=DB_USER,
password=DB_PASSWORD,
row_factory=dict_row
)
return conn
except Exception as e:
logger.error(f"Datenbankverbindungsfehler: {e}")
raise HTTPException(status_code=500, detail="Datenbankverbindung fehlgeschlagen")
with pool.connection() as conn:
yield conn
except psycopg.Error:
logger.exception("DB-Fehler beim Pool-Zugriff")
raise HTTPException(status_code=503, detail="Datenbank nicht erreichbar")
# API Endpoints
@@ -105,7 +156,7 @@ async def root():
"""Root Endpoint"""
return {
"message": "Wetterstation API",
"version": "1.0.0",
"version": "1.1.0",
"docs": "/docs"
}
@@ -113,14 +164,15 @@ async def root():
@app.get("/health", response_model=HealthResponse, tags=["General"])
async def health_check():
"""Health Check Endpoint"""
db_status = "disconnected"
try:
conn = get_db_connection()
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
conn.close()
db_status = "connected"
if pool is not None:
with pool.connection() as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
db_status = "connected"
except Exception:
db_status = "disconnected"
logger.exception("Health-Check DB-Test fehlgeschlagen")
return {
"status": "ok" if db_status == "connected" else "error",
@@ -130,297 +182,232 @@ async def health_check():
@app.get("/weather/latest", response_model=WeatherData, tags=["Weather Data"])
async def get_latest_weather():
async def get_latest_weather(conn = Depends(get_db_conn)):
"""Gibt die neuesten Wetterdaten zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
ORDER BY datetime DESC
LIMIT 1
""")
result = cursor.fetchone()
if not result:
raise HTTPException(status_code=404, detail="Keine Daten verfügbar")
return dict(result)
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
ORDER BY datetime DESC
LIMIT 1
""")
result = cursor.fetchone()
if not result:
raise HTTPException(status_code=404, detail="Keine Daten verfügbar")
return dict(result)
@app.get("/weather/current", response_model=WeatherData, tags=["Weather Data"])
async def get_current_weather():
async def get_current_weather(conn = Depends(get_db_conn)):
"""Alias für /weather/latest - gibt aktuelle Wetterdaten zurück"""
return await get_latest_weather()
return await get_latest_weather(conn=conn)
@app.get("/weather/history", response_model=List[WeatherData], tags=["Weather Data"])
async def get_weather_history(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück (max 168 = 7 Tage)"),
limit: int = Query(1000, ge=1, le=10000, description="Maximale Anzahl Datensätze")
limit: int = Query(1000, ge=1, le=10000, description="Maximale Anzahl Datensätze"),
conn = Depends(get_db_conn),
):
"""Gibt historische Wetterdaten der letzten X Stunden zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime DESC
LIMIT %s
""", (hours, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime DESC
LIMIT %s
""", (hours, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/range", response_model=List[WeatherData], tags=["Weather Data"])
async def get_weather_by_date_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)"),
limit: int = Query(10000, ge=1, le=50000, description="Maximale Anzahl Datensätze")
limit: int = Query(10000, ge=1, le=50000, description="Maximale Anzahl Datensätze"),
conn = Depends(get_db_conn),
):
"""Gibt Wetterdaten für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT * FROM weather_data
WHERE datetime BETWEEN %s AND %s
ORDER BY datetime ASC
LIMIT %s
""", (start, end, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT * FROM weather_data
WHERE datetime BETWEEN %s AND %s
ORDER BY datetime ASC
LIMIT %s
""", (start, end, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/stats", response_model=WeatherStats, tags=["Statistics"])
async def get_weather_statistics(
hours: int = Query(24, ge=1, le=168, description="Zeitraum in Stunden für Statistiken")
hours: int = Query(24, ge=1, le=168, description="Zeitraum in Stunden für Statistiken"),
conn = Depends(get_db_conn),
):
"""Gibt aggregierte Statistiken für den angegebenen Zeitraum zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
""", (hours,))
result = cursor.fetchone()
if not result or result['data_points'] == 0:
raise HTTPException(status_code=404, detail="Keine Daten für den Zeitraum verfügbar")
return dict(result)
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
""", (hours,))
result = cursor.fetchone()
if not result or result['data_points'] == 0:
raise HTTPException(status_code=404, detail="Keine Daten für den Zeitraum verfügbar")
return dict(result)
@app.get("/weather/daily", response_model=List[WeatherStats], tags=["Statistics"])
async def get_daily_statistics(
days: int = Query(7, ge=1, le=90, description="Anzahl Tage zurück (max 90)")
days: int = Query(7, ge=1, le=90, description="Anzahl Tage zurück (max 90)"),
conn = Depends(get_db_conn),
):
"""Gibt tägliche Statistiken für die letzten X Tage zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
DATE(datetime) as date,
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY DATE(datetime)
ORDER BY date DESC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
DATE(datetime) as date,
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY DATE(datetime)
ORDER BY date DESC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/temperature", response_model=List[dict], tags=["Weather Data"])
async def get_temperature_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück"),
conn = Depends(get_db_conn),
):
"""Gibt nur Temperatur-Zeitreihen zurück (optimiert für Diagramme)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, temperature
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
AND temperature IS NOT NULL
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, temperature
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
AND temperature IS NOT NULL
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/wind", response_model=List[dict], tags=["Weather Data"])
async def get_wind_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück"),
conn = Depends(get_db_conn),
):
"""Gibt nur Wind-Daten zurück (Geschwindigkeit, Richtung, Böen)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/rain", response_model=List[dict], tags=["Weather Data"])
async def get_rain_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück"),
conn = Depends(get_db_conn),
):
"""Gibt nur Regen-Daten zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, rain, rain_rate
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, rain, rain_rate
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/hourly-aggregated", response_model=List[WeatherData], tags=["Aggregated Data"])
async def get_hourly_aggregated_data(
days: int = Query(7, ge=1, le=60, description="Anzahl Tage zurück (max 60)")
days: int = Query(7, ge=1, le=60, description="Anzahl Tage zurück (max 60)"),
conn = Depends(get_db_conn),
):
"""Gibt stündlich aggregierte Wetterdaten zurück (Stundenmittel)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
0 as id,
date_trunc('hour', datetime) as datetime,
AVG(temperature) as temperature,
ROUND(AVG(humidity)) as humidity,
AVG(pressure) as pressure,
AVG(wind_speed * 1.60934) as wind_speed,
MAX(wind_gust * 1.60934) as wind_gust,
AVG(wind_dir) as wind_dir,
AVG(rain) as rain,
AVG(rain_rate) as rain_rate,
MAX(received_at) as received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
0 as id,
date_trunc('hour', datetime) as datetime,
AVG(temperature) as temperature,
ROUND(AVG(humidity)) as humidity,
AVG(pressure) as pressure,
AVG(wind_speed * 1.60934) as wind_speed,
MAX(wind_gust * 1.60934) as wind_gust,
AVG(wind_dir) as wind_dir,
AVG(rain) as rain,
AVG(rain_rate) as rain_rate,
MAX(received_at) as received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/daily-aggregated", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_aggregated_data(
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück (max 730)")
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück (max 730)"),
conn = Depends(get_db_conn),
):
"""Gibt täglich aggregierte Wetterdaten zurück (Tagesmittel mit Min/Max-Temperaturen)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/daily-with-minmax", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_with_minmax_data(
days: int = Query(30, ge=1, le=90, description="Anzahl Tage zurück (max 90)")
):
"""Gibt täglich aggregierte Wetterdaten mit Min/Max-Temperaturen zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
@@ -441,143 +428,162 @@ async def get_daily_with_minmax_data(
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/daily-with-minmax", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_with_minmax_data(
days: int = Query(30, ge=1, le=90, description="Anzahl Tage zurück (max 90)"),
conn = Depends(get_db_conn),
):
"""Gibt täglich aggregierte Wetterdaten mit Min/Max-Temperaturen zurück"""
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/rain-daily", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_rain_data(
days: int = Query(30, ge=1, le=365, description="Anzahl Tage zurück")
days: int = Query(30, ge=1, le=365, description="Anzahl Tage zurück"),
conn = Depends(get_db_conn),
):
"""Gibt tägliche Regensummen zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as date,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY date ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as date,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY date ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/rain-weekly", response_model=List[dict], tags=["Aggregated Data"])
async def get_weekly_rain_data(
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück")
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück"),
conn = Depends(get_db_conn),
):
"""Gibt wöchentliche Regensummen zurück (Woche = Mo-So)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
# Bei 365 Tagen: alle verfügbaren Daten zurückgeben
if days >= 365:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""")
else:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
# Bei 365 Tagen: alle verfügbaren Daten zurückgeben
if days >= 365:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""")
else:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/hourly-aggregated-range", response_model=List[dict], tags=["Aggregated Data"])
async def get_hourly_aggregated_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)")
end: datetime = Query(..., description="Enddatum (ISO 8601)"),
conn = Depends(get_db_conn),
):
"""Gibt stündlich aggregierte Wetterdaten für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('hour', datetime) as datetime,
AVG(temperature)::float as temperature,
ROUND(AVG(humidity))::int as humidity,
AVG(pressure)::float as pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('hour', datetime) as datetime,
AVG(temperature)::float as temperature,
ROUND(AVG(humidity))::int as humidity,
AVG(pressure)::float as pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
@app.get("/weather/daily-aggregated-range", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_aggregated_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)")
end: datetime = Query(..., description="Enddatum (ISO 8601)"),
conn = Depends(get_db_conn),
):
"""Gibt täglich aggregierte Wetterdaten mit Min/Max-Temperaturen für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
if __name__ == "__main__":

598
api/main.py_org Normal file
View File

@@ -0,0 +1,598 @@
from fastapi import FastAPI, HTTPException, Query
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field, ConfigDict
from typing import List, Optional
from datetime import datetime, timedelta
import os
from pathlib import Path
from dotenv import load_dotenv
import psycopg
from psycopg.rows import dict_row
import logging
# Logging konfigurieren
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Umgebungsvariablen laden
env_path = Path(__file__).parent.parent / '.env'
load_dotenv(dotenv_path=env_path)
# Datenbank-Konfiguration
DB_HOST = os.getenv('DB_HOST', 'localhost')
DB_PORT = int(os.getenv('DB_PORT', 5432))
DB_NAME = os.getenv('DB_NAME', 'wetterstation')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
# FastAPI App erstellen
app = FastAPI(
title="Wetterstation API",
description="API zum Auslesen von Wetterdaten",
version="1.0.0"
)
# CORS Middleware — auf bekannte Frontend-Domains beschraenkt.
# Zusaetzliche Origins koennen via ENV CORS_EXTRA_ORIGINS (Komma-separiert) gesetzt werden.
_default_origins = [
"https://stwwetter.fuerst-stuttgart.de",
"https://sternwarte-welzheim.de",
"http://localhost:3000",
"http://localhost:5173",
]
_extra = os.getenv("CORS_EXTRA_ORIGINS", "")
_extra_list = [o.strip() for o in _extra.split(",") if o.strip()]
ALLOWED_ORIGINS = _default_origins + _extra_list
app.add_middleware(
CORSMiddleware,
allow_origins=ALLOWED_ORIGINS,
allow_credentials=False, # API liest nur, keine Cookies/Auth noetig
allow_methods=["GET", "OPTIONS"], # API ist read-only
allow_headers=["Content-Type"],
max_age=600,
)
logger.info("CORS aktiv fuer: %s", ALLOWED_ORIGINS)
# Pydantic Models
class WeatherData(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: int
datetime: datetime
temperature: Optional[float] = None
humidity: Optional[int] = None
pressure: Optional[float] = None
wind_speed: Optional[float] = None
wind_gust: Optional[float] = None
wind_dir: Optional[float] = None
rain: Optional[float] = None
rain_rate: Optional[float] = None
bar_trend: Optional[int] = None
received_at: datetime
class WeatherStats(BaseModel):
avg_temperature: Optional[float] = None
min_temperature: Optional[float] = None
max_temperature: Optional[float] = None
avg_humidity: Optional[float] = None
avg_pressure: Optional[float] = None
avg_wind_speed: Optional[float] = None
max_wind_gust: Optional[float] = None
total_rain: Optional[float] = None
data_points: int
class HealthResponse(BaseModel):
status: str
database: str
timestamp: datetime
# Datenbankverbindung
def get_db_connection():
"""Erstellt eine Datenbankverbindung"""
try:
conn = psycopg.connect(
host=DB_HOST,
port=DB_PORT,
dbname=DB_NAME,
user=DB_USER,
password=DB_PASSWORD,
row_factory=dict_row
)
return conn
except Exception as e:
logger.error(f"Datenbankverbindungsfehler: {e}")
raise HTTPException(status_code=500, detail="Datenbankverbindung fehlgeschlagen")
# API Endpoints
@app.get("/", tags=["General"])
async def root():
"""Root Endpoint"""
return {
"message": "Wetterstation API",
"version": "1.0.0",
"docs": "/docs"
}
@app.get("/health", response_model=HealthResponse, tags=["General"])
async def health_check():
"""Health Check Endpoint"""
try:
conn = get_db_connection()
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
conn.close()
db_status = "connected"
except Exception:
db_status = "disconnected"
return {
"status": "ok" if db_status == "connected" else "error",
"database": db_status,
"timestamp": datetime.now()
}
@app.get("/weather/latest", response_model=WeatherData, tags=["Weather Data"])
async def get_latest_weather():
"""Gibt die neuesten Wetterdaten zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
ORDER BY datetime DESC
LIMIT 1
""")
result = cursor.fetchone()
if not result:
raise HTTPException(status_code=404, detail="Keine Daten verfügbar")
return dict(result)
finally:
conn.close()
@app.get("/weather/current", response_model=WeatherData, tags=["Weather Data"])
async def get_current_weather():
"""Alias für /weather/latest - gibt aktuelle Wetterdaten zurück"""
return await get_latest_weather()
@app.get("/weather/history", response_model=List[WeatherData], tags=["Weather Data"])
async def get_weather_history(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück (max 168 = 7 Tage)"),
limit: int = Query(1000, ge=1, le=10000, description="Maximale Anzahl Datensätze")
):
"""Gibt historische Wetterdaten der letzten X Stunden zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT id, datetime, temperature, humidity, pressure,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir, rain, rain_rate, bar_trend, received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime DESC
LIMIT %s
""", (hours, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/range", response_model=List[WeatherData], tags=["Weather Data"])
async def get_weather_by_date_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)"),
limit: int = Query(10000, ge=1, le=50000, description="Maximale Anzahl Datensätze")
):
"""Gibt Wetterdaten für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT * FROM weather_data
WHERE datetime BETWEEN %s AND %s
ORDER BY datetime ASC
LIMIT %s
""", (start, end, limit))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/stats", response_model=WeatherStats, tags=["Statistics"])
async def get_weather_statistics(
hours: int = Query(24, ge=1, le=168, description="Zeitraum in Stunden für Statistiken")
):
"""Gibt aggregierte Statistiken für den angegebenen Zeitraum zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
""", (hours,))
result = cursor.fetchone()
if not result or result['data_points'] == 0:
raise HTTPException(status_code=404, detail="Keine Daten für den Zeitraum verfügbar")
return dict(result)
finally:
conn.close()
@app.get("/weather/daily", response_model=List[WeatherStats], tags=["Statistics"])
async def get_daily_statistics(
days: int = Query(7, ge=1, le=90, description="Anzahl Tage zurück (max 90)")
):
"""Gibt tägliche Statistiken für die letzten X Tage zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
DATE(datetime) as date,
AVG(temperature) as avg_temperature,
MIN(temperature) as min_temperature,
MAX(temperature) as max_temperature,
AVG(humidity) as avg_humidity,
AVG(pressure) as avg_pressure,
AVG(wind_speed * 1.60934) as avg_wind_speed,
MAX(wind_gust * 1.60934) as max_wind_gust,
SUM(rain) as total_rain,
COUNT(*) as data_points
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY DATE(datetime)
ORDER BY date DESC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/temperature", response_model=List[dict], tags=["Weather Data"])
async def get_temperature_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
):
"""Gibt nur Temperatur-Zeitreihen zurück (optimiert für Diagramme)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, temperature
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
AND temperature IS NOT NULL
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/wind", response_model=List[dict], tags=["Weather Data"])
async def get_wind_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
):
"""Gibt nur Wind-Daten zurück (Geschwindigkeit, Richtung, Böen)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime,
wind_speed * 1.60934 as wind_speed,
wind_gust * 1.60934 as wind_gust,
wind_dir
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/rain", response_model=List[dict], tags=["Weather Data"])
async def get_rain_data(
hours: int = Query(24, ge=1, le=168, description="Anzahl Stunden zurück")
):
"""Gibt nur Regen-Daten zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT datetime, rain, rain_rate
FROM weather_data
WHERE datetime >= NOW() - make_interval(hours => %s)
ORDER BY datetime ASC
""", (hours,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/hourly-aggregated", response_model=List[WeatherData], tags=["Aggregated Data"])
async def get_hourly_aggregated_data(
days: int = Query(7, ge=1, le=60, description="Anzahl Tage zurück (max 60)")
):
"""Gibt stündlich aggregierte Wetterdaten zurück (Stundenmittel)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
0 as id,
date_trunc('hour', datetime) as datetime,
AVG(temperature) as temperature,
ROUND(AVG(humidity)) as humidity,
AVG(pressure) as pressure,
AVG(wind_speed * 1.60934) as wind_speed,
MAX(wind_gust * 1.60934) as wind_gust,
AVG(wind_dir) as wind_dir,
AVG(rain) as rain,
AVG(rain_rate) as rain_rate,
MAX(received_at) as received_at
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/daily-aggregated", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_aggregated_data(
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück (max 730)")
):
"""Gibt täglich aggregierte Wetterdaten zurück (Tagesmittel mit Min/Max-Temperaturen)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/daily-with-minmax", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_with_minmax_data(
days: int = Query(30, ge=1, le=90, description="Anzahl Tage zurück (max 90)")
):
"""Gibt täglich aggregierte Wetterdaten mit Min/Max-Temperaturen zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/rain-daily", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_rain_data(
days: int = Query(30, ge=1, le=365, description="Anzahl Tage zurück")
):
"""Gibt tägliche Regensummen zurück"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as date,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('day', datetime)
ORDER BY date ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/rain-weekly", response_model=List[dict], tags=["Aggregated Data"])
async def get_weekly_rain_data(
days: int = Query(365, ge=1, le=730, description="Anzahl Tage zurück")
):
"""Gibt wöchentliche Regensummen zurück (Woche = Mo-So)"""
conn = get_db_connection()
try:
with conn.cursor() as cursor:
# Bei 365 Tagen: alle verfügbaren Daten zurückgeben
if days >= 365:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""")
else:
cursor.execute("""
SELECT
date_trunc('week', datetime) as week_start,
SUM(rain) as total_rain
FROM weather_data
WHERE datetime >= NOW() - make_interval(days => %s)
GROUP BY date_trunc('week', datetime)
ORDER BY week_start ASC
""", (days,))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/hourly-aggregated-range", response_model=List[dict], tags=["Aggregated Data"])
async def get_hourly_aggregated_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)")
):
"""Gibt stündlich aggregierte Wetterdaten für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('hour', datetime) as datetime,
AVG(temperature)::float as temperature,
ROUND(AVG(humidity))::int as humidity,
AVG(pressure)::float as pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('hour', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
@app.get("/weather/daily-aggregated-range", response_model=List[dict], tags=["Aggregated Data"])
async def get_daily_aggregated_range(
start: datetime = Query(..., description="Startdatum (ISO 8601)"),
end: datetime = Query(..., description="Enddatum (ISO 8601)")
):
"""Gibt täglich aggregierte Wetterdaten mit Min/Max-Temperaturen für einen bestimmten Zeitraum zurück"""
if start >= end:
raise HTTPException(status_code=400, detail="Startdatum muss vor Enddatum liegen")
conn = get_db_connection()
try:
with conn.cursor() as cursor:
cursor.execute("""
SELECT
date_trunc('day', datetime) as datetime,
AVG(temperature)::float as temperature,
MIN(temperature)::float as min_temperature,
MAX(temperature)::float as max_temperature,
ROUND(AVG(humidity))::int as humidity,
MIN(humidity)::int as min_humidity,
MAX(humidity)::int as max_humidity,
AVG(pressure)::float as pressure,
MIN(pressure)::float as min_pressure,
MAX(pressure)::float as max_pressure,
AVG(wind_speed * 1.60934)::float as wind_speed,
MAX(wind_gust * 1.60934)::float as wind_gust,
AVG(wind_dir)::float as wind_dir,
SUM(rain)::float as total_rain
FROM weather_data
WHERE datetime BETWEEN %s AND %s
GROUP BY date_trunc('day', datetime)
ORDER BY datetime ASC
""", (start, end))
results = cursor.fetchall()
return [dict(row) for row in results]
finally:
conn.close()
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

@@ -1,5 +1,6 @@
fastapi>=0.115.0
uvicorn[standard]>=0.32.0
psycopg[binary]>=3.2.0
python-dotenv>=1.0.0
pydantic>=2.10.0
fastapi==0.115.5
uvicorn[standard]==0.34.0
psycopg[binary]==3.2.3
psycopg_pool==3.2.4
python-dotenv==1.0.1
pydantic==2.10.3

View File

@@ -3,177 +3,305 @@
import os
import json
import logging
import secrets
from contextlib import asynccontextmanager
from datetime import datetime
from pathlib import Path
from typing import Optional
from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException, Request
from pydantic import BaseModel
import psycopg2
from psycopg2.extras import RealDictCursor
from fastapi import FastAPI, HTTPException, Request, Header, Depends, status
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from pydantic import BaseModel, ConfigDict, field_validator
import psycopg
from psycopg_pool import ConnectionPool
from slowapi import Limiter
from slowapi.errors import RateLimitExceeded
from slowapi.util import get_remote_address
import uvicorn
# Logging konfigurieren
# --------------------------------------------------------------------------- #
# Konfiguration
# --------------------------------------------------------------------------- #
env_path = Path(__file__).parent.parent / ".env"
load_dotenv(dotenv_path=env_path)
COLLECTOR_PORT = int(os.getenv("COLLECTOR_PORT", 8001))
DB_HOST = os.getenv("DB_HOST", "localhost")
DB_PORT = int(os.getenv("DB_PORT", 5432))
DB_NAME = os.getenv("DB_NAME", "wetterstation")
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
# Sicherheit
COLLECTOR_API_KEY = os.getenv("COLLECTOR_API_KEY")
ENVIRONMENT = os.getenv("ENVIRONMENT", "production").lower()
IS_DEV = ENVIRONMENT in ("dev", "development", "local")
# Limits
MAX_BODY_BYTES = int(os.getenv("COLLECTOR_MAX_BODY_BYTES", 16 * 1024)) # 16 KiB
RATE_LIMIT = os.getenv("COLLECTOR_RATE_LIMIT", "30/minute")
# Logging — keine Rohdaten auf INFO mehr
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
level=logging.DEBUG if IS_DEV else logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
# Umgebungsvariablen laden - eine Ebene höher
env_path = Path(__file__).parent.parent / '.env'
load_dotenv(dotenv_path=env_path)
# Konfiguration
COLLECTOR_PORT = int(os.getenv('COLLECTOR_PORT', 8001))
# --------------------------------------------------------------------------- #
# Connection Pool
# --------------------------------------------------------------------------- #
DB_HOST = os.getenv('DB_HOST', 'localhost')
DB_PORT = int(os.getenv('DB_PORT', 5432))
DB_NAME = os.getenv('DB_NAME', 'wetterstation')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
# FastAPI App
app = FastAPI(title="Weather Data Collector API")
def _build_conninfo() -> str:
return (
f"host={DB_HOST} port={DB_PORT} dbname={DB_NAME} "
f"user={DB_USER} password={DB_PASSWORD}"
)
# Pydantic Models
pool: Optional[ConnectionPool] = None
# --------------------------------------------------------------------------- #
# Rate-Limiter
# --------------------------------------------------------------------------- #
def _limit_key(request: Request) -> str:
"""Rate-Limit-Key: bei API-Key danach, sonst nach IP.
Hinter Traefik nutzt slowapi standardmaessig die Peer-IP, was der
Proxy-IP entspricht. Wenn ein API-Key da ist, bevorzugen wir den.
"""
api_key = request.headers.get("x-api-key")
if api_key:
# nur Praefix einsetzen, damit der volle Key nicht in Logs landet
return f"key:{api_key[:8]}"
fwd = request.headers.get("x-forwarded-for")
if fwd:
return f"ip:{fwd.split(',')[0].strip()}"
return f"ip:{get_remote_address(request)}"
limiter = Limiter(key_func=_limit_key, default_limits=[])
# --------------------------------------------------------------------------- #
# Auth-Dependency
# --------------------------------------------------------------------------- #
async def require_api_key(x_api_key: Optional[str] = Header(default=None)) -> None:
"""Prueft den API-Key timing-safe gegen die Konfiguration."""
if not COLLECTOR_API_KEY:
# Fail-closed: wenn kein Key konfiguriert ist, ist die API gesperrt.
logger.error(
"COLLECTOR_API_KEY ist nicht gesetzt - alle Schreibzugriffe blockiert."
)
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Service not configured",
)
if not x_api_key or not secrets.compare_digest(x_api_key, COLLECTOR_API_KEY):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or missing API key",
)
# --------------------------------------------------------------------------- #
# Pydantic Models mit Plausibilitaetspruefung
# --------------------------------------------------------------------------- #
class WeatherDataInput(BaseModel):
# Zeitstempel: ISO-String (time), datetime-String oder Unix-Timestamp
time: str | None = None
datetime: str | None = None
dateTime: int | None = None
# extra-Felder verwerfen statt akzeptieren -> kein Pollution
model_config = ConfigDict(extra="ignore")
# Außentemperatur (Celsius): tempOut, temperature oder outTemp (Fahrenheit)
tempOut: float | None = None # Celsius (neues Format)
temperature: float | None = None
outTemp: float | None = None # Fahrenheit (altes Format)
# Zeitstempel
time: Optional[str] = None
datetime: Optional[str] = None
dateTime: Optional[int] = None
# Aussentemperatur
tempOut: Optional[float] = None # Celsius (neu)
temperature: Optional[float] = None # Celsius
outTemp: Optional[float] = None # Fahrenheit (alt)
# Innentemperatur
tempIn: float | None = None # Celsius
tempIn: Optional[float] = None # Celsius
# Außenfeuchte
humOut: int | None = None
humidity: int | None = None
outHumidity: float | None = None
# Aussenfeuchte
humOut: Optional[int] = None
humidity: Optional[int] = None
outHumidity: Optional[float] = None
# Innenfeuchte
humIn: int | None = None
humIn: Optional[int] = None
# Luftdruck
pressure: float | None = None
barometer: float | None = None # inHg
barTrend: int | None = None # hPa/Stunde
pressure: Optional[float] = None # hPa
barometer: Optional[float] = None # inHg
barTrend: Optional[int] = None
# Wind
windAvg: float | None = None # m/s Durchschnitt (neues Format)
windSpeed: float | None = None
wind_speed: float | None = None
windGust: float | None = None
wind_gust: float | None = None
windDir: float | None = None
wind_dir: float | None = None
windAvg: Optional[float] = None
windSpeed: Optional[float] = None
wind_speed: Optional[float] = None
windGust: Optional[float] = None
wind_gust: Optional[float] = None
windDir: Optional[float] = None
wind_dir: Optional[float] = None
# Niederschlag
rain: float | None = None
rainRate: float | None = None
rain_rate: float | None = None
rain: Optional[float] = None
rainRate: Optional[float] = None
rain_rate: Optional[float] = None
# Vorhersage
forecast: int | None = None
forecast: Optional[int] = None
model_config = {"extra": "allow"}
# ---- Validatoren -----------------------------------------------------
@field_validator("tempOut", "temperature", "tempIn")
@classmethod
def _temp_celsius_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (-90.0 <= v <= 70.0):
raise ValueError("temperature out of plausible range (Celsius)")
return v
@field_validator("outTemp")
@classmethod
def _temp_fahrenheit_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (-130.0 <= v <= 160.0):
raise ValueError("outTemp out of plausible range (Fahrenheit)")
return v
@field_validator("humOut", "humidity", "humIn")
@classmethod
def _humidity_int_range(cls, v: Optional[int]) -> Optional[int]:
if v is not None and not (0 <= v <= 100):
raise ValueError("humidity out of range")
return v
@field_validator("outHumidity")
@classmethod
def _humidity_float_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (0.0 <= v <= 100.0):
raise ValueError("outHumidity out of range")
return v
@field_validator("pressure")
@classmethod
def _pressure_hpa_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (800.0 <= v <= 1100.0):
raise ValueError("pressure (hPa) out of plausible range")
return v
@field_validator("barometer")
@classmethod
def _pressure_inhg_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (23.0 <= v <= 32.5):
raise ValueError("barometer (inHg) out of plausible range")
return v
@field_validator("windAvg", "windSpeed", "wind_speed", "windGust", "wind_gust")
@classmethod
def _wind_speed_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (0.0 <= v <= 120.0):
raise ValueError("wind speed out of plausible range")
return v
@field_validator("windDir", "wind_dir")
@classmethod
def _wind_dir_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (0.0 <= v <= 360.0):
raise ValueError("wind_dir out of range")
return v
@field_validator("rain", "rainRate", "rain_rate")
@classmethod
def _rain_range(cls, v: Optional[float]) -> Optional[float]:
if v is not None and not (0.0 <= v <= 1000.0):
raise ValueError("rain value out of plausible range")
return v
# ---- Konvertierungen -------------------------------------------------
def get_datetime_string(self) -> str:
"""Zeitstempel als String zurückgeben"""
if self.time:
return self.time
elif self.datetime:
if self.datetime:
return self.datetime
elif self.dateTime:
from datetime import datetime as dt
return dt.fromtimestamp(self.dateTime).strftime('%Y-%m-%d %H:%M:%S')
if self.dateTime is not None:
# Plausibilitaet: 2000-01-01 .. 2100-01-01
if not (946684800 <= self.dateTime <= 4102444800):
raise ValueError("dateTime timestamp out of plausible range")
return datetime.fromtimestamp(self.dateTime).strftime("%Y-%m-%d %H:%M:%S")
raise ValueError("Kein Zeitstempel vorhanden (time, datetime oder dateTime)")
def get_temperature_celsius(self) -> float | None:
"""Außentemperatur in Celsius"""
def get_temperature_celsius(self) -> Optional[float]:
if self.tempOut is not None:
return self.tempOut
elif self.temperature is not None:
if self.temperature is not None:
return self.temperature
elif self.outTemp is not None:
if self.outTemp is not None:
return (self.outTemp - 32) * 5 / 9
return None
def get_temp_in(self) -> float | None:
"""Innentemperatur in Celsius"""
def get_temp_in(self) -> Optional[float]:
return self.tempIn
def get_humidity_int(self) -> int | None:
"""Außenfeuchte"""
def get_humidity_int(self) -> Optional[int]:
if self.humOut is not None:
return int(self.humOut)
elif self.humidity is not None:
if self.humidity is not None:
return int(self.humidity)
elif self.outHumidity is not None:
if self.outHumidity is not None:
return int(self.outHumidity)
return None
def get_humidity_in(self) -> int | None:
"""Innenfeuchte"""
def get_humidity_in(self) -> Optional[int]:
return int(self.humIn) if self.humIn is not None else None
def get_pressure_hpa(self) -> float | None:
"""Luftdruck in hPa"""
def get_pressure_hpa(self) -> Optional[float]:
if self.pressure is not None:
return self.pressure
elif self.barometer is not None:
if self.barometer is not None:
return self.barometer * 33.8639
return None
def get_wind_speed(self) -> float | None:
"""Durchschnittliche Windgeschwindigkeit"""
def get_wind_speed(self) -> Optional[float]:
if self.windAvg is not None:
return self.windAvg
elif self.windSpeed is not None:
if self.windSpeed is not None:
return self.windSpeed
return self.wind_speed
def get_wind_gust(self) -> float | None:
"""Windböe"""
def get_wind_gust(self) -> Optional[float]:
return self.windGust if self.windGust is not None else self.wind_gust
def get_wind_dir(self) -> float | None:
"""Windrichtung"""
def get_wind_dir(self) -> Optional[float]:
return self.windDir if self.windDir is not None else self.wind_dir
def get_rain_rate(self) -> float | None:
"""Regenrate"""
def get_rain_rate(self) -> Optional[float]:
return self.rainRate if self.rainRate is not None else self.rain_rate
# Datenbankverbindung
def get_db_connection():
"""Datenbankverbindung herstellen"""
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
return conn
except Exception as e:
logger.error(f"Datenbankverbindungsfehler: {e}")
raise HTTPException(status_code=500, detail="Datenbankverbindung fehlgeschlagen")
# --------------------------------------------------------------------------- #
# Datenbank-Setup
# --------------------------------------------------------------------------- #
def setup_database():
"""Tabelle erstellen und fehlende Spalten ergänzen"""
try:
conn = get_db_connection()
def setup_database() -> None:
"""Tabelle, fehlende Spalten und Index anlegen (idempotent)."""
assert pool is not None
with pool.connection() as conn:
with conn.cursor() as cursor:
cursor.execute("""
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS weather_data (
id SERIAL PRIMARY KEY,
datetime TIMESTAMPTZ NOT NULL,
@@ -188,177 +316,320 @@ def setup_database():
received_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(datetime)
)
""")
# Neue Spalten ergänzen (idempotent)
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS temp_in FLOAT")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS humidity_in INTEGER")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS forecast INTEGER")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS bar_trend INTEGER")
conn.commit()
logger.info("Tabelle weather_data bereit (inkl. neuer Spalten)")
conn.close()
except Exception as e:
logger.error(f"Fehler bei Datenbanksetup: {e}")
raise
"""
)
cursor.execute(
"ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS temp_in FLOAT"
)
cursor.execute(
"ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS humidity_in INTEGER"
)
cursor.execute(
"ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS forecast INTEGER"
)
cursor.execute(
"ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS bar_trend INTEGER"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_weather_datetime_desc "
"ON weather_data (datetime DESC)"
)
conn.commit()
logger.info("Tabelle weather_data und Index bereit")
# API Endpoints
@app.on_event("startup")
async def startup_event():
"""Bei Start die Datenbank initialisieren"""
logger.info("Collector API startet...")
# --------------------------------------------------------------------------- #
# FastAPI Lifespan
# --------------------------------------------------------------------------- #
@asynccontextmanager
async def lifespan(app: FastAPI):
global pool
# Pflicht-Variablen pruefen — fail fast
missing = [v for v in ("DB_USER", "DB_PASSWORD") if not os.getenv(v)]
if missing:
raise RuntimeError(
f"Fehlende Umgebungsvariablen: {', '.join(missing)}"
)
if not COLLECTOR_API_KEY:
raise RuntimeError(
"COLLECTOR_API_KEY ist nicht gesetzt. "
"Mindestens 32 Zeichen empfohlen (z.B. via 'openssl rand -hex 32')."
)
if len(COLLECTOR_API_KEY) < 16:
raise RuntimeError(
"COLLECTOR_API_KEY ist zu kurz (Minimum 16 Zeichen)."
)
pool = ConnectionPool(
conninfo=_build_conninfo(),
min_size=1,
max_size=5,
timeout=10,
kwargs={"autocommit": False},
)
pool.wait()
logger.info("Connection Pool initialisiert (min=1, max=5)")
setup_database()
logger.info(f"API läuft auf Port {COLLECTOR_PORT}")
logger.info("Collector laeuft auf Port %d (env=%s)", COLLECTOR_PORT, ENVIRONMENT)
try:
yield
finally:
if pool is not None:
pool.close()
logger.info("Connection Pool geschlossen")
# --------------------------------------------------------------------------- #
# FastAPI App
# --------------------------------------------------------------------------- #
app = FastAPI(
title="Weather Data Collector API",
docs_url="/docs" if IS_DEV else None,
redoc_url=None,
openapi_url="/openapi.json" if IS_DEV else None,
lifespan=lifespan,
)
# Rate-Limiter an die App binden
app.state.limiter = limiter
@app.exception_handler(RateLimitExceeded)
async def _rate_limit_handler(request: Request, exc: RateLimitExceeded):
return JSONResponse(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
content={"detail": "Too many requests"},
)
@app.exception_handler(RequestValidationError)
async def _validation_handler(request: Request, exc: RequestValidationError):
# Details ins Log, generische Antwort an den Client.
logger.warning("Validation error on %s: %s", request.url.path, exc.errors())
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content={"detail": "Validation error"},
)
@app.exception_handler(Exception)
async def _unhandled_handler(request: Request, exc: Exception):
# NIE Stacktraces oder str(exc) an den Client zurueckgeben.
logger.exception("Unhandled error on %s", request.url.path)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"detail": "Internal server error"},
)
# --------------------------------------------------------------------------- #
# Body-Size-Middleware
# --------------------------------------------------------------------------- #
@app.middleware("http")
async def _limit_body_size(request: Request, call_next):
if request.method in ("POST", "PUT", "PATCH"):
cl = request.headers.get("content-length")
if cl is not None:
try:
if int(cl) > MAX_BODY_BYTES:
return JSONResponse(
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
content={"detail": "Payload too large"},
)
except ValueError:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": "Invalid Content-Length"},
)
return await call_next(request)
# --------------------------------------------------------------------------- #
# Endpoints
# --------------------------------------------------------------------------- #
@app.get("/")
async def root():
"""Root Endpoint - GET zeigt Info"""
"""Info-Endpunkt (kein Auth noetig)."""
return {
"message": "Weather Data Collector API",
"version": "1.0.0",
"endpoint": "POST /weather or POST /"
"service": "Weather Data Collector",
"version": "2.0.0",
"endpoint": "POST /weather (X-API-Key required)",
}
@app.post("/")
async def root_post(request: Request):
"""Root Endpoint - POST akzeptiert Wetterdaten (Alias für /weather)"""
try:
# Rohen Body lesen
body = await request.body()
body_str = body.decode('utf-8')
logger.info(f"POST auf Root - Raw Body: {body_str}")
# Als JSON parsen
data_dict = json.loads(body_str)
logger.info(f"POST auf Root - Parsed JSON: {data_dict}")
# Zu Pydantic Model konvertieren
data = WeatherDataInput(**data_dict)
return await receive_weather_data(data)
except json.JSONDecodeError as e:
logger.error(f"JSON Parse Error: {e}")
raise HTTPException(status_code=400, detail=f"Invalid JSON: {str(e)}")
except Exception as e:
logger.error(f"Fehler bei Root POST: {e}")
raise HTTPException(status_code=422, detail=f"Validation error: {str(e)}")
@app.post("/debug")
async def debug_post(request: dict):
"""Debug Endpoint - akzeptiert beliebige JSON und loggt sie"""
logger.info(f"Debug: Empfangene Rohdaten: {request}")
return {"status": "logged", "data": request}
@app.get("/health")
async def health_check():
"""Health Check"""
"""Health-Check ohne Auth, aber ohne sensitive Details."""
try:
conn = get_db_connection()
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
conn.close()
return {"status": "healthy", "database": "connected"}
except Exception as e:
raise HTTPException(status_code=503, detail=f"Database error: {str(e)}")
@app.post("/weather")
async def receive_weather_data(data: WeatherDataInput):
"""Wetterdaten empfangen und speichern"""
logger.info(f"Empfangene Daten: {data.model_dump()}")
try:
conn = get_db_connection()
try:
# Konvertiere zu den richtigen Werten
dt_string = data.get_datetime_string()
temp_c = data.get_temperature_celsius()
temp_in = data.get_temp_in()
humidity = data.get_humidity_int()
humidity_in = data.get_humidity_in()
pressure = data.get_pressure_hpa()
bar_trend = data.barTrend
wind_speed = data.get_wind_speed()
wind_gust = data.get_wind_gust()
wind_dir = data.get_wind_dir()
rain = data.rain
rain_rate = data.get_rain_rate()
forecast = data.forecast
logger.info(
f"Konvertierte Daten - datetime: {dt_string}, "
f"tempOut: {temp_c}°C, tempIn: {temp_in}°C, "
f"humOut: {humidity}%, humIn: {humidity_in}%, "
f"pressure: {pressure} hPa, barTrend: {bar_trend}"
)
assert pool is not None
with pool.connection() as conn:
with conn.cursor() as cursor:
cursor.execute("""
INSERT INTO weather_data
cursor.execute("SELECT 1")
return {"status": "healthy"}
except Exception:
logger.exception("Health-Check fehlgeschlagen")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Service unavailable",
)
def _store_weather(data: WeatherDataInput) -> dict:
"""Schreibt einen Datenpunkt; setzt voraus, dass `data` validiert ist."""
assert pool is not None
dt_string = data.get_datetime_string()
values = (
dt_string,
data.get_temperature_celsius(),
data.get_temp_in(),
data.get_humidity_int(),
data.get_humidity_in(),
data.get_pressure_hpa(),
data.barTrend,
data.get_wind_speed(),
data.get_wind_gust(),
data.get_wind_dir(),
data.rain,
data.get_rain_rate(),
data.forecast,
)
with pool.connection() as conn:
with conn.cursor() as cursor:
cursor.execute(
"""
INSERT INTO weather_data
(datetime, temperature, temp_in, humidity, humidity_in,
pressure, bar_trend, wind_speed, wind_gust, wind_dir,
rain, rain_rate, forecast)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (datetime) DO UPDATE SET
temperature = EXCLUDED.temperature,
temp_in = EXCLUDED.temp_in,
humidity = EXCLUDED.humidity,
humidity_in = EXCLUDED.humidity_in,
pressure = EXCLUDED.pressure,
bar_trend = EXCLUDED.bar_trend,
wind_speed = EXCLUDED.wind_speed,
wind_gust = EXCLUDED.wind_gust,
wind_dir = EXCLUDED.wind_dir,
rain = EXCLUDED.rain,
rain_rate = EXCLUDED.rain_rate,
forecast = EXCLUDED.forecast
""", (
dt_string,
temp_c,
temp_in,
humidity,
humidity_in,
pressure,
bar_trend,
wind_speed,
wind_gust,
wind_dir,
rain,
rain_rate,
forecast
))
conn.commit()
logger.info(f"Daten gespeichert für {dt_string} (UTC)")
return {
"status": "success",
"message": f"Weather data for {dt_string} saved successfully"
}
finally:
conn.close()
except Exception as e:
logger.error(f"Fehler beim Speichern: {e}")
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (datetime) DO UPDATE SET
temperature = EXCLUDED.temperature,
temp_in = EXCLUDED.temp_in,
humidity = EXCLUDED.humidity,
humidity_in = EXCLUDED.humidity_in,
pressure = EXCLUDED.pressure,
bar_trend = EXCLUDED.bar_trend,
wind_speed = EXCLUDED.wind_speed,
wind_gust = EXCLUDED.wind_gust,
wind_dir = EXCLUDED.wind_dir,
rain = EXCLUDED.rain,
rain_rate = EXCLUDED.rain_rate,
forecast = EXCLUDED.forecast
""",
values,
)
conn.commit()
logger.info("Datenpunkt gespeichert fuer %s", dt_string)
return {"status": "success", "datetime": dt_string}
def main():
"""Hauptfunktion"""
# Prüfen ob alle nötigen Umgebungsvariablen gesetzt sind
required_vars = ['DB_USER', 'DB_PASSWORD']
missing_vars = [var for var in required_vars if not os.getenv(var)]
if missing_vars:
logger.error(f"Fehlende Umgebungsvariablen: {', '.join(missing_vars)}")
logger.error("Bitte .env Datei mit den erforderlichen Werten erstellen")
return
uvicorn.run(app, host="0.0.0.0", port=COLLECTOR_PORT)
@app.post("/weather", dependencies=[Depends(require_api_key)])
@limiter.limit(RATE_LIMIT)
async def receive_weather_data(request: Request, data: WeatherDataInput):
"""Wetterdaten empfangen und speichern (Auth + Rate-Limit)."""
try:
return _store_weather(data)
except ValueError as e:
# Konvertierungs-Fehler (z.B. fehlender Zeitstempel)
logger.warning("Bad request on /weather: %s", e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid input",
)
except psycopg.Error:
logger.exception("DB-Fehler beim Speichern")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Storage unavailable",
)
@app.post("/", dependencies=[Depends(require_api_key)])
@limiter.limit(RATE_LIMIT)
async def root_post(request: Request):
"""Alias fuer POST /weather (Auth + Rate-Limit)."""
body = await request.body()
if len(body) > MAX_BODY_BYTES:
raise HTTPException(
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
detail="Payload too large",
)
try:
data_dict = json.loads(body.decode("utf-8"))
except (UnicodeDecodeError, json.JSONDecodeError):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid JSON",
)
try:
data = WeatherDataInput(**data_dict)
except Exception:
# Pydantic-Fehler enthalten ggf. Werte aus dem Body — nicht durchreichen.
logger.warning("Validation failed on POST /")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Validation error",
)
try:
return _store_weather(data)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid input",
)
except psycopg.Error:
logger.exception("DB-Fehler beim Speichern")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Storage unavailable",
)
# Debug-Endpunkt nur in DEV-Modus + nur mit API-Key
if IS_DEV:
@app.post("/debug", dependencies=[Depends(require_api_key)])
async def debug_post(request: Request):
body = await request.body()
if len(body) > MAX_BODY_BYTES:
raise HTTPException(
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
detail="Payload too large",
)
try:
payload = json.loads(body.decode("utf-8"))
except Exception:
raise HTTPException(status_code=400, detail="Invalid JSON")
logger.debug("Debug payload: %s", payload)
return {"status": "logged"}
# --------------------------------------------------------------------------- #
# Entry-Point
# --------------------------------------------------------------------------- #
def main() -> None:
uvicorn.run(
app,
host="0.0.0.0",
port=COLLECTOR_PORT,
# In Produktion liegt Traefik davor und terminiert TLS.
# X-Forwarded-* nur dann auswerten, wenn man dem Proxy vertraut.
proxy_headers=True,
forwarded_allow_ips="*",
)
if __name__ == "__main__":
main()
main()

364
collector/main.py_old Normal file
View File

@@ -0,0 +1,364 @@
# HTTP API that receives weather data via POST and stores in PostgreSQL
import os
import json
import logging
from datetime import datetime
from pathlib import Path
from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException, Request
from pydantic import BaseModel
import psycopg2
from psycopg2.extras import RealDictCursor
import uvicorn
# Logging konfigurieren
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Umgebungsvariablen laden - eine Ebene höher
env_path = Path(__file__).parent.parent / '.env'
load_dotenv(dotenv_path=env_path)
# Konfiguration
COLLECTOR_PORT = int(os.getenv('COLLECTOR_PORT', 8001))
DB_HOST = os.getenv('DB_HOST', 'localhost')
DB_PORT = int(os.getenv('DB_PORT', 5432))
DB_NAME = os.getenv('DB_NAME', 'wetterstation')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
# FastAPI App
app = FastAPI(title="Weather Data Collector API")
# Pydantic Models
class WeatherDataInput(BaseModel):
# Zeitstempel: ISO-String (time), datetime-String oder Unix-Timestamp
time: str | None = None
datetime: str | None = None
dateTime: int | None = None
# Außentemperatur (Celsius): tempOut, temperature oder outTemp (Fahrenheit)
tempOut: float | None = None # Celsius (neues Format)
temperature: float | None = None
outTemp: float | None = None # Fahrenheit (altes Format)
# Innentemperatur
tempIn: float | None = None # Celsius
# Außenfeuchte
humOut: int | None = None
humidity: int | None = None
outHumidity: float | None = None
# Innenfeuchte
humIn: int | None = None
# Luftdruck
pressure: float | None = None
barometer: float | None = None # inHg
barTrend: int | None = None # hPa/Stunde
# Wind
windAvg: float | None = None # m/s Durchschnitt (neues Format)
windSpeed: float | None = None
wind_speed: float | None = None
windGust: float | None = None
wind_gust: float | None = None
windDir: float | None = None
wind_dir: float | None = None
# Niederschlag
rain: float | None = None
rainRate: float | None = None
rain_rate: float | None = None
# Vorhersage
forecast: int | None = None
model_config = {"extra": "allow"}
def get_datetime_string(self) -> str:
"""Zeitstempel als String zurückgeben"""
if self.time:
return self.time
elif self.datetime:
return self.datetime
elif self.dateTime:
from datetime import datetime as dt
return dt.fromtimestamp(self.dateTime).strftime('%Y-%m-%d %H:%M:%S')
raise ValueError("Kein Zeitstempel vorhanden (time, datetime oder dateTime)")
def get_temperature_celsius(self) -> float | None:
"""Außentemperatur in Celsius"""
if self.tempOut is not None:
return self.tempOut
elif self.temperature is not None:
return self.temperature
elif self.outTemp is not None:
return (self.outTemp - 32) * 5 / 9
return None
def get_temp_in(self) -> float | None:
"""Innentemperatur in Celsius"""
return self.tempIn
def get_humidity_int(self) -> int | None:
"""Außenfeuchte"""
if self.humOut is not None:
return int(self.humOut)
elif self.humidity is not None:
return int(self.humidity)
elif self.outHumidity is not None:
return int(self.outHumidity)
return None
def get_humidity_in(self) -> int | None:
"""Innenfeuchte"""
return int(self.humIn) if self.humIn is not None else None
def get_pressure_hpa(self) -> float | None:
"""Luftdruck in hPa"""
if self.pressure is not None:
return self.pressure
elif self.barometer is not None:
return self.barometer * 33.8639
return None
def get_wind_speed(self) -> float | None:
"""Durchschnittliche Windgeschwindigkeit"""
if self.windAvg is not None:
return self.windAvg
elif self.windSpeed is not None:
return self.windSpeed
return self.wind_speed
def get_wind_gust(self) -> float | None:
"""Windböe"""
return self.windGust if self.windGust is not None else self.wind_gust
def get_wind_dir(self) -> float | None:
"""Windrichtung"""
return self.windDir if self.windDir is not None else self.wind_dir
def get_rain_rate(self) -> float | None:
"""Regenrate"""
return self.rainRate if self.rainRate is not None else self.rain_rate
# Datenbankverbindung
def get_db_connection():
"""Datenbankverbindung herstellen"""
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
return conn
except Exception as e:
logger.error(f"Datenbankverbindungsfehler: {e}")
raise HTTPException(status_code=500, detail="Datenbankverbindung fehlgeschlagen")
def setup_database():
"""Tabelle erstellen und fehlende Spalten ergänzen"""
try:
conn = get_db_connection()
with conn.cursor() as cursor:
cursor.execute("""
CREATE TABLE IF NOT EXISTS weather_data (
id SERIAL PRIMARY KEY,
datetime TIMESTAMPTZ NOT NULL,
temperature FLOAT,
humidity INTEGER,
pressure FLOAT,
wind_speed FLOAT,
wind_gust FLOAT,
wind_dir FLOAT,
rain FLOAT,
rain_rate FLOAT,
received_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(datetime)
)
""")
# Neue Spalten ergänzen (idempotent)
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS temp_in FLOAT")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS humidity_in INTEGER")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS forecast INTEGER")
cursor.execute("ALTER TABLE weather_data ADD COLUMN IF NOT EXISTS bar_trend INTEGER")
conn.commit()
logger.info("Tabelle weather_data bereit (inkl. neuer Spalten)")
conn.close()
except Exception as e:
logger.error(f"Fehler bei Datenbanksetup: {e}")
raise
# API Endpoints
@app.on_event("startup")
async def startup_event():
"""Bei Start die Datenbank initialisieren"""
logger.info("Collector API startet...")
setup_database()
logger.info(f"API läuft auf Port {COLLECTOR_PORT}")
@app.get("/")
async def root():
"""Root Endpoint - GET zeigt Info"""
return {
"message": "Weather Data Collector API",
"version": "1.0.0",
"endpoint": "POST /weather or POST /"
}
@app.post("/")
async def root_post(request: Request):
"""Root Endpoint - POST akzeptiert Wetterdaten (Alias für /weather)"""
try:
# Rohen Body lesen
body = await request.body()
body_str = body.decode('utf-8')
logger.info(f"POST auf Root - Raw Body: {body_str}")
# Als JSON parsen
data_dict = json.loads(body_str)
logger.info(f"POST auf Root - Parsed JSON: {data_dict}")
# Zu Pydantic Model konvertieren
data = WeatherDataInput(**data_dict)
return await receive_weather_data(data)
except json.JSONDecodeError as e:
logger.error(f"JSON Parse Error: {e}")
raise HTTPException(status_code=400, detail=f"Invalid JSON: {str(e)}")
except Exception as e:
logger.error(f"Fehler bei Root POST: {e}")
raise HTTPException(status_code=422, detail=f"Validation error: {str(e)}")
@app.post("/debug")
async def debug_post(request: dict):
"""Debug Endpoint - akzeptiert beliebige JSON und loggt sie"""
logger.info(f"Debug: Empfangene Rohdaten: {request}")
return {"status": "logged", "data": request}
@app.get("/health")
async def health_check():
"""Health Check"""
try:
conn = get_db_connection()
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
conn.close()
return {"status": "healthy", "database": "connected"}
except Exception as e:
raise HTTPException(status_code=503, detail=f"Database error: {str(e)}")
@app.post("/weather")
async def receive_weather_data(data: WeatherDataInput):
"""Wetterdaten empfangen und speichern"""
logger.info(f"Empfangene Daten: {data.model_dump()}")
try:
conn = get_db_connection()
try:
# Konvertiere zu den richtigen Werten
dt_string = data.get_datetime_string()
temp_c = data.get_temperature_celsius()
temp_in = data.get_temp_in()
humidity = data.get_humidity_int()
humidity_in = data.get_humidity_in()
pressure = data.get_pressure_hpa()
bar_trend = data.barTrend
wind_speed = data.get_wind_speed()
wind_gust = data.get_wind_gust()
wind_dir = data.get_wind_dir()
rain = data.rain
rain_rate = data.get_rain_rate()
forecast = data.forecast
logger.info(
f"Konvertierte Daten - datetime: {dt_string}, "
f"tempOut: {temp_c}°C, tempIn: {temp_in}°C, "
f"humOut: {humidity}%, humIn: {humidity_in}%, "
f"pressure: {pressure} hPa, barTrend: {bar_trend}"
)
with conn.cursor() as cursor:
cursor.execute("""
INSERT INTO weather_data
(datetime, temperature, temp_in, humidity, humidity_in,
pressure, bar_trend, wind_speed, wind_gust, wind_dir,
rain, rain_rate, forecast)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (datetime) DO UPDATE SET
temperature = EXCLUDED.temperature,
temp_in = EXCLUDED.temp_in,
humidity = EXCLUDED.humidity,
humidity_in = EXCLUDED.humidity_in,
pressure = EXCLUDED.pressure,
bar_trend = EXCLUDED.bar_trend,
wind_speed = EXCLUDED.wind_speed,
wind_gust = EXCLUDED.wind_gust,
wind_dir = EXCLUDED.wind_dir,
rain = EXCLUDED.rain,
rain_rate = EXCLUDED.rain_rate,
forecast = EXCLUDED.forecast
""", (
dt_string,
temp_c,
temp_in,
humidity,
humidity_in,
pressure,
bar_trend,
wind_speed,
wind_gust,
wind_dir,
rain,
rain_rate,
forecast
))
conn.commit()
logger.info(f"Daten gespeichert für {dt_string} (UTC)")
return {
"status": "success",
"message": f"Weather data for {dt_string} saved successfully"
}
finally:
conn.close()
except Exception as e:
logger.error(f"Fehler beim Speichern: {e}")
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
def main():
"""Hauptfunktion"""
# Prüfen ob alle nötigen Umgebungsvariablen gesetzt sind
required_vars = ['DB_USER', 'DB_PASSWORD']
missing_vars = [var for var in required_vars if not os.getenv(var)]
if missing_vars:
logger.error(f"Fehlende Umgebungsvariablen: {', '.join(missing_vars)}")
logger.error("Bitte .env Datei mit den erforderlichen Werten erstellen")
return
uvicorn.run(app, host="0.0.0.0", port=COLLECTOR_PORT)
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,6 @@
fastapi==0.115.5
uvicorn==0.34.0
psycopg2-binary==2.9.10
python-dotenv==1.0.0
uvicorn[standard]==0.34.0
psycopg[binary]==3.2.3
psycopg_pool==3.2.4
python-dotenv==1.0.1
slowapi==0.1.9

View File

@@ -1,41 +1,130 @@
# Nginx-Konfiguration fuer das Frontend (Container).
# TLS wird von Traefik vorne dran terminiert; dieser Server lauscht nur auf HTTP intern.
# Nginx-Version aus Headern und Fehlerseiten raus
server_tokens off;
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
# Docker DNS resolver für dynamische Service-Auflösung
# Body-Limit (Frontend braucht keine grossen POSTs)
client_max_body_size 1m;
# Docker DNS resolver fuer dynamische Service-Aufloesung
resolver 127.0.0.11 valid=30s;
resolver_timeout 5s;
# Gzip compression
# Gzip
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json;
gzip_types
text/plain
text/css
text/xml
text/javascript
application/x-javascript
application/xml+rss
application/json;
# API proxy (wird im Docker-Compose-Netzwerk aufgelöst)
# ----------------------------------------------------------------- #
# Security-Header — gelten fuer alle Antworten dieses Servers.
# 'always' sorgt dafuer, dass sie auch bei 4xx/5xx ausgeliefert werden.
# ----------------------------------------------------------------- #
# HSTS: ein Jahr, inkl. Subdomains. Wenn die Domain noch nicht zu 100%
# auf HTTPS laeuft, kann der Wert auf "max-age=300" reduziert werden,
# bis sicher ist, dass nichts mehr ueber HTTP geht. preload weglassen,
# solange die Domain nicht in der Preload-Liste eingetragen werden soll.
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
# MIME-Sniffing aus
add_header X-Content-Type-Options "nosniff" always;
# Clickjacking-Schutz: keine Einbettung als iframe
# Referrer nur an gleiche Origin senden
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
# Browser-APIs deaktivieren, die das Frontend nicht benoetigt
add_header Permissions-Policy "camera=(), microphone=(), geolocation=(), payment=(), usb=(), magnetometer=(), gyroscope=(), accelerometer=()" always;
# Content Security Policy — strict, keine externen Quellen
# 'unsafe-inline' fuer style-src ist noetig, weil Highcharts inline-styles
# fuer dynamische Diagramme setzt. script-src bleibt strikt.
# TODO: http://test.sternwarte-welzheim.de entfernen, sobald der Test-Server
# auf HTTPS umgestellt ist. Drei Stellen: server-Block + zwei locations.
add_header Content-Security-Policy "default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; connect-src 'self'; frame-ancestors 'self' https://sternwarte-welzheim.de https://www.sternwarte-welzheim.de https://test.sternwarte-welzheim.de http://test.sternwarte-welzheim.de; base-uri 'self'; form-action 'self'; object-src 'none'" always;
# ----------------------------------------------------------------- #
# API-Proxy
# ----------------------------------------------------------------- #
location /api/ {
set $upstream_api api:8000;
proxy_pass http://$upstream_api/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
# Standard-Header
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Timeouts: lieber sichtbar fehlschlagen als ewig haengen
proxy_connect_timeout 5s;
proxy_send_timeout 15s;
proxy_read_timeout 15s;
# Wenn das Upstream tot ist, sofort 502 statt Retry-Loops
proxy_next_upstream off;
# WebSockets/Upgrade-Pfad behalten, falls spaeter noch gebraucht
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
}
# Frontend routes
location / {
try_files $uri $uri/ /index.html;
}
# Cache static assets
# ----------------------------------------------------------------- #
# Statische Assets — lange Cache-Zeit, da mit Hash im Dateinamen
# ----------------------------------------------------------------- #
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
add_header Cache-Control "public, immutable" always;
# nginx-Quirk: sobald ein add_header in einem location-Block steht,
# werden ALLE add_header der server-Ebene ignoriert. Daher hier
# alle Security-Header noch einmal explizit.
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
add_header Permissions-Policy "camera=(), microphone=(), geolocation=(), payment=(), usb=(), magnetometer=(), gyroscope=(), accelerometer=()" always;
add_header Content-Security-Policy "default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; connect-src 'self'; frame-ancestors 'self' https://sternwarte-welzheim.de https://www.sternwarte-welzheim.de https://test.sternwarte-welzheim.de http://test.sternwarte-welzheim.de; base-uri 'self'; form-action 'self'; object-src 'none'" always;
}
}
# ----------------------------------------------------------------- #
# Frontend-Routing (SPA)
# ----------------------------------------------------------------- #
location / {
try_files $uri $uri/ /index.html;
# index.html selbst nicht aggressiv cachen, sonst sehen Nutzer
# nach einem Deploy alte Asset-Hashes
add_header Cache-Control "no-cache" always;
# Security-Header hier nochmal explizit (nginx-Quirk, s.o.)
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
add_header Permissions-Policy "camera=(), microphone=(), geolocation=(), payment=(), usb=(), magnetometer=(), gyroscope=(), accelerometer=()" always;
add_header Content-Security-Policy "default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; connect-src 'self'; frame-ancestors 'self' https://sternwarte-welzheim.de https://www.sternwarte-welzheim.de https://test.sternwarte-welzheim.de http://test.sternwarte-welzheim.de; base-uri 'self'; form-action 'self'; object-src 'none'" always;
}
# Versteckte/Punktdateien blocken (z.B. .env, .git versehentlich im Build)
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
}

View File

@@ -1,7 +1,7 @@
{
"name": "wetterstation-frontend",
"private": true,
"version": "1.3.1",
"version": "1.4.0",
"type": "module",
"scripts": {
"dev": "vite",

View File

@@ -1,23 +1,91 @@
import { useState, useEffect } from 'react'
import { useState, useEffect, useRef } from 'react'
import WeatherDashboard from './components/WeatherDashboard'
import './App.css'
// API-Basis-URL: in Dev direkt auf Backend, in Prod ueber Nginx-Proxy
const API_BASE = import.meta.env.DEV ? 'http://localhost:8000' : '/api'
// 24-Stunden-URL fuer "Aktuell"-Anzeige (auch bei laengeren Zeitraeumen gebraucht)
const CURRENT_URL = `${API_BASE}/weather/history?hours=24&limit=5000`
// JSON-Fetch-Helfer: liefert {ok, data} oder wirft bei Netzfehler.
// Per signal kann der Request abgebrochen werden, wenn timeRange wechselt.
async function fetchJson(url, signal) {
const res = await fetch(url, { signal })
if (!res.ok) throw new Error(`HTTP ${res.status} bei ${url}`)
return res.json()
}
// Bestimmt die URLs fuer den gewaehlten Zeitbereich.
// Returns: { weatherUrl, rainUrl, needsCurrent }
function buildUrls(timeRange) {
// Custom-Range
if (typeof timeRange === 'object' && timeRange.type === 'custom') {
const start = encodeURIComponent(timeRange.start)
const end = encodeURIComponent(timeRange.end)
const days = timeRange.days || 1
const path = days >= 7 ? 'daily-aggregated-range' : 'hourly-aggregated-range'
return {
weatherUrl: `${API_BASE}/weather/${path}?start=${start}&end=${end}`,
rainUrl: null, // TODO: Regen-Aggregation fuer Range implementieren
needsCurrent: true,
}
}
switch (timeRange) {
case '24h':
return {
weatherUrl: `${API_BASE}/weather/history?hours=24&limit=5000`,
rainUrl: null,
needsCurrent: false, // Hauptdaten SIND die aktuellen 24h-Daten
}
case '7d':
return {
weatherUrl: `${API_BASE}/weather/daily-with-minmax?days=7`,
rainUrl: `${API_BASE}/weather/rain-daily?days=7`,
needsCurrent: true,
}
case '30d':
return {
weatherUrl: `${API_BASE}/weather/daily-with-minmax?days=30`,
rainUrl: `${API_BASE}/weather/rain-daily?days=30`,
needsCurrent: true,
}
case '365d':
return {
weatherUrl: `${API_BASE}/weather/daily-aggregated?days=365`,
rainUrl: `${API_BASE}/weather/rain-weekly?days=365`,
needsCurrent: true,
}
default:
return {
weatherUrl: `${API_BASE}/weather/history?hours=24`,
rainUrl: null,
needsCurrent: false,
}
}
}
function App() {
const [weatherData, setWeatherData] = useState([])
const [currentWeatherData, setCurrentWeatherData] = useState([]) // Immer die aktuellen 24h-Werte
const [currentWeatherData, setCurrentWeatherData] = useState([])
const [rainData, setRainData] = useState([])
const [loading, setLoading] = useState(true)
const [error, setError] = useState(null)
const [lastUpdate, setLastUpdate] = useState(null)
const [timeRange, setTimeRange] = useState('24h') // '24h', '7d', '30d', '365d', oder {type: 'custom', start, end, days}
const [timeRange, setTimeRange] = useState('24h')
const [showTable, setShowTable] = useState(false)
// Handler für Zeitbereich-Änderungen
// Erster-Lade-Flag: nur beim allerersten Fetch zeigen wir den Spinner.
// Bei spaeteren Re-Fetches (Auto-Refresh, Time-Range-Wechsel) bleiben die
// alten Daten sichtbar, bis die neuen da sind — flackert weniger.
const isInitialLoadRef = useRef(true)
const handleTimeRangeChange = (range, customParams) => {
if (range === 'custom' && customParams) {
const start = new Date(customParams.start)
const end = new Date(customParams.end)
const days = Math.ceil((end - start) / (1000 * 60 * 60 * 24))
const end = new Date(customParams.end)
const days = Math.ceil((end - start) / (1000 * 60 * 60 * 24))
setTimeRange({ type: 'custom', start: customParams.start, end: customParams.end, days })
} else {
setTimeRange(range)
@@ -25,109 +93,72 @@ function App() {
}
useEffect(() => {
const fetchData = async () => {
try {
setLoading(true)
// Prüfe ob eingebettete Daten vorhanden sind (statischer Build)
if (window.__WEATHER_DATA__ && timeRange === '24h') {
setWeatherData(window.__WEATHER_DATA__)
setCurrentWeatherData(window.__WEATHER_DATA__)
setRainData([])
setLastUpdate(new Date())
setLoading(false)
return
}
// API-URLs basierend auf Zeitraum
let weatherUrl, rainUrl
const baseUrl = import.meta.env.DEV ? 'http://localhost:8000' : '/api'
// Benutzerdefinierter Zeitbereich
if (typeof timeRange === 'object' && timeRange.type === 'custom') {
const start = encodeURIComponent(timeRange.start)
const end = encodeURIComponent(timeRange.end)
const days = timeRange.days || 1
if (days >= 7) {
// >= 7 Tage: Tagesaggregation mit Min/Max verwenden
weatherUrl = `${baseUrl}/weather/daily-aggregated-range?start=${start}&end=${end}`
rainUrl = null // TODO: Regen-Aggregation für Range implementieren
} else {
// < 7 Tage: Stundenaggregation verwenden
weatherUrl = `${baseUrl}/weather/hourly-aggregated-range?start=${start}&end=${end}`
rainUrl = null
}
} else {
// Vordefinierte Zeitbereiche
switch (timeRange) {
case '24h':
weatherUrl = `${baseUrl}/weather/history?hours=24&limit=5000`
rainUrl = null
break
case '7d':
weatherUrl = `${baseUrl}/weather/daily-with-minmax?days=7`
rainUrl = `${baseUrl}/weather/rain-daily?days=7`
break
case '30d':
weatherUrl = `${baseUrl}/weather/daily-with-minmax?days=30`
rainUrl = `${baseUrl}/weather/rain-daily?days=30`
break
case '365d':
weatherUrl = `${baseUrl}/weather/daily-aggregated?days=365`
rainUrl = `${baseUrl}/weather/rain-weekly?days=365`
break
default:
weatherUrl = `${baseUrl}/weather/history?hours=24`
rainUrl = null
}
}
// Wetterdaten laden
const weatherResponse = await fetch(weatherUrl)
if (!weatherResponse.ok) {
throw new Error('API-Fehler: ' + weatherResponse.status)
}
const weatherDataResult = await weatherResponse.json()
setWeatherData(weatherDataResult)
// Immer die aktuellen 24h-Daten für "Aktuell"-Anzeige laden
if (timeRange !== '24h') {
const currentUrl = `${baseUrl}/weather/history?hours=24&limit=5000`
const currentResponse = await fetch(currentUrl)
if (currentResponse.ok) {
const currentDataResult = await currentResponse.json()
setCurrentWeatherData(currentDataResult)
}
} else {
setCurrentWeatherData(weatherDataResult)
}
// Regendaten laden (falls separater Endpunkt)
if (rainUrl) {
const rainResponse = await fetch(rainUrl)
if (rainResponse.ok) {
const rainDataResult = await rainResponse.json()
setRainData(rainDataResult)
}
} else {
setRainData([])
}
setLastUpdate(new Date())
setLoading(false)
} catch (err) {
setError(err.message)
setLoading(false)
}
// Statische Daten: kein Fetch noetig
if (window.__WEATHER_DATA__ && timeRange === '24h') {
setWeatherData(window.__WEATHER_DATA__)
setCurrentWeatherData(window.__WEATHER_DATA__)
setRainData([])
setLastUpdate(new Date())
setLoading(false)
return
}
const controller = new AbortController()
const fetchData = async () => {
if (isInitialLoadRef.current) setLoading(true)
const { weatherUrl, rainUrl, needsCurrent } = buildUrls(timeRange)
// Alle drei Requests parallel starten (statt sequentiell wie vorher).
// allSettled, damit ein Fehler bei rain/current die Hauptdaten nicht blockiert.
const requests = [
fetchJson(weatherUrl, controller.signal), // [0] weather - Pflicht
needsCurrent ? fetchJson(CURRENT_URL, controller.signal) : null, // [1] current - optional
rainUrl ? fetchJson(rainUrl, controller.signal) : null, // [2] rain - optional
]
const results = await Promise.allSettled(requests.map(p => p ?? Promise.resolve(null)))
// AbortError ignorieren — passiert, wenn timeRange waehrend des Requests
// gewechselt hat. Der nachfolgende Effekt-Lauf macht den richtigen Fetch.
const aborted = results.some(
r => r.status === 'rejected' && r.reason?.name === 'AbortError'
)
if (aborted) return
// Hauptdaten-Fehler ist fatal; ohne die zeigen wir nichts an.
if (results[0].status === 'rejected') {
setError(results[0].reason?.message || 'Unbekannter Fehler')
setLoading(false)
isInitialLoadRef.current = false
return
}
const weatherResult = results[0].value
const currentResult = results[1].status === 'fulfilled' ? results[1].value : null
const rainResult = results[2].status === 'fulfilled' ? results[2].value : null
setError(null)
setWeatherData(weatherResult)
// Wenn 24h gewaehlt ist, sind weather und current dieselben Daten
setCurrentWeatherData(needsCurrent ? (currentResult ?? []) : weatherResult)
setRainData(rainResult ?? [])
setLastUpdate(new Date())
setLoading(false)
isInitialLoadRef.current = false
}
fetchData()
// Automatisches Update alle 5 Minuten (nur für 24h und ohne statische Daten)
// Auto-Refresh nur bei 24h, nur wenn keine statischen Daten
let interval = null
if (!window.__WEATHER_DATA__ && timeRange === '24h') {
const interval = setInterval(fetchData, 5 * 60 * 1000)
return () => clearInterval(interval)
interval = setInterval(fetchData, 5 * 60 * 1000)
}
return () => {
controller.abort()
if (interval) clearInterval(interval)
}
}, [timeRange])
@@ -151,19 +182,19 @@ function App() {
// Aktuelle Zeit formatieren
const now = new Date()
const dateStr = now.toLocaleDateString('de-DE', {
weekday: 'long',
year: 'numeric',
month: 'long',
day: 'numeric'
const dateStr = now.toLocaleDateString('de-DE', {
weekday: 'long',
year: 'numeric',
month: 'long',
day: 'numeric'
})
const timeStr = now.toLocaleTimeString('de-DE', {
hour: '2-digit',
minute: '2-digit'
const timeStr = now.toLocaleTimeString('de-DE', {
hour: '2-digit',
minute: '2-digit'
})
// TODO: Sonnenauf-/untergang und Mondphase berechnen
// Aktuell Platzhalter - benötigt Bibliothek wie 'suncalc'
// Aktuell Platzhalter - benoetigt Bibliothek wie 'suncalc'
const sunrise = "06:45"
const sunset = "18:30"
const moonPhase = "abnehmend 50%"
@@ -179,10 +210,10 @@ function App() {
Sonnen-Aufgang: {sunrise} - Untergang: {sunset} &nbsp;&nbsp; Mond-Phase: {moonPhase}
</div>
</header>
<main className="app-main">
<WeatherDashboard
data={weatherData}
<WeatherDashboard
data={weatherData}
currentData={currentWeatherData}
rainData={rainData}
timeRange={timeRange}

View File

@@ -1,107 +0,0 @@
import { useState, useEffect } from 'react'
import WeatherDashboard from './components/WeatherDashboard'
import './App.css'
function App() {
const [weatherData, setWeatherData] = useState([])
const [loading, setLoading] = useState(true)
const [error, setError] = useState(null)
const [lastUpdate, setLastUpdate] = useState(null)
const fetchWeatherData = async () => {
try {
const apiUrl = import.meta.env.VITE_API_URL || '/api'
const response = await fetch(`${apiUrl}/weather/history?hours=24`)
if (!response.ok) {
throw new Error('Fehler beim Laden der Daten')
}
const data = await response.json()
setWeatherData(data)
setLastUpdate(new Date())
setError(null)
} catch (err) {
setError(err.message)
console.error('Fehler beim Laden der Wetterdaten:', err)
} finally {
setLoading(false)
}
}
useEffect(() => {
fetchWeatherData()
// Berechne Zeit bis zum nächsten 5-Min-Schritt + 1 Minute
const scheduleNextRefresh = () => {
const now = new Date()
const minutes = now.getMinutes()
const seconds = now.getSeconds()
const milliseconds = now.getMilliseconds()
// Nächster 5-Minuten-Schritt
const nextFiveMinStep = Math.ceil(minutes / 5) * 5
// Plus 1 Minute
const targetMinute = (nextFiveMinStep + 1) % 60
let targetTime = new Date(now)
targetTime.setMinutes(targetMinute, 0, 0)
// Wenn die Zielzeit in der Vergangenheit liegt, füge eine Stunde hinzu
if (targetTime <= now) {
targetTime.setHours(targetTime.getHours() + 1)
}
const timeUntilRefresh = targetTime - now
console.log(`Nächster Refresh: ${targetTime.toLocaleTimeString('de-DE')} (in ${Math.round(timeUntilRefresh / 1000)}s)`)
return setTimeout(() => {
fetchWeatherData()
scheduleNextRefresh()
}, timeUntilRefresh)
}
const timeout = scheduleNextRefresh()
return () => clearTimeout(timeout)
}, [])
if (loading) {
return (
<div className="loading-container">
<div className="loading-spinner"></div>
<p>Lade Wetterdaten...</p>
</div>
)
}
if (error) {
return (
<div className="error-container">
<h2>Fehler beim Laden der Daten</h2>
<p>{error}</p>
<button onClick={fetchWeatherData}>Erneut versuchen</button>
</div>
)
}
return (
<div className="app">
<header className="app-header">
<h1>🌤️ Wetterstation</h1>
{lastUpdate && (
<p className="last-update">
Letzte Aktualisierung: {lastUpdate.toLocaleTimeString('de-DE')}
</p>
)}
</header>
<main className="app-main">
<WeatherDashboard data={weatherData} />
</main>
</div>
)
}
export default App

View File

@@ -650,7 +650,6 @@ const WeatherDashboard = ({ data, currentData = [], rainData = [], timeRange = '
const isCustomRange = typeof timeRange === 'object' && timeRange.type === 'custom'
const customDays = isCustomRange ? (timeRange.days || 1) : 0
const hideGusts = (timeRange === '365d') || (isCustomRange && customDays >= 365)
console.log("Gust: ", hideGusts)
const windSpeedSeries = {
name: 'Windgeschwindigkeit',
data: sortedData