mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-08-25 06:52:23 +00:00
Wallbox Leistung wird von der Lastprognose abgezogen
This commit is contained in:
@@ -122,6 +122,17 @@ class PVAkku:
|
||||
|
||||
return geladene_menge, verluste_wh
|
||||
|
||||
def aktueller_energieinhalt(self):
|
||||
"""
|
||||
Diese Methode gibt die aktuelle Restenergie unter Berücksichtigung des Wirkungsgrades zurück.
|
||||
Sie berücksichtigt dabei die Lade- und Entladeeffizienz.
|
||||
"""
|
||||
# Berechnung der Restenergie unter Berücksichtigung der Entladeeffizienz
|
||||
nutzbare_energie = self.soc_wh * self.entlade_effizienz
|
||||
return nutzbare_energie
|
||||
|
||||
|
||||
|
||||
|
||||
# def energie_laden(self, wh, hour):
|
||||
# if hour is not None and self.charge_array[hour] == 0:
|
||||
|
235
modules/class_load_corrector.py
Normal file
235
modules/class_load_corrector.py
Normal file
@@ -0,0 +1,235 @@
|
||||
import json,sys, os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import numpy as np
|
||||
from pprint import pprint
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
# from sklearn.model_selection import train_test_split, GridSearchCV
|
||||
# from sklearn.ensemble import GradientBoostingRegressor
|
||||
# from xgboost import XGBRegressor
|
||||
# from statsmodels.tsa.statespace.sarimax import SARIMAX
|
||||
# from tensorflow.keras.models import Sequential
|
||||
# from tensorflow.keras.layers import Dense, LSTM
|
||||
# from tensorflow.keras.optimizers import Adam
|
||||
# from sklearn.preprocessing import MinMaxScaler
|
||||
# from sklearn.metrics import mean_squared_error, r2_score
|
||||
import mariadb
|
||||
# from sqlalchemy import create_engine
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from sklearn.metrics import mean_squared_error, r2_score
|
||||
# Fügen Sie den übergeordneten Pfad zum sys.path hinzu
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from config import *
|
||||
from modules.class_load import *
|
||||
|
||||
class LoadPredictionAdjuster:
|
||||
def __init__(self, measured_data, predicted_data, load_forecast):
|
||||
self.measured_data = measured_data
|
||||
self.predicted_data = predicted_data
|
||||
self.load_forecast = load_forecast
|
||||
self.merged_data = self._merge_data()
|
||||
self.train_data = None
|
||||
self.test_data = None
|
||||
self.weekday_diff = None
|
||||
self.weekend_diff = None
|
||||
|
||||
|
||||
def _remove_outliers(self, data, threshold=2):
|
||||
# Berechne den Z-Score der 'Last'-Daten
|
||||
data['Z-Score'] = np.abs((data['Last'] - data['Last'].mean()) / data['Last'].std())
|
||||
# Filtere die Daten nach dem Schwellenwert
|
||||
filtered_data = data[data['Z-Score'] < threshold]
|
||||
return filtered_data.drop(columns=['Z-Score'])
|
||||
|
||||
|
||||
def _merge_data(self):
|
||||
merged_data = pd.merge(self.measured_data, self.predicted_data, on='time', how='inner')
|
||||
merged_data['Hour'] = merged_data['time'].dt.hour
|
||||
merged_data['DayOfWeek'] = merged_data['time'].dt.dayofweek
|
||||
return merged_data
|
||||
|
||||
def calculate_weighted_mean(self, train_period_weeks=9, test_period_weeks=1):
|
||||
self.merged_data = self._remove_outliers(self.merged_data)
|
||||
train_end_date = self.merged_data['time'].max() - pd.Timedelta(weeks=test_period_weeks)
|
||||
train_start_date = train_end_date - pd.Timedelta(weeks=train_period_weeks)
|
||||
|
||||
test_start_date = train_end_date + pd.Timedelta(hours=1)
|
||||
test_end_date = test_start_date + pd.Timedelta(weeks=test_period_weeks) - pd.Timedelta(hours=1)
|
||||
|
||||
self.train_data = self.merged_data[(self.merged_data['time'] >= train_start_date) & (self.merged_data['time'] <= train_end_date)]
|
||||
self.test_data = self.merged_data[(self.merged_data['time'] >= test_start_date) & (self.merged_data['time'] <= test_end_date)]
|
||||
|
||||
self.train_data['Difference'] = self.train_data['Last'] - self.train_data['Last Pred']
|
||||
|
||||
weekdays_train_data = self.train_data[self.train_data['DayOfWeek'] < 5]
|
||||
weekends_train_data = self.train_data[self.train_data['DayOfWeek'] >= 5]
|
||||
|
||||
self.weekday_diff = weekdays_train_data.groupby('Hour').apply(self._weighted_mean_diff).dropna()
|
||||
self.weekend_diff = weekends_train_data.groupby('Hour').apply(self._weighted_mean_diff).dropna()
|
||||
|
||||
def _weighted_mean_diff(self, data):
|
||||
train_end_date = self.train_data['time'].max()
|
||||
weights = 1 / (train_end_date - data['time']).dt.days.replace(0, np.nan)
|
||||
weighted_mean = (data['Difference'] * weights).sum() / weights.sum()
|
||||
return weighted_mean
|
||||
|
||||
def adjust_predictions(self):
|
||||
self.train_data['Adjusted Pred'] = self.train_data.apply(self._adjust_row, axis=1)
|
||||
self.test_data['Adjusted Pred'] = self.test_data.apply(self._adjust_row, axis=1)
|
||||
|
||||
def _adjust_row(self, row):
|
||||
if row['DayOfWeek'] < 5:
|
||||
return row['Last Pred'] + self.weekday_diff.get(row['Hour'], 0)
|
||||
else:
|
||||
return row['Last Pred'] + self.weekend_diff.get(row['Hour'], 0)
|
||||
|
||||
def plot_results(self):
|
||||
self._plot_data(self.train_data, 'Training')
|
||||
self._plot_data(self.test_data, 'Testing')
|
||||
|
||||
def _plot_data(self, data, data_type):
|
||||
plt.figure(figsize=(14, 7))
|
||||
plt.plot(data['time'], data['Last'], label=f'Actual Last - {data_type}', color='blue')
|
||||
plt.plot(data['time'], data['Last Pred'], label=f'Predicted Last - {data_type}', color='red', linestyle='--')
|
||||
plt.plot(data['time'], data['Adjusted Pred'], label=f'Adjusted Predicted Last - {data_type}', color='green', linestyle=':')
|
||||
plt.xlabel('Time')
|
||||
plt.ylabel('Load')
|
||||
plt.title(f'Actual vs Predicted vs Adjusted Predicted Load ({data_type} Data)')
|
||||
plt.legend()
|
||||
plt.grid(True)
|
||||
plt.show()
|
||||
|
||||
def evaluate_model(self):
|
||||
mse = mean_squared_error(self.test_data['Last'], self.test_data['Adjusted Pred'])
|
||||
r2 = r2_score(self.test_data['Last'], self.test_data['Adjusted Pred'])
|
||||
print(f'Mean Squared Error: {mse}')
|
||||
print(f'R-squared: {r2}')
|
||||
|
||||
def predict_next_hours(self, hours_ahead):
|
||||
last_date = self.merged_data['time'].max()
|
||||
future_dates = [last_date + pd.Timedelta(hours=i) for i in range(1, hours_ahead + 1)]
|
||||
future_df = pd.DataFrame({'time': future_dates})
|
||||
future_df['Hour'] = future_df['time'].dt.hour
|
||||
future_df['DayOfWeek'] = future_df['time'].dt.dayofweek
|
||||
future_df['Last Pred'] = future_df['time'].apply(self._forecast_next_hours)
|
||||
future_df['Adjusted Pred'] = future_df.apply(self._adjust_row, axis=1)
|
||||
return future_df
|
||||
|
||||
def _forecast_next_hours(self, timestamp):
|
||||
date_str = timestamp.strftime('%Y-%m-%d')
|
||||
hour = timestamp.hour
|
||||
daily_forecast = self.load_forecast.get_daily_stats(date_str)
|
||||
return daily_forecast[0][hour] if hour < len(daily_forecast[0]) else np.nan
|
||||
|
||||
|
||||
|
||||
class LastEstimator:
|
||||
def __init__(self):
|
||||
self.conn_params = db_config
|
||||
self.conn = mariadb.connect(**self.conn_params)
|
||||
|
||||
def fetch_data(self, start_date, end_date):
|
||||
queries = {
|
||||
"Stromzaehler": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS Stromzaehler FROM sensor_stromzaehler WHERE topic = 'stromzaehler leistung' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
"PV": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS PV FROM data WHERE topic = 'solarallpower' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
"Batterie_Strom_PIP": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS Batterie_Strom_PIP FROM pip WHERE topic = 'battery_current' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
"Batterie_Volt_PIP": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS Batterie_Volt_PIP FROM pip WHERE topic = 'battery_voltage' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
"Stromzaehler_Raus": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS Stromzaehler_Raus FROM sensor_stromzaehler WHERE topic = 'stromzaehler leistung raus' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
"Wallbox": f"SELECT DATE_FORMAT(timestamp, '%Y-%m-%d %H:00:00') as timestamp, AVG(data) AS Wallbox_Leistung FROM wallbox WHERE topic = 'power_total' AND timestamp BETWEEN '{start_date}' AND '{end_date}' GROUP BY 1 ORDER BY timestamp ASC",
|
||||
|
||||
}
|
||||
|
||||
|
||||
dataframes = {}
|
||||
for key, query in queries.items():
|
||||
dataframes[key] = pd.read_sql(query, self.conn)
|
||||
|
||||
return dataframes
|
||||
|
||||
def calculate_last(self, dataframes):
|
||||
# Batterie_Leistung = Batterie_Strom_PIP * Batterie_Volt_PIP
|
||||
dataframes["Batterie_Leistung"] = dataframes["Batterie_Strom_PIP"].merge(dataframes["Batterie_Volt_PIP"], on="timestamp", how="outer")
|
||||
dataframes["Batterie_Leistung"]["Batterie_Leistung"] = dataframes["Batterie_Leistung"]["Batterie_Strom_PIP"] * dataframes["Batterie_Leistung"]["Batterie_Volt_PIP"]
|
||||
|
||||
# Stromzaehler_Saldo = Stromzaehler - Stromzaehler_Raus
|
||||
dataframes["Stromzaehler_Saldo"] = dataframes["Stromzaehler"].merge(dataframes["Stromzaehler_Raus"], on="timestamp", how="outer")
|
||||
dataframes["Stromzaehler_Saldo"]["Stromzaehler_Saldo"] = dataframes["Stromzaehler_Saldo"]["Stromzaehler"] - dataframes["Stromzaehler_Saldo"]["Stromzaehler_Raus"]
|
||||
|
||||
# Stromzaehler_Saldo - Batterie_Leistung
|
||||
dataframes["Netzleistung"] = dataframes["Stromzaehler_Saldo"].merge(dataframes["Batterie_Leistung"], on="timestamp", how="outer")
|
||||
dataframes["Netzleistung"]["Netzleistung"] = dataframes["Netzleistung"]["Stromzaehler_Saldo"] - dataframes["Netzleistung"]["Batterie_Leistung"]
|
||||
|
||||
# Füge die Wallbox-Leistung hinzu
|
||||
dataframes["Netzleistung"] = dataframes["Netzleistung"].merge(dataframes["Wallbox"], on="timestamp", how="left")
|
||||
dataframes["Netzleistung"]["Wallbox_Leistung"] = dataframes["Netzleistung"]["Wallbox_Leistung"].fillna(0) # Fülle fehlende Werte mit 0
|
||||
|
||||
# Last = Netzleistung + PV
|
||||
# Berechne die endgültige Last
|
||||
dataframes["Last"] = dataframes["Netzleistung"].merge(dataframes["PV"], on="timestamp", how="outer")
|
||||
dataframes["Last"]["Last_ohneWallbox"] = dataframes["Last"]["Netzleistung"] + dataframes["Last"]["PV"]
|
||||
dataframes["Last"]["Last"] = dataframes["Last"]["Netzleistung"] + dataframes["Last"]["PV"] - dataframes["Last"]["Wallbox_Leistung"]
|
||||
return dataframes["Last"].dropna()
|
||||
|
||||
def get_last(self, start_date, end_date):
|
||||
dataframes = self.fetch_data(start_date, end_date)
|
||||
last_df = self.calculate_last(dataframes)
|
||||
return last_df
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
|
||||
estimator = LastEstimator()
|
||||
start_date = "2024-06-01"
|
||||
end_date = "2024-08-01"
|
||||
last_df = estimator.get_last(start_date, end_date)
|
||||
|
||||
selected_columns = last_df[['timestamp', 'Last']]
|
||||
selected_columns['time'] = pd.to_datetime(selected_columns['timestamp']).dt.floor('H')
|
||||
selected_columns['Last'] = pd.to_numeric(selected_columns['Last'], errors='coerce')
|
||||
|
||||
# Drop rows with NaN values
|
||||
cleaned_data = selected_columns.dropna()
|
||||
|
||||
print(cleaned_data)
|
||||
# Create an instance of LoadForecast
|
||||
|
||||
lf = LoadForecast(filepath=r'.\load_profiles.npz', year_energy=6000*1000)
|
||||
|
||||
# Initialize an empty DataFrame to hold the forecast data
|
||||
forecast_list = []
|
||||
|
||||
# Loop through each day in the date range
|
||||
for single_date in pd.date_range(cleaned_data['time'].min().date(), cleaned_data['time'].max().date()):
|
||||
date_str = single_date.strftime('%Y-%m-%d')
|
||||
daily_forecast = lf.get_daily_stats(date_str)
|
||||
mean_values = daily_forecast[0] # Extract the mean values
|
||||
hours = [single_date + pd.Timedelta(hours=i) for i in range(24)]
|
||||
daily_forecast_df = pd.DataFrame({'time': hours, 'Last Pred': mean_values})
|
||||
forecast_list.append(daily_forecast_df)
|
||||
|
||||
# Concatenate all daily forecasts into a single DataFrame
|
||||
forecast_df = pd.concat(forecast_list, ignore_index=True)
|
||||
|
||||
# Create an instance of the LoadPredictionAdjuster class
|
||||
adjuster = LoadPredictionAdjuster(cleaned_data, forecast_df, lf)
|
||||
|
||||
# Calculate the weighted mean differences
|
||||
adjuster.calculate_weighted_mean()
|
||||
|
||||
# Adjust the predictions
|
||||
adjuster.adjust_predictions()
|
||||
|
||||
# Plot the results
|
||||
adjuster.plot_results()
|
||||
|
||||
# Evaluate the model
|
||||
adjuster.evaluate_model()
|
||||
|
||||
# Predict the next x hours
|
||||
future_predictions = adjuster.predict_next_hours(48)
|
||||
print(future_predictions)
|
@@ -159,10 +159,15 @@ class optimization_problem:
|
||||
individual.extra_data = (o["Gesamtbilanz_Euro"],o["Gesamt_Verluste"], eauto_roi )
|
||||
|
||||
|
||||
|
||||
restenergie_akku = ems.akku.aktueller_energieinhalt()
|
||||
restwert_akku = restenergie_akku*parameter["preis_euro_pro_wh_akku"]
|
||||
# print(restenergie_akku)
|
||||
# print(parameter["preis_euro_pro_wh_akku"])
|
||||
# print(restwert_akku)
|
||||
# print()
|
||||
strafe = 0.0
|
||||
strafe = max(0,(parameter['eauto_min_soc']-ems.eauto.ladezustand_in_prozent()) * self.strafe )
|
||||
gesamtbilanz += strafe
|
||||
gesamtbilanz += strafe - restwert_akku
|
||||
#gesamtbilanz += o["Gesamt_Verluste"]/10000.0
|
||||
|
||||
return (gesamtbilanz,)
|
||||
|
@@ -1,23 +1,22 @@
|
||||
from flask import Flask, jsonify, request
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from pprint import pprint
|
||||
import json, sys, os
|
||||
import requests, hashlib
|
||||
from dateutil import parser, tz
|
||||
|
||||
from dateutil import parser
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class ForecastData:
|
||||
def __init__(self, date_time, dc_power, ac_power, windspeed_10m=None, temperature=None,ac_power_measurement=None):
|
||||
def __init__(self, date_time, dc_power, ac_power, windspeed_10m=None, temperature=None, ac_power_measurement=None):
|
||||
self.date_time = date_time
|
||||
self.dc_power = dc_power
|
||||
self.ac_power = ac_power
|
||||
self.windspeed_10m = windspeed_10m
|
||||
self.temperature = temperature
|
||||
self.ac_power_measurement = None
|
||||
|
||||
# Getter für die ForecastData-Attribute
|
||||
self.ac_power_measurement = ac_power_measurement
|
||||
|
||||
def get_date_time(self):
|
||||
return self.date_time
|
||||
|
||||
@@ -28,7 +27,7 @@ class ForecastData:
|
||||
return self.ac_power_measurement
|
||||
|
||||
def get_ac_power(self):
|
||||
if self.ac_power_measurement != None:
|
||||
if self.ac_power_measurement is not None:
|
||||
return self.ac_power_measurement
|
||||
else:
|
||||
return self.ac_power
|
||||
@@ -40,73 +39,63 @@ class ForecastData:
|
||||
return self.temperature
|
||||
|
||||
class PVForecast:
|
||||
def __init__(self, filepath=None, url=None, cache_dir='cache', prediction_hours = 48):
|
||||
def __init__(self, filepath=None, url=None, cache_dir='cache', prediction_hours=48):
|
||||
self.meta = {}
|
||||
self.forecast_data = []
|
||||
self.cache_dir = cache_dir
|
||||
self.prediction_hours = prediction_hours
|
||||
self.current_measurement = None
|
||||
|
||||
|
||||
if not os.path.exists(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
if filepath:
|
||||
self.load_data_from_file(filepath)
|
||||
elif url:
|
||||
self.load_data_with_caching(url)
|
||||
|
||||
# Überprüfung nach dem Laden der Daten
|
||||
|
||||
if len(self.forecast_data) < self.prediction_hours:
|
||||
raise ValueError(f"Die Vorhersage muss mindestens {self.prediction_hours} Stunden umfassen, aber es wurden nur {len(self.forecast_data)} Stunden vorhergesagt.")
|
||||
|
||||
def update_ac_power_measurement(self, date_time=None, ac_power_measurement=None):
|
||||
"""Aktualisiert einen DC-Leistungsmesswert oder fügt ihn hinzu."""
|
||||
found = False
|
||||
target_timezone = tz.gettz('Europe/Berlin')
|
||||
input_date_hour = date_time.astimezone(target_timezone).replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
input_date_hour = date_time.replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
for forecast in self.forecast_data:
|
||||
forecast_date_hour = datetime.strptime(forecast.date_time, "%Y-%m-%dT%H:%M:%S.%f%z").astimezone(target_timezone).replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
#print(forecast_date_hour," ",input_date_hour)
|
||||
forecast_date_hour = parser.parse(forecast.date_time).replace(minute=0, second=0, microsecond=0)
|
||||
if forecast_date_hour == input_date_hour:
|
||||
forecast.ac_power_measurement = ac_power_measurement
|
||||
found = True
|
||||
break
|
||||
|
||||
# if not found:
|
||||
# # Erstelle ein neues ForecastData-Objekt, falls kein entsprechender Zeitstempel gefunden wurde
|
||||
# # Hier kannst du entscheiden, wie die anderen Werte gesetzt werden sollen, falls keine Vorhersage existiert
|
||||
# new_forecast = ForecastData(date_time, dc_power=None, ac_power=None, dc_power_measurement=dc_power_measurement)
|
||||
# self.forecast_data.append(new_forecast)
|
||||
# # Liste sortieren, um sie chronologisch zu ordnen
|
||||
# self.forecast_data.sort(key=lambda x: datetime.strptime(x.date_time, "%Y-%m-%dT%H:%M:%S.%f%z").replace(minute=0, second=0, microsecond=0))
|
||||
|
||||
|
||||
|
||||
def process_data(self, data):
|
||||
self.meta = data.get('meta', {})
|
||||
all_values = data.get('values', [])
|
||||
|
||||
# Berechnung der Summe der DC- und AC-Leistungen für jeden Zeitstempel
|
||||
|
||||
for i in range(len(all_values[0])): # Annahme, dass alle Listen gleich lang sind
|
||||
sum_dc_power = sum(values[i]['dcPower'] for values in all_values)
|
||||
sum_ac_power = sum(values[i]['power'] for values in all_values)
|
||||
|
||||
# Erstellen eines ForecastData-Objekts mit den summierten Werten
|
||||
|
||||
# Zeige die ursprünglichen und berechneten Zeitstempel an
|
||||
original_datetime = all_values[0][i].get('datetime')
|
||||
#print(original_datetime," ",sum_dc_power," ",all_values[0][i]['dcPower'])
|
||||
dt = datetime.strptime(original_datetime, "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
dt = dt.replace(tzinfo=None)
|
||||
#iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
|
||||
#print()
|
||||
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
|
||||
#adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
|
||||
#print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
|
||||
|
||||
forecast = ForecastData(
|
||||
date_time=all_values[0][i].get('datetime'),
|
||||
date_time=dt, # Verwende angepassten Zeitstempel
|
||||
dc_power=sum_dc_power,
|
||||
ac_power=sum_ac_power,
|
||||
# Optional: Weitere Werte wie Windspeed und Temperature, falls benötigt
|
||||
windspeed_10m=all_values[0][i].get('windspeed_10m'),
|
||||
temperature=all_values[0][i].get('temperature')
|
||||
)
|
||||
|
||||
self.forecast_data.append(forecast)
|
||||
|
||||
|
||||
self.forecast_data.append(forecast)
|
||||
|
||||
def load_data_from_file(self, filepath):
|
||||
with open(filepath, 'r') as file:
|
||||
@@ -124,9 +113,9 @@ class PVForecast:
|
||||
self.load_data_from_url(url)
|
||||
|
||||
def load_data_with_caching(self, url):
|
||||
date = datetime.now().strftime("%Y-%m-%d")
|
||||
date = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
cache_file = os.path.join(self.cache_dir, self.generate_cache_filename(url,date))
|
||||
cache_file = os.path.join(self.cache_dir, self.generate_cache_filename(url, date))
|
||||
if os.path.exists(cache_file):
|
||||
with open(cache_file, 'r') as file:
|
||||
data = json.load(file)
|
||||
@@ -143,28 +132,16 @@ class PVForecast:
|
||||
return
|
||||
self.process_data(data)
|
||||
|
||||
def generate_cache_filename(self, url,date):
|
||||
# Erzeugt einen SHA-256 Hash der URL als Dateinamen
|
||||
def generate_cache_filename(self, url, date):
|
||||
cache_key = hashlib.sha256(f"{url}{date}".encode('utf-8')).hexdigest()
|
||||
#cache_path = os.path.join(self.cache_dir, cache_key)
|
||||
return f"cache_{cache_key}.json"
|
||||
|
||||
def get_forecast_data(self):
|
||||
return self.forecast_data
|
||||
|
||||
|
||||
# def get_forecast_for_date(self, input_date_str):
|
||||
# input_date = datetime.strptime(input_date_str, "%Y-%m-%d")
|
||||
# daily_forecast_obj = [data for data in self.forecast_data if datetime.strptime(data.get_date_time(), "%Y-%m-%dT%H:%M:%S.%f%z").date() == input_date.date()]
|
||||
# daily_forecast = []
|
||||
# for d in daily_forecast_obj:
|
||||
# daily_forecast.append(d.get_ac_power())
|
||||
|
||||
# return np.array(daily_forecast)
|
||||
|
||||
def get_temperature_forecast_for_date(self, input_date_str):
|
||||
input_date = datetime.strptime(input_date_str, "%Y-%m-%d")
|
||||
daily_forecast_obj = [data for data in self.forecast_data if datetime.strptime(data.get_date_time(), "%Y-%m-%dT%H:%M:%S.%f%z").date() == input_date.date()]
|
||||
daily_forecast_obj = [data for data in self.forecast_data if parser.parse(data.get_date_time()).date() == input_date.date()]
|
||||
daily_forecast = []
|
||||
for d in daily_forecast_obj:
|
||||
daily_forecast.append(d.get_temperature())
|
||||
@@ -177,10 +154,10 @@ class PVForecast:
|
||||
date_range_forecast = []
|
||||
|
||||
for data in self.forecast_data:
|
||||
data_date = datetime.strptime(data.get_date_time(), "%Y-%m-%dT%H:%M:%S.%f%z").date()
|
||||
#print(data.get_date_time())
|
||||
data_date = data.get_date_time().date()#parser.parse(data.get_date_time()).date()
|
||||
if start_date <= data_date <= end_date:
|
||||
date_range_forecast.append(data)
|
||||
print(data.get_date_time()," ",data.get_ac_power())
|
||||
|
||||
ac_power_forecast = np.array([data.get_ac_power() for data in date_range_forecast])
|
||||
|
||||
@@ -192,28 +169,36 @@ class PVForecast:
|
||||
date_range_forecast = []
|
||||
|
||||
for data in self.forecast_data:
|
||||
data_date = datetime.strptime(data.get_date_time(), "%Y-%m-%dT%H:%M:%S.%f%z").date()
|
||||
data_date = data.get_date_time().date()
|
||||
if start_date <= data_date <= end_date:
|
||||
date_range_forecast.append(data)
|
||||
|
||||
forecast_data = date_range_forecast
|
||||
temperature_forecast = [data.get_temperature() for data in forecast_data]
|
||||
temperature_forecast = [data.get_temperature() for data in date_range_forecast]
|
||||
return np.array(temperature_forecast)[:self.prediction_hours]
|
||||
|
||||
|
||||
def get_forecast_dataframe(self):
|
||||
# Wandelt die Vorhersagedaten in ein Pandas DataFrame um
|
||||
data = [{
|
||||
'date_time': f.get_date_time(),
|
||||
'dc_power': f.get_dc_power(),
|
||||
'ac_power': f.get_ac_power(),
|
||||
'windspeed_10m': f.get_windspeed_10m(),
|
||||
'temperature': f.get_temperature()
|
||||
} for f in self.forecast_data]
|
||||
|
||||
# Erstelle ein DataFrame
|
||||
df = pd.DataFrame(data)
|
||||
return df
|
||||
|
||||
|
||||
def print_ac_power_and_measurement(self):
|
||||
"""Druckt die DC-Leistung und das Messwert für jede Stunde."""
|
||||
"""Druckt die DC-Leistung und den Messwert für jede Stunde."""
|
||||
for forecast in self.forecast_data:
|
||||
date_time = forecast.date_time
|
||||
|
||||
|
||||
print(f"Zeit: {date_time}, DC: {forecast.dc_power}, AC: {forecast.ac_power}, Messwert: {forecast.ac_power_measurement} AC GET: {forecast.get_ac_power()}")
|
||||
|
||||
|
||||
print(f"Zeit: {date_time}, DC: {forecast.dc_power}, AC: {forecast.ac_power}, Messwert: {forecast.ac_power_measurement}, AC GET: {forecast.get_ac_power()}")
|
||||
|
||||
# Beispiel für die Verwendung der Klasse
|
||||
if __name__ == '__main__':
|
||||
date_now = datetime.now()
|
||||
forecast = PVForecast(prediction_hours = 24, url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m")
|
||||
forecast = PVForecast(prediction_hours=24, url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m")
|
||||
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000)
|
||||
forecast.print_ac_power_and_measurement()
|
||||
|
Reference in New Issue
Block a user