Moved load_profile db to data

removed comments
fixed Bug in visualize.py (extra data empty)
removed dead cp
This commit is contained in:
Andreas 2024-10-05 09:11:41 +02:00 committed by Andreas
parent ec1dc87104
commit caf907a2e5
5 changed files with 28 additions and 100 deletions

View File

@ -116,7 +116,8 @@ def flask_gesamtlast():
measured_data["time"] = measured_data["time"].dt.tz_localize(None) measured_data["time"] = measured_data["time"].dt.tz_localize(None)
# Instantiate LoadForecast and generate forecast data # Instantiate LoadForecast and generate forecast data
lf = LoadForecast(filepath=r"load_profiles.npz", year_energy=year_energy) file_path = os.path.join("data", "load_profiles.npz")
lf = LoadForecast(filepath=file_path, year_energy=year_energy)
forecast_list = [] forecast_list = []
# Generate daily forecasts for the date range based on measured data # Generate daily forecasts for the date range based on measured data
@ -153,68 +154,6 @@ def flask_gesamtlast():
return jsonify(last.tolist()) return jsonify(last.tolist())
# @app.route('/gesamtlast', methods=['GET'])
# def flask_gesamtlast():
# if request.method == 'GET':
# year_energy = float(request.args.get("year_energy")) # Get annual energy value from query parameters
# prediction_hours = int(request.args.get("hours", 48)) # Default to 48 hours if not specified
# date_now = datetime.now() # Get the current date and time
# end_date = (date_now + timedelta(hours=prediction_hours)).strftime('%Y-%m-%d %H:%M:%S') # Calculate end date based on prediction hours
# ###############
# # Load Forecast
# ###############
# # Instantiate LastEstimator to retrieve measured data
# estimator = LastEstimator()
# start_date = (date_now - timedelta(days=60)).strftime('%Y-%m-%d') # Start date: last 60 days
# end_date = date_now.strftime('%Y-%m-%d') # Current date
# last_df = estimator.get_last(start_date, end_date) # Get last load data
# selected_columns = last_df[['timestamp', 'Last']] # Select relevant columns
# selected_columns['time'] = pd.to_datetime(selected_columns['timestamp']).dt.floor('H') # Floor timestamps to the nearest hour
# selected_columns['Last'] = pd.to_numeric(selected_columns['Last'], errors='coerce') # Convert 'Last' to numeric, coerce errors
# cleaned_data = selected_columns.dropna() # Clean data by dropping NaN values
# # Instantiate LoadForecast
# lf = LoadForecast(filepath=r'load_profiles.npz', year_energy=year_energy)
# # Generate forecast data
# forecast_list = [] # List to hold daily forecasts
# for single_date in pd.date_range(cleaned_data['time'].min().date(), cleaned_data['time'].max().date()): # Iterate over date range
# date_str = single_date.strftime('%Y-%m-%d') # Format date
# daily_forecast = lf.get_daily_stats(date_str) # Get daily stats from LoadForecast
# mean_values = daily_forecast[0] # Extract mean values
# hours = [single_date + pd.Timedelta(hours=i) for i in range(24)] # Generate hours for the day
# daily_forecast_df = pd.DataFrame({'time': hours, 'Last Pred': mean_values}) # Create DataFrame for daily forecast
# forecast_list.append(daily_forecast_df) # Append to the list
# forecast_df = pd.concat(forecast_list, ignore_index=True) # Concatenate all daily forecasts
# # Create LoadPredictionAdjuster instance
# adjuster = LoadPredictionAdjuster(cleaned_data, forecast_df, lf)
# adjuster.calculate_weighted_mean() # Calculate weighted mean for adjustments
# adjuster.adjust_predictions() # Adjust predictions based on measured data
# # Predict the next hours
# future_predictions = adjuster.predict_next_hours(prediction_hours) # Predict future load
# leistung_haushalt = future_predictions['Adjusted Pred'].values # Extract household power predictions
# gesamtlast = Gesamtlast(prediction_hours=prediction_hours) # Create Gesamtlast instance
# gesamtlast.hinzufuegen("Haushalt", leistung_haushalt) # Add household load to total load calculation
# # ###############
# # # WP (Heat Pump)
# # ##############
# # leistung_wp = wp.simulate_24h(temperature_forecast) # Simulate heat pump load for 24 hours
# # gesamtlast.hinzufuegen("Heatpump", leistung_wp) # Add heat pump load to total load calculation
# last = gesamtlast.gesamtlast_berechnen() # Calculate total load
# print(last) # Output total load
# return jsonify(last.tolist()) # Return total load as JSON
@app.route("/gesamtlast_simple", methods=["GET"]) @app.route("/gesamtlast_simple", methods=["GET"])
def flask_gesamtlast_simple(): def flask_gesamtlast_simple():
if request.method == "GET": if request.method == "GET":
@ -228,8 +167,10 @@ def flask_gesamtlast_simple():
############### ###############
# Load Forecast # Load Forecast
############### ###############
file_path = os.path.join("data", "load_profiles.npz")
lf = LoadForecast( lf = LoadForecast(
filepath=r"load_profiles.npz", year_energy=year_energy filepath=file_path, year_energy=year_energy
) # Instantiate LoadForecast with specified parameters ) # Instantiate LoadForecast with specified parameters
leistung_haushalt = lf.get_stats_for_date_range(date_now, date)[ leistung_haushalt = lf.get_stats_for_date_range(date_now, date)[
0 0

View File

@ -101,7 +101,7 @@ class LoadForecast:
# Example usage of the class # Example usage of the class
if __name__ == "__main__": if __name__ == "__main__":
filepath = r"..\load_profiles.npz" # Adjust the path to the .npz file filepath = r"..\data\load_profiles.npz" # Adjust the path to the .npz file
lf = LoadForecast(filepath=filepath, year_energy=2000) lf = LoadForecast(filepath=filepath, year_energy=2000)
specific_date_prices = lf.get_daily_stats("2024-02-16") # Adjust date as needed specific_date_prices = lf.get_daily_stats("2024-02-16") # Adjust date as needed
specific_hour_stats = lf.get_hourly_stats( specific_hour_stats = lf.get_hourly_stats(

View File

@ -3,17 +3,6 @@ import numpy as np
import pandas as pd import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score from sklearn.metrics import mean_squared_error, r2_score
# from sklearn.model_selection import train_test_split, GridSearchCV
# from sklearn.ensemble import GradientBoostingRegressor
# from xgboost import XGBRegressor
# from statsmodels.tsa.statespace.sarimax import SARIMAX
# from tensorflow.keras.models import Sequential
# from tensorflow.keras.layers import Dense, LSTM
# from tensorflow.keras.optimizers import Adam
# from sklearn.preprocessing import MinMaxScaler
# from sqlalchemy import create_engine
class LoadPredictionAdjuster: class LoadPredictionAdjuster:
def __init__(self, measured_data, predicted_data, load_forecast): def __init__(self, measured_data, predicted_data, load_forecast):
self.measured_data = measured_data self.measured_data = measured_data

View File

@ -338,8 +338,6 @@ class optimization_problem:
extra_data=extra_data, extra_data=extra_data,
) )
os.system("cp visualisierungsergebnisse.pdf ~/")
# Return final results as a dictionary # Return final results as a dictionary
return { return {
"discharge_hours_bin": discharge_hours_bin, "discharge_hours_bin": discharge_hours_bin,

View File

@ -110,7 +110,7 @@ def visualisiere_ergebnisse(
plt.figure(figsize=(14, 10)) plt.figure(figsize=(14, 10))
if ist_dst_wechsel(datetime.now()): if ist_dst_wechsel(datetime.datetime.now()):
hours = np.arange(start_hour, prediction_hours - 1) hours = np.arange(start_hour, prediction_hours - 1)
else: else:
hours = np.arange(start_hour, prediction_hours) hours = np.arange(start_hour, prediction_hours)
@ -262,7 +262,7 @@ def visualisiere_ergebnisse(
if n < 0.01 if n < 0.01
] ]
) )
if filtered_losses.size != 0:
best_loss = min(filtered_losses) best_loss = min(filtered_losses)
worst_loss = max(filtered_losses) worst_loss = max(filtered_losses)
best_balance = min(filtered_balance) best_balance = min(filtered_balance)