Time stop in verbose + LRU Cache / Vectoriz.

This commit is contained in:
Andreas 2024-12-20 16:34:51 +01:00 committed by Andreas
parent 9214d190e8
commit 83bfb1878b
2 changed files with 58 additions and 35 deletions

View File

@ -1,6 +1,7 @@
import random
from tabnanny import verbose
from typing import Any, Optional, Tuple
import time
import numpy as np
from deap import algorithms, base, creator, tools
from pydantic import BaseModel, Field, field_validator, model_validator
@ -354,10 +355,11 @@ class optimization_problem:
worst_case: bool,
) -> Tuple[float]:
"""Evaluate the fitness of an individual solution based on the simulation results."""
# try:
o = self.evaluate_inner(individual, ems, start_hour)
# except Exception as e:
# return (100000.0,) # Return a high penalty in case of an exception
try:
o = self.evaluate_inner(individual, ems, start_hour)
except Exception as e:
return (100000.0,) # Return a high penalty in case of an exception
gesamtbilanz = o["Gesamtbilanz_Euro"] * (-1.0 if worst_case else 1.0)
@ -509,8 +511,13 @@ class optimization_problem:
"evaluate",
lambda ind: self.evaluate(ind, ems, parameters, start_hour, worst_case),
)
start_solution, extra_data = self.optimize(parameters.start_solution, ngen=ngen)
if self.verbose == True:
start_time = time.time()
start_solution, extra_data = self.optimize(parameters.start_solution, ngen=ngen)
if self.verbose == True:
elapsed_time = time.time() - start_time
print(f"Time evaluate inner: {elapsed_time:.4f} sec.")
# Perform final evaluation on the best solution
o = self.evaluate_inner(start_solution, ems, start_hour)
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python
import numpy as np
import pickle
from functools import lru_cache
# from scipy.interpolate import RegularGridInterpolator
from pathlib import Path
@ -10,44 +11,59 @@ class self_consumption_probability_interpolator:
def __init__(self, filepath: str | Path):
self.filepath = filepath
self.interpolator = None
print("OPEN")
# Load the RegularGridInterpolator
with open(self.filepath, "rb") as file:
print("OPENED")
self.interpolator = pickle.load(file)
def calculate_self_consumption(self, load_1h_power: float, pv_power: float) -> float:
"""Calculate the PV self-consumption rate using RegularGridInterpolator.
Args:
- last_1h_power: 1h power levels (W).
- pv_power: Current PV power output (W).
Returns:
- Self-consumption rate as a float.
"""
# Generate the range of partial loads (0 to last_1h_power)
partial_loads = np.arange(0, 3500, 50)
# Get probabilities for all partial loads
@lru_cache(maxsize=128)
def generate_points(self, load_1h_power: float, pv_power: float):
"""Generate the grid points for interpolation."""
partial_loads = np.arange(0, pv_power + 50, 50)
points = np.array([np.full_like(partial_loads, load_1h_power), partial_loads]).T
if self.interpolator == None:
return -1.0
return points, partial_loads
def calculate_self_consumption(self, load_1h_power: float, pv_power: float) -> float:
points, partial_loads = self.generate_points(load_1h_power, pv_power)
probabilities = self.interpolator(points)
probabilities = probabilities / probabilities.sum()
# for i, w in enumerate(partial_loads):
# print(w, ": ", probabilities[i])
# print(probabilities.sum())
# Ensure probabilities are within [0, 1]
probabilities = np.clip(probabilities, 0, 1)
return probabilities.sum()
# Mask: Only include probabilities where the load is <= PV power
mask = partial_loads <= pv_power
# def calculate_self_consumption(self, load_1h_power: float, pv_power: float) -> float:
# """Calculate the PV self-consumption rate using RegularGridInterpolator.
# Calculate the cumulative probability for covered loads
self_consumption_rate = np.sum(probabilities[mask]) / np.sum(probabilities)
# Args:
# - last_1h_power: 1h power levels (W).
# - pv_power: Current PV power output (W).
return self_consumption_rate
# Returns:
# - Self-consumption rate as a float.
# """
# # Generate the range of partial loads (0 to last_1h_power)
# partial_loads = np.arange(0, pv_power + 50, 50)
# # Get probabilities for all partial loads
# points = np.array([np.full_like(partial_loads, load_1h_power), partial_loads]).T
# if self.interpolator == None:
# return -1.0
# probabilities = self.interpolator(points)
# self_consumption_rate = probabilities.sum()
# # probabilities = probabilities / (np.sum(probabilities)) # / (pv_power / 3450))
# # # for i, w in enumerate(partial_loads):
# # # print(w, ": ", probabilities[i])
# # print(probabilities.sum())
# # # Ensure probabilities are within [0, 1]
# # probabilities = np.clip(probabilities, 0, 1)
# # # Mask: Only include probabilities where the load is <= PV power
# # mask = partial_loads <= pv_power
# # # Calculate the cumulative probability for covered loads
# # self_consumption_rate = np.sum(probabilities[mask]) / np.sum(probabilities)
# # print(self_consumption_rate)
# # sys.exit()
# return self_consumption_rate
# Test the function