ci(ruff): add bandit checks (#575)

Added bandit checks to continuous integration.

Updated sources to pass bandit checks:
- replaced asserts
- added timeouts to requests
- added checks for process command execution
- changed to 127.0.0.1 as default IP address for EOS and EOSdash for security reasons

Added a rudimentary check for outdated config files.

BREAKING CHANGE: Default IP address for EOS and EOSdash changed to 127.0.0.1

Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
Bobby Noelte
2025-06-03 08:30:37 +02:00
committed by GitHub
parent aa39ff475c
commit 3421b2303b
34 changed files with 163 additions and 86 deletions

View File

@@ -104,12 +104,13 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
- add the file cache again.
"""
source = "https://api.akkudoktor.net"
assert self.start_datetime # mypy fix
if not self.start_datetime:
raise ValueError(f"Start DateTime not set: {self.start_datetime}")
# Try to take data from 5 weeks back for prediction
date = to_datetime(self.start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD")
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
url = f"{source}/prices?start={date}&end={last_date}&tz={self.config.general.timezone}"
response = requests.get(url)
response = requests.get(url, timeout=10)
logger.debug(f"Response from {url}: {response}")
response.raise_for_status() # Raise an error for bad responses
akkudoktor_data = self._validate_data(response.content)
@@ -148,7 +149,8 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
"""
# Get Akkudoktor electricity price data
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
assert self.start_datetime # mypy fix
if not self.start_datetime:
raise ValueError(f"Start DateTime not set: {self.start_datetime}")
# Assumption that all lists are the same length and are ordered chronologically
# in ascending order and have the same timestamps.
@@ -178,7 +180,10 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
)
amount_datasets = len(self.records)
assert highest_orig_datetime # mypy fix
if not highest_orig_datetime: # mypy fix
error_msg = f"Highest original datetime not available: {highest_orig_datetime}"
logger.error(error_msg)
raise ValueError(error_msg)
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
needed_hours = int(

View File

@@ -14,7 +14,7 @@ class SelfConsumptionProbabilityInterpolator:
self.filepath = filepath
# Load the RegularGridInterpolator
with open(self.filepath, "rb") as file:
self.interpolator: RegularGridInterpolator = pickle.load(file)
self.interpolator: RegularGridInterpolator = pickle.load(file) # noqa: S301
@lru_cache(maxsize=128)
def generate_points(

View File

@@ -291,7 +291,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
Raises:
ValueError: If the API response does not include expected `meta` data.
"""
response = requests.get(self._url())
response = requests.get(self._url(), timeout=10)
response.raise_for_status() # Raise an error for bad responses
logger.debug(f"Response from {self._url()}: {response}")
akkudoktor_data = self._validate_data(response.content)
@@ -332,7 +332,8 @@ class PVForecastAkkudoktor(PVForecastProvider):
logger.error(f"Akkudoktor schema change: {error_msg}")
raise ValueError(error_msg)
assert self.start_datetime # mypy fix
if not self.start_datetime:
raise ValueError(f"Start DateTime not set: {self.start_datetime}")
# Iterate over forecast data points
for forecast_values in zip(*akkudoktor_data.values):

View File

@@ -100,7 +100,8 @@ class WeatherBrightSky(WeatherProvider):
date = to_datetime(self.start_datetime, as_string=True)
last_date = to_datetime(self.end_datetime, as_string=True)
response = requests.get(
f"{source}/weather?lat={self.config.general.latitude}&lon={self.config.general.longitude}&date={date}&last_date={last_date}&tz={self.config.general.timezone}"
f"{source}/weather?lat={self.config.general.latitude}&lon={self.config.general.longitude}&date={date}&last_date={last_date}&tz={self.config.general.timezone}",
timeout=10,
)
response.raise_for_status() # Raise an error for bad responses
logger.debug(f"Response from {source}: {response}")
@@ -222,7 +223,7 @@ class WeatherBrightSky(WeatherProvider):
# Add Preciptable Water (PWAT) with a PVLib method.
key = WeatherDataRecord.key_from_description("Temperature (°C)")
assert key
assert key # noqa: S101
temperature = self.key_to_array(
key=key,
start_datetime=self.start_datetime,
@@ -235,7 +236,7 @@ class WeatherBrightSky(WeatherProvider):
logger.debug(debug_msg)
return
key = WeatherDataRecord.key_from_description("Relative Humidity (%)")
assert key
assert key # noqa: S101
humidity = self.key_to_array(
key=key,
start_datetime=self.start_datetime,

View File

@@ -93,7 +93,7 @@ class WeatherClearOutside(WeatherProvider):
source = "https://clearoutside.com/forecast"
latitude = round(self.config.general.latitude, 2)
longitude = round(self.config.general.longitude, 2)
response = requests.get(f"{source}/{latitude}/{longitude}?desktop=true")
response = requests.get(f"{source}/{latitude}/{longitude}?desktop=true", timeout=10)
response.raise_for_status() # Raise an error for bad responses
logger.debug(f"Response from {source}: {response}")
# We are working on fresh data (no cache), report update time