MEOS/main.py
2025-07-18 17:11:50 +01:00

136 lines
4.4 KiB
Python

import numpy as np
import yaml
from Utilities.Time import get_start_time
from Utilities.LoadProfile import get_load_profiles
from Utilities.BESS import (
initialise_SoC,
initial_site_assignment,
discharge_bess,
predict_swap_time,
)
import matplotlib.pyplot as pl
import pandas as pd
from concurrent.futures import ThreadPoolExecutor
# read config file
c = yaml.safe_load(open("YAMLs/config.yml"))
# read BESS data
bess_data = yaml.safe_load(open(c["paths"]["bess"]))
## simulation time setup
# get current time
c["sim_start_time"] = get_start_time()
# get time step in minutes, then convert to seconds
dt = c["sim_time"]["time_step_minutes"] * 60
# compute end time based on duration in days
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
c["sim_end_time"] = c["sim_start_time"] + duration
timestamps = np.arange(c["sim_start_time"], c["sim_end_time"] + 1, dt)
# batch process hours in seconds
c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] * 60 * 60
# load site info
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
def generate_and_cache_profiles(c, dt):
"""Generates load profiles for all sites and caches them."""
return get_load_profiles(
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
)
# initialise BESS
bess_data = initialise_SoC(bess_data)
bess_data = initial_site_assignment(c, bess_data)
# bess SoC dataframe
bess_soc_since_start = pd.DataFrame(
columns=[unit["name"] for unit in bess_data["units"]]
)
# bess SoC dictionary, meant to track SoC progress over each cycle.
# resets after each charging cycle. This is for predicting swap times.
init_df = pd.DataFrame(columns=["Timestamp", "SoC"])
bess_soc_for_cycle = {unit["name"]: init_df for unit in bess_data["units"]}
# get initial load profiles
cumulative_load_profiles = get_load_profiles(
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
)
# async function is running
is_running_in_async = False
# loop through
with ThreadPoolExecutor() as executor:
for i in range(0, len(timestamps)):
# start generating load profiles 200 seconds before data required
if len(cumulative_load_profiles) <= len(timestamps):
if is_running_in_async is False:
# generate load profiles
future = executor.submit(generate_and_cache_profiles, c, dt)
is_running_in_async = True
else:
is_running_in_async = False
# discharge BESS for each site
for site in c["site_info"]["sites"]:
site_name = site["name"]
discharge_power = cumulative_load_profiles[site_name].iloc[i]
bess_data = discharge_bess(bess_data, site_name, dt, discharge_power)
temp_soc = [unit["SoC"] for unit in bess_data["units"]]
# append SoC to dataframe
bess_soc_since_start = pd.concat(
[
bess_soc_since_start,
pd.DataFrame(
[temp_soc],
columns=bess_soc_since_start.columns,
index=[timestamps[i]],
),
],
axis=0,
)
# assign SoC for cycle
for unit in bess_data["units"]:
unit_name = unit["name"]
# reset df if SoC is 0. Start a new cycle
if unit["SoC"] == 0:
bess_soc_for_cycle[unit_name] = init_df
bess_soc_for_cycle[unit_name] = pd.concat(
[
bess_soc_for_cycle[unit_name],
pd.DataFrame(
[[timestamps[i], unit["SoC"]]],
columns=["Timestamp", "SoC"],
),
],
axis=0,
)
# predict swap times
swap_times = predict_swap_time(bess_soc_for_cycle)
# add to cumulative load profiles
# check if future exists and is done
if is_running_in_async:
if future.done():
load_profiles = future.result()
cumulative_load_profiles = pd.concat(
[
cumulative_load_profiles,
load_profiles,
],
axis=0,
)
print(len(cumulative_load_profiles), "load profiles generated")
is_running_in_async = False
pl.plot(cumulative_load_profiles)
pl.show()
pl.plot(bess_soc_since_start)