diff --git a/Utilities/BESS.py b/Utilities/BESS.py index a10c421..05b4dbd 100644 --- a/Utilities/BESS.py +++ b/Utilities/BESS.py @@ -26,5 +26,19 @@ def discharge_bess(bess, site_name, dt, discharge_power): if unit["site"] == site_name: new_soc = unit["SoC"] - (dt * discharge_energy) / unit["capacity_kWh"] new_soc = 0 if new_soc < 0 else new_soc - bess["units"][index]["SoC"] = new_soc + else: + new_soc = unit["SoC"] + + # update SoC + bess["units"][index]["SoC"] = new_soc return bess + + +def predict_swap_time(bess_soc_for_cycle): + """Predict the swap time for each BESS unit based on its SoC history.""" + swap_times = {} + for unit_name, df in bess_soc_for_cycle.items(): + # Find the timestamp when SoC reaches 0 + swap_time = df[df["SoC"] == 0]["Timestamp"].min() + swap_times[unit_name] = swap_time + return swap_times diff --git a/YAMLs/BESS.yml b/YAMLs/BESS.yml index 2df1cea..3796a0d 100644 --- a/YAMLs/BESS.yml +++ b/YAMLs/BESS.yml @@ -14,3 +14,9 @@ units: - name: MBESS 5 capacity_kWh: 2096 c-rate: 0.5 + - name: MBESS 6 + capacity_kWh: 2096 + c-rate: 0.5 +buffer: + unit: percentage of buffer + min: 0.1 diff --git a/main.py b/main.py index d7ec2e0..ddbc585 100644 --- a/main.py +++ b/main.py @@ -2,11 +2,17 @@ import numpy as np import yaml from Utilities.Time import get_start_time from Utilities.LoadProfile import get_load_profiles -from Utilities.BESS import initialise_SoC, initial_site_assignment, discharge_bess +from Utilities.BESS import ( + initialise_SoC, + initial_site_assignment, + discharge_bess, + predict_swap_time, +) import matplotlib.pyplot as pl import pandas as pd from concurrent.futures import ThreadPoolExecutor + # read config file c = yaml.safe_load(open("YAMLs/config.yml")) @@ -41,7 +47,13 @@ def generate_and_cache_profiles(c, dt): bess_data = initialise_SoC(bess_data) bess_data = initial_site_assignment(c, bess_data) # bess SoC dataframe -bess_soc = pd.DataFrame(columns=[unit["name"] for unit in bess_data["units"]]) +bess_soc_since_start = pd.DataFrame( + columns=[unit["name"] for unit in bess_data["units"]] +) +# bess SoC dictionary, meant to track SoC progress over each cycle. +# resets after each charging cycle. This is for predicting swap times. +init_df = pd.DataFrame(columns=["Timestamp", "SoC"]) +bess_soc_for_cycle = {unit["name"]: init_df for unit in bess_data["units"]} # get initial load profiles cumulative_load_profiles = get_load_profiles( @@ -71,16 +83,38 @@ with ThreadPoolExecutor() as executor: temp_soc = [unit["SoC"] for unit in bess_data["units"]] # append SoC to dataframe - bess_soc = pd.concat( + bess_soc_since_start = pd.concat( [ - bess_soc, + bess_soc_since_start, pd.DataFrame( - [temp_soc], columns=bess_soc.columns, index=[timestamps[i]] + [temp_soc], + columns=bess_soc_since_start.columns, + index=[timestamps[i]], ), ], axis=0, ) + # assign SoC for cycle + for unit in bess_data["units"]: + unit_name = unit["name"] + # reset df if SoC is 0. Start a new cycle + if unit["SoC"] == 0: + bess_soc_for_cycle[unit_name] = init_df + bess_soc_for_cycle[unit_name] = pd.concat( + [ + bess_soc_for_cycle[unit_name], + pd.DataFrame( + [[timestamps[i], unit["SoC"]]], + columns=["Timestamp", "SoC"], + ), + ], + axis=0, + ) + + # predict swap times + swap_times = predict_swap_time(bess_soc_for_cycle) + # add to cumulative load profiles # check if future exists and is done if is_running_in_async: @@ -96,6 +130,6 @@ with ThreadPoolExecutor() as executor: print(len(cumulative_load_profiles), "load profiles generated") is_running_in_async = False -pl.plot(bess_soc.index, bess_soc.values, label="BESS SoC", alpha=0.5) +pl.plot(cumulative_load_profiles) pl.show() -pl.xlabel("Time (s since epoch)") +pl.plot(bess_soc_since_start)