wip BESS discharging function
This commit is contained in:
parent
8125583313
commit
62cf35c727
30
Utilities/BESS.py
Normal file
30
Utilities/BESS.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
def initialise_SoC(bess):
|
||||||
|
"""Initialise the state of charge (SoC) for the BESS."""
|
||||||
|
for i in range(0, len(bess["units"])): # initially fully charged
|
||||||
|
bess["units"][i]["SoC"] = 1
|
||||||
|
return bess
|
||||||
|
|
||||||
|
|
||||||
|
def initial_site_assignment(c, bess):
|
||||||
|
"""Initialise the site assignment for each BESS."""
|
||||||
|
k = 0
|
||||||
|
while k < len(c["site_info"]["sites"]):
|
||||||
|
bess["units"][k]["site"] = c["site_info"]["sites"][k]["name"]
|
||||||
|
k += 1
|
||||||
|
|
||||||
|
if k < len(c["site_info"]["sites"]):
|
||||||
|
bess["units"][k]["site"] = "Unassigned"
|
||||||
|
return bess
|
||||||
|
|
||||||
|
|
||||||
|
def discharge_bess(bess, site_name, dt, discharge_power):
|
||||||
|
# convert discharge power to discharge energy (kW to kWh)
|
||||||
|
discharge_energy = discharge_power * dt / 3600
|
||||||
|
|
||||||
|
"""Discharge the BESS for a specific site."""
|
||||||
|
for index, unit in enumerate(bess["units"]):
|
||||||
|
if unit["site"] == site_name:
|
||||||
|
new_soc = unit["SoC"] - (dt * discharge_energy) / unit["capacity_kWh"]
|
||||||
|
new_soc = 0 if new_soc < 0 else new_soc
|
||||||
|
bess["units"][index]["SoC"] = new_soc
|
||||||
|
return bess
|
||||||
@ -126,7 +126,7 @@ def get_load_profiles(c, dt, batch_start_time, batch_process_duration):
|
|||||||
1 - c["noise"]["range"], 1 + c["noise"]["range"], len(timestamps)
|
1 - c["noise"]["range"], 1 + c["noise"]["range"], len(timestamps)
|
||||||
)
|
)
|
||||||
|
|
||||||
# make every 2 seconds the same
|
# make every 2 minutes the same
|
||||||
for i in range(0, len(noise), 2):
|
for i in range(0, len(noise), 2):
|
||||||
noise[i : i + 2] = noise[i]
|
noise[i : i + 2] = noise[i]
|
||||||
|
|
||||||
@ -147,7 +147,7 @@ def get_load_profiles(c, dt, batch_start_time, batch_process_duration):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# baseline operating hour power is 40% higher than out-of-hours power
|
# baseline operating hour power is 40% higher than out-of-hours power
|
||||||
gain = 1.4
|
gain = 5
|
||||||
assumed_operating_baseline_power = avg_out_of_hours_power * gain
|
assumed_operating_baseline_power = avg_out_of_hours_power * gain
|
||||||
baseline_energy = avg_out_of_hours_power * (
|
baseline_energy = avg_out_of_hours_power * (
|
||||||
batch_process_duration_hours - no_of_operating_hours
|
batch_process_duration_hours - no_of_operating_hours
|
||||||
@ -167,7 +167,7 @@ def get_load_profiles(c, dt, batch_start_time, batch_process_duration):
|
|||||||
peak_profile = generate_peak_profile(idx_peak, c, site)
|
peak_profile = generate_peak_profile(idx_peak, c, site)
|
||||||
|
|
||||||
# assign base load profile
|
# assign base load profile
|
||||||
load_profile[idx_operating_hours > 0] = avg_operating_hour_power
|
load_profile[idx_operating_hours > 0] = assumed_operating_baseline_power
|
||||||
load_profile[idx_operating_hours == 0] = avg_out_of_hours_power
|
load_profile[idx_operating_hours == 0] = avg_out_of_hours_power
|
||||||
|
|
||||||
# smoothen out sharp edges
|
# smoothen out sharp edges
|
||||||
|
|||||||
16
YAMLs/BESS.yml
Normal file
16
YAMLs/BESS.yml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
units:
|
||||||
|
- name: MBESS 1
|
||||||
|
capacity_kWh: 2096
|
||||||
|
c-rate: 0.5
|
||||||
|
- name: MBESS 2
|
||||||
|
capacity_kWh: 2096
|
||||||
|
c-rate: 0.5
|
||||||
|
- name: MBESS 3
|
||||||
|
capacity_kWh: 2096
|
||||||
|
c-rate: 0.5
|
||||||
|
- name: MBESS 4
|
||||||
|
capacity_kWh: 2096
|
||||||
|
c-rate: 0.5
|
||||||
|
- name: MBESS 5
|
||||||
|
capacity_kWh: 2096
|
||||||
|
c-rate: 0.5
|
||||||
@ -8,3 +8,4 @@ noise:
|
|||||||
|
|
||||||
paths:
|
paths:
|
||||||
site_info: YAMLs/site_info.yaml
|
site_info: YAMLs/site_info.yaml
|
||||||
|
bess: YAMLs/BESS.yml
|
||||||
|
|||||||
@ -35,5 +35,5 @@ peak_duration:
|
|||||||
min: 1
|
min: 1
|
||||||
max: 4
|
max: 4
|
||||||
out_of_hours_consumption:
|
out_of_hours_consumption:
|
||||||
min: 0.05
|
min: 0.01
|
||||||
max: 0.15
|
max: 0.08
|
||||||
|
|||||||
83
main.py
83
main.py
@ -1,12 +1,18 @@
|
|||||||
|
import numpy as np
|
||||||
import yaml
|
import yaml
|
||||||
from Utilities.Time import get_start_time
|
from Utilities.Time import get_start_time
|
||||||
from Utilities.LoadProfile import get_load_profiles
|
from Utilities.LoadProfile import get_load_profiles
|
||||||
|
from Utilities.BESS import initialise_SoC, initial_site_assignment, discharge_bess
|
||||||
import matplotlib.pyplot as pl
|
import matplotlib.pyplot as pl
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
# read config file
|
# read config file
|
||||||
c = yaml.safe_load(open("YAMLs/config.yml"))
|
c = yaml.safe_load(open("YAMLs/config.yml"))
|
||||||
|
|
||||||
|
# read BESS data
|
||||||
|
bess_data = yaml.safe_load(open(c["paths"]["bess"]))
|
||||||
|
|
||||||
## simulation time setup
|
## simulation time setup
|
||||||
# get current time
|
# get current time
|
||||||
c["sim_start_time"] = get_start_time()
|
c["sim_start_time"] = get_start_time()
|
||||||
@ -15,6 +21,7 @@ dt = c["sim_time"]["time_step_minutes"] * 60
|
|||||||
# compute end time based on duration in days
|
# compute end time based on duration in days
|
||||||
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
|
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
|
||||||
c["sim_end_time"] = c["sim_start_time"] + duration
|
c["sim_end_time"] = c["sim_start_time"] + duration
|
||||||
|
timestamps = np.arange(c["sim_start_time"], c["sim_end_time"] + 1, dt)
|
||||||
|
|
||||||
# batch process hours in seconds
|
# batch process hours in seconds
|
||||||
c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] * 60 * 60
|
c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] * 60 * 60
|
||||||
@ -22,17 +29,73 @@ c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] *
|
|||||||
# load site info
|
# load site info
|
||||||
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
|
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
|
||||||
|
|
||||||
cumulative_load_profiles = pd.DataFrame()
|
|
||||||
|
|
||||||
# loop through timesteps
|
def generate_and_cache_profiles(c, dt):
|
||||||
for i in range(
|
"""Generates load profiles for all sites and caches them."""
|
||||||
c["sim_start_time"], c["sim_end_time"], c["sim_time"]["batch_process_seconds"]
|
return get_load_profiles(
|
||||||
):
|
|
||||||
|
|
||||||
# generate load profiles
|
|
||||||
load_profiles = get_load_profiles(
|
|
||||||
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
|
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# add to cumulative load profiles
|
|
||||||
cumulative_load_profiles = pd.concat([cumulative_load_profiles, load_profiles], axis=1
|
# initialise BESS
|
||||||
|
bess_data = initialise_SoC(bess_data)
|
||||||
|
bess_data = initial_site_assignment(c, bess_data)
|
||||||
|
# bess SoC dataframe
|
||||||
|
bess_soc = pd.DataFrame(columns=[unit["name"] for unit in bess_data["units"]])
|
||||||
|
|
||||||
|
# get initial load profiles
|
||||||
|
cumulative_load_profiles = get_load_profiles(
|
||||||
|
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# async function is running
|
||||||
|
is_running_in_async = False
|
||||||
|
|
||||||
|
# loop through
|
||||||
|
with ThreadPoolExecutor() as executor:
|
||||||
|
for i in range(0, len(timestamps)):
|
||||||
|
# start generating load profiles 200 seconds before data required
|
||||||
|
if len(cumulative_load_profiles) <= len(timestamps):
|
||||||
|
if is_running_in_async is False:
|
||||||
|
# generate load profiles
|
||||||
|
future = executor.submit(generate_and_cache_profiles, c, dt)
|
||||||
|
is_running_in_async = True
|
||||||
|
else:
|
||||||
|
is_running_in_async = False
|
||||||
|
|
||||||
|
# discharge BESS for each site
|
||||||
|
for site in c["site_info"]["sites"]:
|
||||||
|
site_name = site["name"]
|
||||||
|
discharge_power = cumulative_load_profiles[site_name].iloc[i]
|
||||||
|
bess_data = discharge_bess(bess_data, site_name, dt, discharge_power)
|
||||||
|
temp_soc = [unit["SoC"] for unit in bess_data["units"]]
|
||||||
|
|
||||||
|
# append SoC to dataframe
|
||||||
|
bess_soc = pd.concat(
|
||||||
|
[
|
||||||
|
bess_soc,
|
||||||
|
pd.DataFrame(
|
||||||
|
[temp_soc], columns=bess_soc.columns, index=[timestamps[i]]
|
||||||
|
),
|
||||||
|
],
|
||||||
|
axis=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# add to cumulative load profiles
|
||||||
|
# check if future exists and is done
|
||||||
|
if is_running_in_async:
|
||||||
|
if future.done():
|
||||||
|
load_profiles = future.result()
|
||||||
|
cumulative_load_profiles = pd.concat(
|
||||||
|
[
|
||||||
|
cumulative_load_profiles,
|
||||||
|
load_profiles,
|
||||||
|
],
|
||||||
|
axis=0,
|
||||||
|
)
|
||||||
|
print(len(cumulative_load_profiles), "load profiles generated")
|
||||||
|
is_running_in_async = False
|
||||||
|
|
||||||
|
pl.plot(bess_soc.index, bess_soc.values, label="BESS SoC", alpha=0.5)
|
||||||
|
pl.show()
|
||||||
|
pl.xlabel("Time (s since epoch)")
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user