Compare commits

...

6 Commits

Author SHA1 Message Date
089078a9ea battery swap method wip 2025-07-28 18:18:09 +01:00
7c5d3f5b1d estimated time to swap calculation 2025-07-20 23:07:48 +01:00
d3dbd9c9f3 largely working dashboard 2025-07-20 17:05:13 +01:00
6d1199e37a wip sim dashboard 2025-07-20 16:15:11 +01:00
6c994e970c dashboard wip 2025-07-20 15:37:43 +01:00
35dd46e799 font setting for dashboard 2025-07-19 18:37:52 +01:00
23 changed files with 414 additions and 96 deletions

34
.streamlit/config.toml Normal file
View File

@ -0,0 +1,34 @@
[server]
enableStaticServing = true
[[theme.fontFaces]]
family = "Exo2"
url = "app/static/EXO2-VARIABLEFONT_WGHT.TTF"
style = "normal"
weight = 400
[[theme.fontFaces]]
family = "Exo2"
url = "app/static/EXO2-BOLD.TTF"
style = "bold"
weight = 700
[[theme.fontFaces]]
family = "Exo2"
url = "app/static/EXO2-ITALIC.TTF"
style = "italic"
weight = 400
[[theme.fontFaces]]
family = "Exo2"
url = "app/static/EXO2-BOLDITALIC.TTF"
style = "bold italic"
weight = 7
[theme]
base="dark"
primaryColor="#fcd913"
font="Exo2"
codeFont="Exo2"

28
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,28 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "🔍 Debug Streamlit",
"type": "debugpy",
"request": "launch",
// Tell VS Code to use `python -m streamlit run ...`
"module": "streamlit",
// Replace `app.py` (or dashboard.py) with your entry-point
"args": [
"run",
"dashboard.py",
// (optional but highly recommended) disable the auto-reloader
"--server.runOnSave=false"
],
// so you can interact with the app and see logs
"console": "integratedTerminal",
// only step into *your* code, not the Streamlit internals
"justMyCode": true
}
]
}

View File

@ -31,13 +31,13 @@ def discharge_bess(bess, site_name, dt, discharge_power):
continue
if unit["site"] == site_name:
new_soc = unit["SoC"] - (dt * discharge_energy) / unit["capacity_kWh"]
new_soc = unit["SoC"] - discharge_energy / unit["capacity_kWh"]
new_soc = 0 if new_soc < 0 else new_soc
else:
# maintain SoC if not assigned to the site
new_soc = unit["SoC"]
continue
# update SoC
# update SoC and current load
bess["units"][index]["current_load_kW"] = discharge_power
bess["units"][index]["SoC"] = new_soc
return bess
@ -72,7 +72,7 @@ def predict_swap_time(bess_soc_for_cycle):
return swap_times
def update_cycle_SoC(bess_data, bess_soc_for_cycle, timestamps):
def update_cycle_SoC(bess_data, bess_soc_for_cycle, timestamp):
init_df = pd.DataFrame(columns=["Timestamp", "SoC"])
# assign SoC for cycle
for unit in bess_data["units"]:
@ -85,20 +85,60 @@ def update_cycle_SoC(bess_data, bess_soc_for_cycle, timestamps):
[
bess_soc_for_cycle[unit_name],
pd.DataFrame(
[[timestamps[i], unit["SoC"]]],
[[timestamp, unit["SoC"]]],
columns=["Timestamp", "SoC"],
),
],
axis=0,
)
return bess_soc_for_cycle
def arrange_swap(bess_data, c):
for unit in bess_data["units"]:
if unit["SoC"] < c["bess"]["buffer"]:
# find for unassigned BESS unit with SOC at 100%
for candidate in bess_data["units"]:
if candidate["SoC"] == 1 and candidate["site"] == "Unassigned":
# assign the candidate to the site
candidate["site"] = unit["site"]
break
def arrange_swap(c, bess_data, bess_soc_for_cycle):
# identify BESS units that need swapping
units_needing_swap = [
unit for unit in bess_data["units"] if unit["SoC"] < bess_data["buffer"]["min"]
]
if not units_needing_swap:
return bess_data, bess_soc_for_cycle
# identify BESS units that are unassigned and fully charged
unassigned_fully_charged = [
unit
for unit in bess_data["units"]
if unit["SoC"] == 1 and unit["site"] == "Unassigned"
]
if not unassigned_fully_charged:
return bess_data, bess_soc_for_cycle
# assign unassigned fully charged units to units needing swap
for unit in units_needing_swap:
# take the first unassigned fully charged unit
new_unit = unassigned_fully_charged.pop(0)
# assign it to the site of the unit needing swap
new_unit["site"] = unit["site"]
# reset SoC to 1 (fully charged)
new_unit["SoC"] = 1
# set current load to existing load
new_unit["current_load_kW"] = unit["current_load_kW"]
# reset old unit
unit["site"] = "Unassigned" # mark the old unit as unassigned
unit["current_load_kW"] = 0 # reset current load
# update the BESS data
# search for the index of the unit needing swap and replace it with the new unit
index = next(
i for i, d in enumerate(bess_data["units"]) if d["name"] == unit["name"]
)
bess_data["units"][index] = new_unit
# search for index of new unit, and replace with old unit
new_index = next(
i for i, d in enumerate(bess_data["units"]) if d["name"] == new_unit["name"]
)
bess_data["units"][new_index] = unit
return bess_data, bess_soc_for_cycle

67
Utilities/DataVis.py Normal file
View File

@ -0,0 +1,67 @@
import pandas as pd
def format_dataframe(
bess_soc_for_cycle, bess_data, load_profiles_since_start, swap_time, current_time
):
"""Formats the DataFrame for display in the dashboard."""
# Create a DataFrame for sites
# columns = ["Site Name", "MBESS Unit", "Current Load (kW)", "SoC (%)", "Predicted Swap Time"]
status_df = pd.DataFrame(
columns=[
"Site Name",
"MBESS Unit",
"Current Load (kW)",
"SoC (%)",
"Predicted Swap Time",
"Estimated Time To Swap",
"Cycle Discharge Profile",
"Load Profile Since Start",
]
)
for site in load_profiles_since_start.keys():
index = next(i for i, d in enumerate(bess_data["units"]) if d["site"] == site)
soc = bess_data["units"][index]["SoC"]
current_load = bess_data["units"][index]["current_load_kW"]
unit_name = bess_data["units"][index]["name"]
predicted_swap_time = swap_time.get(unit_name, "N/A")
# calculate estimated time to swap
if isinstance(predicted_swap_time, float):
estimated_time_to_swap = predicted_swap_time - current_time
estimated_time_to_swap = pd.to_timedelta(estimated_time_to_swap, unit="s")
else:
estimated_time_to_swap = "N/A"
# convert predicted_swap_time to a readable format
if isinstance(predicted_swap_time, float):
predicted_swap_time = pd.to_datetime(
predicted_swap_time, unit="s"
).strftime("%Y-%m-%d %H:%M:%S")
status_df = pd.concat(
[
status_df,
pd.DataFrame(
[
{
"Site Name": site,
"MBESS Unit": unit_name,
"Current Load (kW)": current_load,
"SoC (%)": soc * 100, # Convert to percentage
"Predicted Swap Time": predicted_swap_time,
"Estimated Time To Swap": estimated_time_to_swap,
"Cycle Discharge Profile": bess_soc_for_cycle[unit_name][
"SoC"
].tolist(),
"Load Profile Since Start": load_profiles_since_start[
site
].tolist(),
}
]
),
],
ignore_index=True,
)
return status_df

111
dashboard.py Normal file
View File

@ -0,0 +1,111 @@
# dashboard.py
import streamlit as st
import matplotlib.pyplot as pl
import pandas as pd
import main
from main import (
start_sim,
stop_sim,
reset_sim,
)
import time
st.set_page_config(layout="wide")
# Header
st.logo("https://rooftop.my/logo.svg", size="large")
st.title("MEOS Control Dashboard")
st.subheader("Mobile Energy Operations Simulation (MEOS)")
st.text("Run MEOS Simulation and Monitor MBESS Status")
# some instructions
# --- SESSION STATE SETUP ---
if "running" not in st.session_state:
st.session_state.running = False
if "plot_area" not in st.session_state:
st.session_state.plot_area = st.empty()
# --- CONTROL BUTTONS ---
col1, col2, col3 = st.columns(3)
with col1:
if st.button("Start", use_container_width=True):
start_sim()
st.session_state.running = True
with col2:
if st.button("Stop", use_container_width=True):
stop_sim()
st.session_state.running = False
with col3:
if st.button("Reset", use_container_width=True):
reset_sim()
st.session_state.running = False
placeholder = st.empty()
def show_table():
df = main.status_df
if df is None or df.empty:
placeholder.text("Waiting for first simulation step…")
else:
placeholder.dataframe(
df,
column_config={
"Site Name": st.column_config.TextColumn("Site Name"),
"MBESS Unit": st.column_config.TextColumn(
"MBESS Unit", help="Name of the MBESS unit at the site"
),
"Current Load (kW)": st.column_config.NumberColumn(
"Current Load (kW)", help="Current BESS discharge load in kW"
),
"SoC (%)": st.column_config.ProgressColumn(
"State of Charge",
help="State of Charge of the BESS unit",
format="%.1f%%",
min_value=0,
max_value=100,
),
"Predicted Swap Time": st.column_config.TextColumn(
"Predicted Swap Time", help="Predicted time for BESS swap"
),
"Cycle Discharge Profile": st.column_config.LineChartColumn(
"Cycle Discharge Profile",
help="Cycle discharge profile of the BESS unit",
),
"Load Profile Since Start": st.column_config.LineChartColumn(
"Load Profile Since Start",
help="Load profile since the start of the simulation",
),
},
use_container_width=True,
)
if st.session_state.running:
# display simulation start time
st.metric(
"Simulation Start Time",
value=pd.to_datetime(main.c["sim_start_time"], unit="s").strftime(
"%Y-%m-%d %H:%M:%S"
),
)
st.metric(
"Current Time",
value=pd.to_datetime(
main.c["sim_start_time"] + main.sim_i * main.dt, unit="s"
).strftime("%Y-%m-%d %H:%M:%S"),
)
st.metric(
"Time Elapsed in DD:HH:MM:SS",
value=str(pd.to_timedelta(main.sim_i * main.dt, unit="s")),
)
# display BESS data, SoC, Load Consumption
show_table()
time.sleep(1)
st.rerun()
else:
show_table()
st.info("Simulation not running")

158
main.py
View File

@ -3,16 +3,20 @@ import yaml
from Utilities.Time import get_start_time
from Utilities.LoadProfile import get_load_profiles
from Utilities.BESS import (
arrange_swap,
initialise_SoC,
initial_site_assignment,
discharge_bess,
predict_swap_time,
update_cycle_SoC,
)
from Utilities.DataVis import format_dataframe
import matplotlib.pyplot as pl
import pandas as pd
from concurrent.futures import ThreadPoolExecutor
import threading ### <<< CONTROL ADDED >>>
import time ### <<< CONTROL ADDED >>>
# read config file
c = yaml.safe_load(open("YAMLs/config.yml"))
@ -21,105 +25,139 @@ c = yaml.safe_load(open("YAMLs/config.yml"))
bess_data = yaml.safe_load(open(c["paths"]["bess"]))
## simulation time setup
# get current time
c["sim_start_time"] = get_start_time()
# get time step in minutes, then convert to seconds
dt = c["sim_time"]["time_step_minutes"] * 60
# compute end time based on duration in days
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
c["sim_end_time"] = c["sim_start_time"] + duration
timestamps = np.arange(c["sim_start_time"], c["sim_end_time"] + 1, dt)
# batch process hours in seconds
c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] * 60 * 60
load_profiles_since_start = None
status_df = None
# load site info
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
def generate_and_cache_profiles(c, dt):
"""Generates load profiles for all sites and caches them."""
return get_load_profiles(
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
)
### <<< CONTROL ADDED >>> Initialize simulation state globals
sim_i = 0
running = False
is_running_in_async = False
sim_lock = threading.Lock()
# initialise BESS
bess_data = initialise_SoC(bess_data)
bess_data = initial_site_assignment(c, bess_data)
# bess SoC dataframe
def _init_state():
global bess_data, bess_soc_since_start, bess_soc_for_cycle, cumulative_load_profiles, load_profiles_since_start, init_df
bd = initialise_SoC(bess_data.copy())
bd = initial_site_assignment(c, bd)
bess_data = bd
bess_soc_since_start = pd.DataFrame(
columns=[unit["name"] for unit in bess_data["units"]]
)
# bess SoC dictionary, meant to track SoC progress over each cycle.
# resets after each charging cycle. This is for predicting swap times.
init_df = pd.DataFrame(columns=["Timestamp", "SoC"])
bess_soc_for_cycle = {unit["name"]: init_df for unit in bess_data["units"]}
# get initial load profiles
cumulative_load_profiles = get_load_profiles(
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
)
# async function is running
is_running_in_async = False
# loop through
# do initial setup
_init_state()
def simulation_loop():
"""Runs the loop, stepping through timestamps until stopped or finished."""
global sim_i, running, is_running_in_async, cumulative_load_profiles, bess_data, bess_soc_for_cycle, load_profiles_since_start, status_df
with ThreadPoolExecutor() as executor:
for i in range(0, len(timestamps)):
# start generating load profiles 200 seconds before data required
while True:
with sim_lock:
if not running or sim_i >= len(timestamps):
break
i = sim_i
sim_i += 1
# pre-fetch next batch if needed
if len(cumulative_load_profiles) <= len(timestamps):
if is_running_in_async is False:
# generate load profiles
future = executor.submit(generate_and_cache_profiles, c, dt)
if not is_running_in_async:
future = executor.submit(
get_load_profiles,
c,
dt,
c["sim_start_time"],
c["sim_time"]["batch_process_seconds"],
)
is_running_in_async = True
else:
is_running_in_async = False
# check if any BESS units are below threshold (buffer as defined in config)
# discharge BESS for each site
for site in c["site_info"]["sites"]:
site_name = site["name"]
discharge_power = cumulative_load_profiles[site_name].iloc[i]
bess_data = discharge_bess(bess_data, site_name, dt, discharge_power)
temp_soc = [unit["SoC"] for unit in bess_data["units"]]
name = site["name"]
p = cumulative_load_profiles[name].iloc[i]
bess_data = discharge_bess(bess_data, name, dt, p)
# append SoC to dataframe
bess_soc_since_start = pd.concat(
[
bess_soc_since_start,
pd.DataFrame(
[temp_soc],
columns=bess_soc_since_start.columns,
index=[timestamps[i]],
),
],
axis=0,
# record SoC
temp_soc = [u["SoC"] for u in bess_data["units"]]
bess_soc_since_start.loc[timestamps[i]] = temp_soc
# update cycle SoC and predict swaps
bess_soc_for_cycle = update_cycle_SoC(
bess_data, bess_soc_for_cycle, timestamps[i]
)
# update cycle SoC
# this is for predicting swap times
bess_soc_for_cycle = update_cycle_SoC(bess_data, bess_soc_for_cycle, timestamps)
# predict swap times
swap_times = predict_swap_time(bess_soc_for_cycle)
# add to cumulative load profiles
# check if future exists and is done
if is_running_in_async:
if future.done():
# trigger swap if needed
bess_data, bess_soc_for_cycle = arrange_swap(
c, bess_data, bess_soc_for_cycle
)
# integrate newly fetched profiles
if is_running_in_async and future.done():
load_profiles = future.result()
cumulative_load_profiles = pd.concat(
[
cumulative_load_profiles,
load_profiles,
],
axis=0,
[cumulative_load_profiles, load_profiles], axis=0
)
print(len(cumulative_load_profiles), "load profiles generated")
print(len(cumulative_load_profiles), "profiles generated")
is_running_in_async = False
pl.plot(cumulative_load_profiles)
pl.show()
pl.plot(bess_soc_since_start)
load_profiles_since_start = cumulative_load_profiles.iloc[: i + 1]
# format data for display
status_df = format_dataframe(
bess_soc_for_cycle,
bess_data,
load_profiles_since_start,
swap_times,
timestamps[i],
)
# small sleep to allow dashboard to refresh / release GIL
time.sleep(0.01)
### <<< CONTROL ADDED >>> Control functions
def start_sim():
"""Starts the simulation in a background thread."""
global running, sim_thread
if not running:
running = True
sim_thread = threading.Thread(target=simulation_loop, daemon=True)
sim_thread.start()
def stop_sim():
"""Stops the simulation loop."""
global running
running = False
def reset_sim():
"""Stops and re-initializes the simulation state."""
global running, sim_i
running = False
sim_i = 0
_init_state()

BIN
static/EXO2-BLACK.TTF Normal file

Binary file not shown.

BIN
static/EXO2-BLACKITALIC.TTF Normal file

Binary file not shown.

BIN
static/EXO2-BOLD.TTF Normal file

Binary file not shown.

BIN
static/EXO2-BOLDITALIC.TTF Normal file

Binary file not shown.

BIN
static/EXO2-EXTRABOLD.TTF Normal file

Binary file not shown.

Binary file not shown.

BIN
static/EXO2-EXTRALIGHT.TTF Normal file

Binary file not shown.

Binary file not shown.

BIN
static/EXO2-LIGHT.TTF Normal file

Binary file not shown.

BIN
static/EXO2-LIGHTITALIC.TTF Normal file

Binary file not shown.

BIN
static/EXO2-MEDIUM.TTF Normal file

Binary file not shown.

Binary file not shown.

BIN
static/EXO2-SEMIBOLD.TTF Normal file

Binary file not shown.

Binary file not shown.

BIN
static/EXO2-THIN.TTF Normal file

Binary file not shown.

BIN
static/EXO2-THINITALIC.TTF Normal file

Binary file not shown.

Binary file not shown.