wip load profile engine

generating reasonably realistic load profiles
This commit is contained in:
Lucas Tan 2025-07-15 16:56:39 +01:00
parent 80eed10898
commit 8125583313
5 changed files with 158 additions and 35 deletions

View File

@ -1,16 +1,40 @@
import numpy as np import numpy as np
from Utilities.Time import generate_timestrings, index_peak_times, index_operating_hours from Utilities.Time import (
generate_timestrings,
index_peak_times,
index_operating_hours,
check_is_weekday,
)
from scipy.optimize import root_scalar from scipy.optimize import root_scalar
import pandas as pd import pandas as pd
import matplotlib.pyplot as pl
def get_no_of_peaks(peak_bounds): def get_no_of_peaks(peak_bounds, is_weekday, peak_duration, peak_energy, site):
peak_occurences = np.random.randint(peak_bounds["min"], peak_bounds["max"], 1) # calculate theoretically maximum number of peaks based on daily consumption (kWh)
max_occurences = np.floor(
peak_energy / site["maximum_demand_kW"] / (peak_duration["max"] / 60)
)
if is_weekday:
peak_occurences = np.random.randint(
peak_bounds["weekdays"]["min"], max_occurences, 1
)
else:
peak_occurences = np.random.randint(
peak_bounds["weekends"]["min"], max_occurences, 1
)
return peak_occurences return peak_occurences
def generate_peak_info(c, dt): def generate_peak_info(c, dt, is_weekday, peak_energy, site):
no_of_peaks = get_no_of_peaks(c["site_info"]["no_of_peaks"]) no_of_peaks = get_no_of_peaks(
c["site_info"]["no_of_peaks"],
is_weekday,
c["site_info"]["peak_duration"],
peak_energy,
site,
)
operating_hours = generate_timestrings( operating_hours = generate_timestrings(
c["site_info"]["operating hours"]["start"], c["site_info"]["operating hours"]["start"],
c["site_info"]["operating hours"]["end"], c["site_info"]["operating hours"]["end"],
@ -26,6 +50,17 @@ def generate_peak_info(c, dt):
return peak_times, peak_durations return peak_times, peak_durations
def generate_peak_profile(idx_peak, c, site):
# Generate a peak profile based on the peak indices and site information
peak_profile = np.zeros(len(idx_peak))
for i in range(1, np.max(idx_peak) + 1):
peak_profile[idx_peak == i] = site["maximum_demand_kW"] * np.random.uniform(
1 - c["noise"]["range"],
1 + c["noise"]["range"],
)
return peak_profile
def generate_out_of_hours_consumption_ratio(c): def generate_out_of_hours_consumption_ratio(c):
# Generate a random ratio for out-of-hours consumption # Generate a random ratio for out-of-hours consumption
ratio = np.random.uniform( ratio = np.random.uniform(
@ -35,18 +70,22 @@ def generate_out_of_hours_consumption_ratio(c):
return ratio return ratio
def recompute_load_profile( def recompute_load_profile(load_profile, offset, noise, peak_profile):
load_profile, # apply noise to the load profile, including max demand
offset, load_profile = load_profile * noise
):
# apply offset to the load profile # apply offset to the load profile
load_profile += offset load_profile += offset
# overwrite with peak profile
for i in range(len(peak_profile)):
if peak_profile[i] > 0:
load_profile[i] = peak_profile[i]
return load_profile return load_profile
def get_load_profile(c, dt, batch_start_time, batch_process_duration): def get_load_profiles(c, dt, batch_start_time, batch_process_duration):
# Generate load profile for each site # Generate load profile for each site
# c is the configuration dictionary # c is the configuration dictionary
@ -56,14 +95,26 @@ def get_load_profile(c, dt, batch_start_time, batch_process_duration):
# start with indexing all the peak occurences # start with indexing all the peak occurences
# generate timeseries from start to end time # generate timeseries from start to end time
hours2seconds = 3600
# check day of the week
is_weekday = check_is_weekday(batch_start_time)
start_time = batch_start_time start_time = batch_start_time
end_time = start_time + batch_process_duration end_time = start_time + batch_process_duration
batch_process_duration_hours = batch_process_duration / 3600 # convert to hours batch_process_duration_hours = (
timestamps = np.arange(start_time, end_time + 1, dt) batch_process_duration / hours2seconds
) # convert to hours
timestamps = np.arange(start_time, end_time, dt)
idx_operating_hours = index_operating_hours( idx_operating_hours = index_operating_hours(
timestamps, c["site_info"]["operating hours"] timestamps, c["site_info"]["operating hours"]
) )
no_of_operating_hours = np.sum(idx_operating_hours > 0) no_of_operating_hours = (
np.sum(idx_operating_hours)
/ len(idx_operating_hours)
* batch_process_duration_hours
)
# initialise load profiles DataFrame
load_profiles = pd.DataFrame(index=timestamps)
# loop through each site in the configuration # loop through each site in the configuration
for site in c["site_info"]["sites"]: for site in c["site_info"]["sites"]:
@ -71,14 +122,13 @@ def get_load_profile(c, dt, batch_start_time, batch_process_duration):
load_profile = np.zeros(len(timestamps)) load_profile = np.zeros(len(timestamps))
# generate noise to make the profile more realistic # generate noise to make the profile more realistic
noise = np.random.normal( noise = np.random.uniform(
1 - c["noise"]["range"], 1 + c["noise"]["range"], len(timestamps) 1 - c["noise"]["range"], 1 + c["noise"]["range"], len(timestamps)
) )
# Generate peak times and durations
peak_times, peak_durations = generate_peak_info(c, dt)
# Generate peak times and durations # make every 2 seconds the same
idx_peak = index_peak_times(timestamps, peak_times, peak_durations) for i in range(0, len(noise), 2):
noise[i : i + 2] = noise[i]
# Generate out-of-hours consumption ratio # Generate out-of-hours consumption ratio
# The % of energy used outside of the operating hours # The % of energy used outside of the operating hours
@ -91,20 +141,66 @@ def get_load_profile(c, dt, batch_start_time, batch_process_duration):
) )
out_of_hours_consumption = site["daily_consumption_kWh"] * out_of_hours_ratio out_of_hours_consumption = site["daily_consumption_kWh"] * out_of_hours_ratio
avg_operating_hour_consumption = ( avg_operating_hour_power = operating_hour_consumption / no_of_operating_hours
operating_hour_consumption / no_of_operating_hours avg_out_of_hours_power = out_of_hours_consumption / (
)
avg_out_of_hours_consumption = out_of_hours_consumption / (
batch_process_duration_hours - no_of_operating_hours batch_process_duration_hours - no_of_operating_hours
) )
# baseline operating hour power is 40% higher than out-of-hours power
gain = 1.4
assumed_operating_baseline_power = avg_out_of_hours_power * gain
baseline_energy = avg_out_of_hours_power * (
batch_process_duration_hours - no_of_operating_hours
) + (assumed_operating_baseline_power * no_of_operating_hours)
peak_energy = site["daily_consumption_kWh"] - baseline_energy
# Generate peak times and durations
peak_times, peak_durations = generate_peak_info(
c, dt, is_weekday, peak_energy, site
)
# Generate peak times and durations
idx_peak = index_peak_times(timestamps, peak_times, peak_durations)
# Generate peak profile
peak_profile = generate_peak_profile(idx_peak, c, site)
# assign base load profile # assign base load profile
load_profile[idx_operating_hours > 0] = avg_operating_hour_consumption load_profile[idx_operating_hours > 0] = avg_operating_hour_power
load_profile[idx_operating_hours == 0] = avg_out_of_hours_consumption load_profile[idx_operating_hours == 0] = avg_out_of_hours_power
# apply peak loads # smoothen out sharp edges
for i in range(1, np.max(idx_peak) + 1): load_profile = np.convolve(load_profile, np.ones(40) / 40, mode="same")
load_profile[idx_peak == i] = site["maximum_demand_kW"]
# apply noise to the load profile, including max demand def objective(x):
load_profile = load_profile * noise # Objective function to minimize the difference between the load profile and the target profile
# x is the offset
adjusted_profile = recompute_load_profile(
load_profile, x, noise, peak_profile
)
# get energy consumption in kWh
energy_consumption = np.sum(adjusted_profile) * dt / 3600
target_consumption = site["daily_consumption_kWh"]
delta = energy_consumption - target_consumption
return delta
# Use root_scalar to find the optimal offset
result = root_scalar(
objective,
bracket=[-site["maximum_demand_kW"] * 10, site["maximum_demand_kW"] * 10],
method="bisect",
)
if result.converged:
offset = result.root
else:
raise ValueError("Root finding did not converge")
# Recompute the load profile with the optimal offset
load_profile = recompute_load_profile(load_profile, offset, noise, peak_profile)
# Add the load profile to the DataFrame
load_profiles[site["name"]] = load_profile
return load_profiles

View File

@ -74,3 +74,14 @@ def index_operating_hours(timestamps, operating_hours):
operating_indices[i] = 1 # mark as operating hour operating_indices[i] = 1 # mark as operating hour
return operating_indices return operating_indices
def check_is_weekday(batch_start_time):
"""Checks if the batch start time is on a weekday."""
# batch_start_time is in seconds since the epoch
start_time = datetime.fromtimestamp(batch_start_time)
if start_time.weekday() >= 5: # Saturday or Sunday
is_weekday = False
else:
is_weekday = True
return is_weekday

View File

@ -4,7 +4,7 @@ sim_time:
duration_days: 60 duration_days: 60
noise: noise:
range: 0.15 range: 0.3
paths: paths:
site_info: YAMLs/site_info.yaml site_info: YAMLs/site_info.yaml

View File

@ -26,8 +26,10 @@ operating hours:
end: "19:00" end: "19:00"
time zone: Asia/Kuala_Lumpur time zone: Asia/Kuala_Lumpur
no_of_peaks: no_of_peaks:
min: 30 weekdays:
max: 100 min: 5
weekends:
min: 1
peak_duration: peak_duration:
unit: minutes unit: minutes
min: 1 min: 1

18
main.py
View File

@ -1,6 +1,8 @@
import yaml import yaml
from Utilities.Time import get_start_time from Utilities.Time import get_start_time
from Utilities.LoadProfile import get_load_profile from Utilities.LoadProfile import get_load_profiles
import matplotlib.pyplot as pl
import pandas as pd
# read config file # read config file
c = yaml.safe_load(open("YAMLs/config.yml")) c = yaml.safe_load(open("YAMLs/config.yml"))
@ -20,5 +22,17 @@ c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] *
# load site info # load site info
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"])) c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
cumulative_load_profiles = pd.DataFrame()
# loop through timesteps
for i in range(
c["sim_start_time"], c["sim_end_time"], c["sim_time"]["batch_process_seconds"]
):
# generate load profiles # generate load profiles
get_load_profile(c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]) load_profiles = get_load_profiles(
c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"]
)
# add to cumulative load profiles
cumulative_load_profiles = pd.concat([cumulative_load_profiles, load_profiles], axis=1