207 lines
7.1 KiB
Python
207 lines
7.1 KiB
Python
import numpy as np
|
|
from Utilities.Time import (
|
|
generate_timestrings,
|
|
index_peak_times,
|
|
index_operating_hours,
|
|
check_is_weekday,
|
|
)
|
|
from scipy.optimize import root_scalar
|
|
import pandas as pd
|
|
import matplotlib.pyplot as pl
|
|
|
|
|
|
def get_no_of_peaks(peak_bounds, is_weekday, peak_duration, peak_energy, site):
|
|
# calculate theoretically maximum number of peaks based on daily consumption (kWh)
|
|
max_occurences = np.floor(
|
|
peak_energy / site["maximum_demand_kW"] / (peak_duration["max"] / 60)
|
|
)
|
|
|
|
if is_weekday:
|
|
peak_occurences = np.random.randint(
|
|
peak_bounds["weekdays"]["min"], max_occurences, 1
|
|
)
|
|
else:
|
|
peak_occurences = np.random.randint(
|
|
peak_bounds["weekends"]["min"], max_occurences, 1
|
|
)
|
|
return peak_occurences
|
|
|
|
|
|
def generate_peak_info(c, dt, is_weekday, peak_energy, site):
|
|
no_of_peaks = get_no_of_peaks(
|
|
c["site_info"]["no_of_peaks"],
|
|
is_weekday,
|
|
c["site_info"]["peak_duration"],
|
|
peak_energy,
|
|
site,
|
|
)
|
|
operating_hours = generate_timestrings(
|
|
c["site_info"]["operating hours"]["start"],
|
|
c["site_info"]["operating hours"]["end"],
|
|
dt,
|
|
)
|
|
peak_times = np.random.choice(operating_hours, no_of_peaks, replace=False)
|
|
peak_durations = np.random.randint(
|
|
c["site_info"]["peak_duration"]["min"],
|
|
c["site_info"]["peak_duration"]["max"],
|
|
no_of_peaks,
|
|
)
|
|
|
|
return peak_times, peak_durations
|
|
|
|
|
|
def generate_peak_profile(idx_peak, c, site):
|
|
# Generate a peak profile based on the peak indices and site information
|
|
peak_profile = np.zeros(len(idx_peak))
|
|
for i in range(1, np.max(idx_peak) + 1):
|
|
peak_profile[idx_peak == i] = site["maximum_demand_kW"] * np.random.uniform(
|
|
1 - c["noise"]["range"],
|
|
1 + c["noise"]["range"],
|
|
)
|
|
return peak_profile
|
|
|
|
|
|
def generate_out_of_hours_consumption_ratio(c):
|
|
# Generate a random ratio for out-of-hours consumption
|
|
ratio = np.random.uniform(
|
|
c["site_info"]["out_of_hours_consumption"]["min"],
|
|
c["site_info"]["out_of_hours_consumption"]["max"],
|
|
)
|
|
return ratio
|
|
|
|
|
|
def recompute_load_profile(load_profile, offset, noise, peak_profile):
|
|
# apply noise to the load profile, including max demand
|
|
load_profile = load_profile * noise
|
|
|
|
# apply offset to the load profile
|
|
load_profile += offset
|
|
|
|
# overwrite with peak profile
|
|
for i in range(len(peak_profile)):
|
|
if peak_profile[i] > 0:
|
|
load_profile[i] = peak_profile[i]
|
|
|
|
return load_profile
|
|
|
|
|
|
def get_load_profiles(c, dt, batch_start_time, batch_process_duration):
|
|
# Generate load profile for each site
|
|
|
|
# c is the configuration dictionary
|
|
# dt is the time step in seconds
|
|
# batch_start_time is the start time for the batch process in seconds since the epoch
|
|
# batch_process_duration is the duration of the batch process in seconds
|
|
|
|
# start with indexing all the peak occurences
|
|
# generate timeseries from start to end time
|
|
hours2seconds = 3600
|
|
# check day of the week
|
|
is_weekday = check_is_weekday(batch_start_time)
|
|
start_time = batch_start_time
|
|
end_time = start_time + batch_process_duration
|
|
batch_process_duration_hours = (
|
|
batch_process_duration / hours2seconds
|
|
) # convert to hours
|
|
timestamps = np.arange(start_time, end_time, dt)
|
|
idx_operating_hours = index_operating_hours(
|
|
timestamps, c["site_info"]["operating hours"]
|
|
)
|
|
no_of_operating_hours = (
|
|
np.sum(idx_operating_hours)
|
|
/ len(idx_operating_hours)
|
|
* batch_process_duration_hours
|
|
)
|
|
|
|
# initialise load profiles DataFrame
|
|
load_profiles = pd.DataFrame(index=timestamps)
|
|
|
|
# loop through each site in the configuration
|
|
for site in c["site_info"]["sites"]:
|
|
# Initialise the load profile
|
|
load_profile = np.zeros(len(timestamps))
|
|
|
|
# generate noise to make the profile more realistic
|
|
noise = np.random.uniform(
|
|
1 - c["noise"]["range"], 1 + c["noise"]["range"], len(timestamps)
|
|
)
|
|
|
|
# make every 2 minutes the same
|
|
for i in range(0, len(noise), 2):
|
|
noise[i : i + 2] = noise[i]
|
|
|
|
# Generate out-of-hours consumption ratio
|
|
# The % of energy used outside of the operating hours
|
|
out_of_hours_ratio = generate_out_of_hours_consumption_ratio(c)
|
|
|
|
# start by computing average consumption during operating hours
|
|
# and outside of operating hours
|
|
operating_hour_consumption = site["daily_consumption_kWh"] * (
|
|
1 - out_of_hours_ratio
|
|
)
|
|
out_of_hours_consumption = site["daily_consumption_kWh"] * out_of_hours_ratio
|
|
|
|
avg_operating_hour_power = operating_hour_consumption / no_of_operating_hours
|
|
avg_out_of_hours_power = out_of_hours_consumption / (
|
|
batch_process_duration_hours - no_of_operating_hours
|
|
)
|
|
|
|
# baseline operating hour power is 40% higher than out-of-hours power
|
|
gain = 5
|
|
assumed_operating_baseline_power = avg_out_of_hours_power * gain
|
|
baseline_energy = avg_out_of_hours_power * (
|
|
batch_process_duration_hours - no_of_operating_hours
|
|
) + (assumed_operating_baseline_power * no_of_operating_hours)
|
|
|
|
peak_energy = site["daily_consumption_kWh"] - baseline_energy
|
|
|
|
# Generate peak times and durations
|
|
peak_times, peak_durations = generate_peak_info(
|
|
c, dt, is_weekday, peak_energy, site
|
|
)
|
|
|
|
# Generate peak times and durations
|
|
idx_peak = index_peak_times(timestamps, peak_times, peak_durations)
|
|
|
|
# Generate peak profile
|
|
peak_profile = generate_peak_profile(idx_peak, c, site)
|
|
|
|
# assign base load profile
|
|
load_profile[idx_operating_hours > 0] = assumed_operating_baseline_power
|
|
load_profile[idx_operating_hours == 0] = avg_out_of_hours_power
|
|
|
|
# smoothen out sharp edges
|
|
load_profile = np.convolve(load_profile, np.ones(40) / 40, mode="same")
|
|
|
|
def objective(x):
|
|
# Objective function to minimize the difference between the load profile and the target profile
|
|
# x is the offset
|
|
adjusted_profile = recompute_load_profile(
|
|
load_profile, x, noise, peak_profile
|
|
)
|
|
# get energy consumption in kWh
|
|
energy_consumption = np.sum(adjusted_profile) * dt / 3600
|
|
target_consumption = site["daily_consumption_kWh"]
|
|
delta = energy_consumption - target_consumption
|
|
return delta
|
|
|
|
# Use root_scalar to find the optimal offset
|
|
result = root_scalar(
|
|
objective,
|
|
bracket=[-site["maximum_demand_kW"] * 10, site["maximum_demand_kW"] * 10],
|
|
method="bisect",
|
|
)
|
|
|
|
if result.converged:
|
|
offset = result.root
|
|
else:
|
|
raise ValueError("Root finding did not converge")
|
|
|
|
# Recompute the load profile with the optimal offset
|
|
load_profile = recompute_load_profile(load_profile, offset, noise, peak_profile)
|
|
|
|
# Add the load profile to the DataFrame
|
|
load_profiles[site["name"]] = load_profile
|
|
|
|
return load_profiles
|