trying to get peak simulation to work
This commit is contained in:
parent
e69c04cda2
commit
bc2ef9fb59
@ -1,5 +1,5 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from Utilities.Time import generate_timestrings
|
from Utilities.Time import generate_timestrings, index_peak_times
|
||||||
|
|
||||||
|
|
||||||
def get_no_of_peaks(peak_bounds):
|
def get_no_of_peaks(peak_bounds):
|
||||||
@ -15,10 +15,31 @@ def generate_peak_info(c, dt):
|
|||||||
dt,
|
dt,
|
||||||
)
|
)
|
||||||
peak_times = np.random.choice(operating_hours, no_of_peaks, replace=False)
|
peak_times = np.random.choice(operating_hours, no_of_peaks, replace=False)
|
||||||
|
peak_durations = np.random.randint(
|
||||||
|
c["site_info"]["peak_duration"]["min"],
|
||||||
|
c["site_info"]["peak_duration"]["max"],
|
||||||
|
no_of_peaks,
|
||||||
|
)
|
||||||
|
|
||||||
print(f"Peak times: {peak_times}")
|
return peak_times, peak_durations
|
||||||
return peak_times
|
|
||||||
|
|
||||||
|
|
||||||
def get_load_profile(c, dt):
|
def get_load_profile(c, dt, batch_start_time, batch_process_duration):
|
||||||
return generate_peak_info(c, dt)
|
# Generate load profile for each site
|
||||||
|
|
||||||
|
# c is the configuration dictionary
|
||||||
|
# dt is the time step in seconds
|
||||||
|
# batch_start_time is the start time for the batch process in seconds since the epoch
|
||||||
|
# batch_process_duration is the duration of the batch process in seconds
|
||||||
|
|
||||||
|
# start with indexing all the peak occurences
|
||||||
|
# generate timeseries from start to end time
|
||||||
|
start_time = batch_start_time
|
||||||
|
end_time = start_time + batch_process_duration
|
||||||
|
timestamps = np.arange(start_time, end_time, dt)
|
||||||
|
|
||||||
|
# Generate peak times and durations
|
||||||
|
peak_times, peak_durations = generate_peak_info(c, dt)
|
||||||
|
|
||||||
|
# Generate peak times and durations
|
||||||
|
peak_indices = index_peak_times(timestamps, peak_times, peak_durations)
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as pl
|
||||||
|
|
||||||
|
|
||||||
def get_current_time():
|
def get_current_time():
|
||||||
@ -19,3 +21,34 @@ def generate_timestrings(start_time_str, end_time_str, dt):
|
|||||||
current_time += timedelta(seconds=dt)
|
current_time += timedelta(seconds=dt)
|
||||||
timestrings = [dt.strftime("%H:%M") for dt in timestamps]
|
timestrings = [dt.strftime("%H:%M") for dt in timestamps]
|
||||||
return timestrings
|
return timestrings
|
||||||
|
|
||||||
|
|
||||||
|
def index_peak_times(timestamps, peak_times, peak_durations):
|
||||||
|
"""Converts peak times from HH:MM format to seconds since epoch."""
|
||||||
|
# start_time is the start time of the batch process in seconds since the epoch
|
||||||
|
# peak_times is a list of strings in HH:MM format
|
||||||
|
dt = timestamps[1] - timestamps[0] # time step in seconds
|
||||||
|
peak_indices = np.zeros(len(timestamps), dtype=int)
|
||||||
|
start_datetime = datetime.fromtimestamp(timestamps[0])
|
||||||
|
processed_times = []
|
||||||
|
peak_occurence_no = 1
|
||||||
|
for time_str, duration in zip(peak_times, peak_durations):
|
||||||
|
# convert HH:MM to a datetime object
|
||||||
|
time_obj = datetime.strptime(time_str, "%H:%M")
|
||||||
|
full_datetime = start_datetime.replace(
|
||||||
|
hour=time_obj.hour, minute=time_obj.minute, second=0, microsecond=0
|
||||||
|
)
|
||||||
|
peak_start = int(full_datetime.timestamp())
|
||||||
|
peak_end = peak_start + duration * 60 # duration in minutes to seconds
|
||||||
|
peak_timestamps = np.arange(peak_start, peak_end, dt, dtype=int)
|
||||||
|
|
||||||
|
common, indices_ts, _ = np.intersect1d(
|
||||||
|
timestamps, peak_timestamps, return_indices=True
|
||||||
|
)
|
||||||
|
peak_indices[indices_ts] = peak_occurence_no
|
||||||
|
peak_occurence_no += 1
|
||||||
|
|
||||||
|
pl.plot(peak_indices, label="Peak Indices")
|
||||||
|
pl.show()
|
||||||
|
|
||||||
|
return processed_times
|
||||||
|
|||||||
@ -30,5 +30,5 @@ no_of_peaks:
|
|||||||
max: 100
|
max: 100
|
||||||
peak_duration:
|
peak_duration:
|
||||||
unit: minutes
|
unit: minutes
|
||||||
min: 0.5
|
min: 1
|
||||||
max: 2
|
max: 4
|
||||||
|
|||||||
5
main.py
5
main.py
@ -14,8 +14,11 @@ dt = c["sim_time"]["time_step_minutes"] * 60
|
|||||||
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
|
duration = c["sim_time"]["duration_days"] * 24 * 60 * 60
|
||||||
c["sim_end_time"] = c["sim_start_time"] + duration
|
c["sim_end_time"] = c["sim_start_time"] + duration
|
||||||
|
|
||||||
|
# batch process hours in seconds
|
||||||
|
c["sim_time"]["batch_process_seconds"] = c["sim_time"]["batch_process_hours"] * 60 * 60
|
||||||
|
|
||||||
# load site info
|
# load site info
|
||||||
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
|
c["site_info"] = yaml.safe_load(open(c["paths"]["site_info"]))
|
||||||
|
|
||||||
# generate load profiles
|
# generate load profiles
|
||||||
get_load_profile(c, dt)
|
get_load_profile(c, dt, c["sim_start_time"], c["sim_time"]["batch_process_seconds"])
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user