simulation_handler.run_simulation
This module runs the port simulation, generating ships, trucks, and trains, creating resources, and executing the simulation until the specified time.
1""" 2This module runs the port simulation, generating ships, trucks, and trains, 3creating resources, and executing the simulation until the specified time. 4""" 5 6import gc 7import psutil 8import time 9import os 10import shutil 11 12import simpy 13import pandas as pd 14 15from simulation_handler.helpers import clear_logs, clear_env 16from simulation_handler.preprocess import generate_ships, generate_trucks, generate_trains, get_piplines_import 17from simulation_handler.generators import ship_generator, truck_generator, train_generator, data_logger 18from simulation_handler.helpers import clean_data, create_terminal_data_cache, create_terminal_tuple_cache 19from simulation_classes.port import create_resources 20from simulation_classes.channel import Channel 21from simulation_analysis.resource_utilization import bottleneckAnalysis 22from simulation_analysis.whatif_scenarios import * 23from simulation_analysis.results import plot_channel, gen_logs_and_plots 24import constants 25 26 27def print_memory_usage(): 28 """Print the current memory usage of the process.""" 29 process = psutil.Process(os.getpid()) 30 memory_info = process.memory_info() 31 print(f"Memory usage: {memory_info.rss / 1024**2:.2f} MB (RSS), {memory_info.vms / 1024**2:.2f} MB (VMS)") 32 33def run_simulation(seed): 34 """ 35 Run the port simulation with the given seed. 36 This function initializes the simulation environment, generates ships, trucks, and trains, 37 creates resources, and runs the simulation until the specified time. 38 Args: 39 seed (int): Random seed for reproducibility. 40 """ 41 print("Processing seed: ", seed) 42 start_time = time.time() 43 run_id = f"Results_{seed}_{int(constants.NUM_MONTHS)}_months_{constants.ARRIVAL_INCREASE_FACTOR}" 44 clear_logs(run_id) 45 ship_logs = [] 46 47 print("Preprocessing data...") 48 terminal_data_df = clean_data(constants.directory) 49 terminal_data = create_terminal_data_cache(terminal_data_df, run_id, seed) 50 terminal_tuple_cache = create_terminal_tuple_cache(terminal_data_df, run_id, seed) 51 52 df = pd.DataFrame(terminal_data.items(), columns=['Key', 'Amount Allocated']) 53 df[['Terminal type', 'Terminal Number', 'Resource Allocated']] = pd.DataFrame(df['Key'].tolist(), index=df.index) 54 df = df.drop(columns=['Key']) 55 df = df[['Terminal type', 'Terminal Number', 'Resource Allocated', 'Amount Allocated']] 56 df.to_csv(f'.{run_id}/logs/terminal_data_cache.csv', index=False) 57 58 num_container_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'Container')]['Terminal'].nunique() 59 num_liquid_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'Liquid')]['Terminal'].nunique() 60 num_drybulk_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'DryBulk')]['Terminal'].nunique() 61 num_terminals_list = [num_container_terminals, num_liquid_terminals, num_drybulk_terminals] 62 63 print("Generating ships...") 64 generate_ships(run_id, num_terminals_list, seed) 65 print("Generating trucks...") 66 generate_trucks(run_id, num_terminals_list, terminal_data_df, terminal_tuple_cache, seed) 67 print("Generating trains...") 68 generate_trains(run_id, num_terminals_list, terminal_data, terminal_data_df, terminal_tuple_cache, seed) 69 70 liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink = get_piplines_import(num_terminals_list, terminal_data) 71 72 plot_channel(run_id) 73 74 print("Creating resources...") 75 env = simpy.Environment() 76 events = [] 77 train_events = {} 78 channel_events = [] 79 channel_logs = [] 80 chassis_bays_utilization = {} 81 for terminal_type in ["Container", "Liquid", "DryBulk"]: 82 chassis_bays_utilization[terminal_type] = {} 83 for terminal_id in range(1, num_terminals_list[["Container", "Liquid", "DryBulk"].index(terminal_type)] + 1): 84 chassis_bays_utilization[terminal_type][terminal_id] = [] 85 86 terminal_resouces = create_resources(terminal_data, run_id, terminal_data_df, num_terminals_list, env, seed) 87 88 if constants.MODEL_HURRICANE: 89 model_hurricane(env, terminal_resouces, num_terminals_list, terminal_data, run_id, seed) 90 91 ship_data_df = pd.read_csv(f".{run_id}/logs/ship_data.csv") 92 ship_data = ship_data_df.to_dict(orient="index") 93 SIMULATION_TIME = constants.SIMULATION_TIME 94 NUM_CHANNEL_SECTIONS = constants.NUM_CHANNEL_SECTIONS 95 CHANNEL_SAFETWOWAY = constants.CHANNEL_SAFETWOWAY 96 port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals, \ 97 port_berth_drybulk_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals, \ 98 port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, train_loading_racks_ctr, train_loading_racks_liquid, \ 99 train_loading_racks_dk, day_pilots, night_pilots, tugboats, channel_scheduer = terminal_resouces 100 101 # Creating channel... 102 SHIPS_IN_ANCHORAGE = [0,0,0] 103 SHIPS_IN_CHANNEL = [] 104 SHIPS_IN_CHANNEL.append(0) 105 if constants.MODEL_FOG: 106 turnoffTime = {"switch": "channel_closed", "closed_between": constants.FOG_CLOSURES} 107 else: 108 turnoffTime = {"switch": "channel_open"} 109 110 channel = Channel(ship_logs, env, NUM_CHANNEL_SECTIONS, SIMULATION_TIME, CHANNEL_SAFETWOWAY, channel_events, channel_logs, day_pilots, night_pilots, tugboats, turnoffTime, channel_scheduer, seed) 111 112 # Starting simulation... 113 ship_proc = env.process(ship_generator(run_id, env, chassis_bays_utilization, port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals, 114 port_berth_drybulk_terminals, port_silos_drybulk_terminals, channel, day_pilots, night_pilots, tugboats, events, ship_logs, channel_events, channel_logs, SHIPS_IN_ANCHORAGE, SHIPS_IN_CHANNEL, ship_data, terminal_data, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink)) 115 truck_proc = env.process(truck_generator(run_id, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink, chassis_bays_utilization, env, terminal_tuple_cache, port_tanks_liquid_terminals, port_yard_container_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals, port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, events, seed, terminal_data)) 116 train_proc = env.process(train_generator(run_id, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink, env, terminal_tuple_cache, train_loading_racks_ctr, train_loading_racks_liquid, train_loading_racks_dk, train_events, port_tanks_liquid_terminals, port_yard_container_terminals, port_silos_drybulk_terminals, seed)) 117 118 pilots_tugs_data = pd.DataFrame(columns=['Time', 'Day Pilots', 'Night Pilots', 'Tugboats']) 119 data_taker_proc = env.process(data_logger(run_id, env, pilots_tugs_data, day_pilots, night_pilots, tugboats)) 120 121 env.run(until=SIMULATION_TIME) 122 123 clear_env(env, ship_proc, truck_proc, train_proc, data_taker_proc) 124 gen_logs_and_plots(run_id, ship_logs, events, chassis_bays_utilization, num_terminals_list, train_events, channel_logs, channel_events, channel, animate=False) 125 bottleneckAnalysis(run_id) 126 127 # Free memory 128 # print_memory_usage() 129 130 del env, ship_proc, truck_proc, terminal_resouces, terminal_data, events, channel_events, channel_logs 131 del port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals 132 del port_berth_drybulk_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals 133 del port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, day_pilots, night_pilots, tugboats 134 gc.collect() 135 136 # remove truck pickle file from "/.Results*" output folder (saves hard drive space) 137 if os.path.exists(f".{run_id}/logs/truck_data.pkl"): 138 os.remove(f".{run_id}/logs/truck_data.pkl") 139 else: 140 print("Truck pkl file does not exist") 141 pass 142 143 # revome the availablity folder 144 dir_path = f".{run_id}/logs/availability" 145 146 if os.path.exists(dir_path): 147 shutil.rmtree(dir_path) 148 else: 149 print("Availability folder does not exist") 150 151 # close all open files and plots 152 plt.close('all') 153
def
print_memory_usage():
28def print_memory_usage(): 29 """Print the current memory usage of the process.""" 30 process = psutil.Process(os.getpid()) 31 memory_info = process.memory_info() 32 print(f"Memory usage: {memory_info.rss / 1024**2:.2f} MB (RSS), {memory_info.vms / 1024**2:.2f} MB (VMS)")
Print the current memory usage of the process.
def
run_simulation(seed):
34def run_simulation(seed): 35 """ 36 Run the port simulation with the given seed. 37 This function initializes the simulation environment, generates ships, trucks, and trains, 38 creates resources, and runs the simulation until the specified time. 39 Args: 40 seed (int): Random seed for reproducibility. 41 """ 42 print("Processing seed: ", seed) 43 start_time = time.time() 44 run_id = f"Results_{seed}_{int(constants.NUM_MONTHS)}_months_{constants.ARRIVAL_INCREASE_FACTOR}" 45 clear_logs(run_id) 46 ship_logs = [] 47 48 print("Preprocessing data...") 49 terminal_data_df = clean_data(constants.directory) 50 terminal_data = create_terminal_data_cache(terminal_data_df, run_id, seed) 51 terminal_tuple_cache = create_terminal_tuple_cache(terminal_data_df, run_id, seed) 52 53 df = pd.DataFrame(terminal_data.items(), columns=['Key', 'Amount Allocated']) 54 df[['Terminal type', 'Terminal Number', 'Resource Allocated']] = pd.DataFrame(df['Key'].tolist(), index=df.index) 55 df = df.drop(columns=['Key']) 56 df = df[['Terminal type', 'Terminal Number', 'Resource Allocated', 'Amount Allocated']] 57 df.to_csv(f'.{run_id}/logs/terminal_data_cache.csv', index=False) 58 59 num_container_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'Container')]['Terminal'].nunique() 60 num_liquid_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'Liquid')]['Terminal'].nunique() 61 num_drybulk_terminals = terminal_data_df[(terminal_data_df['Cargo'] == 'DryBulk')]['Terminal'].nunique() 62 num_terminals_list = [num_container_terminals, num_liquid_terminals, num_drybulk_terminals] 63 64 print("Generating ships...") 65 generate_ships(run_id, num_terminals_list, seed) 66 print("Generating trucks...") 67 generate_trucks(run_id, num_terminals_list, terminal_data_df, terminal_tuple_cache, seed) 68 print("Generating trains...") 69 generate_trains(run_id, num_terminals_list, terminal_data, terminal_data_df, terminal_tuple_cache, seed) 70 71 liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink = get_piplines_import(num_terminals_list, terminal_data) 72 73 plot_channel(run_id) 74 75 print("Creating resources...") 76 env = simpy.Environment() 77 events = [] 78 train_events = {} 79 channel_events = [] 80 channel_logs = [] 81 chassis_bays_utilization = {} 82 for terminal_type in ["Container", "Liquid", "DryBulk"]: 83 chassis_bays_utilization[terminal_type] = {} 84 for terminal_id in range(1, num_terminals_list[["Container", "Liquid", "DryBulk"].index(terminal_type)] + 1): 85 chassis_bays_utilization[terminal_type][terminal_id] = [] 86 87 terminal_resouces = create_resources(terminal_data, run_id, terminal_data_df, num_terminals_list, env, seed) 88 89 if constants.MODEL_HURRICANE: 90 model_hurricane(env, terminal_resouces, num_terminals_list, terminal_data, run_id, seed) 91 92 ship_data_df = pd.read_csv(f".{run_id}/logs/ship_data.csv") 93 ship_data = ship_data_df.to_dict(orient="index") 94 SIMULATION_TIME = constants.SIMULATION_TIME 95 NUM_CHANNEL_SECTIONS = constants.NUM_CHANNEL_SECTIONS 96 CHANNEL_SAFETWOWAY = constants.CHANNEL_SAFETWOWAY 97 port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals, \ 98 port_berth_drybulk_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals, \ 99 port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, train_loading_racks_ctr, train_loading_racks_liquid, \ 100 train_loading_racks_dk, day_pilots, night_pilots, tugboats, channel_scheduer = terminal_resouces 101 102 # Creating channel... 103 SHIPS_IN_ANCHORAGE = [0,0,0] 104 SHIPS_IN_CHANNEL = [] 105 SHIPS_IN_CHANNEL.append(0) 106 if constants.MODEL_FOG: 107 turnoffTime = {"switch": "channel_closed", "closed_between": constants.FOG_CLOSURES} 108 else: 109 turnoffTime = {"switch": "channel_open"} 110 111 channel = Channel(ship_logs, env, NUM_CHANNEL_SECTIONS, SIMULATION_TIME, CHANNEL_SAFETWOWAY, channel_events, channel_logs, day_pilots, night_pilots, tugboats, turnoffTime, channel_scheduer, seed) 112 113 # Starting simulation... 114 ship_proc = env.process(ship_generator(run_id, env, chassis_bays_utilization, port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals, 115 port_berth_drybulk_terminals, port_silos_drybulk_terminals, channel, day_pilots, night_pilots, tugboats, events, ship_logs, channel_events, channel_logs, SHIPS_IN_ANCHORAGE, SHIPS_IN_CHANNEL, ship_data, terminal_data, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink)) 116 truck_proc = env.process(truck_generator(run_id, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink, chassis_bays_utilization, env, terminal_tuple_cache, port_tanks_liquid_terminals, port_yard_container_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals, port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, events, seed, terminal_data)) 117 train_proc = env.process(train_generator(run_id, liq_terminals_with_pipeline_source, liq_terminals_with_pipeline_sink, env, terminal_tuple_cache, train_loading_racks_ctr, train_loading_racks_liquid, train_loading_racks_dk, train_events, port_tanks_liquid_terminals, port_yard_container_terminals, port_silos_drybulk_terminals, seed)) 118 119 pilots_tugs_data = pd.DataFrame(columns=['Time', 'Day Pilots', 'Night Pilots', 'Tugboats']) 120 data_taker_proc = env.process(data_logger(run_id, env, pilots_tugs_data, day_pilots, night_pilots, tugboats)) 121 122 env.run(until=SIMULATION_TIME) 123 124 clear_env(env, ship_proc, truck_proc, train_proc, data_taker_proc) 125 gen_logs_and_plots(run_id, ship_logs, events, chassis_bays_utilization, num_terminals_list, train_events, channel_logs, channel_events, channel, animate=False) 126 bottleneckAnalysis(run_id) 127 128 # Free memory 129 # print_memory_usage() 130 131 del env, ship_proc, truck_proc, terminal_resouces, terminal_data, events, channel_events, channel_logs 132 del port_berths_container_terminals, port_yard_container_terminals, port_berth_liquid_terminals, port_tanks_liquid_terminals 133 del port_berth_drybulk_terminals, port_silos_drybulk_terminals, port_loading_bays_liquid_terminals, port_drybulk_bays_drybulk_terminals 134 del port_chassis_container_terminals, truck_gates_ctr, truck_gates_liquid, truck_gates_dk, day_pilots, night_pilots, tugboats 135 gc.collect() 136 137 # remove truck pickle file from "/.Results*" output folder (saves hard drive space) 138 if os.path.exists(f".{run_id}/logs/truck_data.pkl"): 139 os.remove(f".{run_id}/logs/truck_data.pkl") 140 else: 141 print("Truck pkl file does not exist") 142 pass 143 144 # revome the availablity folder 145 dir_path = f".{run_id}/logs/availability" 146 147 if os.path.exists(dir_path): 148 shutil.rmtree(dir_path) 149 else: 150 print("Availability folder does not exist") 151 152 # close all open files and plots 153 plt.close('all')
Run the port simulation with the given seed. This function initializes the simulation environment, generates ships, trucks, and trains, creates resources, and runs the simulation until the specified time.
Arguments:
- seed (int): Random seed for reproducibility.