Skip to content

Commit

Permalink
PEP8 (1)
Browse files Browse the repository at this point in the history
  • Loading branch information
aPovidlo committed May 21, 2024
1 parent a649ea9 commit 6f97988
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 13 deletions.
6 changes: 5 additions & 1 deletion rl_core/experiments/ts_experiment_dqn.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e
)

dataloader_train, dataloader_test, train_list, test_list = define_data_for_experiment()
env = TimeSeriesPipelineEnvironment(max_number_of_nodes=number_of_nodes_in_pipeline, render_mode='none', metadata_dim=126)
env = TimeSeriesPipelineEnvironment(
max_number_of_nodes=number_of_nodes_in_pipeline,
render_mode='none',
metadata_dim=126
)
state_dim, action_dim = env.state_dim, env.action_dim

agent = DQN(
Expand Down
13 changes: 5 additions & 8 deletions rl_core/experiments/ts_validation_dqn.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@


def print_params(experiment_name, number_of_nodes_in_pipeline=8):
log_dir = f'{project_root()}/MetaFEDOT/rl_core/agent/tensorboard_logs/dqn/{number_of_nodes_in_pipeline}/{experiment_name}'
log_dir = f'{project_root()}/MetaFEDOT/rl_core/agent/tensorboard_logs/dqn/' \
f'{number_of_nodes_in_pipeline}/{experiment_name}'

with io.open(f'{log_dir}/params.log', 'r', encoding='utf-8') as file:
lines = file.readlines()
Expand Down Expand Up @@ -57,8 +58,6 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e
total_metrics = []

period = 20
period_of_cleaning = 15
period_of_heatmap = 100

for episode in range(1, n_episodes + 1):
print(f'-- Starting {episode} episode --')
Expand Down Expand Up @@ -119,15 +118,16 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e
print(f'-- Finishing {episode} episode --\n')

# -- Saving Agent ---
name = f'{env.metadata["name"]}_{number_of_nodes_in_pipeline}_{state_dim}_{agent.metadata["name"]}_{agent.hidden_dim}_{n_episodes}'
name = f'{env.metadata["name"]}_{number_of_nodes_in_pipeline}_{state_dim}_{agent.metadata["name"]}' \
f'_{agent.hidden_dim}_{n_episodes}'
path = f'{log_dir}/weight'

if not os.path.exists(path):
os.makedirs(path)

agent.save(f'{path}/{name}')

print(f'-- Validation Starts --\n')
print('-- Validation Starts --\n')
for test_dataset in test_list:
train_data, test_data, meta_data = dataloader_test.get_data(test_dataset)

Expand Down Expand Up @@ -192,6 +192,3 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e

for h, g, e, e_m, e_d in params:
run_experiment(2000, 8, h, g, e, e_m, e_d)

# Parallel(n_jobs=-2)(
# delayed(run_experiment)(2000, 8, h, g, e, e_m, e_d) for m, h, g, e, e_m, e_d in params)
4 changes: 2 additions & 2 deletions rl_core/experiments/ts_validation_dt.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e

agent.save(f'{path}/{name}')

print(f'-- Validation Starts --\n')
print('-- Validation Starts --\n')
for test_dataset in test_list:
train_data, test_data, meta_data = dataloader_test.get_data(test_dataset)

Expand Down Expand Up @@ -191,4 +191,4 @@ def run_experiment(n_episodes, number_of_nodes_in_pipeline, hidden_dim, gamma, e
]

for h, g, e, e_m, e_d in params:
run_experiment(2000, 8, h, g, e, e_m, e_d)
run_experiment(2000, 8, h, g, e, e_m, e_d)
6 changes: 4 additions & 2 deletions rl_core/pipeline_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@

path_to_base = os.path.join(
str(project_root()),
f'MetaFEDOT\\data\\knowledge_base_time_series_0\\datasets\\{dataset}\\model\\0_pipeline_saved\\0_pipeline_saved.json'
f'MetaFEDOT\\data\\knowledge_base_time_series_0\\datasets\\{dataset}\\'
f'model\\0_pipeline_saved\\0_pipeline_saved.json'
)

try:
Expand All @@ -121,7 +122,8 @@
try:
path_to_topo = os.path.join(
str(project_root()),
f'MetaFEDOT\\data\\topo_ws_selection_evo\\{dataset.split("_")[1]}\\model\\0_pipeline_saved\\0_pipeline_saved.json'
f'MetaFEDOT\\data\\topo_ws_selection_evo\\{dataset.split("_")[1]}\\'
f'model\\0_pipeline_saved\\0_pipeline_saved.json'
)

topo_pipeline_ = Pipeline().load(source=path_to_topo)
Expand Down

0 comments on commit 6f97988

Please # to comment.