Skip to content

Commit

Permalink
[#60] Add some tests for training step 3
Browse files Browse the repository at this point in the history
One of these tests is failing because LinkStepSaveModelMetadata is erroring out
when it's not skipped.
  • Loading branch information
riley-harper committed Oct 18, 2023
1 parent d26daa4 commit 2f3144e
Showing 1 changed file with 25 additions and 0 deletions.
25 changes: 25 additions & 0 deletions hlink/tests/training_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def test_all_steps(

# training_conf["training"]["use_potential_matches_features"] = True
training_conf["training"]["score_with_model"] = True
training_conf["training"]["feature_importances"] = True
training_conf["spark_tmp_dir"] = spark_test_tmp_dir_path

training.link_run.trained_models["trained_model"] = None
Expand All @@ -110,6 +111,8 @@ def test_all_steps(
assert row.prediction == 0
assert row.state_distance_imp.round(0) == 1909

training.run_step(3)


def test_step_2_bucketizer(spark, main, conf):
"""Test a bucketized feature using spark pipeline function"""
Expand Down Expand Up @@ -236,3 +239,25 @@ def test_step_2_interaction(spark, main, conf):
assert prepped_data.query("var1 == 3")["interacted_vars012"].iloc[0][0] == 18

main.do_drop_all("")


def test_step_3_skipped_on_no_feature_importances(training_conf, training, capsys):
"""Step 3 is skipped when there is no training.feature_importances attribute
in the config."""
assert "feature_importances" not in training_conf

training.run_step(3)

output = capsys.readouterr().out
assert "Skipping the save model metadata training step" in output


def test_step_3_skipped_on_false_feature_importances(training_conf, training, capsys):
"""Step 3 is skipped when training.feature_importances is set to false in
the config."""
training_conf["feature_importances"] = False

training.run_step(3)

output = capsys.readouterr().out
assert "Skipping the save model metadata training step" in output

0 comments on commit 2f3144e

Please # to comment.