Skip to content

Commit

Permalink
Update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
arpitjasa-db committed Feb 2, 2024
1 parent 9af651c commit ecf542b
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 124 deletions.
2 changes: 1 addition & 1 deletion databricks_template_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -238,5 +238,5 @@
}
}
},
"success_message" : "\n Your MLOps Stack has been created in the '{{.input_root_dir}}{{if not (eq .input_setup_cicd_and_project `CICD_Only`) }}/{{.input_project_name}}{{end}}' directory!\n\nPlease refer to the README.md for further instructions on getting started."
"success_message" : "\n*** Your MLOps Stack has been created in the '{{.input_root_dir}}{{if not (eq .input_setup_cicd_and_project `CICD_Only`) }}/{{.input_project_name}}{{end}}' directory! ***\n\nPlease refer to the README.md for further instructions on getting started."
}
1 change: 1 addition & 0 deletions tests/example-project-configs/aws/aws-github.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"input_setup_cicd_and_project": "CICD_and_Project",
"input_root_dir": "example-aws-github-project",
"input_project_name": "example-aws-github-project",
"input_cloud": "aws",
Expand Down
1 change: 1 addition & 0 deletions tests/example-project-configs/azure/azure-devops.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"input_setup_cicd_and_project": "CICD_and_Project",
"input_root_dir": "test-azure-devops-project",
"input_project_name": "test-azure-devops-project",
"input_cloud": "azure",
Expand Down
1 change: 1 addition & 0 deletions tests/example-project-configs/azure/azure-github.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"input_setup_cicd_and_project": "CICD_and_Project",
"input_root_dir": "example-azure-github-project",
"input_project_name": "example-azure-github-project",
"input_cloud": "azure",
Expand Down
91 changes: 61 additions & 30 deletions tests/test_create_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ def test_generate_project_with_default_values(
databricks_cli,
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
Expand Down Expand Up @@ -193,28 +194,54 @@ def test_generate_project_with_default_values(
assert f"{param}={value}" in test_file_contents


def prepareContext(
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
):
context = {
"input_setup_cicd_and_project": setup_cicd_and_project,
"input_project_name": TEST_PROJECT_NAME,
"input_root_dir": TEST_PROJECT_NAME,
"input_cloud": cloud,
"input_cicd_platform": cicd_platform,
}
if include_feature_store != "":
context["input_include_feature_store"] = include_feature_store
if include_mlflow_recipes != "":
context["input_include_mlflow_recipes"] = include_mlflow_recipes
if include_models_in_unity_catalog != "":
context[
"input_include_models_in_unity_catalog"
] = include_models_in_unity_catalog
return context


@parametrize_by_project_generation_params
def test_generate_project_check_delta_output(
tmpdir,
databricks_cli,
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
):
"""
Asserts the behavior of Delta Table-related artifacts when generating MLOps Stacks.
"""
context = {
"input_project_name": TEST_PROJECT_NAME,
"input_root_dir": TEST_PROJECT_NAME,
"input_cloud": cloud,
"input_cicd_platform": cicd_platform,
"input_include_feature_store": include_feature_store,
"input_include_mlflow_recipes": include_mlflow_recipes,
"input_include_models_in_unity_catalog": include_models_in_unity_catalog,
}
context = prepareContext(
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
)
generate(tmpdir, databricks_cli, context=context)
delta_notebook_path = (
tmpdir
Expand All @@ -224,7 +251,11 @@ def test_generate_project_check_delta_output(
/ "notebooks"
/ "Train.py"
)
if include_mlflow_recipes == "no" and include_feature_store == "no":
if (
setup_cicd_and_project != "CICD_Only"
and include_mlflow_recipes == "no"
and include_feature_store == "no"
):
assert os.path.isfile(delta_notebook_path)
else:
assert not os.path.isfile(delta_notebook_path)
Expand All @@ -236,22 +267,22 @@ def test_generate_project_check_feature_store_output(
databricks_cli,
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
):
"""
Asserts the behavior of feature store-related artifacts when generating MLOps Stacks.
"""
context = {
"input_project_name": TEST_PROJECT_NAME,
"input_root_dir": TEST_PROJECT_NAME,
"input_cloud": cloud,
"input_cicd_platform": cicd_platform,
"input_include_feature_store": include_feature_store,
"input_include_mlflow_recipes": include_mlflow_recipes,
"input_include_models_in_unity_catalog": include_models_in_unity_catalog,
}
context = prepareContext(
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
)
generate(tmpdir, databricks_cli, context=context)
fs_notebook_path = (
tmpdir
Expand All @@ -261,7 +292,7 @@ def test_generate_project_check_feature_store_output(
/ "notebooks"
/ "GenerateAndWriteFeatures.py"
)
if include_feature_store == "yes":
if setup_cicd_and_project != "CICD_Only" and include_feature_store == "yes":
assert os.path.isfile(fs_notebook_path)
else:
assert not os.path.isfile(fs_notebook_path)
Expand All @@ -273,22 +304,22 @@ def test_generate_project_check_recipe_output(
databricks_cli,
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
):
"""
Asserts the behavior of MLflow Recipes-related artifacts when generating MLOps Stacks.
"""
context = {
"input_project_name": TEST_PROJECT_NAME,
"input_root_dir": TEST_PROJECT_NAME,
"input_cloud": cloud,
"input_cicd_platform": cicd_platform,
"input_include_feature_store": include_feature_store,
"input_include_mlflow_recipes": include_mlflow_recipes,
"input_include_models_in_unity_catalog": include_models_in_unity_catalog,
}
context = prepareContext(
cloud,
cicd_platform,
setup_cicd_and_project,
include_feature_store,
include_mlflow_recipes,
include_models_in_unity_catalog,
)
generate(tmpdir, databricks_cli, context=context)
recipe_notebook_path = (
tmpdir
Expand All @@ -298,7 +329,7 @@ def test_generate_project_check_recipe_output(
/ "notebooks"
/ "TrainWithMLflowRecipes.py"
)
if include_mlflow_recipes == "yes":
if setup_cicd_and_project != "CICD_Only" and include_mlflow_recipes == "yes":
assert os.path.isfile(recipe_notebook_path)
else:
assert not os.path.isfile(recipe_notebook_path)
Expand Down
55 changes: 21 additions & 34 deletions tests/test_github_actions.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,35 @@
import subprocess
import pytest
from functools import wraps
from utils import (
databricks_cli,
generated_project_dir,
parametrize_by_cloud,
parametrize_by_project_generation_params,
)


def parametrize_by_params(fn):
@wraps(fn)
@pytest.mark.parametrize(
"setup_cicd_and_project,include_feature_store,include_mlflow_recipes,include_models_in_unity_catalog",
[
("CICD_and_Project", "no", "no", "no"),
("CICD_and_Project", "no", "no", "yes"),
("CICD_and_Project", "no", "yes", "no"),
("CICD_and_Project", "yes", "no", "no"),
("CICD_and_Project", "yes", "no", "yes"),
],
)
def wrapper(*args, **kwargs):
return fn(*args, **kwargs)

return wrapper


@pytest.mark.parametrize(
"cicd_platform", ["github_actions", "github_actions_for_github_enterprise_servers"]
)
@pytest.mark.parametrize(
"include_feature_store, include_mlflow_recipes, include_models_in_unity_catalog",
[("yes", "no", "no"), ("no", "yes", "no"), ("no", "no", "yes"), ("no", "no", "no")],
)
@parametrize_by_params
@parametrize_by_cloud
def test_generated_yaml_format(cicd_platform, generated_project_dir):
# Note: actionlint only works when the directory is a git project. Thus we begin by initiatilizing
Expand All @@ -36,11 +51,7 @@ def test_generated_yaml_format(cicd_platform, generated_project_dir):
@pytest.mark.parametrize(
"cicd_platform", ["github_actions", "github_actions_for_github_enterprise_servers"]
)
@pytest.mark.parametrize("include_feature_store", ["no"])
@pytest.mark.parametrize(
"include_mlflow_recipes, include_models_in_unity_catalog",
[("yes", "no"), ("no", "yes"), ("no", "no")],
)
@parametrize_by_params
@parametrize_by_cloud
def test_run_unit_tests_workflow(cicd_platform, generated_project_dir):
"""Test that the GitHub workflow for running unit tests in the materialized project passes"""
Expand All @@ -56,27 +67,3 @@ def test_run_unit_tests_workflow(cicd_platform, generated_project_dir):
executable="/bin/bash",
cwd=(generated_project_dir / "my-mlops-project"),
)


@pytest.mark.large
@pytest.mark.parametrize(
"cicd_platform", ["github_actions", "github_actions_for_github_enterprise_servers"]
)
@pytest.mark.parametrize("include_feature_store", ["yes"])
@pytest.mark.parametrize("include_mlflow_recipes", ["no"])
@pytest.mark.parametrize("include_models_in_unity_catalog", ["no"])
@parametrize_by_cloud
def test_run_unit_tests_feature_store_workflow(cicd_platform, generated_project_dir):
"""Test that the GitHub workflow for running unit tests passes for feature store"""
# We only test the unit test workflow, as it's the only one that doesn't require
# Databricks REST API
subprocess.run(
"""
git init
act -s GITHUB_TOKEN workflow_dispatch --workflows .github/workflows/my-mlops-project-run-tests.yml -j "unit_tests"
""",
shell=True,
check=True,
executable="/bin/bash",
cwd=(generated_project_dir / "my-mlops-project"),
)
Loading

0 comments on commit ecf542b

Please sign in to comment.