Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #555

Merged
merged 4 commits into from
Jan 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[flake8]
max-line-length = 99
ignore = W503
ignore = W503,E704
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
hooks:
- id: flake8
- repo: https://github.com/psf/black
rev: 22.10.0
rev: 24.10.0
hooks:
- id: black

Expand Down
2 changes: 2 additions & 0 deletions servicex/app/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ class TimeFrame(str, Enum):
r"""
Time Frame levels: 'day', 'week' & 'month'
"""

day = ("day",)
week = ("week",)
month = ("month",)
Expand All @@ -209,6 +210,7 @@ class LogLevel(str, Enum):
r"""
Level of the log messages: INFO & ERROR
"""

info = ("INFO",)
error = ("ERROR",)

Expand Down
8 changes: 5 additions & 3 deletions servicex/databinder_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,11 @@ def hash(self):
[
self.dataset_identifier.hash,
self.NFiles,
self.Query
if (not self.Query or isinstance(self.Query, str))
else self.Query.generate_selection_string(),
(
self.Query
if (not self.Query or isinstance(self.Query, str))
else self.Query.generate_selection_string()
),
self.Codegen,
]
).encode("utf-8")
Expand Down
1 change: 1 addition & 0 deletions servicex/func_adl/func_adl_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ class FuncADLQuery(QueryStringGenerator, EventDataset[T], ABC):
r"""
ServiceX Dataset class that uses func_adl query syntax.
"""

# These are methods that are translated locally
_execute_locally = ["ResultPandasDF", "ResultAwkwardArray"]
default_codegen = None
Expand Down
3 changes: 3 additions & 0 deletions servicex/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ class TransformStatus(DocStringBaseModel):
r"""
Status object returned by servicex
"""

model_config = {"use_attribute_docstrings": True}

request_id: str
Expand Down Expand Up @@ -185,6 +186,7 @@ class ResultFile(DocStringBaseModel):
r"""
Record reporting the properties of a transformed file result
"""

model_config = {"use_attribute_docstrings": True}

filename: str
Expand All @@ -197,6 +199,7 @@ class TransformedResults(DocStringBaseModel):
Returned for a submission. Gives you everything you need to know about a completed
transform.
"""

model_config = {"use_attribute_docstrings": True}

hash: str
Expand Down
157 changes: 94 additions & 63 deletions tests/test_databinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,12 +302,15 @@ def test_submit_mapping(transformed_result, codegen_list):
}
],
}
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
results = deliver(spec, config_path="tests/example_config.yaml")
assert list(results["sampleA"]) == ["1.parquet"]
Expand All @@ -327,12 +330,15 @@ def test_submit_mapping_signed_urls(transformed_result_signed_url, codegen_list)
}
],
}
with patch(
"servicex.dataset_group.DatasetGroup.as_signed_urls",
return_value=[transformed_result_signed_url],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_signed_urls",
return_value=[transformed_result_signed_url],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
results = deliver(spec, config_path="tests/example_config.yaml")
assert list(results["sampleA"]) == [
Expand All @@ -354,12 +360,15 @@ def test_submit_mapping_failure(transformed_result, codegen_list):
}
]
}
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[ServiceXException("dummy")],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[ServiceXException("dummy")],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
results = deliver(spec, config_path="tests/example_config.yaml")
assert len(results) == 1
Expand All @@ -383,12 +392,15 @@ def test_submit_mapping_failure_signed_urls(codegen_list):
}
],
}
with patch(
"servicex.dataset_group.DatasetGroup.as_signed_urls",
return_value=[ServiceXException("dummy")],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_signed_urls",
return_value=[ServiceXException("dummy")],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
results = deliver(
spec, config_path="tests/example_config.yaml", return_exceptions=False
Expand Down Expand Up @@ -591,9 +603,10 @@ def test_yaml_include(tmp_path):
from servicex.servicex_client import _load_ServiceXSpec

# Create two files, one has definitions for the other and is included by it
with open(tmp_path / "definitions.yaml", "w") as f1, open(
path2 := (tmp_path / "parent.yaml"), "w"
) as f2:
with (
open(tmp_path / "definitions.yaml", "w") as f1,
open(path2 := (tmp_path / "parent.yaml"), "w") as f2,
):
f1.write(
"""
- &DEF_query !PythonFunction |
Expand Down Expand Up @@ -638,12 +651,15 @@ def test_funcadl_query(transformed_result, codegen_list):
]
}
)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
deliver(spec, config_path="tests/example_config.yaml")

Expand All @@ -667,12 +683,15 @@ def test_query_with_codegen_override(transformed_result, codegen_list):
],
}
)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
with pytest.raises(NameError) as excinfo:
deliver(spec, config_path="tests/example_config.yaml")
Expand All @@ -694,12 +713,15 @@ def test_query_with_codegen_override(transformed_result, codegen_list):
]
}
)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
with pytest.raises(NameError) as excinfo:
deliver(spec, config_path="tests/example_config.yaml")
Expand Down Expand Up @@ -747,12 +769,15 @@ def run_query(input_filenames=None):
]
}
)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
deliver(spec, config_path="tests/example_config.yaml")

Expand All @@ -772,12 +797,15 @@ def test_uproot_raw_query(transformed_result, codegen_list):
]
}
)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
deliver(spec, config_path="tests/example_config.yaml")

Expand All @@ -799,12 +827,15 @@ def test_uproot_raw_query_parquet(transformed_result, codegen_list):
}
)
print(spec)
with patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
), patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
with (
patch(
"servicex.dataset_group.DatasetGroup.as_files",
return_value=[transformed_result],
),
patch(
"servicex.servicex_client.ServiceXClient.get_code_generators",
return_value=codegen_list,
),
):
deliver(spec, config_path="tests/example_config.yaml")

Expand Down
3 changes: 1 addition & 2 deletions tests/test_func_adl_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ def test_type():
class my_type_info:
"typespec for possible event type"

def fork_it_over(self) -> int:
...
def fork_it_over(self) -> int: ...

datasource = FuncADLQuery[my_type_info](item_type=my_type_info)

Expand Down
Loading