From d546e83527d5a9c2730c0f996ca34cea8fb628ca Mon Sep 17 00:00:00 2001 From: Kirils Mensikovs Date: Thu, 5 Sep 2019 17:34:56 +0200 Subject: [PATCH 01/72] Tavern GRPC support --- MANIFEST.in | 1 + example/grpc/__init__.py | 0 example/grpc/common.yaml | 7 + example/grpc/docker-compose.yaml | 10 ++ example/grpc/helloworld_pb2.py | 134 +++++++++++++++ example/grpc/helloworld_pb2_grpc.py | 45 +++++ example/grpc/server.Dockerfile | 9 + example/grpc/server.py | 35 ++++ example/grpc/test_grpc.tavern.yaml | 22 +++ setup.cfg | 5 + tavern/_plugins/grpc/__init__.py | 0 tavern/_plugins/grpc/client.py | 237 +++++++++++++++++++++++++++ tavern/_plugins/grpc/request.py | 62 +++++++ tavern/_plugins/grpc/response.py | 116 +++++++++++++ tavern/_plugins/grpc/schema.yaml | 45 +++++ tavern/_plugins/grpc/tavernhook.py | 40 +++++ tavern/core.py | 5 + tavern/plugins.py | 6 +- tavern/schemas/extensions.py | 25 +++ tavern/schemas/tests.schema.yaml | 47 ++++++ tavern/testutils/pytesthook/hooks.py | 3 + tavern/testutils/pytesthook/util.py | 7 +- tavern/util/exceptions.py | 5 + tests/unit/conftest.py | 2 +- tests/unit/test_call_run.py | 7 +- tox-integration.ini | 3 +- 26 files changed, 871 insertions(+), 7 deletions(-) create mode 100644 example/grpc/__init__.py create mode 100644 example/grpc/common.yaml create mode 100644 example/grpc/docker-compose.yaml create mode 100644 example/grpc/helloworld_pb2.py create mode 100644 example/grpc/helloworld_pb2_grpc.py create mode 100644 example/grpc/server.Dockerfile create mode 100644 example/grpc/server.py create mode 100644 example/grpc/test_grpc.tavern.yaml create mode 100644 tavern/_plugins/grpc/__init__.py create mode 100644 tavern/_plugins/grpc/client.py create mode 100644 tavern/_plugins/grpc/request.py create mode 100644 tavern/_plugins/grpc/response.py create mode 100644 tavern/_plugins/grpc/schema.yaml create mode 100644 tavern/_plugins/grpc/tavernhook.py diff --git a/MANIFEST.in b/MANIFEST.in index d709b9073..e1376b0e7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include tavern/schemas/tests.schema.yaml include tavern/_plugins/mqtt/schema.yaml +include tavern/_plugins/grpc/schema.yaml include LICENSE diff --git a/example/grpc/__init__.py b/example/grpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/example/grpc/common.yaml b/example/grpc/common.yaml new file mode 100644 index 000000000..f5b0df6ca --- /dev/null +++ b/example/grpc/common.yaml @@ -0,0 +1,7 @@ +--- +name: test includes +description: used for testing against local server + +variables: + grpc_host: localhost + grpc_port: 50051 diff --git a/example/grpc/docker-compose.yaml b/example/grpc/docker-compose.yaml new file mode 100644 index 000000000..980aa7c43 --- /dev/null +++ b/example/grpc/docker-compose.yaml @@ -0,0 +1,10 @@ +--- +version: '2' + +services: + server: + build: + context: . + dockerfile: server.Dockerfile + ports: + - "50051:50051" \ No newline at end of file diff --git a/example/grpc/helloworld_pb2.py b/example/grpc/helloworld_pb2.py new file mode 100644 index 000000000..1bb6ce053 --- /dev/null +++ b/example/grpc/helloworld_pb2.py @@ -0,0 +1,134 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: helloworld.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='helloworld.proto', + package='helloworld', + syntax='proto3', + serialized_pb=_b('\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3') +) + + + + +_HELLOREQUEST = _descriptor.Descriptor( + name='HelloRequest', + full_name='helloworld.HelloRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='helloworld.HelloRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=32, + serialized_end=60, +) + + +_HELLOREPLY = _descriptor.Descriptor( + name='HelloReply', + full_name='helloworld.HelloReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message', full_name='helloworld.HelloReply.message', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=62, + serialized_end=91, +) + +DESCRIPTOR.message_types_by_name['HelloRequest'] = _HELLOREQUEST +DESCRIPTOR.message_types_by_name['HelloReply'] = _HELLOREPLY +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +HelloRequest = _reflection.GeneratedProtocolMessageType('HelloRequest', (_message.Message,), dict( + DESCRIPTOR = _HELLOREQUEST, + __module__ = 'helloworld_pb2' + # @@protoc_insertion_point(class_scope:helloworld.HelloRequest) + )) +_sym_db.RegisterMessage(HelloRequest) + +HelloReply = _reflection.GeneratedProtocolMessageType('HelloReply', (_message.Message,), dict( + DESCRIPTOR = _HELLOREPLY, + __module__ = 'helloworld_pb2' + # @@protoc_insertion_point(class_scope:helloworld.HelloReply) + )) +_sym_db.RegisterMessage(HelloReply) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW')) + +_GREETER = _descriptor.ServiceDescriptor( + name='Greeter', + full_name='helloworld.Greeter', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=93, + serialized_end=166, + methods=[ + _descriptor.MethodDescriptor( + name='SayHello', + full_name='helloworld.Greeter.SayHello', + index=0, + containing_service=None, + input_type=_HELLOREQUEST, + output_type=_HELLOREPLY, + options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_GREETER) + +DESCRIPTOR.services_by_name['Greeter'] = _GREETER + +# @@protoc_insertion_point(module_scope) \ No newline at end of file diff --git a/example/grpc/helloworld_pb2_grpc.py b/example/grpc/helloworld_pb2_grpc.py new file mode 100644 index 000000000..b55ac8e7a --- /dev/null +++ b/example/grpc/helloworld_pb2_grpc.py @@ -0,0 +1,45 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import helloworld_pb2 as helloworld__pb2 + + +class GreeterStub(object): + """The greeting service definition. + """ + + def __init__(self, channel): + """Constructor. + Args: + channel: A grpc.Channel. + """ + self.SayHello = channel.unary_unary( + '/helloworld.Greeter/SayHello', + request_serializer=helloworld__pb2.HelloRequest.SerializeToString, + response_deserializer=helloworld__pb2.HelloReply.FromString, + ) + + +class GreeterServicer(object): + """The greeting service definition. + """ + + def SayHello(self, request, context): + """Sends a greeting + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_GreeterServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SayHello': grpc.unary_unary_rpc_method_handler( + servicer.SayHello, + request_deserializer=helloworld__pb2.HelloRequest.FromString, + response_serializer=helloworld__pb2.HelloReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'helloworld.Greeter', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) \ No newline at end of file diff --git a/example/grpc/server.Dockerfile b/example/grpc/server.Dockerfile new file mode 100644 index 000000000..d6271641e --- /dev/null +++ b/example/grpc/server.Dockerfile @@ -0,0 +1,9 @@ +FROM python:3.5-slim-jessie + +RUN pip install grpcio grpcio-tools grpcio-reflection + +COPY server.py / +COPY helloworld_pb2.py / +COPY helloworld_pb2_grpc.py / + +CMD ["python3", "/server.py"] diff --git a/example/grpc/server.py b/example/grpc/server.py new file mode 100644 index 000000000..6bad7eac2 --- /dev/null +++ b/example/grpc/server.py @@ -0,0 +1,35 @@ +from concurrent import futures +import logging +import threading + +import grpc +from grpc_reflection.v1alpha import reflection + +import helloworld_pb2 +import helloworld_pb2_grpc + + +class Greeter(helloworld_pb2_grpc.GreeterServicer): + + def SayHello(self, request, context): + return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) + + +def serve(): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) + SERVICE_NAMES = ( + helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(SERVICE_NAMES, server) + server.add_insecure_port('[::]:50051') + logging.info("Starting...") + server.start() + + event = threading.Event() + event.wait() + +if __name__ == '__main__': + logging.basicConfig() + serve() \ No newline at end of file diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml new file mode 100644 index 000000000..f47a9cc2a --- /dev/null +++ b/example/grpc/test_grpc.tavern.yaml @@ -0,0 +1,22 @@ +--- + +test_name: Test grpc message echo + +includes: + - !include common.yaml + +grpc: &grpc_spec + connect: + host: "{grpc_host}" + port: !int "{grpc_port}" + timeout: 3 + +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + body: + message: "Hello, John!" diff --git a/setup.cfg b/setup.cfg index 9b30daa04..88a69b6d8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,6 +43,9 @@ install_requires = backports.functools_lru_cache paho-mqtt==1.3.1 jmespath + grpcio + grpcio-reflection + grpcio-status pytest>=3.6.0,<4.6.0 [options.packages.find] @@ -59,6 +62,8 @@ tavern_http = requests = tavern._plugins.rest.tavernhook:TavernRestPlugin tavern_mqtt = paho-mqtt = tavern._plugins.mqtt.tavernhook +tavern_grpc = + grpc = tavern._plugins.grpc.tavernhook [bdist_wheel] universal = 1 diff --git a/tavern/_plugins/grpc/__init__.py b/tavern/_plugins/grpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py new file mode 100644 index 000000000..d844a7334 --- /dev/null +++ b/tavern/_plugins/grpc/client.py @@ -0,0 +1,237 @@ +from distutils.spawn import find_executable +from importlib import import_module +import os +import logging +import subprocess +import sys +import pkgutil +import warnings + +from future.utils import raise_from + +import grpc + +from grpc_reflection.v1alpha import reflection_pb2 +from grpc_reflection.v1alpha import reflection_pb2_grpc + +from google.protobuf import descriptor_pb2 +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import json_format + +from grpc_status import rpc_status + +from tavern.util.dict_util import check_expected_keys +from tavern.util import exceptions + +logger = logging.getLogger(__name__) + +with warnings.catch_warnings(): + warnings.simplefilter("ignore") + warnings.warn("deprecated", DeprecationWarning) + +# Find the Protocol Compiler. +if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): + protoc = os.environ["PROTOC"] +else: + protoc = find_executable("protoc") + + +def _generate_proto_import(source, output): + """Invokes the Protocol Compiler to generate a _pb2.py from the given + .proto file. Does nothing if the output already exists and is newer than + the input.""" + + if protoc is None: + sys.stderr.write( + "protoc is not installed nor found in ../src. Please compile it " + "or install the binary package.\n" + ) + sys.exit(-1) + + if not os.path.exists(source): + sys.stderr.write("Can't find required file: %s\n" % source) + sys.exit(-1) + + if not os.path.exists(output): + os.makedirs(output) + + logger.info("Generating %s...", output) + protos = [ + os.path.join(source, child) + for child in os.listdir(source) + if child.rsplit(".", 1)[-1] == "proto" + ] + + protoc_command = [protoc, "-I" + source, "--python_out=" + output] + protoc_command.extend(protos) + + if subprocess.call(protoc_command) != 0: + sys.exit(-1) + + +def _import_grpc_module(output): + if os.path.exists(output): + sys.path.append(output) + for (_, name, _) in pkgutil.iter_modules([output]): + import_module("." + name, package=output) + + +class GRPCClient(object): + def __init__(self, **kwargs): + logger.debug("Initialising GRPC client with %s", kwargs) + expected_blocks = { + "connect": {"host", "port", "options", "compression", "timeout", "tls"}, + "proto": {"source", "module"}, + "metadata": {}, + } + # check main block first + check_expected_keys(expected_blocks.keys(), kwargs) + + _connect_args = kwargs.pop("connect", {}) + check_expected_keys(expected_blocks["connect"], _connect_args) + + metadata = kwargs.pop("metadata", {}) + self._metadata = [(key, value) for key, value in metadata.items()] + + _proto_args = kwargs.pop("proto", {}) + check_expected_keys(expected_blocks["proto"], _proto_args) + + host = "localhost" + port = "50051" + + if "host" in _connect_args: + host_arg = _connect_args["host"] + host_port = host_arg.split(":") + + if len(host_port) == 2: + host = host_port[0] + port = host_port[1] + elif len(host_port) == 1: + host = host_arg + + port = _connect_args.get("port", port) + self.default_host = "{}:{}".format(host, port) + self.timeout = int(_connect_args.get("timeout", 5)) + self.tls = bool(_connect_args.get("tls", False)) + + self.channels = {} + self.sym_db = _symbol_database.Default() + + proto_module = _proto_args.get("module", "proto") + if "source" in _proto_args: + proto_source = _proto_args["source"] + _generate_proto_import(proto_source, proto_module) + + _import_grpc_module(proto_module) + + def _register_file_descriptor(self, service_proto): + for i in range(len(service_proto.file_descriptor_proto)): + file_descriptor_proto = service_proto.file_descriptor_proto[ + len(service_proto.file_descriptor_proto) - i - 1 + ] + proto = descriptor_pb2.FileDescriptorProto() + proto.ParseFromString(file_descriptor_proto) + self.sym_db.pool.Add(proto) + + def _get_reflection_info(self, channel, service_name=None, file_by_filename=None): + logger.debug("Geting GRPC protobuf for service %s", service_name) + ref_request = reflection_pb2.ServerReflectionRequest( + file_containing_symbol=service_name, file_by_filename=file_by_filename + ) + reflection_stub = reflection_pb2_grpc.ServerReflectionStub(channel) + ref_response = reflection_stub.ServerReflectionInfo( + iter([ref_request]), metadata=self._metadata + ) + for response in ref_response: + self._register_file_descriptor(response.file_descriptor_response) + + def _get_grpc_service(self, channel, service, method): + full_service_name = "{}.{}".format(service, method) + try: + grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) + input_type = self.sym_db.GetPrototype(grpc_service.input_type) + output_type = self.sym_db.GetPrototype(grpc_service.output_type) + except KeyError: + return None, None + + service_url = "/{}/{}".format(service, method) + grpc_method = channel.unary_unary( + service_url, + request_serializer=input_type.SerializeToString, + response_deserializer=output_type.FromString, + ) + + return grpc_method, input_type + + def _make_call_request(self, host, full_service): + full_service = full_service.replace("/", ".") + service_method = full_service.rsplit(".", 1) + if len(service_method) != 2: + raise exceptions.GRPCRequestException("Could not find method name") + + service = service_method[0] + method = service_method[1] + logger.debug( + "Make call for host %s service %s method %s", host, service, method + ) + + if host not in self.channels: + if self.tls: + credentials = grpc.ssl_channel_credentials() + self.channels[host] = grpc.secure_channel( + host, + credentials, + options=[("grpc.max_receive_message_length", 10 * 1024 * 1024)], + ) + else: + self.channels[host] = grpc.insecure_channel( + host, + options=[("grpc.max_receive_message_length", 10 * 1024 * 1024)], + ) + + channel = self.channels[host] + + grpc_method, input_type = self._get_grpc_service(channel, service, method) + if grpc_method is not None and input_type is not None: + return grpc_method, input_type + + try: + self._get_reflection_info(channel, service_name=service) + except grpc.RpcError as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + logger.error("Call failure: %s", rpc_error) + status = rpc_status.from_call(rpc_error) + logger.warning( + "Unable get %s service reflection information code %s detail %s", + service, + status.code, + status.details, + ) + raise_from(exceptions.GRPCRequestException, rpc_error) + + return self._get_grpc_service(channel, service, method) + + def __enter__(self): + logger.debug("Connecting to GRPC") + + def call(self, service, host=None, body=None, timeout=None): + if host is None: + host = self.default_host + if timeout is None: + timeout = self.timeout + + grpc_call, grpc_request = self._make_call_request(host, service) + if grpc_call is None or grpc_request is None: + raise exceptions.GRPCRequestException( + "Service {} was not found on host {}".format(service, host) + ) + + request = grpc_request() + if body is not None: + request = json_format.ParseDict(body, request) + + logger.debug("Send request %s", request) + + return grpc_call.future(request, metadata=self._metadata, timeout=timeout) + + def __exit__(self, *args): + logger.debug("Disconnecting from GRPC") diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py new file mode 100644 index 000000000..c74e4e080 --- /dev/null +++ b/tavern/_plugins/grpc/request.py @@ -0,0 +1,62 @@ +import logging +import json +import functools + +from future.utils import raise_from +from box import Box + +from tavern.util import exceptions +from tavern.util.dict_util import format_keys, check_expected_keys + +from tavern.request.base import BaseRequest + +logger = logging.getLogger(__name__) + + +def get_grpc_args(rspec, test_block_config): + """Format GRPC request args + """ + + fspec = format_keys(rspec, test_block_config["variables"]) + + if "json" in rspec: + if "body" in rspec: + raise exceptions.BadSchemaError( + "Can only specify one of 'body' or 'json' in GRPC request" + ) + + fspec["body"] = json.dumps(fspec.pop("json")) + + return fspec + + +class GRPCRequest(BaseRequest): + """Wrapper for a single GRPC request on a client + + Similar to RestRequest, publishes a single message. + """ + + def __init__(self, client, rspec, test_block_config): + expected = {"host", "service", "body"} + + check_expected_keys(expected, rspec) + + grpc_args = get_grpc_args(rspec, test_block_config) + + self._prepared = functools.partial(client.call, **grpc_args) + + # Need to do this here because get_publish_args will modify the original + # input, which we might want to use to format. No error handling because + # all the error handling is done in the previous call + self._original_publish_args = format_keys(rspec, test_block_config["variables"]) + + def run(self): + try: + return self._prepared() + except ValueError as e: + logger.exception("Error executing request") + raise_from(exceptions.GRPCRequestException, e) + + @property + def request_vars(self): + return Box(self._original_publish_args) diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py new file mode 100644 index 000000000..b32870ace --- /dev/null +++ b/tavern/_plugins/grpc/response.py @@ -0,0 +1,116 @@ +import json +import logging + +from grpc import StatusCode +from google.protobuf import json_format + +from tavern.response.base import BaseResponse +from tavern.util.exceptions import TestFailError + +try: + LoadException = json.decoder.JSONDecodeError +except AttributeError: + # python 2 raises ValueError on json loads() error instead + LoadException = ValueError + +logger = logging.getLogger(__name__) + + +class GRPCResponse(BaseResponse): + def __init__(self, client, name, expected, test_block_config): + super(GRPCResponse, self).__init__(name, expected, test_block_config) + + self._client = client + + self.received_messages = [] + + def __str__(self): + if self.response: + return self.response.payload + else: + return "" + + def _validate_block(self, blockname, block): + """Validate a block of the response + + Args: + blockname (str): which part of the response is being checked + block (dict): The actual part being checked + """ + try: + expected_block = self.expected[blockname] or {} + except KeyError: + expected_block = {} + + if isinstance(expected_block, dict): + if expected_block.pop("$ext", None): + logger.warning( + "$ext function found in block %s - this has been moved to verify_response_with block - see documentation", + blockname, + ) + + logger.debug("Validating response %s against %s", blockname, expected_block) + + # 'strict' could be a list, in which case we only want to enable strict + # key checking for that specific bit of the response + test_strictness = self.test_block_config["strict"] + if isinstance(test_strictness, list): + block_strictness = blockname in test_strictness + else: + block_strictness = test_strictness + + self.recurse_check_key_match(expected_block, block, blockname, block_strictness) + + def verify(self, response): + # Get any keys to save + saved = {} + verify_status = [StatusCode.OK.name] + if "status" in self.expected: + status = self.expected["status"] + if isinstance(status, list): + verify_status = [name.upper() for name in status] + else: + verify_status = [status.upper()] + + if response.code().name not in verify_status: + self._adderr( + "expected status %s, but the actual response '%s'", + verify_status, + response.code().name, + ) + + if "details" in self.expected: + verify_details = self.expected["details"] + if verify_details != response.details(): + self._adderr( + "expected details '%s', but the actual response '%s'", + verify_details, + response.details(), + ) + + if "body" in self.expected: + result = response.result() + + json_result = json_format.MessageToDict( + result, + including_default_value_fields=True, + preserving_proto_field_name=True, + ) + + self._validate_block("body", json_result) + self._maybe_run_validate_functions(json_result) + + saved.update( + self.maybe_get_save_values_from_save_block("body", json_result) + ) + saved.update( + self.maybe_get_save_values_from_ext(json_result, self.expected) + ) + + if self.errors: + raise TestFailError( + "Test '{:s}' failed:\n{:s}".format(self.name, self._str_errors()), + failures=self.errors, + ) + + return saved diff --git a/tavern/_plugins/grpc/schema.yaml b/tavern/_plugins/grpc/schema.yaml new file mode 100644 index 000000000..a9a7b4a73 --- /dev/null +++ b/tavern/_plugins/grpc/schema.yaml @@ -0,0 +1,45 @@ +--- +name: GRPC schemas +desc: pykwalify schemas for 'grpc' plugin block, grpc_request, and grpc_response + +initialisation: + grpc: + required: false + type: map + mapping: + connect: + required: true + type: map + mapping: + host: + required: false + type: any + port: + required: false + type: any + func: int_variable + keepalive: + required: false + type: float + timeout: + required: false + type: float + tls: + required: false + type: any + func: bool_variable + + metadata: + required: false + type: any + + proto: + required: false + type: map + mapping: + source: + required: false + type: str + module: + required: false + type: str \ No newline at end of file diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py new file mode 100644 index 000000000..c481ba3be --- /dev/null +++ b/tavern/_plugins/grpc/tavernhook.py @@ -0,0 +1,40 @@ +import logging + +from os.path import join, abspath, dirname + +import yaml + +from tavern.util.dict_util import format_keys + +from .request import GRPCRequest +from .response import GRPCResponse +from .client import GRPCClient + +logger = logging.getLogger(__name__) + + +session_type = GRPCClient + +request_type = GRPCRequest +request_block_name = "grpc_request" + + +def get_expected_from_request(stage, test_block_config, session): + # grpc response is not required + grpc_expected = stage.get("grpc_response") + if grpc_expected: + # format so we can subscribe to the right topic + f_expected = format_keys(grpc_expected, test_block_config["variables"]) + expected = f_expected + else: + expected = {} + + return expected + + +verifier_type = GRPCResponse +response_block_name = "grpc_response" + +schema_path = join(abspath(dirname(__file__)), "schema.yaml") +with open(schema_path, "r") as schema_file: + schema = yaml.load(schema_file, Loader=yaml.SafeLoader) diff --git a/tavern/core.py b/tavern/core.py index 0c5ebf529..d81151ad9 100644 --- a/tavern/core.py +++ b/tavern/core.py @@ -250,6 +250,7 @@ def _run_pytest( tavern_global_cfg, tavern_mqtt_backend=None, tavern_http_backend=None, + tavern_grpc_backend=None, tavern_strict=None, pytest_args=None, **kwargs @@ -263,6 +264,8 @@ def _run_pytest( specified, uses tavern-mqtt tavern_http_backend (str, optional): name of HTTP plugin to use. If not specified, use tavern-http + tavern_grpc_backend (str, optional): name of GRPC plugin to use. If not + specified, use tavern-grpc tavern_strict (bool, optional): Strictness of checking for responses. See documentation for details pytest_args (list, optional): List of extra arguments to pass directly @@ -287,6 +290,8 @@ def _run_pytest( pytest_args += ["--tavern-mqtt-backend", tavern_mqtt_backend] if tavern_http_backend: pytest_args += ["--tavern-http-backend", tavern_http_backend] + if tavern_grpc_backend: + pytest_args += ["--tavern-grpc-backend", tavern_grpc_backend] if tavern_strict: pytest_args += ["--tavern-strict", tavern_strict] diff --git a/tavern/plugins.py b/tavern/plugins.py index dab92cbfe..bec825aaf 100644 --- a/tavern/plugins.py +++ b/tavern/plugins.py @@ -45,13 +45,13 @@ def is_valid_reqresp_plugin(ext): "session_type", # RestRequest, MQTTRequest "request_type", - # request, mqtt_publish + # request, mqtt_publish, grpc_request "request_block_name", # Some function that returns a dict "get_expected_from_request", # MQTTResponse, RestResponse "verifier_type", - # response, mqtt_response + # response, mqtt_response, grpc_request "response_block_name", # dictionary with pykwalify schema "schema", @@ -100,7 +100,7 @@ def _load_plugins(self, test_block_config): plugins = [] - for backend in ["http", "mqtt"]: + for backend in ["http", "mqtt", "grpc"]: namespace = "tavern_{}".format(backend) def enabled(ext): diff --git a/tavern/schemas/extensions.py b/tavern/schemas/extensions.py index 83ffa9db1..1cc2f92d1 100644 --- a/tavern/schemas/extensions.py +++ b/tavern/schemas/extensions.py @@ -5,6 +5,7 @@ from future.utils import raise_from from pykwalify.types import is_int, is_float, is_bool +from grpc import StatusCode from tavern.util import exceptions from tavern.util.exceptions import BadSchemaError @@ -219,6 +220,30 @@ def validate_status_code_is_int_or_list_of_ints(value, rule_obj, path): return True +def validate_grpc_status_is_valid_or_list_of_names(value, rule_obj, path): + """ Validate GRPC statuses https://github.com/grpc/grpc/blob/master/doc/statuscodes.md + """ + # pylint: disable=unused-argument + err_msg = "status has to be an valid grpc status name (got {})".format(value) + + if not isinstance(value, list) and not is_grpc_status(value): + raise BadSchemaError(err_msg) + + if isinstance(value, list): + if not all(is_grpc_status(i) for i in value): + raise BadSchemaError(err_msg) + + return True + + +def is_grpc_status(value): + value = value.upper() + for status in StatusCode: + if status.name == value: + return True + return False + + def check_usefixtures(value, rule_obj, path): # pylint: disable=unused-argument err_msg = "'usefixtures' has to be a list with at least one item" diff --git a/tavern/schemas/tests.schema.yaml b/tavern/schemas/tests.schema.yaml index e064002ad..e96fff0c4 100644 --- a/tavern/schemas/tests.schema.yaml +++ b/tavern/schemas/tests.schema.yaml @@ -116,6 +116,53 @@ schema;stage: json: type: any + grpc_request: + type: map + required: false + mapping: + host: + type: str + required: false + service: + type: str + required: true + body: + include: any_json_with_ext + retain: + type: any + func: bool_variable + required: false + + grpc_response: + type: map + required: false + mapping: + status: + type: any + func: validate_grpc_status_is_valid_or_list_of_names + details: + type: any + required: false + body: + type: any + required: false + json: + include: any_json_with_ext + required: false + timeout: + type: any + func: float_variable + required: false + verify_response_with: + func: validate_extensions + type: any + + save: + include: any_json_with_ext + mapping: + json: + type: any + request: type: map required: false diff --git a/tavern/testutils/pytesthook/hooks.py b/tavern/testutils/pytesthook/hooks.py index d45f477c1..6f5d55dd5 100644 --- a/tavern/testutils/pytesthook/hooks.py +++ b/tavern/testutils/pytesthook/hooks.py @@ -21,6 +21,9 @@ def pytest_addoption(parser): parser.addini( "tavern-mqtt-backend", help="Which mqtt backend to use", default="paho-mqtt" ) + parser.addini( + "tavern-grpc-backend", help="Which grpc backend to use", default="grpc" + ) parser.addini( "tavern-strict", help="Default response matching strictness", diff --git a/tavern/testutils/pytesthook/util.py b/tavern/testutils/pytesthook/util.py index 8216e6da3..6b5f5ade7 100644 --- a/tavern/testutils/pytesthook/util.py +++ b/tavern/testutils/pytesthook/util.py @@ -35,6 +35,11 @@ def add_parser_options(parser_addoption, with_defaults=True): help="Which mqtt backend to use", default="paho-mqtt" if with_defaults else None, ) + parser_addoption( + "--tavern-grpc-backend", + help="Which grpc backend to use", + default="grpc" if with_defaults else None, + ) parser_addoption( "--tavern-strict", help="Default response matching strictness", @@ -101,7 +106,7 @@ def _load_global_backends(pytest_config): """Load which backend should be used""" backend_settings = {} - backends = ["http", "mqtt"] + backends = ["http", "mqtt", "grpc"] for b in backends: # similar logic to above - use ini, then cmdline if present ini_opt = pytest_config.getini("tavern-{}-backend".format(b)) diff --git a/tavern/util/exceptions.py b/tavern/util/exceptions.py index 7f265e235..16c69c1c7 100644 --- a/tavern/util/exceptions.py +++ b/tavern/util/exceptions.py @@ -64,6 +64,11 @@ class RestRequestException(TavernException): """ +class GRPCRequestException(TavernException): + """Error making requests in GRPCRequest() + """ + + class MQTTRequestException(TavernException): """Error making requests in MQTTRequest() """ diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 6c0d7bd84..4223a7eb9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -12,7 +12,7 @@ def fix_example_includes(): "callback_url": "www.yahoo.co.uk", "request_topic": "/abc", }, - "backends": {"mqtt": "paho-mqtt", "http": "requests"}, + "backends": {"mqtt": "paho-mqtt", "http": "requests", "grpc": "grpc"}, "strict": True, "tavern_internal": {"pytest_hook_caller": Mock()}, } diff --git a/tests/unit/test_call_run.py b/tests/unit/test_call_run.py index 3b0197990..37ac6eac4 100644 --- a/tests/unit/test_call_run.py +++ b/tests/unit/test_call_run.py @@ -25,7 +25,12 @@ def test_run_with_cfg(self): @pytest.mark.parametrize( "expected_kwarg", - ("tavern_mqtt_backend", "tavern_http_backend", "tavern_strict"), + ( + "tavern_mqtt_backend", + "tavern_http_backend", + "tavern_grpc_backend", + "tavern_strict", + ), ) def test_doesnt_warn_about_expected_kwargs(self, expected_kwarg): kw = {expected_kwarg: 123} diff --git a/tox-integration.ini b/tox-integration.ini index 901ca3eb6..725de9ae9 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py34,py35,py36,py37,pypy,pypy3}-{generic,cookies,mqtt,advanced,components,noextra,hooks} +envlist = {py27,py34,py35,py36,py37,pypy,pypy3}-{generic,cookies,mqtt,grpc,advanced,components,noextra,hooks} skip_missing_interpreters = true [testenv] @@ -9,6 +9,7 @@ setenv = SECOND_URL_PART = again PYTHONPATH = . changedir = + grpc: example/grpc mqtt: example/mqtt cookies: example/cookies advanced: example/advanced From 5da3facf72054e8c1c477df72f4b40b1d9a4af26 Mon Sep 17 00:00:00 2001 From: Kirils Mensikovs Date: Thu, 5 Sep 2019 21:23:53 +0200 Subject: [PATCH 02/72] fix pylint issue --- tavern/_plugins/grpc/tavernhook.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py index c481ba3be..9682b4e9d 100644 --- a/tavern/_plugins/grpc/tavernhook.py +++ b/tavern/_plugins/grpc/tavernhook.py @@ -20,6 +20,7 @@ def get_expected_from_request(stage, test_block_config, session): + # pylint: disable=unused-argument # grpc response is not required grpc_expected = stage.get("grpc_response") if grpc_expected: From eafcaac24e8786fabfaf2859293667e2de337d1b Mon Sep 17 00:00:00 2001 From: Kirils Mensikovs Date: Wed, 11 Sep 2019 14:25:05 +0200 Subject: [PATCH 03/72] Fix import module for GRPC and make errors more informative --- tavern/_plugins/grpc/client.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index d844a7334..7ceb98fbb 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -70,10 +70,16 @@ def _generate_proto_import(source, output): def _import_grpc_module(output): + output_path = [] if os.path.exists(output): - sys.path.append(output) - for (_, name, _) in pkgutil.iter_modules([output]): - import_module("." + name, package=output) + output_path.append(output) + else: + mod = __import__(output, fromlist=[""]) + output_path.extend(mod.__path__) + + sys.path.extend(output_path) + for (_, name, _) in pkgutil.iter_modules(output_path): + import_module("." + name, package=output) class GRPCClient(object): @@ -200,12 +206,15 @@ def _make_call_request(self, host, full_service): except grpc.RpcError as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. logger.error("Call failure: %s", rpc_error) status = rpc_status.from_call(rpc_error) - logger.warning( - "Unable get %s service reflection information code %s detail %s", - service, - status.code, - status.details, - ) + if status is None: + logger.warning("Error occurred %s", rpc_error) + else: + logger.warning( + "Unable get %s service reflection information code %s detail %s", + service, + status.code, + status.details, + ) raise_from(exceptions.GRPCRequestException, rpc_error) return self._get_grpc_service(channel, service, method) From 703dca6d759199d162ec5aa40e282ff55e60ba6e Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 19 Dec 2020 13:36:46 +0000 Subject: [PATCH 04/72] Fix python2/3 things which now no longer apply --- tavern/_plugins/grpc/client.py | 23 ++++++++--------------- tavern/_plugins/grpc/request.py | 12 +++++------- tavern/_plugins/grpc/response.py | 2 +- tavern/_plugins/grpc/tavernhook.py | 5 ++--- tavern/_plugins/mqtt/response.py | 4 +--- tavern/_plugins/rest/request.py | 4 +--- tavern/_plugins/rest/response.py | 4 +--- tavern/response/base.py | 4 +--- tavern/schemas/extensions.py | 1 + tavern/util/dict_util.py | 6 ++---- tests/unit/conftest.py | 1 + tests/unit/test_helpers.py | 2 +- tests/unit/test_pytest_hooks.py | 4 ++-- tests/unit/test_utilities.py | 2 +- 14 files changed, 28 insertions(+), 46 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 7ceb98fbb..c0f3889b2 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,27 +1,20 @@ from distutils.spawn import find_executable from importlib import import_module -import os import logging +import os +import pkgutil import subprocess import sys -import pkgutil import warnings -from future.utils import raise_from - -import grpc - -from grpc_reflection.v1alpha import reflection_pb2 -from grpc_reflection.v1alpha import reflection_pb2_grpc - -from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor_pb2, json_format from google.protobuf import symbol_database as _symbol_database -from google.protobuf import json_format - +import grpc +from grpc_reflection.v1alpha import reflection_pb2, reflection_pb2_grpc from grpc_status import rpc_status -from tavern.util.dict_util import check_expected_keys from tavern.util import exceptions +from tavern.util.dict_util import check_expected_keys logger = logging.getLogger(__name__) @@ -97,7 +90,7 @@ def __init__(self, **kwargs): check_expected_keys(expected_blocks["connect"], _connect_args) metadata = kwargs.pop("metadata", {}) - self._metadata = [(key, value) for key, value in metadata.items()] + self._metadata = metadata.items() _proto_args = kwargs.pop("proto", {}) check_expected_keys(expected_blocks["proto"], _proto_args) @@ -215,7 +208,7 @@ def _make_call_request(self, host, full_service): status.code, status.details, ) - raise_from(exceptions.GRPCRequestException, rpc_error) + raise exceptions.GRPCRequestException from rpc_error return self._get_grpc_service(channel, service, method) diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index c74e4e080..bbdaf33d3 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -1,14 +1,12 @@ -import logging -import json import functools +import json +import logging -from future.utils import raise_from from box import Box -from tavern.util import exceptions -from tavern.util.dict_util import format_keys, check_expected_keys - from tavern.request.base import BaseRequest +from tavern.util import exceptions +from tavern.util.dict_util import check_expected_keys, format_keys logger = logging.getLogger(__name__) @@ -55,7 +53,7 @@ def run(self): return self._prepared() except ValueError as e: logger.exception("Error executing request") - raise_from(exceptions.GRPCRequestException, e) + raise exceptions.GRPCRequestException from e @property def request_vars(self): diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index b32870ace..b684b39c0 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,8 +1,8 @@ import json import logging -from grpc import StatusCode from google.protobuf import json_format +from grpc import StatusCode from tavern.response.base import BaseResponse from tavern.util.exceptions import TestFailError diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py index 9682b4e9d..e7f4c4f2e 100644 --- a/tavern/_plugins/grpc/tavernhook.py +++ b/tavern/_plugins/grpc/tavernhook.py @@ -1,14 +1,13 @@ import logging - -from os.path import join, abspath, dirname +from os.path import abspath, dirname, join import yaml from tavern.util.dict_util import format_keys +from .client import GRPCClient from .request import GRPCRequest from .response import GRPCResponse -from .client import GRPCClient logger = logging.getLogger(__name__) diff --git a/tavern/_plugins/mqtt/response.py b/tavern/_plugins/mqtt/response.py index a5ef4d963..5188c556d 100644 --- a/tavern/_plugins/mqtt/response.py +++ b/tavern/_plugins/mqtt/response.py @@ -37,9 +37,7 @@ def _get_payload_vals(self): json_payload = True if payload.pop("$ext", None): - raise exceptions.InvalidExtBlockException( - "json", - ) + raise exceptions.InvalidExtBlockException("json",) elif "payload" in self.expected: payload = self.expected["payload"] json_payload = False diff --git a/tavern/_plugins/rest/request.py b/tavern/_plugins/rest/request.py index 14ef5236b..99ab5a5a1 100644 --- a/tavern/_plugins/rest/request.py +++ b/tavern/_plugins/rest/request.py @@ -405,9 +405,7 @@ def __init__(self, session, rspec, test_block_config): request_args = get_request_args(rspec, test_block_config) update_from_ext( - request_args, - RestRequest.optional_in_file, - test_block_config, + request_args, RestRequest.optional_in_file, test_block_config, ) # Used further down, but pop it asap to avoid unwanted side effects diff --git a/tavern/_plugins/rest/response.py b/tavern/_plugins/rest/response.py index 6580d3624..fecf24185 100644 --- a/tavern/_plugins/rest/response.py +++ b/tavern/_plugins/rest/response.py @@ -210,9 +210,7 @@ def _validate_block(self, blockname, block): if isinstance(expected_block, dict): if expected_block.pop("$ext", None): - raise exceptions.InvalidExtBlockException( - blockname, - ) + raise exceptions.InvalidExtBlockException(blockname,) if blockname == "headers": # Special case for headers. These need to be checked in a case diff --git a/tavern/response/base.py b/tavern/response/base.py index 6a0d2144b..da8f7423d 100644 --- a/tavern/response/base.py +++ b/tavern/response/base.py @@ -118,9 +118,7 @@ def check_deprecated_validate(name): if isinstance(block, dict): check_ext_functions(block.get("$ext", None)) if nfuncs != len(self.validate_functions): - raise exceptions.InvalidExtBlockException( - name, - ) + raise exceptions.InvalidExtBlockException(name,) # Could put in an isinstance check here check_deprecated_validate("json") diff --git a/tavern/schemas/extensions.py b/tavern/schemas/extensions.py index 0813d65eb..19ddcd070 100644 --- a/tavern/schemas/extensions.py +++ b/tavern/schemas/extensions.py @@ -2,6 +2,7 @@ import re from pykwalify.types import is_bool, is_float, is_int + from grpc import StatusCode from tavern.util import exceptions diff --git a/tavern/util/dict_util.py b/tavern/util/dict_util.py index 324f6bf0a..8f98f8829 100644 --- a/tavern/util/dict_util.py +++ b/tavern/util/dict_util.py @@ -417,10 +417,8 @@ def _format_err(which): expected_val.compiled, full_err() ) else: - msg = ( - "Type of returned data was different than expected ({})".format( - full_err() - ) + msg = "Type of returned data was different than expected ({})".format( + full_err() ) raise exceptions.KeyMismatchError(msg) from e diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index bac1a987e..ed6bc50ec 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,4 +1,5 @@ from unittest.mock import Mock + import pytest from tavern.util.strict_util import StrictLevel diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 00bd28fad..b85771d0e 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -1,10 +1,10 @@ +import sys import tempfile from textwrap import dedent from unittest.mock import Mock, patch import _pytest import pytest -import sys import yaml from tavern.core import run diff --git a/tests/unit/test_pytest_hooks.py b/tests/unit/test_pytest_hooks.py index f22ec4091..1ea6e5490 100644 --- a/tests/unit/test_pytest_hooks.py +++ b/tests/unit/test_pytest_hooks.py @@ -1,10 +1,10 @@ from dataclasses import dataclass from unittest.mock import Mock -import py -import pytest from faker import Faker +import py from py._path.local import LocalPath +import pytest from tavern.testutils.pytesthook.file import YamlFile, _get_parametrized_items diff --git a/tests/unit/test_utilities.py b/tests/unit/test_utilities.py index 9320e452c..8c917b065 100644 --- a/tests/unit/test_utilities.py +++ b/tests/unit/test_utilities.py @@ -4,8 +4,8 @@ import os import tempfile from textwrap import dedent - from unittest.mock import Mock, patch + import pytest import yaml From b9a011202a03ae3015d618244c79d9439269a0a8 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 19 Dec 2020 13:40:04 +0000 Subject: [PATCH 05/72] Fix format --- tavern/_plugins/grpc/request.py | 3 +-- tavern/_plugins/mqtt/response.py | 4 +++- tavern/_plugins/rest/request.py | 4 +++- tavern/_plugins/rest/response.py | 4 +++- tavern/response/base.py | 4 +++- tavern/schemas/extensions.py | 3 +-- tavern/util/dict_util.py | 6 ++++-- tavern/util/exceptions.py | 3 +-- 8 files changed, 19 insertions(+), 12 deletions(-) diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index bbdaf33d3..b28f45fc6 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -12,8 +12,7 @@ def get_grpc_args(rspec, test_block_config): - """Format GRPC request args - """ + """Format GRPC request args""" fspec = format_keys(rspec, test_block_config["variables"]) diff --git a/tavern/_plugins/mqtt/response.py b/tavern/_plugins/mqtt/response.py index 5188c556d..a5ef4d963 100644 --- a/tavern/_plugins/mqtt/response.py +++ b/tavern/_plugins/mqtt/response.py @@ -37,7 +37,9 @@ def _get_payload_vals(self): json_payload = True if payload.pop("$ext", None): - raise exceptions.InvalidExtBlockException("json",) + raise exceptions.InvalidExtBlockException( + "json", + ) elif "payload" in self.expected: payload = self.expected["payload"] json_payload = False diff --git a/tavern/_plugins/rest/request.py b/tavern/_plugins/rest/request.py index 99ab5a5a1..14ef5236b 100644 --- a/tavern/_plugins/rest/request.py +++ b/tavern/_plugins/rest/request.py @@ -405,7 +405,9 @@ def __init__(self, session, rspec, test_block_config): request_args = get_request_args(rspec, test_block_config) update_from_ext( - request_args, RestRequest.optional_in_file, test_block_config, + request_args, + RestRequest.optional_in_file, + test_block_config, ) # Used further down, but pop it asap to avoid unwanted side effects diff --git a/tavern/_plugins/rest/response.py b/tavern/_plugins/rest/response.py index fecf24185..6580d3624 100644 --- a/tavern/_plugins/rest/response.py +++ b/tavern/_plugins/rest/response.py @@ -210,7 +210,9 @@ def _validate_block(self, blockname, block): if isinstance(expected_block, dict): if expected_block.pop("$ext", None): - raise exceptions.InvalidExtBlockException(blockname,) + raise exceptions.InvalidExtBlockException( + blockname, + ) if blockname == "headers": # Special case for headers. These need to be checked in a case diff --git a/tavern/response/base.py b/tavern/response/base.py index da8f7423d..6a0d2144b 100644 --- a/tavern/response/base.py +++ b/tavern/response/base.py @@ -118,7 +118,9 @@ def check_deprecated_validate(name): if isinstance(block, dict): check_ext_functions(block.get("$ext", None)) if nfuncs != len(self.validate_functions): - raise exceptions.InvalidExtBlockException(name,) + raise exceptions.InvalidExtBlockException( + name, + ) # Could put in an isinstance check here check_deprecated_validate("json") diff --git a/tavern/schemas/extensions.py b/tavern/schemas/extensions.py index 19ddcd070..1fa866fda 100644 --- a/tavern/schemas/extensions.py +++ b/tavern/schemas/extensions.py @@ -122,8 +122,7 @@ def validate_status_code_is_int_or_list_of_ints(value, rule_obj, path): def validate_grpc_status_is_valid_or_list_of_names(value, rule_obj, path): - """ Validate GRPC statuses https://github.com/grpc/grpc/blob/master/doc/statuscodes.md - """ + """Validate GRPC statuses https://github.com/grpc/grpc/blob/master/doc/statuscodes.md""" # pylint: disable=unused-argument err_msg = "status has to be an valid grpc status name (got {})".format(value) diff --git a/tavern/util/dict_util.py b/tavern/util/dict_util.py index 8f98f8829..324f6bf0a 100644 --- a/tavern/util/dict_util.py +++ b/tavern/util/dict_util.py @@ -417,8 +417,10 @@ def _format_err(which): expected_val.compiled, full_err() ) else: - msg = "Type of returned data was different than expected ({})".format( - full_err() + msg = ( + "Type of returned data was different than expected ({})".format( + full_err() + ) ) raise exceptions.KeyMismatchError(msg) from e diff --git a/tavern/util/exceptions.py b/tavern/util/exceptions.py index 0ed71ec19..b5c57c642 100644 --- a/tavern/util/exceptions.py +++ b/tavern/util/exceptions.py @@ -55,8 +55,7 @@ class RestRequestException(TavernException): class GRPCRequestException(TavernException): - """Error making requests in GRPCRequest() - """ + """Error making requests in GRPCRequest()""" class MQTTRequestException(TavernException): From 3f8cba8be2e75db8f3a938254a9fb2f906c798f6 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 12:58:23 +0000 Subject: [PATCH 06/72] disable duplicate code warning --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 10a0e00d8..330e90598 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,5 +1,5 @@ [MASTER] -disable=missing-docstring,bad-continuation,fixme,invalid-name,line-too-long,too-few-public-methods,no-else-return,too-many-branches,locally-disabled,useless-object-inheritance,no-else-raise,abstract-method,import-outside-toplevel,cyclic-import +disable=missing-docstring,bad-continuation,fixme,invalid-name,line-too-long,too-few-public-methods,no-else-return,too-many-branches,locally-disabled,useless-object-inheritance,no-else-raise,abstract-method,import-outside-toplevel,cyclic-import,duplicate-code ignore=tests [REPORTS] From 552d6a04632b54b1bfc4d4bb63b43f028071c7d0 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 13:09:13 +0000 Subject: [PATCH 07/72] Fix type errors --- tavern/_plugins/grpc/client.py | 2 +- tavern/_plugins/grpc/response.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index c0f3889b2..14c62d092 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -24,7 +24,7 @@ # Find the Protocol Compiler. if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): - protoc = os.environ["PROTOC"] + protoc = os.environ.get("PROTOC") else: protoc = find_executable("protoc") diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index b684b39c0..6bd4d3b91 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,4 +1,3 @@ -import json import logging from google.protobuf import json_format @@ -7,12 +6,6 @@ from tavern.response.base import BaseResponse from tavern.util.exceptions import TestFailError -try: - LoadException = json.decoder.JSONDecodeError -except AttributeError: - # python 2 raises ValueError on json loads() error instead - LoadException = ValueError - logger = logging.getLogger(__name__) From 2281aa84daa27f008da2aae244c5eaecad9348c8 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 13:49:52 +0000 Subject: [PATCH 08/72] Fix some issues causing tests to fail --- example/{grpc => grpc_}/__init__.py | 0 example/{grpc => grpc_}/common.yaml | 0 example/{grpc => grpc_}/docker-compose.yaml | 0 example/{grpc => grpc_}/helloworld_pb2.py | 0 .../{grpc => grpc_}/helloworld_pb2_grpc.py | 0 example/{grpc => grpc_}/server.Dockerfile | 0 example/{grpc => grpc_}/server.py | 12 +++---- example/{grpc => grpc_}/test_grpc.tavern.yaml | 0 tavern/_plugins/grpc/client.py | 24 +++++++------- tavern/_plugins/grpc/response.py | 31 ------------------- tavern/_plugins/rest/response.py | 30 ------------------ tavern/response/base.py | 31 +++++++++++++++++++ tavern/schemas/extensions.py | 3 +- tavern/util/exceptions.py | 4 +++ tavern/util/strict_util.py | 2 +- tox-integration.ini | 2 +- 16 files changed, 57 insertions(+), 82 deletions(-) rename example/{grpc => grpc_}/__init__.py (100%) rename example/{grpc => grpc_}/common.yaml (100%) rename example/{grpc => grpc_}/docker-compose.yaml (100%) rename example/{grpc => grpc_}/helloworld_pb2.py (100%) rename example/{grpc => grpc_}/helloworld_pb2_grpc.py (100%) rename example/{grpc => grpc_}/server.Dockerfile (100%) rename example/{grpc => grpc_}/server.py (76%) rename example/{grpc => grpc_}/test_grpc.tavern.yaml (100%) diff --git a/example/grpc/__init__.py b/example/grpc_/__init__.py similarity index 100% rename from example/grpc/__init__.py rename to example/grpc_/__init__.py diff --git a/example/grpc/common.yaml b/example/grpc_/common.yaml similarity index 100% rename from example/grpc/common.yaml rename to example/grpc_/common.yaml diff --git a/example/grpc/docker-compose.yaml b/example/grpc_/docker-compose.yaml similarity index 100% rename from example/grpc/docker-compose.yaml rename to example/grpc_/docker-compose.yaml diff --git a/example/grpc/helloworld_pb2.py b/example/grpc_/helloworld_pb2.py similarity index 100% rename from example/grpc/helloworld_pb2.py rename to example/grpc_/helloworld_pb2.py diff --git a/example/grpc/helloworld_pb2_grpc.py b/example/grpc_/helloworld_pb2_grpc.py similarity index 100% rename from example/grpc/helloworld_pb2_grpc.py rename to example/grpc_/helloworld_pb2_grpc.py diff --git a/example/grpc/server.Dockerfile b/example/grpc_/server.Dockerfile similarity index 100% rename from example/grpc/server.Dockerfile rename to example/grpc_/server.Dockerfile diff --git a/example/grpc/server.py b/example/grpc_/server.py similarity index 76% rename from example/grpc/server.py rename to example/grpc_/server.py index 6bad7eac2..8cee60938 100644 --- a/example/grpc/server.py +++ b/example/grpc_/server.py @@ -10,26 +10,26 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer): - def SayHello(self, request, context): - return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) + return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name) def serve(): server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) SERVICE_NAMES = ( - helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, + helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name, reflection.SERVICE_NAME, ) reflection.enable_server_reflection(SERVICE_NAMES, server) - server.add_insecure_port('[::]:50051') + server.add_insecure_port("[::]:50051") logging.info("Starting...") server.start() event = threading.Event() event.wait() -if __name__ == '__main__': + +if __name__ == "__main__": logging.basicConfig() - serve() \ No newline at end of file + serve() diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc_/test_grpc.tavern.yaml similarity index 100% rename from example/grpc/test_grpc.tavern.yaml rename to example/grpc_/test_grpc.tavern.yaml diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 14c62d092..a1d4ef37f 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -35,15 +35,13 @@ def _generate_proto_import(source, output): the input.""" if protoc is None: - sys.stderr.write( + raise exceptions.ProtoGenError( "protoc is not installed nor found in ../src. Please compile it " "or install the binary package.\n" ) - sys.exit(-1) if not os.path.exists(source): - sys.stderr.write("Can't find required file: %s\n" % source) - sys.exit(-1) + raise exceptions.ProtoGenError("Can't find required file: %s\n" % source) if not os.path.exists(output): os.makedirs(output) @@ -58,8 +56,11 @@ def _generate_proto_import(source, output): protoc_command = [protoc, "-I" + source, "--python_out=" + output] protoc_command.extend(protos) - if subprocess.call(protoc_command) != 0: - sys.exit(-1) + call = subprocess.run(protoc_command, capture_output=True) + try: + call.check_returncode() + except subprocess.CalledProcessError as e: + raise exceptions.ProtoGenError(call.stderr) from e def _import_grpc_module(output): @@ -116,12 +117,13 @@ def __init__(self, **kwargs): self.channels = {} self.sym_db = _symbol_database.Default() - proto_module = _proto_args.get("module", "proto") - if "source" in _proto_args: - proto_source = _proto_args["source"] - _generate_proto_import(proto_source, proto_module) + if _proto_args: + proto_module = _proto_args.get("module", "proto") + if "source" in _proto_args: + proto_source = _proto_args["source"] + _generate_proto_import(proto_source, proto_module) - _import_grpc_module(proto_module) + _import_grpc_module(proto_module) def _register_file_descriptor(self, service_proto): for i in range(len(service_proto.file_descriptor_proto)): diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 6bd4d3b91..0e4aceb90 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -23,37 +23,6 @@ def __str__(self): else: return "" - def _validate_block(self, blockname, block): - """Validate a block of the response - - Args: - blockname (str): which part of the response is being checked - block (dict): The actual part being checked - """ - try: - expected_block = self.expected[blockname] or {} - except KeyError: - expected_block = {} - - if isinstance(expected_block, dict): - if expected_block.pop("$ext", None): - logger.warning( - "$ext function found in block %s - this has been moved to verify_response_with block - see documentation", - blockname, - ) - - logger.debug("Validating response %s against %s", blockname, expected_block) - - # 'strict' could be a list, in which case we only want to enable strict - # key checking for that specific bit of the response - test_strictness = self.test_block_config["strict"] - if isinstance(test_strictness, list): - block_strictness = blockname in test_strictness - else: - block_strictness = test_strictness - - self.recurse_check_key_match(expected_block, block, blockname, block_strictness) - def verify(self, response): # Get any keys to save saved = {} diff --git a/tavern/_plugins/rest/response.py b/tavern/_plugins/rest/response.py index 6580d3624..3ddfb05cb 100644 --- a/tavern/_plugins/rest/response.py +++ b/tavern/_plugins/rest/response.py @@ -195,33 +195,3 @@ def verify(self, response): ) return saved - - def _validate_block(self, blockname, block): - """Validate a block of the response - - Args: - blockname (str): which part of the response is being checked - block (dict): The actual part being checked - """ - try: - expected_block = self.expected[blockname] or {} - except KeyError: - expected_block = {} - - if isinstance(expected_block, dict): - if expected_block.pop("$ext", None): - raise exceptions.InvalidExtBlockException( - blockname, - ) - - if blockname == "headers": - # Special case for headers. These need to be checked in a case - # insensitive manner - block = {i.lower(): j for i, j in block.items()} - expected_block = {i.lower(): j for i, j in expected_block.items()} - - logger.debug("Validating response %s against %s", blockname, expected_block) - - test_strictness = self.test_block_config["strict"] - block_strictness = test_strictness.setting_for(blockname).is_on() - self.recurse_check_key_match(expected_block, block, blockname, block_strictness) diff --git a/tavern/response/base.py b/tavern/response/base.py index 6a0d2144b..1ab4e4e9e 100644 --- a/tavern/response/base.py +++ b/tavern/response/base.py @@ -230,3 +230,34 @@ def maybe_get_save_values_from_save_block(self, key, to_check): logger.debug("Saved %s for '%s' from response", saved, key) return saved + + def _validate_block(self, blockname, block): + """Validate a block of the response + + Args: + blockname (str): which part of the response is being checked + block (dict): The actual part being checked + """ + try: + expected_block = self.expected[blockname] or {} + except KeyError: + expected_block = {} + + if isinstance(expected_block, dict): + if expected_block.pop("$ext", None): + raise exceptions.InvalidExtBlockException( + blockname, + ) + + if blockname == "headers": + # Special case for headers. These need to be checked in a case + # insensitive manner + block = {i.lower(): j for i, j in block.items()} + expected_block = {i.lower(): j for i, j in expected_block.items()} + + logger.debug("Validating response %s against %s", blockname, expected_block) + + test_strictness = self.test_block_config["strict"] + strict_setting = blockname if blockname != "body" else "json" + block_strictness = test_strictness.setting_for(strict_setting).is_on() + self.recurse_check_key_match(expected_block, block, blockname, block_strictness) diff --git a/tavern/schemas/extensions.py b/tavern/schemas/extensions.py index 1fa866fda..5bf44103c 100644 --- a/tavern/schemas/extensions.py +++ b/tavern/schemas/extensions.py @@ -1,9 +1,8 @@ import os import re -from pykwalify.types import is_bool, is_float, is_int - from grpc import StatusCode +from pykwalify.types import is_bool, is_float, is_int from tavern.util import exceptions from tavern.util.exceptions import BadSchemaError diff --git a/tavern/util/exceptions.py b/tavern/util/exceptions.py index b5c57c642..1cde6fb8e 100644 --- a/tavern/util/exceptions.py +++ b/tavern/util/exceptions.py @@ -127,3 +127,7 @@ def __init__(self, block): class InvalidRetryException(TavernException): """Invalid spec for max_retries""" + + +class ProtoGenError(TavernException): + """Error generating protobuf imports""" diff --git a/tavern/util/strict_util.py b/tavern/util/strict_util.py index 323c5486d..5cf00ae7b 100644 --- a/tavern/util/strict_util.py +++ b/tavern/util/strict_util.py @@ -86,7 +86,7 @@ def setting_for(self, section): return getattr(self, section) except AttributeError as e: raise exceptions.InvalidConfigurationException( - "No setting for '{}'".format(section) + "No strictness setting for '{}'".format(section) ) from e @classmethod diff --git a/tox-integration.ini b/tox-integration.ini index e73468614..ceeb957ca 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -9,7 +9,7 @@ setenv = SECOND_URL_PART = again PYTHONPATH = . changedir = - grpc: example/grpc + grpc: example/grpc_ mqtt: example/mqtt cookies: example/cookies advanced: example/advanced From e9ce41086e80a6329b3555cb8bf90f59b94a4af5 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 13:57:23 +0000 Subject: [PATCH 09/72] Disable some useless checks in pylint --- .isort.cfg | 2 +- .pylintrc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.isort.cfg b/.isort.cfg index 30be553a2..cbccf5804 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -12,4 +12,4 @@ sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TAVERN,LOCALFOLDER force_sort_within_sections=true # seed-isort-config -known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box +known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box,grpc diff --git a/.pylintrc b/.pylintrc index 330e90598..4e9521919 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,5 +1,5 @@ [MASTER] -disable=missing-docstring,bad-continuation,fixme,invalid-name,line-too-long,too-few-public-methods,no-else-return,too-many-branches,locally-disabled,useless-object-inheritance,no-else-raise,abstract-method,import-outside-toplevel,cyclic-import,duplicate-code +disable=missing-docstring,bad-continuation,fixme,invalid-name,line-too-long,too-few-public-methods,no-else-return,too-many-branches,locally-disabled,useless-object-inheritance,no-else-raise,abstract-method,import-outside-toplevel,cyclic-import,duplicate-code,subprocess-run-check ignore=tests [REPORTS] From 12469cf1f1c7dedbf31c5d1def0faef933deaed2 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 14:09:19 +0000 Subject: [PATCH 10/72] try to fix CI --- .isort.cfg | 2 +- setup.cfg | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.isort.cfg b/.isort.cfg index cbccf5804..da7d95c14 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -12,4 +12,4 @@ sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TAVERN,LOCALFOLDER force_sort_within_sections=true # seed-isort-config -known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box,grpc +known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box,grpc,google diff --git a/setup.cfg b/setup.cfg index cb3bf6293..03ed9f6a3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,6 +43,7 @@ install_requires = grpcio grpcio-reflection grpcio-status + google [options.packages.find] exclude = From e98bb05f152d7ba29e736d3a5c78770ee01c7a21 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 14:11:43 +0000 Subject: [PATCH 11/72] Add more extenral packages --- .isort.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.isort.cfg b/.isort.cfg index da7d95c14..611103382 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -12,4 +12,4 @@ sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TAVERN,LOCALFOLDER force_sort_within_sections=true # seed-isort-config -known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box,grpc,google +known_third_party = _pytest,attr,contextlib2,coreapi,faker,flask,future,itsdangerous,jmespath,jwt,mock,paho,py,pykwalify,pytest,recommonmark,requests,setuptools,sphinx_rtd_theme,stevedore,yaml,box,grpc,google,grpc_reflection,grpc_status From 01241be04bb2e2cdcdd0c51e5bce93fb9e52b2d7 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 24 Dec 2020 14:16:07 +0000 Subject: [PATCH 12/72] Use other client --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 03ed9f6a3..277ae109c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,7 +43,7 @@ install_requires = grpcio grpcio-reflection grpcio-status - google + google-api-python-client [options.packages.find] exclude = From b2ce79160b46cddc7d6ef2f8da23eeac104793f8 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 27 Mar 2021 13:34:31 +0000 Subject: [PATCH 13/72] Lint --- tavern/_plugins/grpc/response.py | 2 +- tavern/util/exceptions.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 0e4aceb90..d8a4e8718 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -11,7 +11,7 @@ class GRPCResponse(BaseResponse): def __init__(self, client, name, expected, test_block_config): - super(GRPCResponse, self).__init__(name, expected, test_block_config) + super().__init__(name, expected, test_block_config) self._client = client diff --git a/tavern/util/exceptions.py b/tavern/util/exceptions.py index 77ff99f9d..297a731ba 100644 --- a/tavern/util/exceptions.py +++ b/tavern/util/exceptions.py @@ -131,7 +131,7 @@ class InvalidRetryException(TavernException): class ProtoGenError(TavernException): """Error generating protobuf imports""" - - + + class RegexAccessError(TavernException): """Error accessing a key via regex""" From 285d48c1d7d21a262a33c69c7fea42186594c905 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Tue, 21 Feb 2023 15:57:19 +0000 Subject: [PATCH 14/72] Merge master --- .bumpversion.cfg | 9 - .coveragerc | 8 - .dockerignore | 6 +- .flake8 | 4 - .github/workflows/main.yml | 136 +-- .gitignore | 9 + .isort.cfg | 12 - .pre-commit-config.yaml | 23 +- .prettierignore | 1 + .pylintrc | 14 - CHANGELOG.md | 392 ++++--- CONTRIBUTING.md | 60 ++ LICENSE | 2 +- MANIFEST.in | 4 +- README.md | 193 ++++ README.rst | 205 ---- constraints.txt | 319 ++++++ docs/source/basics.md | 152 ++- docs/source/conf.py | 89 +- docs/source/cookbook.md | 8 +- docs/source/debugging.md | 81 +- docs/source/examples.md | 10 +- docs/source/http.md | 5 +- docs/source/index.md | 2 +- docs/source/mqtt.md | 19 +- docs/source/plugins.md | 6 +- docs/source/requirements.txt | 2 +- example/advanced/Dockerfile | 9 +- example/advanced/server.py | 4 +- example/advanced/test_server.tavern.yaml | 3 +- example/advanced/testing_utils.py | 7 +- example/components/Dockerfile | 8 +- example/components/server.py | 10 +- example/cookies/Dockerfile | 8 +- example/cookies/server.py | 2 +- example/generate_from_openapi/Pipfile.lock | 129 +-- example/{grpc_ => grpc}/__init__.py | 0 example/{grpc_ => grpc}/common.yaml | 0 example/{grpc_ => grpc}/docker-compose.yaml | 0 example/{grpc_ => grpc}/helloworld_pb2.py | 0 .../{grpc_ => grpc}/helloworld_pb2_grpc.py | 0 example/{grpc_ => grpc}/server.Dockerfile | 0 example/{grpc_ => grpc}/server.py | 12 +- example/{grpc_ => grpc}/test_grpc.tavern.yaml | 0 example/hooks/Dockerfile | 8 +- example/hooks/docker-compose.yaml | 2 +- example/hooks/test_server.tavern.yaml | 4 +- example/mqtt/conftest.py | 33 +- example/mqtt/listener.Dockerfile | 4 +- example/mqtt/listener.py | 93 +- example/mqtt/server.Dockerfile | 4 +- example/mqtt/server.py | 77 +- example/mqtt/test_mqtt.tavern.yaml | 447 +++++++- example/mqtt/test_mqtt_failures.tavern.yaml | 48 + example/mqtt/testing_utils.py | 7 +- example/remember/server.py | 58 - mypy.ini | 3 - pyproject.toml | 199 ++++ pytest.ini | 25 - requirements.txt | 999 +++++++++++++++++- coverage.sh => scripts/coverage.sh | 6 +- scripts/release.sh | 12 + scripts/smoke.bash | 21 + scripts/update-changelog.bash | 30 + setup.cfg | 66 -- setup.py | 28 - smoke.bash | 15 - tavern/__init__.py | 2 +- tavern/{_plugins => _core}/__init__.py | 0 tavern/{util => _core}/dict_util.py | 88 +- tavern/{util => _core}/exceptions.py | 31 +- tavern/{util => _core}/extfunctions.py | 115 +- tavern/{util => _core}/formatted_str.py | 0 tavern/{util => _core}/general.py | 23 +- tavern/{testutils => _core}/jmesutils.py | 33 +- tavern/{util => _core}/loader.py | 62 +- tavern/{ => _core}/plugins.py | 176 +-- .../pytesthook => _core/pytest}/__init__.py | 0 tavern/_core/pytest/config.py | 48 + .../pytesthook => _core/pytest}/error.py | 91 +- .../pytesthook => _core/pytest}/file.py | 141 ++- .../pytesthook => _core/pytest}/hooks.py | 24 +- .../pytesthook => _core/pytest}/item.py | 112 +- .../pytesthook => _core/pytest}/newhooks.py | 24 +- .../pytesthook => _core/pytest}/util.py | 82 +- tavern/{util => _core}/report.py | 23 +- tavern/_core/run.py | 321 ++++++ .../grpc => _core/schema}/__init__.py | 0 .../{schemas => _core/schema}/extensions.py | 159 +-- tavern/{schemas => _core/schema}/files.py | 35 +- tavern/_core/schema/jsonschema.py | 195 ++++ tavern/_core/schema/tests.jsonschema.yaml | 427 ++++++++ .../schema}/tests.schema.yaml | 26 +- tavern/{util => _core}/stage_lines.py | 6 +- tavern/_core/strict_util.py | 164 +++ tavern/_core/strtobool.py | 10 + .../{util/retry.py => _core/testhelpers.py} | 40 +- tavern/_plugins/grpc/client.py | 65 +- tavern/_plugins/grpc/request.py | 6 +- tavern/_plugins/grpc/response.py | 37 +- tavern/_plugins/grpc/schema.yaml | 2 +- tavern/_plugins/grpc/tavernhook.py | 3 +- tavern/_plugins/mqtt/__init__.py | 0 tavern/_plugins/mqtt/client.py | 310 ++++-- tavern/_plugins/mqtt/jsonschema.yaml | 169 +++ tavern/_plugins/mqtt/request.py | 41 +- tavern/_plugins/mqtt/response.py | 410 ++++--- tavern/_plugins/mqtt/schema.yaml | 46 +- tavern/_plugins/mqtt/tavernhook.py | 29 +- tavern/_plugins/rest/__init__.py | 0 tavern/_plugins/rest/request.py | 262 +++-- tavern/_plugins/rest/response.py | 69 +- tavern/_plugins/rest/tavernhook.py | 22 +- tavern/core.py | 289 +---- tavern/entry.py | 4 +- tavern/{testutils => }/helpers.py | 122 ++- tavern/{request/base.py => request.py} | 8 +- tavern/request/__init__.py | 3 - tavern/{response/base.py => response.py} | 211 ++-- tavern/response/__init__.py | 3 - tavern/schemas/__init__.py | 0 tavern/testutils/__init__.py | 0 tavern/util/__init__.py | 0 tavern/util/delay.py | 24 - tavern/util/jmespath_util.py | 36 - tavern/util/strict_util.py | 98 -- tests/conftest.py | 22 + tests/integration/Dockerfile | 10 +- tests/integration/OK.json.gz | Bin 0 -> 33 bytes tests/integration/conftest.py | 2 +- tests/integration/docker-compose.yaml | 2 +- tests/integration/ext_functions.py | 8 + tests/integration/server.py | 177 +++- tests/integration/test_allure.tavern.yaml | 15 + tests/integration/test_auth_key.tavern.yaml | 2 - tests/integration/test_certs.tavern.yaml | 2 - .../test_cookie_remember.tavern.yaml | 25 +- tests/integration/test_data_key.tavern.yaml | 6 - .../test_env_var_format.tavern.yaml | 1 - tests/integration/test_error.tavern.yaml | 22 +- .../test_external_functions.tavern.yaml | 28 +- tests/integration/test_files.tavern.yaml | 27 +- tests/integration/test_fixtures.tavern.yaml | 9 +- .../test_follow_redirects.tavern.yaml | 16 +- .../test_header_comparisons.tavern.yaml | 2 - tests/integration/test_helpers.tavern.yaml | 57 + tests/integration/test_jmes.tavern.yaml | 13 +- tests/integration/test_minimal.tavern.yaml | 21 + .../integration/test_parametrize.tavern.yaml | 410 +++++-- tests/integration/test_regex.tavern.yaml | 63 +- .../test_response_types.tavern.yaml | 8 +- tests/integration/test_retry.tavern.yaml | 16 +- .../test_save_dict_value.tavern.yaml | 1 - .../test_selective_tests.tavern.yaml | 4 +- .../test_skipped_tests.tavern.yaml | 8 - .../integration/test_status_codes.tavern.yaml | 4 - tests/integration/test_stream.tavern.yaml | 1 - .../test_strict_key_checks.tavern.yaml | 214 +++- tests/integration/test_timeout.tavern.yaml | 119 +-- tests/integration/test_typetokens.tavern.yaml | 70 +- .../test_validate_pykwalify.tavern.yaml | 6 +- tests/logging.yaml | 48 +- tests/unit/conftest.py | 42 +- tests/unit/response/test_mqtt_response.py | 228 +++- tests/unit/response/test_rest.py | 8 +- tests/unit/test_call_run.py | 2 +- tests/unit/test_core.py | 54 +- tests/unit/test_helpers.py | 95 +- tests/unit/test_mqtt.py | 75 +- tests/unit/test_pytest_hooks.py | 97 +- tests/unit/test_request.py | 89 +- tests/unit/test_schema.py | 8 +- tests/unit/test_strict_util.py | 35 + tests/unit/test_utilities.py | 22 +- tox-integration.ini | 18 +- tox.ini | 42 +- 176 files changed, 7992 insertions(+), 3141 deletions(-) delete mode 100644 .bumpversion.cfg delete mode 100644 .coveragerc delete mode 100644 .flake8 delete mode 100644 .isort.cfg create mode 100644 .prettierignore delete mode 100644 .pylintrc create mode 100644 CONTRIBUTING.md create mode 100644 README.md delete mode 100644 README.rst create mode 100644 constraints.txt rename example/{grpc_ => grpc}/__init__.py (100%) rename example/{grpc_ => grpc}/common.yaml (100%) rename example/{grpc_ => grpc}/docker-compose.yaml (100%) rename example/{grpc_ => grpc}/helloworld_pb2.py (100%) rename example/{grpc_ => grpc}/helloworld_pb2_grpc.py (100%) rename example/{grpc_ => grpc}/server.Dockerfile (100%) rename example/{grpc_ => grpc}/server.py (76%) rename example/{grpc_ => grpc}/test_grpc.tavern.yaml (100%) delete mode 100644 example/remember/server.py delete mode 100644 mypy.ini create mode 100644 pyproject.toml delete mode 100644 pytest.ini rename coverage.sh => scripts/coverage.sh (57%) mode change 100644 => 100755 create mode 100755 scripts/release.sh create mode 100755 scripts/smoke.bash create mode 100755 scripts/update-changelog.bash delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100755 smoke.bash rename tavern/{_plugins => _core}/__init__.py (100%) rename tavern/{util => _core}/dict_util.py (87%) rename tavern/{util => _core}/exceptions.py (83%) rename tavern/{util => _core}/extfunctions.py (55%) rename tavern/{util => _core}/formatted_str.py (100%) rename tavern/{util => _core}/general.py (59%) rename tavern/{testutils => _core}/jmesutils.py (68%) rename tavern/{util => _core}/loader.py (91%) rename tavern/{ => _core}/plugins.py (59%) rename tavern/{testutils/pytesthook => _core/pytest}/__init__.py (100%) create mode 100644 tavern/_core/pytest/config.py rename tavern/{testutils/pytesthook => _core/pytest}/error.py (72%) rename tavern/{testutils/pytesthook => _core/pytest}/file.py (65%) rename tavern/{testutils/pytesthook => _core/pytest}/hooks.py (65%) rename tavern/{testutils/pytesthook => _core/pytest}/item.py (72%) rename tavern/{testutils/pytesthook => _core/pytest}/newhooks.py (75%) rename tavern/{testutils/pytesthook => _core/pytest}/util.py (70%) rename tavern/{util => _core}/report.py (70%) create mode 100644 tavern/_core/run.py rename tavern/{_plugins/grpc => _core/schema}/__init__.py (100%) rename tavern/{schemas => _core/schema}/extensions.py (77%) rename tavern/{schemas => _core/schema}/files.py (83%) create mode 100644 tavern/_core/schema/jsonschema.py create mode 100644 tavern/_core/schema/tests.jsonschema.yaml rename tavern/{schemas => _core/schema}/tests.schema.yaml (95%) rename tavern/{util => _core}/stage_lines.py (88%) create mode 100644 tavern/_core/strict_util.py create mode 100644 tavern/_core/strtobool.py rename tavern/{util/retry.py => _core/testhelpers.py} (76%) delete mode 100644 tavern/_plugins/mqtt/__init__.py create mode 100644 tavern/_plugins/mqtt/jsonschema.yaml delete mode 100644 tavern/_plugins/rest/__init__.py rename tavern/{testutils => }/helpers.py (55%) rename tavern/{request/base.py => request.py} (84%) delete mode 100644 tavern/request/__init__.py rename tavern/{response/base.py => response.py} (53%) delete mode 100644 tavern/response/__init__.py delete mode 100644 tavern/schemas/__init__.py delete mode 100644 tavern/testutils/__init__.py delete mode 100644 tavern/util/__init__.py delete mode 100644 tavern/util/delay.py delete mode 100644 tavern/util/jmespath_util.py delete mode 100644 tavern/util/strict_util.py create mode 100644 tests/integration/OK.json.gz create mode 100644 tests/integration/test_allure.tavern.yaml rename example/remember/test_server.tavern.yaml => tests/integration/test_cookie_remember.tavern.yaml (63%) create mode 100644 tests/integration/test_helpers.tavern.yaml create mode 100644 tests/integration/test_minimal.tavern.yaml create mode 100644 tests/unit/test_strict_util.py diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index 864ef8883..000000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[bumpversion] -current_version = 1.14.0 -tag_name = {new_version} -tag = True -commit = True - -[bumpversion:file:tavern/__init__.py] - -[bumpversion:file:docs/source/conf.py] diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index e4bec5066..000000000 --- a/.coveragerc +++ /dev/null @@ -1,8 +0,0 @@ -[run] -branch = False - -[paths] -tavern = - tavern/ - .tox/py38-generic/lib/python3.8/site-packages/tavern/ - .tox/py38-mqtt/lib/python3.8/site-packages/tavern diff --git a/.dockerignore b/.dockerignore index 5de45d828..3ea096cf6 100644 --- a/.dockerignore +++ b/.dockerignore @@ -91,7 +91,6 @@ ENV/ tags .pytest_cache -.bumpversion.cfg dist docker docs @@ -101,8 +100,8 @@ example .gitignore LICENSE # MANIFEST.in -.pylintrc -README.rst +README.md +README.md requirements.txt # setup.cfg # setup.py @@ -112,7 +111,6 @@ tests .tox tox.ini tox-integration.ini -.travis.yml venv .idea diff --git a/.flake8 b/.flake8 deleted file mode 100644 index d5c486c83..000000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -ignore = E501,W503,C901,W504 -exclude = .git,__pycache__,docs/source/conf.py,build,dist -max-complexity = 10 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cca948d0f..f66ea1ecf 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -3,54 +3,56 @@ name: basic test on: push: tags: - - ".*" + - ".*" branches: - - master + - master pull_request: branches: - - master + - master + - feature-2.0 jobs: + simple-checks: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml', 'requirements.in') }} + restore-keys: | + ${{ runner.os }}-pip- + + - uses: pre-commit/action@v3.0.0 + test: runs-on: ubuntu-latest + needs: simple-checks strategy: fail-fast: false matrix: include: - # 'Basic' tests and checks - # Pylint, formatting/sorting, and quickest unit/integration tests - - TOXENV: py38black - python: "3.8" - TOXCFG: tox.ini - - TOXENV: py38 - python: "3.8" - TOXCFG: tox.ini - - TOXENV: py38lint - python: "3.8" - TOXCFG: tox.ini - - TOXENV: py38mypy - python: "3.8" - TOXCFG: tox.ini - - TOXENV: py38isort - python: "3.8" - TOXCFG: tox.ini - - - TOXENV: py38-generic - python: "3.8" - TOXCFG: tox-integration.ini - - TOXENV: pypy3-noextra - python: pypy3 - TOXCFG: tox-integration.ini - - TOXENV: py38-mqtt - python: "3.8" - TOXCFG: tox-integration.ini - - TOXENV: py38-hooks - python: "3.8" - TOXCFG: tox-integration.ini - - TOXENV: py38-advanced - python: "3.8" - TOXCFG: tox-integration.ini + # 'Basic' tests and checks + - TOXENV: py3mypy + TOXCFG: tox.ini + - TOXENV: py3 + TOXCFG: tox.ini + + - TOXENV: py3-generic + TOXCFG: tox-integration.ini + - TOXENV: py3-mqtt + TOXCFG: tox-integration.ini + - TOXENV: py3-hooks + TOXCFG: tox-integration.ini + - TOXENV: py3-advanced + TOXCFG: tox-integration.ini services: docker: @@ -61,34 +63,34 @@ jobs: TOXCFG: ${{ matrix.TOXCFG }} steps: - - uses: actions/checkout@v2 - - - uses: actions/cache@v2 - env: - cache-name: cache-${{ matrix.TOXENV }} - with: - path: .tox - key: ${{ runner.os }}-tox-${{ env.cache-name }}-${{ hashFiles('tox.ini', 'tox-integration.ini') }} - restore-keys: | - ${{ runner.os }}-tox-${{ env.cache-name }}- - ${{ runner.os }}-tox- - - - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'setup.cfg', 'setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - - - name: install deps - run: | - pip install tox-travis - - - name: tox - run: | - tox -c ${TOXCFG} + - uses: actions/checkout@v3 + + - uses: actions/cache@v3 + env: + cache-name: cache-${{ matrix.TOXENV }} + with: + path: .tox + key: ${{ runner.os }}-tox-${{ env.cache-name }}-${{ hashFiles('tox.ini', 'tox-integration.ini', 'pyproject.toml', 'requirements.in') }} + + - uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml', 'requirements.in') }} + restore-keys: | + ${{ runner.os }}-pip- + + - uses: jpribyl/action-docker-layer-caching@v0.1.1 + continue-on-error: true + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: install deps + run: | + pip install tox-travis -c constraints.txt + + - name: tox + run: | + tox -c ${TOXCFG} diff --git a/.gitignore b/.gitignore index 8f23cd848..09a8a5b9f 100644 --- a/.gitignore +++ b/.gitignore @@ -116,3 +116,12 @@ out/ .idea allure/ + +.ijwb/ +.pants.d/ +.pids/ +bazel-bin +bazel-out +bazel-tavern +bazel-testlogs + diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index c9047e984..000000000 --- a/.isort.cfg +++ /dev/null @@ -1,12 +0,0 @@ -[isort] -multi_line_output=3 -include_trailing_comma=true -indent=4 -atomic=true -combine_star=true -use_parentheses=true -line_length=88 -skip_glob=tests/**,docs/**,example/** -known_tavern=tavern -sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TAVERN,LOCALFOLDER -force_sort_within_sections=true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cce2504b6..c8f311706 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,19 +1,18 @@ repos: - repo: https://github.com/ambv/black - rev: stable + rev: 23.1.0 hooks: - id: black - language_version: python3.7 - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + files: "(tavern|tests)" + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.246" hooks: - - id: pyflakes - language_version: python3.7 - - repo: https://github.com/pre-commit/mirrors-isort - rev: v4.3.21 + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 hooks: - - id: isort - args: - - --settings-path=.isort.cfg + - id: prettier + types_or: [yaml] -exclude: (docs/|example/|tests/|setup.py) +exclude: (docs/|example/) diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..f21ce954e --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +tests/integration/test_cookies.tavern.yaml diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 4e9521919..000000000 --- a/.pylintrc +++ /dev/null @@ -1,14 +0,0 @@ -[MASTER] -disable=missing-docstring,bad-continuation,fixme,invalid-name,line-too-long,too-few-public-methods,no-else-return,too-many-branches,locally-disabled,useless-object-inheritance,no-else-raise,abstract-method,import-outside-toplevel,cyclic-import,duplicate-code,subprocess-run-check -ignore=tests - -[REPORTS] -reports=no - -[SIMILARITIES] -min-similarity-lines=6 -ignore-imports=yes - -[TYPECHECK] -ignored-classes=RememberComposer -ignored-modules=distutils diff --git a/CHANGELOG.md b/CHANGELOG.md index c1e164a88..f6e7e427a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,98 +1,98 @@ -## Changelog +# Changelog -## 0.1.0 - Initial release +## 0.1.2 Allow sending/validation of JSON lists (2017-11-21) -### 0.1.2 - Allow sending/validation of JSON lists +## 0.1.3 Fix global configuration loading via pytest command line (2017-12-05) -### 0.1.3 - Fix global configuration loading via pytest command line +## 0.1.4 Fix global configuration if it wasn't actually passed (2017-12-06) -### 0.1.4 - Fix global configuration if it wasn't actually passed +## 0.1.5 Fix temporary file wrapping on windows (2017-12-06) -### 0.1.5 - Fix temporary file wrapping on windows +# 0.2.0 Add python 2 support (2017-12-12) -## 0.2.0 - Add python 2 support +## 0.2.1 Add option to install 'pytest' extra (2017-12-12) -### 0.2.1 - Add option to install 'pytest' extra +## 0.2.2 Support for 'verify' arg to requests (2018-01-23) -### 0.2.2 - Support for 'verify' arg to requests +## 0.2.3 quote nested json in query parameters (2018-01-23) -### 0.2.3 - quote nested json in query parameters +## 0.2.4 Fix log format interpolation for py2 (2018-01-25) -### 0.2.4 - Fix log format interpolation for py2 +## 0.2.5 Fix empty yaml files hard-failing (2018-01-25) -### 0.2.5 - Fix empty yaml files hard-failing +# 0.3.0 Use a persistent requests Session to allow cookies to be propagated forward through tests (2018-02-15) -## 0.3.0 - Use a persistent requests Session to allow cookies to be propagated forward through tests +# 0.4.0 MQTT support (2018-02-22) -## 0.4.0 - MQTT support +# 0.5.0 Add special 'tavern' key for formatting magic variables, and don't strictly enforce some HTTP verbs not having a body (2018-02-23) -## 0.5.0 - Add special 'tavern' key for formatting magic variables, and don't strictly enforce some HTTP verbs not having a body +## 0.5.1 Add regex validation function and verify tests at run time, not discovery time (2018-02-26) -### 0.5.1 - Add regex validation function and verify tests at run time, not discovery time +## 0.5.2 Add MQTT TLS support and fixes to formatting nested arrays/dicts (2018-03-05) -### 0.5.2 - Add MQTT TLS support and fixes to formatting nested arrays/dicts +## 0.5.3 Update README (2018-03-05) -### 0.5.3 - Update README +## 0.5.4 Add 'meta' key to request block (2018-03-05) -### 0.5.4 - Add 'meta' key to request block currently the only key in 'meta' is clear_session_cookies which wipes the session cookies before the request is made -## 0.6.0 - Allow multiple global config options on the command line and in pytest config file +# 0.6.0 Allow multiple global config options on the command line and in pytest config file (2018-03-07) -### 0.6.1 - Fix implementation of 'auth' keyword +## 0.6.1 Fix implementation of 'auth' keyword (2018-03-09) -## 0.7.0 - Add new 'anything' constructor for matching any value returned which should now also work with nested values. Also add special constructors for int/float types +# 0.7.0 Add new 'anything' constructor for matching any value returned which should now also work with nested values. Also add special constructors for int/float types (2018-03-09) -### 0.7.1 - fix delay_after/before to accept float arguments +## 0.7.1 fix delay_after/before to accept float arguments (2018-03-12) -### 0.7.2 - Fix warning on incorrect status codes +## 0.7.2 Fix warning on incorrect status codes (2018-03-20) -### 0.7.3 - Improve error handling in parser errors +## 0.7.3 Improve error handling in parser errors (2018-03-21) -### 0.7.4 - Fix python 2 +## 0.7.4 Fix python 2 (2018-03-21) -### 0.7.5 - Fix pytest-pspec error +## 0.7.5 Fix pytest-pspec error (2018-03-21) -### 0.7.6 - Move dict utilities around +## 0.7.6 Move dict utilities around (2018-03-21) -### 0.7.7 - Improve validation on the type of block returned +## 0.7.7 Improve validation on the type of block returned (2018-03-23) -## 0.8.0 - Fix matching magic variables and add new type sentinels for matching +# 0.8.0 Fix matching magic variables and add new type sentinels for matching (2018-04-03) -### 0.8.1 - Fix formatting env vars into included variables +## 0.8.1 Fix formatting env vars into included variables (2018-04-03) -### 0.8.2 - Cleanup of type conversion code and better list item validation +## 0.8.2 Cleanup of type conversion code and better list item validation (2018-04-05) -## 0.9.0 - Add file upload capability +# 0.9.0 Add file upload capability (2018-04-10) -### 0.9.1 - Fix logging library warning +## 0.9.1 Fix logging library warning (2018-04-11) -### 0.9.10 - Add new tag to match floating point numbers approximately in responses +## 0.9.2 Minor improvement to error messages (2018-04-13) -### 0.9.2 - Minor improvement to error messages +## 0.9.3 Improve error reporting from dictionary mismatches and allow regex checks in headers (2018-05-04) -### 0.9.3 - Improve error reporting from dictionary mismatches and allow regex checks in headers +## 0.9.4 Fixes to type conversion tags, and add a new 'anybool' type sentinel to match either True or False (2018-05-15) -### 0.9.4 - Fixes to type conversion tags, and add a new 'anybool' type sentinel to match either True or False +## 0.9.5 Fix type conversion tokens and add more robust integration tests for them (2018-05-16) -### 0.9.5 - Fix type conversion tokens and add more robust integration tests for them +## 0.9.6 Add bool conversion type token as well (2018-05-16) -### 0.9.6 - Add bool conversion type token as well +## 0.9.7 Fix error in formatting MQTT variables (2018-05-24) -### 0.9.7 - Fix error in formatting MQTT variables +## 0.9.8 Fix tavern overriding content type header when sending a file with extra headers (2018-05-25) -### 0.9.8 - Fix tavern overriding content type header when sending a file with extra headers +## 0.9.9 Allow nesting of variables in included files that can be access using dot notation (2018-05-29) -### 0.9.9 - Allow nesting of variables in included files that can be access using dot notation +# 0.9.10 Add new tag to match floating point numbers approximately in responses (2018-05-29) -## 0.10.0 - Add basic plugin system +# 0.10.0 Add basic plugin system (2018-05-29) -### 0.10.1 - Slightly improve docstrings for use with pytest-pspec +## 0.10.1 Slightly improve docstrings for use with pytest-pspec (2018-06-11) -### 0.10.2 - Fix python 2 type token issue +## 0.10.2 Fix python 2 type token issue (2018-06-13) + +# 0.11.0 Marking, strict key checking, and multiple status codes (2018-06-18) -## 0.11.0 - Marking, strict key checking, and multiple status codes - Add ability to use custom QoS for subscribing in MQTT - Add pytest marks to tests @@ -101,101 +101,118 @@ currently the only key in 'meta' is clear_session_cookies which wipes the sessio - Improve 'doc' of test for pytest-pspec - Add internal xfail for testing Tavern -## 0.12.0 Add parametrize mark and make run() use pytest.main in the background +# 0.12.0 Add parametrize mark and make run() use pytest.main in the background (2018-06-20) + See https://github.com/taverntesting/tavern/issues/127#issuecomment-398409023 calling run() directly will now cause a pytest isntance to be run in the background. This is to avoid having to maintain code and documentation for two separate entry points -### 0.12.1 Flesh out the 'run' function a bit more so it can mostly be used to pass in all config values without having to have a Pytest config file +## 0.12.1 Flesh out the 'run' function a bit more so it can mostly be used to pass in all config values without having to have a Pytest config file (2018-06-20) + +## 0.12.2 Fix Pylint (2018-06-20) + +## 0.12.3 Fix extra expected keys beign ignroed in responses sometimes (2018-06-20) + +## 0.12.4 Fix case matching with headers (2018-06-20) + +# 0.13.0 Add new flag to enable 'fancy' formatting on errors (2018-06-21) + +## 0.13.1 Fix python 2 error (2018-06-21) + +## 0.13.2 Bug fixes to logging and parametrization (2018-06-22) -### 0.12.2 Fix Pylint +## 0.13.3 Fix new traceback errors when anystr/anybool/etc was used (2018-06-22) -### 0.12.3 Fix extra expected keys beign ignroed in responses sometimes +## 0.13.4 Fix to formatting empty bodies in response with new traceback (2018-06-22) -### 0.12.4 Fix case matching with headers +## 0.13.5 Fix for Python 2 regex function (2018-06-25) -## 0.13.0 Add new flag to enable 'fancy' formatting on errors +# 0.14.0 Allow sending of raw data in the 'data' key for a HTTP request (2018-06-27) -### 0.13.1 Fix python 2 error +## 0.14.1 CI fix (2018-06-27) -### 0.13.2 Bug fixes to logging and parametrization +## 0.14.2 CI fix (2018-06-27) -### 0.13.3 Fix new traceback errors when anystr/anybool/etc was used +## 0.14.3 Fix header value comparisons (2018-07-04) -### 0.13.4 Fix to formatting empty bodies in response with new traceback +## 0.14.4 Pylint fix (2018-07-04) -### 0.13.5 Fix for Python 2 regex function +## 0.14.5 Add support for the 'stream' requests flag (2018-07-06) -## 0.14.0 Allow sending of raw data in the 'data' key for a HTTP request +# 0.15.0 Add basic pytest fixture support (2018-07-10) -### 0.14.3 Fix header value comparisons +## 0.15.1 Fix boolean conversion with anybool tag (2018-07-11) -### 0.14.4 Pylint fix +## 0.15.2 Travis deployment fix (2018-07-16) -### 0.14.5 Add support for the 'stream' requests flag +# 0.16.0 Add 'raw' token to alow using curly braces in strings (2018-07-24) -## 0.15.0 Add basic pytest fixture support +## 0.16.1 fix delay_before/after bug (2018-07-26) -### 0.15.1 Fix boolean conversion with anybool tag +## 0.16.2 dummy bump tag for travis deploy (2018-07-26) -### 0.15.2 Travis deployment fix +## 0.16.3 dummy bump tag for travis deploy (2018-07-26) -## 0.16.0 Add 'raw' token to alow using curly braces in strings +## 0.16.4 dummy bump tag for travis deploy (2018-07-26) -### 0.16.1 fix delay before/after bug +## 0.16.5 Fixes to requirements for development and working from local pypi indexes (2018-08-02) -### 0.16.5 Fixes to requirements for development and working from local pypi indexes +# 0.17.0 Add support for putting stages in included files which can be referred to by an id - see 57f2a10e58a88325c185258d2c83b07a532aa93a for details (2018-08-04) -## 0.17.0 Add support for putting stages in included files which can be referred to by an id +## 0.17.1 Dummy tag to attempt to make travis dpeloy, again (2018-08-07) -See 57f2a10e58a88325c185258d2c83b07a532aa93a for details +## 0.17.2 Stop wrapping responses/schemas in files for verification (2018-08-07) -### 0.17.2 Stop wrapping responses/schemas in files for verification +# 0.18.0 Add 'timeout' parameter for http requests (2018-08-24) -## 0.18.0 Add 'timeout' parameter for http requests +## show Bump version: 0.17.2 → 0.18.0 (2018-08-24) -### 0.18.1 Upload the content type along with the file if we can guess it +## 0.18.1 Add content type/encoding to uploaded files (2018-09-05) -### 0.18.2 Fix formatting environment variables in command line global config files +## 0.18.2 Fix formatting environment variables in command line global config files (2018-09-21) -### 0.18.3 Fix 'anything' token in included test stages +## 0.18.3 Fix 'anything' token in included test stages (2018-09-28) -## 0.19.0 add retries to stages +# 0.19.0 Add retries to stages (2018-10-07) -### 0.19.1 fix typo in jmes utils +## 0.19.1 Fix typo in JMES utils (2018-10-14) -## 0.20.0 allow compatibility with pytest 4 +# 0.20.0 Allow compatibility with pytest 4 (2018-11-15) -## 0.21.0 add parametrisation of multiple keys without creating combinations +# 0.21.0 Add parametrisation of multiple keys without creating combinations (2018-12-09) -### 0.21.1 improve reporting of actual vs expected types in errors +## 0.21.1 Improve reporting of actual vs expected types in errors (2018-12-09) -## 0.22.0 fix selection of tests when using run() function interface +# 0.22.0 Fix selection of tests when using run() function interface (2018-12-28) -### 0.22.1 allow referenced stages to be included from global configuration files -## 0.23.0 Fix 'only' keyword +This used pytests's -k flag when we actually wanted to change collection of tests, not collecting all tests then selecting by name -## 0.24.0 Fix typetoken validation and correctly unsubscribe from MQTT topics after a stage +## 0.22.1 Allow referenced stages to be included from global configuration files (2018-12-28) -## 0.25.0 Allow specifying custom SSL certificates in HTTP requests +# 0.23.0 Fix 'only' keyword (2019-02-02) -### 0.25.1 Fix fancy traceback when comments in yaml files contain special characters +# 0.24.0 Fix typetoken validation and correctly unsubscribe from MQTT topics after a stage (2019-02-16) -## 0.26.0 Add more advanced cookie behaviour +# 0.25.0 Allow specifying custom SSL certificates in HTTP requests (2019-02-21) -### 0.26.1 Fix matching 'anything' type token in MQTT +## 0.25.1 Fix fancy traceback when comments in yaml files contain special characters (2019-03-16) -### 0.26.2 Fix loading global config via run function +# 0.26.0 Add more advanced cookie behaviour (2019-03-17) -### 0.26.3 Fix raw token formatting +## 0.26.1 Fix matching 'anything' type token in MQTT (2019-03-17) -### 0.26.4 Allow loading of json files using include directive +## 0.26.2 Fix loading global config via run function (2019-03-19) -### 0.26.5 Lock pytest version to stop internal error +## 0.26.3 Fix raw token formatting (2019-04-11) + +## 0.26.4 Allow loading of json files using include directive (2019-06-01) + +## 0.26.5 Lock pytest version to stop internal error (2019-06-01) + +# 0.27.0 0.27.0 release (2019-08-10) -## 0.27.0 - Fix various typos in documentation - Allow sending form data and files in a single request @@ -205,112 +222,185 @@ See 57f2a10e58a88325c185258d2c83b07a532aa93a for details - Allow sending single values as JSON body as according to RFC 7159 - Change 'save' selector to use JMESpath -## 0.28.0 - -Add a couple of initial hooks - -The initial 2 hooks should allow a user to do something before every test and -after every stage - -## 0.29.0 - -Allow saving in MQTT tests and move calling external verification functions into their own block - -## 0.30.0 +# 0.28.0 Add a couple of initial hooks (2019-08-26) -Allow formatting of cookie names and allow overriding cookie values in a request -### 0.30.1 +The initial 2 hooks should allow a user to do something before every test and after every stage -Fix MQTT subscription race condition +# 0.29.0 Allow saving in MQTT tests and move calling external verification functions into their own block (2019-08-28) -### 0.30.2 +# 0.30.0 Allow formatting of cookie names and allow overriding cookie values in a request (2019-08-30) -Fix parsing auth header +## 0.30.1 Fix MQTT subscription race condition (2019-09-07) -### 0.30.3 +## 0.30.2 Fix parsing auth header (2019-09-07) -Fix marker serialisation for pytest-xdist +## 0.30.3 Fix marker serialisation for pytest-xdist (2019-09-07) -## 0.31.0 +# 0.31.0 - Add isort (2019-11-22) -- Add isort - Fix pytest warnings from None check - Add warning when trying to coerce a non-stirnginto a string in string formatting - Fix jmespath not working when the expected response was falsy - Fix compatability with pytest-rerunfailures - Add options to specify custom content type and encoding for files -## 0.32.0 +# 0.32.0 Add option to control which files to search for rather than having it hardcoded (2019-11-22) + +# 0.33.0 Add extra type tokens for matching lsits and dicts (2019-11-25) + +# 0.34.0 Add new magic tag that includes something as json rather than a string (2019-12-08) + +# 1.0.0 1.0 Release (2020-04-05) + +## 1.0.1 Enable formatting of file body key in requests (2020-05-01) + +## 1.0.2 Fix incorrect logic checking request codes (2020-05-01) + +# 1.1.0 Add new global option to enable merging of keys from external functions (2020-05-01) + +## 1.1.1 Travis fix (2020-05-23) + +## travis-force Attempt to force travis to commit (2020-05-23) + +## 1.1.2 fforce new verison to make travis actually commit (2020-05-23) + +## 1.1.3 travis (2020-05-23) + +## 1.1.4 Bump version: 1.1.3 → 1.1.4 (2020-05-23) + +## 1.1.5 travis (2020-05-23) + +# 1.2.0 allow passing max_retries as a format variable (2020-05-25) + +## 1.2.1 travis (2020-05-25) + +## 1.2.2 travis (2020-05-25) + +## 1.2.3 lock pytest to below 6 temporarily (2020-08-01) + +## 1.2.4 Be more relaxed in locking dependency versions (2020-08-08) + +# 1.3.0 Allow autouse fixtures in Tavern tests (2020-08-08) + +# 1.4.0 Support pytest 6 (2020-08-15) + +## 1.4.1 Fix reading utf8 encoded test files (2020-08-22) + +# 1.5.0 Allow using environment variables when formatting test marks (2020-08-26) + +## 1.5.1 Fix strictness for a stage 'leaking' into the subsequent stages (2020-08-26) + +# 1.6.0 Allow specifying just the stage 'id' in case of a stage ref without also needing a name (2020-08-26) + +# 1.7.0 Add TAVERN_INCLUDE_PATH to allow including files from other file locations (2020-10-09) + +# 1.8.0 Move parametrize functions out of main class as they are specific behaviour (2020-10-09) + + +Add filterwarning to schema + +# 1.10.0 Format filenames (#612) (2020-11-07) + + + +# 1.11.0 523 add request hook (#615) (2020-11-07) + + + +# 1.9.0 219 response function calls (#614) (2020-11-06) + + +Also log the result from 'response' ext functions + +## 1.7.1 Bump max version of paho-mqtt (2020-11-07) + +## 1.11.1 Fix bumped version (2020-11-07) + +# 1.12.0 Allow ext functions in mqtt blocks (2020-12-11) + +## 1.12.1 Fix pytest deprecation warning (2020-12-11) + +## 1.12.2 lock pykwalify version to 1.7 because of breaking API change in 1.8 (2020-12-31) + +# 1.13.0 Add support for generating Allure test reports (2021-01-30) + +## 1.13.1 Fix using ext functions in query params (2021-01-30) + +## 1.13.2 Fix checking for cert_reqs file (2021-02-20) + +# 1.14.0 Add extra argument to regex helper to allow matching from a jmespath (2021-02-20) + +## 1.14.1 Fix mqtt tls options validation (2021-03-27) + +## 1.14.2 Stop pytest warning about a private import (2021-04-05) -Add option to control which files to search for rather than having it hardcoded +# 1.15.0 Update pytest and pykwalify (2021-06-06) -## 0.33.0 +# 1.16.0 Allow specifying a new strict option which will allow list items in any order (2021-06-20) -Add extra type tokens for matching arbitrary lists and dicts +## 1.16.1 Fix regression in nested strict key checking (2021-09-05) -## 0.34.0 +## 1.16.2 Fix some settings being lost after retrying a stage (2021-10-03) -Add new magic tag that includes something as json rather than a string +## 1.16.3 Fix --collect-only flag (2021-10-17) -## 1.0.0 +## 1.16.4 Change a couple of instances of logging where 'info' might log sensitive data and add note to docs (2021-10-31) -- 'body' key changes to 'json' in HTTP response +## 1.16.5 Fix 'x is not None' vs 'not x' causing strict matching error (2021-10-31) -- Python 2 dropped +# 1.17.0 Allow parametrising HTTP method (2021-10-31) -- Changes to the way strictness works +## 1.17.1 Allow bools in parameterized values (2021-12-12) -- remove 'null' checking on body matching anything +## 1.17.2 Fix hardcoded list of strictness choices on command line (2021-12-12) -- 'run' entry point slightly reworked +# 1.18.0 Infer content-type and content-encoding from file_body key (2021-12-12) -- New error traceback is the default +# 1.19.0 Allow parametrizing more types of values (2022-01-09) -- External function blocks changes +# 1.20.0 Add pytest_tavern_beta_after_every_test_run (2022-02-25) -- Save value path changes to jmespath +# 1.21.0 Allow usage of pytest 7 (2022-04-17) -- Add key to allow uploading the raw content of a file as a request body +# 1.22.0 Allow usage of pyyaml 6 (2022-04-23) -- Add new token which can match regex values in parts of responses +## 1.22.1 Fix allure formatting stage name (2022-05-02) -- Strict key checking should now work with MQTT json payloads +# 1.23.0 Update pyjwt for CVE-2022-29217 (2022-06-05) -- Fix bug where saved variables were not cleared between tests +## 1.23.1 Fix docstring of fake pytest object to be a string (2022-06-05) -See https://github.com/taverntesting/tavern/issues/495 for details +## 1.23.2 Fix newer versions of requests complaining about headers not being strings (2022-06-12) -### 1.0.1 - Enable formatting of file body key in requests +## 1.23.3 Allow specifying 'unexpected' messages in MQTT to fail a test (2022-06-26) -### 1.0.2 - Fix incorrect logic checking request codes +## 1.23.4 Update stevedore version (2022-10-23) -## 1.1.0 - Add new global option to enable merging of keys from external functions +## 1.23.5 Fix missing dependency in newer pytest versions (2022-11-07) -## 1.2.0 - allow passing max_retries as a format variable +# 1.24.0 Fix using 'py' library (2022-11-08) -### 1.2.3 - lock pytest to below 6 temporarily -### 1.2.4 - Be more relaxed in locking dependency versions +This locks pytest to <=7.2 to avoid having to fix imports every time a new version comes out. -## 1.3.0 - Allow autouse fixtures in Tavern tests +## 1.24.1 Format variables in test error log before dumping as a YAML string (2022-11-22) -## 1.4.0 - Support pytest 6 +# 1.25.0 More changes to packaging (2022-12-13) -### 1.4.1 - Fix reading utf8 encoded test files -## 1.5.0 - Allow using environment variables when formatting test marks +This is technically not a operational change but I'm adding a new tag so it can br reverted in future -### 1.5.1 - Fix strictness for a stage 'leaking' into the subsequent stages +## 1.25.1 Remove tbump from dependencies so it can actually be uploaded to pypi (2022-12-13) -## 1.6.0 - Allow specifying just the stage 'id' in case of a stage ref without also needing a name +## 1.25.2 Only patch pyyaml when a test is actually being loaded to avoid side effect from Tavern just being in the python path (2022-12-15) -## 1.7.0 - Add TAVERN_INCLUDE_PATH to allow including files from other file locations +# 2.0.0 2.0.0 release (2023-01-12) -## 1.8.0 - Allow using filterwarnings +## 2.0.1 Bump some dependency versions (2023-01-16) -## 1.9.0 - Also log the result from 'response' ext functions +## 2.0.2 Fix saving in MQTT (2023-02-08) -## 1.10.0 - Fix formatting filenames +## 2.0.3 Add type annotations (internal change) (2023-02-10) -## 1.11.0 - Add hook that is called before requests +## 2.0.4 Fix using ext functions in MQTT publish (2023-02-16) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..b145d4ce9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,60 @@ +# Contributing + +All configuration for the project should be put into `pyproject.toml`. + +## Working locally + +1. Create a virtualenv using whatever method you like ( + eg, [virtualenvwrapper](https://virtualenvwrapper.readthedocs.io/)) + +1. Install dependencies from requirements.txt + +## Running tests locally + +To run a subset of the required tests, run the [smoke test script](/scripts/smoke.bash) + + ./scripts/smoke.bash + +If on Windows, you should be able to just run the 'tox' commands in that file. + +## Updating/adding a dependency + +1. Add or update the dependency in [pyproject.toml](/pyproject.toml) + +1. Update requirements files (BOTH of them) + + pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file requirements.txt --reuse-hashes --generate-hashes + pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file constraints.txt --strip-extras + +1. Run tests as above + +## Pre-commit + +Basic checks (formatting, import order) is done with pre-commit and is controlled by [the yaml file](/.pre-commit-config.yaml). + +After installing dependencies, Run + + # check it works + pre-commit run --all-files + pre-commit install + +Run every so often to update the pre-commit hooks + + pre-commit autoupdate + +### Fixing Python formatting issue + + black tavern/ tests/ + ruff --fix tavern/ tests/ + +## Creating a new release + +1. Setup `~/.pypirc` + +1. Install the correct version of tbump + + pip install tbump@https://github.com/michaelboulton/tbump/archive/714ba8957a3c84b625608ceca39811ebe56229dc.zip + +1. Tag and push to git with `tbump --tag-message ""` + +1. Upload to pypi with `flit publish` diff --git a/LICENSE b/LICENSE index 8d9955232..f869bffdb 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2017 Zoetrope Ltd. trading as Overlock +Copyright 2021 Michael Boulton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/MANIFEST.in b/MANIFEST.in index e1376b0e7..05b2580c1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include tavern/schemas/tests.schema.yaml -include tavern/_plugins/mqtt/schema.yaml +include tavern/_core/schema/tests.jsonschema.yaml +include tavern/_plugins/mqtt/jsonschema.yaml include tavern/_plugins/grpc/schema.yaml include LICENSE diff --git a/README.md b/README.md new file mode 100644 index 000000000..d82bb12a8 --- /dev/null +++ b/README.md @@ -0,0 +1,193 @@ +[![pypi](https://img.shields.io/pypi/v/tavern.svg)](https://pypi.org/project/tavern/) +[![docs](https://readthedocs.org/projects/pip/badge/?version=latest&style=flat)](https://tavern.readthedocs.io/en/latest/) +![workflow](https://github.com/taverntesting/tavern/actions/workflows/main.yml/badge.svg?branch=master) + +# Easier API testing + +Tavern is a pytest plugin, command-line tool and Python library for +automated testing of APIs, with a simple, concise and flexible +YAML-based syntax. It's very simple to get started, and highly +customisable for complex tests. Tavern supports testing RESTful APIs as +well as MQTT based APIs. + +The best way to use Tavern is with +[pytest](https://docs.pytest.org/en/latest/). Tavern comes with a +pytest plugin so that literally all you have to do is install pytest and +Tavern, write your tests in `.tavern.yaml` files and run pytest. This +means you get access to all of the pytest ecosystem and allows you to do +all sorts of things like regularly run your tests against a test server +and report failures or generate HTML reports. + +You can also integrate Tavern into your own test framework or continuous +integration setup using the Python library, or use the command line +tool, `tavern-ci` with bash scripts and cron jobs. + +To learn more, check out the [examples](https://taverntesting.github.io/examples) or the complete +[documentation](https://taverntesting.github.io/documentation). If you're interested in contributing +to the project take a look at the [GitHub +repo](https://github.com/taverntesting/tavern). + +## Quickstart + +First up run `pip install tavern`. + +Then, let's create a basic test, `test_minimal.tavern.yaml`: + +```yaml +--- +# Every test file has one or more tests... +test_name: Get some fake data from the JSON placeholder API + +# ...and each test has one or more stages (e.g. an HTTP request) +stages: + - name: Make sure we have the right ID + + # Define the request to be made... + request: + url: https://jsonplaceholder.typicode.com/posts/1 + method: GET + + # ...and the expected response code and body + response: + status_code: 200 + json: + id: 1 + userId: 1 + title: "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + body: "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" +``` + +This file can have any name, but if you intend to use Pytest with +Tavern, it will only pick up files called `test_*.tavern.yaml`. + +This can then be run like so: + +```bash +$ pip install tavern[pytest] +$ py.test test_minimal.tavern.yaml -v +=================================== test session starts =================================== +platform linux -- Python 3.5.2, pytest-3.4.2, py-1.5.2, pluggy-0.6.0 -- /home/taverntester/.virtualenvs/tavernexample/bin/python3 +cachedir: .pytest_cache +rootdir: /home/taverntester/myproject, inifile: +plugins: tavern-0.7.2 +collected 1 item + +test_minimal.tavern.yaml::Get some fake data from the JSON placeholder API PASSED [100%] + +================================ 1 passed in 0.14 seconds ================================= +``` + +It is strongly advised that you use Tavern with Pytest - not only does +it have a lot of utility to control discovery and execution of tests, +there are a huge amount of plugins to improve your development +experience. If you absolutely can't use Pytest for some reason, use the +`tavern-ci` command line interface: + +```bash +$ pip install tavern +$ tavern-ci --stdout test_minimal.tavern.yaml +2017-11-08 16:17:00,152 [INFO]: (tavern.core:55) Running test : Get some fake data from the JSON placeholder API +2017-11-08 16:17:00,153 [INFO]: (tavern.core:69) Running stage : Make sure we have the right ID +2017-11-08 16:17:00,239 [INFO]: (tavern.core:73) Response: '' ({ + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "json": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" +}) +2017-11-08 16:17:00,239 [INFO]: (tavern.printer:9) PASSED: Make sure we have the right ID [200] +``` + +## Why not Postman, Insomnia or pyresttest etc? + +Tavern is a focused tool which does one thing well: automated testing of +APIs. + +**Postman** and **Insomnia** are excellent tools which cover a wide +range of use-cases for RESTful APIs, and indeed we use Tavern alongside +Postman. However, specifically with regards to automated testing, Tavern +has several advantages over Postman: + +- A full-featured Python environment for writing easily reusable custom validation functions +- Testing of MQTT based systems in tandem with RESTful APIS. +- Seamless integration with pytest to keep all your tests in one place +- A simpler, less verbose and clearer testing language + +Tavern does not do many of the things Postman and Insomnia do. For +example, Tavern does not have a GUI nor does it do API monitoring or +mock servers. On the other hand, Tavern is free and open-source and is a +more powerful tool for developers to automate tests. + +**pyresttest** is a similar tool to Tavern for testing RESTful APIs, but +is no longer actively developed. On top of MQTT testing, Tavern has +several other advantages over PyRestTest which overall add up to a +better developer experience: + +- Cleaner test syntax which is more intuitive, especially for + non-developers +- Validation function are more flexible and easier to use +- Better explanations of why a test failed + +## Hacking on Tavern + +If you want to add a feature to Tavern or just play around with it +locally, it's a good plan to first create a local development +environment ([this +page](http://docs.python-guide.org/en/latest/dev/virtualenvs/) has a +good primer for working with development environments with Python). +After you've created your development environment, just +`pip install tox` and run `tox` to run the unit tests. If you want +to run the integration tests, make sure you have +[docker](https://www.docker.com/) installed and run +`tox -c tox-integration.ini` (bear in mind this might take a while.) +It's that simple! + +If you want to develop things in tavern, enter your virtualenv and run +`pip install -r requirements.txt` to install the library, any requirements, +and other useful development options. + +Tavern uses [black](https://github.com/ambv/black) to keep all of the code +formatted consistently. There is a pre-commit hook to run black which can +be enabled by running `pre-commit install`. + +If you want to add a feature to get merged back into mainline Tavern: + +- Add the feature you want +- Add some tests for your feature: + - If you are adding some utility functionality such as improving verification + of responses, adding some unit tests might be best. These are in the + `tests/unit/` folder and are written using Pytest. + - If you are adding more advanced functionality like extra validation + functions, or some functionality that directly depends on the format of the + input YAML, it might also be useful to add some integration tests. At the + time of writing, this is done by adding an example flask endpoint in + `tests/integration/server.py` and a corresponding Tavern YAML test file in + the same directory. This will be cleaned up a bit once we have a proper + plugin system implemented. +- Open a [pull request](https://github.com/taverntesting/tavern/pulls). + +See [CONTRIBUTING.md](/CONTRIBUTING.md) for more details. + +## Acknowledgements + +Tavern makes use of several excellent open-source projects: + +- [pytest](https://docs.pytest.org/en/latest/), the testing + framework Tavern intergrates with +- [requests](http://docs.python-requests.org/en/master/), for HTTP + requests +- [YAML](http://yaml.org/) and + [pyyaml](https://github.com/yaml/pyyaml), for the test syntax +- [pykwalify](https://github.com/Grokzen/pykwalify), for YAML schema + validation +- [pyjwt](https://github.com/jpadilla/pyjwt), for decoding JSON Web + Tokens +- [colorlog](https://github.com/borntyping/python-colorlog), for + formatting terminal outputs +- [paho-mqtt](https://github.com/eclipse/paho.mqtt.python), for + sending MQTT messages + +## Maintenance + +Tavern is currently maintained by + +- @michaelboulton diff --git a/README.rst b/README.rst deleted file mode 100644 index dcaa0a872..000000000 --- a/README.rst +++ /dev/null @@ -1,205 +0,0 @@ -.. image:: https://travis-ci.org/taverntesting/tavern.svg?branch=master - :target: https://travis-ci.org/taverntesting/tavern - -.. image:: https://img.shields.io/pypi/v/tavern.svg - :target: https://pypi.org/project/tavern/ - -.. image:: https://readthedocs.org/projects/pip/badge/?version=latest&style=flat - :target: https://tavern.readthedocs.io/en/latest/ - -1.0 Release -=========== - -See here for details about **breaking** changes with the upcoming 1.0 release: https://github.com/taverntesting/tavern/issues/495 - -Easier API testing -================== - -Tavern is a pytest plugin, command-line tool and Python library for -automated testing of APIs, with a simple, concise and flexible -YAML-based syntax. It's very simple to get started, and highly -customisable for complex tests. Tavern supports testing RESTful APIs as -well as MQTT based APIs. - -The best way to use Tavern is with -`pytest `__. Tavern comes with a -pytest plugin so that literally all you have to do is install pytest and -Tavern, write your tests in ``.tavern.yaml`` files and run pytest. This -means you get access to all of the pytest ecosystem and allows you to do -all sorts of things like regularly run your tests against a test server -and report failures or generate HTML reports. - -You can also integrate Tavern into your own test framework or continuous -integration setup using the Python library, or use the command line -tool, ``tavern-ci`` with bash scripts and cron jobs. - -To learn more, check out the `examples `__ or the complete -`documentation `__. If you're interested in contributing -to the project take a look at the `GitHub -repo `__. - -Quickstart ----------- - -First up run ``pip install tavern``. - -Then, let's create a basic test, ``test_minimal.tavern.yaml``: - -.. code:: yaml - - --- - # Every test file has one or more tests... - test_name: Get some fake data from the JSON placeholder API - - # ...and each test has one or more stages (e.g. an HTTP request) - stages: - - name: Make sure we have the right ID - - # Define the request to be made... - request: - url: https://jsonplaceholder.typicode.com/posts/1 - method: GET - - # ...and the expected response code and body - response: - status_code: 200 - json: - id: 1 - -This file can have any name, but if you intend to use Pytest with -Tavern, it will only pick up files called ``test_*.tavern.yaml``. - -This can then be run like so: - -.. code:: bash - - $ pip install tavern[pytest] - $ py.test test_minimal.tavern.yaml -v - =================================== test session starts =================================== - platform linux -- Python 3.5.2, pytest-3.4.2, py-1.5.2, pluggy-0.6.0 -- /home/taverntester/.virtualenvs/tavernexample/bin/python3 - cachedir: .pytest_cache - rootdir: /home/taverntester/myproject, inifile: - plugins: tavern-0.7.2 - collected 1 item - - test_minimal.tavern.yaml::Get some fake data from the JSON placeholder API PASSED [100%] - - ================================ 1 passed in 0.14 seconds ================================= - -It is strongly advised that you use Tavern with Pytest - not only does -it have a lot of utility to control discovery and execution of tests, -there are a huge amount of plugins to improve your development -experience. If you absolutely can't use Pytest for some reason, use the -``tavern-ci`` command line interface: - -.. code:: bash - - $ pip install tavern - $ tavern-ci --stdout test_minimal.tavern.yaml - 2017-11-08 16:17:00,152 [INFO]: (tavern.core:55) Running test : Get some fake data from the JSON placeholder API - 2017-11-08 16:17:00,153 [INFO]: (tavern.core:69) Running stage : Make sure we have the right ID - 2017-11-08 16:17:00,239 [INFO]: (tavern.core:73) Response: '' ({ - "userId": 1, - "id": 1, - "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", - "json": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" - }) - 2017-11-08 16:17:00,239 [INFO]: (tavern.printer:9) PASSED: Make sure we have the right ID [200] - -Why not Postman, Insomnia or pyresttest etc? --------------------------------------------- - -Tavern is a focused tool which does one thing well: automated testing of -APIs. - -**Postman** and **Insomnia** are excellent tools which cover a wide -range of use-cases for RESTful APIs, and indeed we use Tavern alongside -Postman. However, specifically with regards to automated testing, Tavern -has several advantages over Postman: - -- A full-featured Python environment for writing easily reusable custom validation functions -- Testing of MQTT based systems in tandem with RESTful APIS. -- Seamless integration with pytest to keep all your tests in one place -- A simpler, less verbose and clearer testing language - -Tavern does not do many of the things Postman and Insomnia do. For -example, Tavern does not have a GUI nor does it do API monitoring or -mock servers. On the other hand, Tavern is free and open-source and is a -more powerful tool for developers to automate tests. - -**pyresttest** is a similar tool to Tavern for testing RESTful APIs, but -is no longer actively developed. On top of MQTT testing, Tavern has -several other advantages over PyRestTest which overall add up to a -better developer experience: - -- Cleaner test syntax which is more intuitive, especially for - non-developers -- Validation function are more flexible and easier to use -- Better explanations of why a test failed - -Hacking on Tavern ------------------ - -If you want to add a feature to Tavern or just play around with it -locally, it's a good plan to first create a local development -environment (`this -page `__ has a -good primer for working with development environments with Python). -After you've created your development environment, just -``pip install tox`` and run ``tox`` to run the unit tests. If you want -to run the integration tests, make sure you have -`docker `__ installed and run -``tox -c tox-integration.ini`` (bear in mind this might take a while.) -It's that simple! - -If you want to develop things in tavern, enter your virtualenv and run -``pip install -r requirements.txt`` to install the library, any requirements, -and other useful development options. - -Tavern uses [black](https://github.com/ambv/black) to keep all of the code -formatted consistently. There is a pre-commit hook to run black which can -be enabled by running ``pre-commit install``. - -If you want to add a feature to get merged back into mainline Tavern: - -- Add the feature you want -- Add some tests for your feature: - - If you are adding some utility functionality such as improving verification - of responses, adding some unit tests might be best. These are in the - ``tests/unit/`` folder and are written using Pytest. - - If you are adding more advanced functionality like extra validation - functions, or some functionality that directly depends on the format of the - input YAML, it might also be useful to add some integration tests. At the - time of writing, this is done by adding an example flask endpoint in - ``tests/integration/server.py`` and a corresponding Tavern YAML test file in - the same directory. This will be cleaned up a bit once we have a proper - plugin system implemented. -- Open a `pull request `__. - -Acknowledgements ----------------- - -Tavern makes use of several excellent open-source projects: - -- `pytest `__, the testing - framework Tavern intergrates with -- `requests `__, for HTTP - requests -- `YAML `__ and - `pyyaml `__, for the test syntax -- `pykwalify `__, for YAML schema - validation -- `pyjwt `__, for decoding JSON Web - Tokens -- `colorlog `__, for - formatting terminal outputs -- `paho-mqtt `__, for - sending MQTT messages - - -Maintenance ------------ - -Tavern is currently maintained by - -- @michaelboulton diff --git a/constraints.txt b/constraints.txt new file mode 100644 index 000000000..743af2450 --- /dev/null +++ b/constraints.txt @@ -0,0 +1,319 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --all-extras --output-file=constraints.txt --resolver=backtracking --strip-extras pyproject.toml +# +allure-pytest==2.12.0 + # via tavern (pyproject.toml) +allure-python-commons==2.12.0 + # via allure-pytest +attrs==22.1.0 + # via + # allure-python-commons + # jsonschema + # pytest +bcrypt==4.0.1 + # via paramiko +black==23.1.0 + # via tavern (pyproject.toml) +bleach==5.0.1 + # via readme-renderer +build==0.9.0 + # via pip-tools +bump2version==1.0.1 + # via tavern (pyproject.toml) +certifi==2022.12.7 + # via requests +cffi==1.15.1 + # via + # cryptography + # pynacl +cfgv==3.3.1 + # via pre-commit +charset-normalizer==2.1.1 + # via requests +click==8.1.3 + # via + # black + # flask + # pip-tools +colorlog==6.7.0 + # via tavern (pyproject.toml) +commonmark==0.9.1 + # via rich +coverage==7.0.0 + # via + # pytest-cov + # tavern (pyproject.toml) +cryptography==39.0.1 + # via + # paramiko + # secretstorage +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via docker-compose +docker==6.0.1 + # via docker-compose +docker-compose==1.29.2 + # via tavern (pyproject.toml) +dockerpty==0.4.1 + # via docker-compose +docopt==0.6.2 + # via + # docker-compose + # pykwalify +docutils==0.19 + # via + # flit + # readme-renderer +exceptiongroup==1.0.4 + # via pytest +execnet==1.9.0 + # via pytest-xdist +faker==15.3.4 + # via tavern (pyproject.toml) +filelock==3.8.2 + # via + # tox + # virtualenv +flask==2.2.2 + # via tavern (pyproject.toml) +flit==3.8.0 + # via tavern (pyproject.toml) +flit-core==3.8.0 + # via flit +fluent-logger==0.10.0 + # via tavern (pyproject.toml) +googleapis-common-protos==1.58.0 + # via grpcio-status +grpcio==1.51.1 + # via + # grpcio-reflection + # grpcio-status + # tavern (pyproject.toml) +grpcio-reflection==1.51.1 + # via tavern (pyproject.toml) +grpcio-status==1.51.1 + # via tavern (pyproject.toml) +identify==2.5.10 + # via pre-commit +idna==3.4 + # via requests +importlib-metadata==5.2.0 + # via + # flask + # keyring + # twine +iniconfig==1.1.1 + # via pytest +itsdangerous==2.1.2 + # via + # flask + # tavern (pyproject.toml) +jaraco-classes==3.2.3 + # via keyring +jeepney==0.8.0 + # via + # keyring + # secretstorage +jinja2==3.1.2 + # via flask +jmespath==1.0.1 + # via tavern (pyproject.toml) +jsonschema==3.2.0 + # via + # docker-compose + # tavern (pyproject.toml) +keyring==23.13.1 + # via twine +markupsafe==2.1.1 + # via + # jinja2 + # werkzeug +more-itertools==9.0.0 + # via jaraco-classes +msgpack==1.0.4 + # via fluent-logger +mypy==0.991 + # via tavern (pyproject.toml) +mypy-extensions==0.4.3 + # via + # black + # mypy + # tavern (pyproject.toml) +nodeenv==1.7.0 + # via pre-commit +packaging==22.0 + # via + # black + # build + # docker + # pytest + # tox +paho-mqtt==1.5.1 + # via tavern (pyproject.toml) +paramiko==2.12.0 + # via docker +pathspec==0.10.3 + # via black +pbr==5.11.0 + # via stevedore +pep517==0.13.0 + # via build +pip-tools==6.12.1 + # via tavern (pyproject.toml) +pkginfo==1.9.2 + # via twine +platformdirs==2.6.0 + # via + # black + # virtualenv +pluggy==1.0.0 + # via + # allure-python-commons + # pytest + # tox +pre-commit==2.20.0 + # via tavern (pyproject.toml) +protobuf==4.22.0 + # via + # googleapis-common-protos + # grpcio-reflection + # grpcio-status +py==1.11.0 + # via + # tavern (pyproject.toml) + # tox +pycparser==2.21 + # via cffi +pygments==2.13.0 + # via + # readme-renderer + # rich + # tavern (pyproject.toml) +pyjwt==2.6.0 + # via tavern (pyproject.toml) +pykwalify==1.8.0 + # via tavern (pyproject.toml) +pynacl==1.5.0 + # via paramiko +pyrsistent==0.19.2 + # via jsonschema +pytest==7.2.0 + # via + # allure-pytest + # pytest-cov + # pytest-xdist + # tavern (pyproject.toml) +pytest-cov==4.0.0 + # via tavern (pyproject.toml) +pytest-xdist==3.1.0 + # via tavern (pyproject.toml) +python-box==6.1.0 + # via tavern (pyproject.toml) +python-dateutil==2.8.2 + # via + # faker + # pykwalify +python-dotenv==0.21.0 + # via docker-compose +pyyaml==5.4.1 + # via + # docker-compose + # pre-commit + # tavern (pyproject.toml) +readme-renderer==37.3 + # via twine +requests==2.28.1 + # via + # docker + # docker-compose + # flit + # requests-toolbelt + # tavern (pyproject.toml) + # twine +requests-toolbelt==0.10.1 + # via twine +rfc3986==2.0.0 + # via twine +rich==12.6.0 + # via twine +ruamel-yaml==0.17.21 + # via pykwalify +ruamel-yaml-clib==0.2.7 + # via ruamel-yaml +ruff==0.0.246 + # via tavern (pyproject.toml) +secretstorage==3.3.3 + # via keyring +six==1.16.0 + # via + # allure-pytest + # allure-python-commons + # bleach + # dockerpty + # jsonschema + # paramiko + # python-dateutil + # tox + # websocket-client +stevedore==4.1.1 + # via tavern (pyproject.toml) +texttable==1.6.7 + # via docker-compose +toml==0.10.2 + # via pre-commit +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pep517 + # pytest + # tox +tomli-w==1.0.0 + # via flit +tox==3.28.0 + # via + # tavern (pyproject.toml) + # tox-travis +tox-travis==0.12 + # via tavern (pyproject.toml) +twine==4.0.2 + # via tavern (pyproject.toml) +types-pyyaml==6.0.12.2 + # via tavern (pyproject.toml) +typing-extensions==4.4.0 + # via + # black + # mypy +urllib3==1.26.13 + # via + # docker + # requests + # twine +virtualenv==20.17.1 + # via + # pre-commit + # tox +webencodings==0.5.1 + # via bleach +websocket-client==0.59.0 + # via + # docker + # docker-compose +werkzeug==2.2.3 + # via flask +wheel==0.38.4 + # via + # pip-tools + # tavern (pyproject.toml) +zipp==3.11.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/docs/source/basics.md b/docs/source/basics.md index 37d6e79c5..11852ede6 100644 --- a/docs/source/basics.md +++ b/docs/source/basics.md @@ -17,6 +17,9 @@ stages: status_code: 200 json: id: 1 + userId: 1 + title: "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + body: "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" save: json: returned_id: id @@ -128,16 +131,24 @@ This can be saved into the value `first_val` with this response block: response: save: json: - first_val: thing.nested[0] + first_val: "thing.nested[0]" ``` The query should be defined as a JMES query (see [JMESPath](http://jmespath.org/) for more information). In the above example, this essentially performs the operation `json["thing"]["nested"][0]`. This can be used to perform -powerful queries on response data, but note that only 'simple' values -like integers, strings, or float values can be saved. Trying to save a -'block' of data such as a JSON list or object is currently unsupported -and will cause the test to fail. +powerful queries on response data. + +This can be used to save blocks of data as well, for example: + +```yaml +response: + save: + json: + nested_thing: "thing" +``` + +This will save `{"nested": [1, 2, 3, 4]}` into the `nested_thing` variable. See the documentation for the `force_format_include` tag for how this can be used. **NOTE**: The behaviour of these queries used to be different and indexing into an array was done like `thing.nested.0`. This will be deprecated in the @@ -326,7 +337,7 @@ changed to use a different library to avoid this issue.** # valid jwt which is signed by the given key. response: verify_response_with: - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" key: CGQgaG7GYvTcpaQZqosLy4 @@ -344,7 +355,7 @@ body of the response against it. # which has to contain a user name and may contain a user number. response: verify_response_with: - function: tavern.testutils.helpers:validate_pykwalify + function: tavern.helpers:validate_pykwalify extra_kwargs: schema: type: seq @@ -404,6 +415,7 @@ This can be used as so: request: url: http://server.com/login headers: + x-my-header: abc123 $ext: function: utils:generate_bearer_token json: @@ -434,7 +446,14 @@ def return_hello(): ``` If `tavern-merge-ext-function-values` is set, this will send "hello" and "goodbye" in -the request. If not, it will just sent "hello". +the request. If not, it will just send "hello". + +Example `pytest.ini` setting `tavern-merge-ext-function-values` as an argument. +```python +# pytest.ini +[pytest] +addopts = --tavern-merge-ext-function-values +``` #### Saving data from a response @@ -499,7 +518,7 @@ For example, if our server saves the user ID in the 'sub' field of the JWT: status_code: 200 verify_response_with: # Make sure a token exists - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" options: @@ -509,7 +528,7 @@ For example, if our server saves the user ID in the 'sub' field of the JWT: # in the test configuration for use in future tests # Note the use of $ext again $ext: - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" options: @@ -607,8 +626,12 @@ With strict being turned off for the body, any of these in the test will pass: But not: -- `[3, 1]`, `[2, 1]` - items present, but out of order - `[2, 4]` - '4' not present in response from the server +- `[3, 1]`, `[2, 1]` - items present, but out of order + +To match the last case you can use the special setting `list_any_order`. This setting +can only be used in the 'json' key of a request, but will match list items in any order as +long as they are present in the response. ### Changing the setting @@ -927,7 +950,7 @@ stages: json: test_user_login_token: token verify_response_with: - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" options: @@ -1481,7 +1504,7 @@ third block must start with 4 and the third block must start with 8, 9, "A", or ``` This is using the `!re_fullmatch` variant of the tag - this calls -[`re.fullmatch`](https://docs.python.org/3.7/library/re.html#re.fullmatch) under +[`re.fullmatch`](https://docs.python.org/3.8/library/re.html#re.fullmatch) under the hood, which means that the regex given needs to match the _entire_ part of the response that is being checked for it to pass. There is also `!re_search` which will pass if it matches _part_ of the thing being checked, or `!re_match` @@ -1510,7 +1533,7 @@ stages: hash: 456 save: $ext: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: "v(?P[\d\.]+)-[\w\d]+" in_jmespath: "meta.version" @@ -1577,7 +1600,7 @@ request: Sometimes there are situations where you need to directly include a block of JSON, such as a list, rather than just one value. To do this, there is a -`!force_format_include` tag which will include whatever variable is being +`!force_original_structure` tag which will include whatever variable is being referenced in the format block rather than coercing it to a string. For example, if we have an API that will return a list of users on a GET and @@ -1609,7 +1632,7 @@ could be done by url: "{host}/users" method: DELETE # 'all_users' list will be sent in the request as a list, not a string - json: !force_format_include "{all_users}" + json: !force_original_structure "{all_users}" response: status_code: 204 @@ -2073,8 +2096,89 @@ This will result in 6 tests: - cheap fresh orange - cheap unripe pear +Since 1.19.0 you can now also parametrize generic blocks of data instead of only strings. This can +also be mixed and matched with items which _are_ strings. If you do this, remember to use the +[force_format_include](#Including raw JSON data) tag so it doesn't come out as a string: + +```yaml +test_name: Test sending a list of list of keys where one is not a string + +marks: + - parametrize: + key: + - fruit + - colours + vals: + - [ apple, [red, green, pink] ] + - [ pear, [yellow, green] ] + +stages: + - name: Send fruit and colours + request: + url: "{host}/newfruit" + method: POST + json: + fruit: "{fruit}" + colours: !force_format_include "{colours}" + + # This sends: + # { + # "fruit": "apple", + # "colours": [ + # "red", + # "green", + # "pink" + # ] + # } +``` + +The type of the 'val' does not need to be the same for each version of the test, and even external +functions can be used to read values. For example this block will create 6 tests which sets the +`value_to_send` key to a string, a list, or a dictionary: + +```yaml +--- + +test_name: Test parametrizing random different data types in the same test + +marks: +- parametrize: + key: value_to_send + vals: + - a + - [b, c] + - more: stuff + - yet: [more, stuff] + - $ext: + function: ext_functions:return_string + - and: this + $ext: + function: ext_functions:return_dict + + # If 'return_dict' returns {"keys: ["a","b","c"]} this results in: + # { + # "and": "this", + # "keys": [ + # "a", + # "b", + # "c" + # ] + # } +``` + +As see in the last example, if the `$ext` function returns a dictionary then it will also be merged +with any existing data in the 'val'. In this case, the return value of the function _must_ be a +dictionary or an error will be raised. + +```yaml + # This would raise an error + #- and: this + # $ext: + # function: ext_functions:return_string +``` + **NOTE**: Due to implementation reasons it is currently impossible to -parametrize either the HTTP method or the MQTT QoS parameter. +parametrize the MQTT QoS parameter. #### usefixtures @@ -2215,6 +2319,20 @@ def pytest_tavern_beta_before_every_test_run(test_dict, variables): variables["extra_var"] = "abc123" ``` +### After every test run + +This hook is called _after_ execution of each test, regardless of the test +result. The hook can, for example, be used to perform cleanup after the test is run. + +Example usage: + +```python +import logging + +def pytest_tavern_beta_after_every_test_run(test_dict, variables): + logging.info("Ending test %s", test_dict["test_name"]) +``` + ### After every response This hook is called after every _response_ for each _stage_ - this includes HTTP diff --git a/docs/source/conf.py b/docs/source/conf.py index e3c02e559..1323bfba2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -36,33 +36,33 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.imgmath', - 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon', - 'sphinx.ext.githubpages', - 'recommonmark', - 'sphinx_markdown_tables', + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.imgmath", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", + "sphinx.ext.githubpages", + "recommonmark", + "sphinx_markdown_tables", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Tavern' -copyright = '2019, Michael Boulton' -author = 'Michael Boulton' +project = "Tavern" +copyright = "2019, Michael Boulton" +author = "Michael Boulton" html_logo = "_static/icon.png" html_favicon = "_static/favicon.png" @@ -71,9 +71,9 @@ # built documents. # # The short X.Y version. -version = '1.0' +version = "1.0" # The full version, including alpha/beta/rc tags. -release = '1.14.0' +release = "2.0.5" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -88,7 +88,7 @@ exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'monokai' +pygments_style = "monokai" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -99,7 +99,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] @@ -112,7 +112,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -120,9 +120,9 @@ # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', + "**": [ + "relations.html", # needs 'show_related': True theme option to display + "searchbox.html", ] } @@ -130,7 +130,7 @@ # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'Taverndoc' +htmlhelp_basename = "Taverndoc" # -- Options for LaTeX output --------------------------------------------- @@ -139,15 +139,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -157,8 +154,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'Tavern.tex', 'Tavern Documentation', - 'Michael Boulton', 'manual'), + (master_doc, "Tavern.tex", "Tavern Documentation", "Michael Boulton", "manual"), ] @@ -166,10 +162,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'tavern', 'Tavern Documentation', - [author], 1) -] +man_pages = [(master_doc, "tavern", "Tavern Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- @@ -178,16 +171,28 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'Tavern', 'Tavern Documentation', - author, 'Tavern', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "Tavern", + "Tavern Documentation", + author, + "Tavern", + "One line description of project.", + "Miscellaneous", + ), ] -github_doc_root = '/' +github_doc_root = "/" + + def setup(app): - app.add_config_value('recommonmark_config', { - 'url_resolver': lambda url: github_doc_root + url, - 'enable_auto_toc_tree': True, - }, True) + app.add_config_value( + "recommonmark_config", + { + "url_resolver": lambda url: github_doc_root + url, + "enable_auto_toc_tree": True, + }, + True, + ) app.add_transform(AutoStructify) diff --git a/docs/source/cookbook.md b/docs/source/cookbook.md index e519f5859..a48d9d9bb 100644 --- a/docs/source/cookbook.md +++ b/docs/source/cookbook.md @@ -23,7 +23,7 @@ any Pytest plugins as mentioned above): ```dockerfile # tavern.Dockerfile -FROM python:3.7-alpine +FROM python:3.10-slim RUN pip3 install tavern ``` @@ -38,7 +38,7 @@ Or if you need a specific version (hopefully you shouldn't): ```dockerfile # tavern.Dockerfile -FROM python:3.7-alpine +FROM python:3.10-slim ARG TAVERNVER RUN pip3 install tavern==$TAVERNVER @@ -66,7 +66,7 @@ services: tavern: build: context: . - image: tavern.Dockerfile + dockerfile: tavern.Dockerfile env_file: # Any extra environment variables for testing # This will probably contain things like names of docker containers to run tests against @@ -93,7 +93,7 @@ services: application: build: context: . - image: application.Dockerfile + dockerfile: application.Dockerfile command: ... ``` diff --git a/docs/source/debugging.md b/docs/source/debugging.md index baa463b95..2db576b4a 100644 --- a/docs/source/debugging.md +++ b/docs/source/debugging.md @@ -1,7 +1,5 @@ # Debugging a test -**This section assumes you're using pytest to run tests**. - When making a test it's not always going to work first time, and at the time of writing the error reporting is a bit messy because it shows the whole stack trace from pytest is printed out (which can be a few hundred lines, most of @@ -16,7 +14,8 @@ the logs in case something goes wrong. The easiest way to do this is with [dictConfig](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) from the Python logging library. It can also be useful to use [colorlog](https://pypi.org/project/colorlog/) to colourize the output so it's -easier to see the different log levels. An example logging configuration +easier to see the different log levels. An example logging configuration (note that this requires +the `colorlog` package to be installed): ```yaml # log_spec.yaml @@ -67,6 +66,11 @@ By default, recent versions of pytest will print out log messages in the logging, you probably want to disable this by also passing `-p no:logging` to the invocation of pytest. +**WARNING**: Tavern will try not to log any response data or request data at the `INFO` level or +above (unless it is in an error trace). Logging at the `DEBUG` level will log things like response +headers, return values from any external functions etc. If this contains sensitive data, either +log at the `INFO` level, or make sure that any data logged is obfuscated, or the logs are not public. + ### Setting pytest options Some pytest options can be used to make the test output easier to read. @@ -140,7 +144,7 @@ tavern/core.py:111: in run_test saved = v.verify(response) tavern/response/rest.py:147: in verify raise TestFailError("Test '{:s}' failed:\n{:s}".format(self.name, self._str_errors())) -E tavern.util.exceptions.TestFailError: Test 'login' failed: +E tavern._core.exceptions.TestFailError: Test 'login' failed: E - Key not present: a_key ---------------------------- Captured stderr call ----------------------------- 16:30:46 [INFO]: (tavern.core:70) Running test : Check trying to get a number that we didnt post before returns a 404 @@ -154,78 +158,13 @@ E - Key not present: a_key Traceback (most recent call last): File "/home/michael/code/tavern/tavern/tavern/response/base.py", line 87, in recurse_check_key_match actual_val = recurse_access_key(block, list(split_key)) - File "/home/michael/code/tavern/tavern/tavern/util/dict_util.py", line 77, in recurse_access_key + File "/home/michael/code/tavern/tavern/tavern/_core/dict_util.py", line 77, in recurse_access_key return recurse_access_key(current_val[current_key], keys) KeyError: 'a_key' 16:30:46 [ERROR]: (tavern.printer:21) FAILED: login [200] -16:30:46 [ERROR]: (tavern.printer:22) Expected: {'requests': {'save': {'$ext': {'extra_kwargs': {'jwt_key': 'token', 'key': 'CGQgaG7GYvTcpaQZqosLy4', 'options': {'verify_aud': True, 'verify_signature': True, 'verify_exp': True}, 'audience': 'testserver'}, 'function': 'tavern.testutils.helpers:validate_jwt'}, 'body': {'test_login_token': 'token'}}, 'status_code': 200, 'headers': {'content-type': 'application/json'}, 'body': {'a_key': 'missing', 'token': }}} +16:30:46 [ERROR]: (tavern.printer:22) Expected: {'requests': {'save': {'$ext': {'extra_kwargs': {'jwt_key': 'token', 'key': 'CGQgaG7GYvTcpaQZqosLy4', 'options': {'verify_aud': True, 'verify_signature': True, 'verify_exp': True}, 'audience': 'testserver'}, 'function': 'tavern.helpers:validate_jwt'}, 'body': {'test_login_token': 'token'}}, 'status_code': 200, 'headers': {'content-type': 'application/json'}, 'body': {'a_key': 'missing', 'token': }}} ``` When tavern tries to access `a_key` in the response it gets a `KeyError` (shown in the logs), and the `TestFailError` in the stack trace gives a more human-readable explanation as to why the test failed. - -### New traceback option - -Though this does give a lot of information about exactly when and where a test -failed, it's not very easy to tell what input actually caused this error. Since -0.13.0, you can use the `tavern-beta-new-traceback` flag to give a much nicer -output showing the original source code for the stage, the formatted stages that -Tavern uses to send the request, and any format variables. This is used like any -other Pytest flag: - -1. Passed on on the command line using `--tavern-beta-new-traceback` -2. Put in the Pytest configuration file (pytest.ini, setup.cfg, ...) `tavern-beta-new-traceback = True` - -Rather than the Python traceback as shown above, we get an error output like this: - -``` -Format variables: - tavern.env_vars.TEST_HOST = 'http://localhost:5003' - first_part = 'nested' - second_part = 'again' - -Source test stage: - - name: Make requests using environment variables - request: - url: "{tavern.env_vars.TEST_HOST}/{first_part}/{second_part}" - method: GET - response: - status_code: 200 - json: - status: OKdfokd - -Formatted stage: - name: Make requests using environment variables - request: - method: GET - url: 'http://localhost:5003/nested/again' - response: - json: - status: OKdfokd - status_code: 200 - -Errors: -E tavern.util.exceptions.TestFailError: Test 'Make requests using environment variables' failed: - - Value mismatch in json: Key mismatch: (expected["status"] = 'OKdfokd', actual["status"] = 'OK') -``` - -- Format variables shows all the variables which are used for formatting in that - stage. Any variables which are missing will be highlighted in red. -- The source test stage is the raw source code for the stage from the input - file. This is before anything has been done to it - no formatting, no anchors, - no includes, etc. -- The formatted stage shows the stage at the point that Tavern will start to - perform the request - all variables will be formatted (if present), all YAML - anchors will be resolved, etc. -- The errors will show which exception caused this test to fail - -This output style will become the default in version 1.0. - -Note that this will only show when a test fails in a way that Tavern can handle, -and it will not be shown on things like IOErrors on input files or unhandled -errors. - -If a test fails in a way that does not raise a `TestFailError`, it might be a -bug in Tavern - if this happens, feel free to make an issue -[on the repo](https://github.com/taverntesting/tavern/issues). diff --git a/docs/source/examples.md b/docs/source/examples.md index 288b4036f..03f8705f0 100644 --- a/docs/source/examples.md +++ b/docs/source/examples.md @@ -16,6 +16,9 @@ stages: status_code: 200 json: id: 1 + userId: 1 + title: "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + body: "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" ``` Next, install Tavern if you have not already: @@ -158,10 +161,11 @@ The Python library allows you to include Tavern tests in deploy scripts written ```python from tavern.core import run +from pytest import ExitCode -success = run("test_server.tavern.yaml") +exit_code = run("test_server.tavern.yaml") -if not success: +if exit_code != ExitCode.OK: print("Error running tests") ``` @@ -193,7 +197,7 @@ stages: status_code: 200 json: $ext: - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" key: CGQgaG7GYvTcpaQZqosLy4 diff --git a/docs/source/http.md b/docs/source/http.md index 3824a1402..d73fa77ca 100644 --- a/docs/source/http.md +++ b/docs/source/http.md @@ -263,7 +263,7 @@ stages: status_code: 200 json: $ext: &verify_token - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" key: CGQgaG7GYvTcpaQZqosLy4 @@ -282,7 +282,8 @@ stages: request: url: "{host}/userinfo" method: GET - Authorization: "Bearer {test_login_token:s}" + headers: + Authorization: "Bearer {test_login_token:s}" response: status_code: 200 json: diff --git a/docs/source/index.md b/docs/source/index.md index 4bf7fa54e..584562e67 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -4,7 +4,7 @@ Tavern is an advanced pytest based API testing framework for HTTP, MQTT or other protocols. Note that Tavern **only** supports Python 3.4 and up. At the time of writing we -test against Python 3.4-3.7 and pypy3. Python 2 is now **unsupported**. +test against Python 3.8-3.10. Python 2 is now **unsupported**. ## Why Tavern diff --git a/docs/source/mqtt.md b/docs/source/mqtt.md index e686b417d..5c0496efd 100644 --- a/docs/source/mqtt.md +++ b/docs/source/mqtt.md @@ -114,7 +114,6 @@ The full list of keys for this block: serialize into JSON. - `qos`: QoS level for publishing. Defaults to 0 in paho-mqtt. - ### Options for receiving MQTT messages The `mqtt_response` key gives a topic and payload which should be received by @@ -157,6 +156,24 @@ HTTP JSON responses, The special 'anything' token can be used with the Other type tokens such as `!anyint` will _not_ work. +### Unexpected messages + +If you want to make sure that you do _not_ want to receive a message when a certain request (MQTT or +HTTP) is sent, use the 'unexpected' key like so: + +```yaml + mqtt_response: + topic: /device/123/status/response + payload: !anything + timeout: 3 + qos: 1 + unexpected: true +``` + +If this message is received during the test, it will fail it. Be careful when using this as if this +message just happened to be sent during the test and not as a result of anything during your test, +it will still make the test fail. + ## Mixing MQTT tests and HTTP tests If the architecture of your program combines MQTT and HTTP, Tavern can diff --git a/docs/source/plugins.md b/docs/source/plugins.md index 6588a4c10..6ad2fc8b3 100644 --- a/docs/source/plugins.md +++ b/docs/source/plugins.md @@ -28,8 +28,8 @@ plugin. The entry point needs to point to either a class or a module which defines a preset number of variables. -Something like this should be in your `setup.py` or `setup.cfg` to make sure -Tavern can pick it up at run time: +Something like this should be in your `setup.py`, `setup.cfg`, `poetry.toml`, +`pyproject.toml`, etc. to make sure Tavern can pick it up at run time: ``` # setup.cfg @@ -102,7 +102,7 @@ If you don't need any functionality provided by this, you can define empty `__enter__` and `__exit__` methods on your class like so: ```python -class MySession(object): +class MySession: def __enter__(self): pass diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index 4a3b26bc5..6144b1577 100644 --- a/docs/source/requirements.txt +++ b/docs/source/requirements.txt @@ -3,5 +3,5 @@ sphinx_rtd_theme recommonmark==0.5.0 commonmark==0.8.1 docutils==0.14 -pygments==2.3.1 +pygments==2.7.4 sphinx-markdown-tables==0.0.9 diff --git a/example/advanced/Dockerfile b/example/advanced/Dockerfile index 57a81fcbc..490cc8ddc 100644 --- a/example/advanced/Dockerfile +++ b/example/advanced/Dockerfile @@ -1,6 +1,11 @@ -FROM python:3.5-alpine +FROM python:3.10-alpine -RUN pip install flask pyjwt +RUN pip3 install pyjwt~=2.4.0 flask~=2.0.3 + +ENV FLASK_DEBUG=1 +ENV PYTHONUNBUFFERED=0 + +ENV FLASK_DEBUG=1 COPY server.py / diff --git a/example/advanced/server.py b/example/advanced/server.py index 015141fbb..c9b399a72 100644 --- a/example/advanced/server.py +++ b/example/advanced/server.py @@ -49,13 +49,13 @@ def login(): "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), } - token = jwt.encode(payload, SECRET, algorithm="HS256").decode("utf8") + token = jwt.encode(payload, SECRET, algorithm="HS256") return jsonify({"token": token}) def requires_jwt(endpoint): - """ Makes sure a jwt is in the request before accepting it """ + """Makes sure a jwt is in the request before accepting it""" @functools.wraps(endpoint) def check_auth_call(*args, **kwargs): diff --git a/example/advanced/test_server.tavern.yaml b/example/advanced/test_server.tavern.yaml index 274f72fa8..a21239b58 100644 --- a/example/advanced/test_server.tavern.yaml +++ b/example/advanced/test_server.tavern.yaml @@ -19,10 +19,11 @@ stages: response: status_code: 200 verify_response_with: &verify_token - function: tavern.testutils.helpers:validate_jwt + function: tavern.helpers:validate_jwt extra_kwargs: jwt_key: "token" key: CGQgaG7GYvTcpaQZqosLy4 + algorithms: [ HS256 ] options: verify_signature: true verify_aud: true diff --git a/example/advanced/testing_utils.py b/example/advanced/testing_utils.py index 47705beb1..fbbbdafd7 100644 --- a/example/advanced/testing_utils.py +++ b/example/advanced/testing_utils.py @@ -11,6 +11,7 @@ def assert_quick_response(response): """ assert response.elapsed < datetime.timedelta(seconds=0.1) + def create_bearer_token(): # Authorization: "bearer {test_login_token:s}" @@ -23,8 +24,6 @@ def create_bearer_token(): "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), } - token = jwt.encode(payload, SECRET, algorithm="HS256").decode("utf8") + token = jwt.encode(payload, SECRET, algorithm="HS256") - return { - "Authorization": "Bearer {}".format(token) - } + return {"Authorization": "Bearer {}".format(token)} diff --git a/example/components/Dockerfile b/example/components/Dockerfile index b71e40ec8..f07717262 100644 --- a/example/components/Dockerfile +++ b/example/components/Dockerfile @@ -1,6 +1,10 @@ -FROM python:3.5-alpine +FROM python:3.10-alpine + +RUN pip3 install pyjwt~=2.4.0 flask~=2.0.3 + +ENV FLASK_DEBUG=1 +ENV PYTHONUNBUFFERED=0 -RUN pip install flask pyjwt COPY server.py / diff --git a/example/components/server.py b/example/components/server.py index 7ea1c16e2..b7792c456 100644 --- a/example/components/server.py +++ b/example/components/server.py @@ -1,13 +1,11 @@ -import sqlite3 import datetime import functools -from flask import Flask, jsonify, request, g -import jwt +from flask import Flask, jsonify, request +import jwt app = Flask(__name__) - SECRET = "CGQgaG7GYvTcpaQZqosLy5" DATABASE = "/tmp/test_db" SERVERNAME = "testserver" @@ -26,13 +24,13 @@ def login(): "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), } - token = jwt.encode(payload, SECRET, algorithm="HS256").decode("utf8") + token = jwt.encode(payload, SECRET, algorithm="HS256") return jsonify({"token": token}) def requires_jwt(endpoint): - """ Makes sure a jwt is in the request before accepting it """ + """Makes sure a jwt is in the request before accepting it""" @functools.wraps(endpoint) def check_auth_call(*args, **kwargs): diff --git a/example/cookies/Dockerfile b/example/cookies/Dockerfile index 57a81fcbc..68b46a7f7 100644 --- a/example/cookies/Dockerfile +++ b/example/cookies/Dockerfile @@ -1,6 +1,10 @@ -FROM python:3.5-alpine +FROM python:3.10-alpine + +RUN pip3 install pyjwt~=2.4.0 flask~=2.0.3 + +ENV FLASK_DEBUG=1 +ENV PYTHONUNBUFFERED=0 -RUN pip install flask pyjwt COPY server.py / diff --git a/example/cookies/server.py b/example/cookies/server.py index d7f43c36c..e7629a062 100644 --- a/example/cookies/server.py +++ b/example/cookies/server.py @@ -47,7 +47,7 @@ def login(): def requires_jwt(endpoint): - """ Makes sure a jwt is in the request before accepting it """ + """Makes sure a jwt is in the request before accepting it""" @functools.wraps(endpoint) def check_auth_call(*args, **kwargs): diff --git a/example/generate_from_openapi/Pipfile.lock b/example/generate_from_openapi/Pipfile.lock index 0fef43c26..29ecaf368 100644 --- a/example/generate_from_openapi/Pipfile.lock +++ b/example/generate_from_openapi/Pipfile.lock @@ -5,7 +5,7 @@ }, "pipfile-spec": 6, "requires": { - "python_version": "3.6" + "python_version": "3.9" }, "sources": [ { @@ -18,17 +18,17 @@ "default": { "attrs": { "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", + "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" ], - "version": "==20.3.0" + "version": "==21.2.0" }, "certifi": { "hashes": [ - "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", - "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", + "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" ], - "version": "==2020.12.5" + "version": "==2021.5.30" }, "chardet": { "hashes": [ @@ -75,10 +75,10 @@ }, "jinja2": { "hashes": [ - "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", - "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" + "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4", + "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4" ], - "version": "==2.11.3" + "version": "==3.0.1" }, "jsonschema": { "hashes": [ @@ -116,60 +116,42 @@ }, "markupsafe": { "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", - "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", - "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", - "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", - "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", - "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", - "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", - "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", - "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" + "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298", + "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64", + "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b", + "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567", + "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff", + "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74", + "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35", + "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26", + "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7", + "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75", + "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f", + "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135", + "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8", + "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a", + "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914", + "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18", + "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8", + "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2", + "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d", + "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b", + "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f", + "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb", + "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833", + "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415", + "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902", + "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9", + "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d", + "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066", + "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f", + "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5", + "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94", + "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509", + "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51", + "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872" ], - "version": "==1.1.1" + "version": "==2.0.1" }, "openapi-codec": { "hashes": [ @@ -197,11 +179,11 @@ }, "openapi-spec-validator": { "hashes": [ - "sha256:4083fc5aac3e9f751c2a82d4ec5cf3adad5f967d0faf31495d8b56a0b0f9705c", - "sha256:53ba3d884e98ff2062d5ada025aa590541dcd665b8f81067dc82dd61c0923759", - "sha256:e11df7c559339027bd04f2399bc82474983129a6a7a6a0421eaa95e2c844d686" + "sha256:0a7da925bad4576f4518f77302c0b1990adb2fbcbe7d63fb4ed0de894cad8bdd", + "sha256:3d70e6592754799f7e77a45b98c6a91706bdd309a425169d17d8e92173e198a2", + "sha256:ba28b06e63274f2bc6de995a07fb572c657e534425b5baf68d9f7911efe6929f" ], - "version": "==0.3.0" + "version": "==0.3.1" }, "pyrsistent": { "hashes": [ @@ -245,10 +227,10 @@ }, "six": { "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.15.0" + "version": "==1.16.0" }, "strict-rfc3339": { "hashes": [ @@ -265,10 +247,11 @@ }, "urllib3": { "hashes": [ - "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", - "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" + "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", + "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" ], - "version": "==1.26.4" + "index": "pypi", + "version": "==1.26.5" } }, "develop": {} diff --git a/example/grpc_/__init__.py b/example/grpc/__init__.py similarity index 100% rename from example/grpc_/__init__.py rename to example/grpc/__init__.py diff --git a/example/grpc_/common.yaml b/example/grpc/common.yaml similarity index 100% rename from example/grpc_/common.yaml rename to example/grpc/common.yaml diff --git a/example/grpc_/docker-compose.yaml b/example/grpc/docker-compose.yaml similarity index 100% rename from example/grpc_/docker-compose.yaml rename to example/grpc/docker-compose.yaml diff --git a/example/grpc_/helloworld_pb2.py b/example/grpc/helloworld_pb2.py similarity index 100% rename from example/grpc_/helloworld_pb2.py rename to example/grpc/helloworld_pb2.py diff --git a/example/grpc_/helloworld_pb2_grpc.py b/example/grpc/helloworld_pb2_grpc.py similarity index 100% rename from example/grpc_/helloworld_pb2_grpc.py rename to example/grpc/helloworld_pb2_grpc.py diff --git a/example/grpc_/server.Dockerfile b/example/grpc/server.Dockerfile similarity index 100% rename from example/grpc_/server.Dockerfile rename to example/grpc/server.Dockerfile diff --git a/example/grpc_/server.py b/example/grpc/server.py similarity index 76% rename from example/grpc_/server.py rename to example/grpc/server.py index 8cee60938..6bad7eac2 100644 --- a/example/grpc_/server.py +++ b/example/grpc/server.py @@ -10,26 +10,26 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer): + def SayHello(self, request, context): - return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name) + return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) def serve(): server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) SERVICE_NAMES = ( - helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name, + helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, reflection.SERVICE_NAME, ) reflection.enable_server_reflection(SERVICE_NAMES, server) - server.add_insecure_port("[::]:50051") + server.add_insecure_port('[::]:50051') logging.info("Starting...") server.start() event = threading.Event() event.wait() - -if __name__ == "__main__": +if __name__ == '__main__': logging.basicConfig() - serve() + serve() \ No newline at end of file diff --git a/example/grpc_/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml similarity index 100% rename from example/grpc_/test_grpc.tavern.yaml rename to example/grpc/test_grpc.tavern.yaml diff --git a/example/hooks/Dockerfile b/example/hooks/Dockerfile index 57a81fcbc..68b46a7f7 100644 --- a/example/hooks/Dockerfile +++ b/example/hooks/Dockerfile @@ -1,6 +1,10 @@ -FROM python:3.5-alpine +FROM python:3.10-alpine + +RUN pip3 install pyjwt~=2.4.0 flask~=2.0.3 + +ENV FLASK_DEBUG=1 +ENV PYTHONUNBUFFERED=0 -RUN pip install flask pyjwt COPY server.py / diff --git a/example/hooks/docker-compose.yaml b/example/hooks/docker-compose.yaml index 3d6369852..3b186a7b0 100644 --- a/example/hooks/docker-compose.yaml +++ b/example/hooks/docker-compose.yaml @@ -7,4 +7,4 @@ services: context: . dockerfile: Dockerfile ports: - - "5000:5000" + - "5005:5000" diff --git a/example/hooks/test_server.tavern.yaml b/example/hooks/test_server.tavern.yaml index 9a66d1923..c56fdd4b0 100644 --- a/example/hooks/test_server.tavern.yaml +++ b/example/hooks/test_server.tavern.yaml @@ -5,7 +5,7 @@ test_name: Make sure server doubles number properly stages: - name: Make sure number is returned correctly request: - url: http://localhost:5000/double + url: http://localhost:5005/double json: number: 5 method: POST @@ -18,7 +18,7 @@ stages: - name: Make sure number is returned correctly again request: - url: http://localhost:5000/double + url: http://localhost:5005/double json: number: 10 method: POST diff --git a/example/mqtt/conftest.py b/example/mqtt/conftest.py index 094197946..cabeddffa 100644 --- a/example/mqtt/conftest.py +++ b/example/mqtt/conftest.py @@ -1,9 +1,13 @@ +import datetime import logging import logging.config +import random +import pytest +import requests import yaml -from tavern.testutils.pytesthook.item import YamlItem +from tavern._core.pytest.item import YamlItem logging_initialised = False @@ -62,7 +66,7 @@ def setup_logging(): <<: *reduced_log """ - as_dict = yaml.load(log_cfg) + as_dict = yaml.load(log_cfg, Loader=yaml.SafeLoader) logging.config.dictConfig(as_dict) logging.info("Logging set up") @@ -82,3 +86,28 @@ def pytest_runtest_setup(item): setup_logging() return False + + +@pytest.fixture(scope="session", autouse=True) +def reset_devices(): + requests.post("http://localhost:5002/reset") + + +@pytest.fixture +def get_publish_topic(random_device_id): + return "/device/{}/echo".format(random_device_id) + + +@pytest.fixture +def get_response_topic_suffix(): + return "response" + + +@pytest.fixture(scope="function", autouse=True) +def random_device_id(): + return str(random.randint(100, 10000)) + + +@pytest.fixture(scope="function", autouse=True) +def random_device_id_2(): + return str(random.randint(100, 10000)) diff --git a/example/mqtt/listener.Dockerfile b/example/mqtt/listener.Dockerfile index 36e5060f2..5e155884f 100644 --- a/example/mqtt/listener.Dockerfile +++ b/example/mqtt/listener.Dockerfile @@ -1,6 +1,6 @@ -FROM python:3.5-slim-jessie +FROM python:3.9-slim-buster -RUN pip install paho-mqtt fluent-logger pyyaml +RUN pip install 'paho-mqtt>=1.3.1,<=1.5.1' fluent-logger 'PyYAML>=5.3.1,<6' COPY listener.py / diff --git a/example/mqtt/listener.py b/example/mqtt/listener.py index b0fc80ef7..dc02d4ecc 100644 --- a/example/mqtt/listener.py +++ b/example/mqtt/listener.py @@ -3,10 +3,10 @@ import logging.config import os import sqlite3 +import time -import yaml import paho.mqtt.client as paho - +import yaml DATABASE = os.environ.get("DB_NAME") @@ -53,16 +53,30 @@ def setup_logging(): propagate: true """ - as_dict = yaml.load(log_cfg) + as_dict = yaml.load(log_cfg, Loader=yaml.SafeLoader) logging.config.dictConfig(as_dict) logging.info("Logging set up") +def assert_device_exists(device_id): + db = get_db() + with db: + row = db.execute( + "SELECT device_id from devices_table where device_id IS (?)", (device_id,) + ) + + try: + next(row) + except: + raise Exception("Device {} is not registered".format(device_id)) + + def handle_lights_topic(message): db = get_db() device_id = message.topic.split("/")[-2] + assert_device_exists(device_id) if message.payload.decode("utf8") == "on": logging.info("Lights have been turned on") @@ -80,17 +94,20 @@ def handle_lights_topic(message): ) -def handle_request_topic(client, message): - db = get_db() - +def handle_status_topic(client, message): device_id = message.topic.split("/")[-2] + assert_device_exists(device_id) + publish_device_status(client, device_id) + + +def publish_device_status(client, device_id): + db = get_db() logging.info("Checking lights status") with db: row = db.execute( "SELECT lights_on FROM devices_table WHERE device_id IS (?)", (device_id,) ) - try: status = int(next(row)[0]) except Exception: @@ -102,31 +119,50 @@ def handle_request_topic(client, message): ) +def handle_full_status_topic(client, message): + db = get_db() + + logging.info("all devices reporting status") + + with db: + device_ids = db.execute("SELECT device_id FROM devices_table") + + for device_id in device_ids: + publish_device_status(client, device_id[0]) + + def handle_ping_topic(client, message): device_id = message.topic.split("/")[-2] + assert_device_exists(device_id) client.publish("/device/{}/pong".format(device_id)) def handle_echo_topic(client, message): device_id = message.topic.split("/")[-2] + assert_device_exists(device_id) client.publish("/device/{}/echo/response".format(device_id), message.payload) def on_message_callback(client, userdata, message): - logging.info("Received message on %s", message.topic) - - if "lights" in message.topic: - handle_lights_topic(message) - elif "echo" in message.topic: - handle_echo_topic(client, message) - elif "ping" in message.topic: - handle_ping_topic(client, message) - elif "status" in message.topic: - handle_request_topic(client, message) - else: - logging.warning("Got unexpected MQTT topic '%s'", message.topic) + try: + logging.info("Received message on %s", message.topic) + + if "devices/status" in message.topic: + handle_full_status_topic(client, message) + elif "lights" in message.topic: + handle_lights_topic(message) + elif "echo" in message.topic: + handle_echo_topic(client, message) + elif "ping" in message.topic: + handle_ping_topic(client, message) + elif "status" in message.topic: + handle_status_topic(client, message) + else: + logging.warning("Got unexpected MQTT topic '%s'", message.topic) + except Exception as e: + logging.exception(e) def wait_for_messages(): @@ -138,27 +174,14 @@ def wait_for_messages(): topics = ["lights", "ping", "echo", "status"] for t in topics: - device_topic = "/device/{}/{}".format(123, t) + device_topic = "/device/+/{}".format(t) logging.debug("Subscribing to '%s'", device_topic) mqtt_client.subscribe(device_topic) + mqtt_client.subscribe("/devices/status") + mqtt_client.loop_forever() if __name__ == "__main__": - db = get_db() - - with db: - try: - db.execute( - "CREATE TABLE devices_table (device_id TEXT NOT NULL, lights_on INTEGER NOT NULL)" - ) - except: - pass - - try: - db.execute("INSERT INTO devices_table VALUES ('123', 0)") - except: - pass - wait_for_messages() diff --git a/example/mqtt/server.Dockerfile b/example/mqtt/server.Dockerfile index 253ea8517..f08178634 100644 --- a/example/mqtt/server.Dockerfile +++ b/example/mqtt/server.Dockerfile @@ -1,7 +1,7 @@ -FROM python:3.5-slim-jessie +FROM python:3.9-slim-buster RUN apt-get update && apt-get install build-essential --yes --no-install-recommends && apt-get clean -RUN pip install flask paho-mqtt fluent-logger pyyaml uwsgi gevent==1.2.2 +RUN pip install flask 'paho-mqtt>=1.3.1,<=1.5.1' fluent-logger 'PyYAML>=5.3.1,<6' uwsgi gevent==21.1.2 COPY server.py / diff --git a/example/mqtt/server.py b/example/mqtt/server.py index 5cdfb08d2..b9631ee32 100644 --- a/example/mqtt/server.py +++ b/example/mqtt/server.py @@ -1,19 +1,16 @@ -import sqlite3 import contextlib -import os import logging import logging.config -import yaml -from flask import Flask, jsonify, request, g - +import os +import sqlite3 import paho.mqtt.client as paho - +import yaml +from flask import Flask, g, jsonify, request app = Flask(__name__) application = app - DATABASE = os.environ.get("DB_NAME") @@ -74,7 +71,7 @@ def setup_logging(): propagate: true """ - as_dict = yaml.load(log_cfg) + as_dict = yaml.load(log_cfg, Loader=yaml.SafeLoader) logging.config.dictConfig(as_dict) logging.info("Logging set up") @@ -127,7 +124,12 @@ def get_device(): try: status = next(row)[1] except StopIteration: - return jsonify({"error": "bad device id"}), 400 + return ( + jsonify( + {"error": "could not find device with id {}".format(r["device_id"])} + ), + 400, + ) onoff = "on" if status else "off" @@ -136,12 +138,65 @@ def get_device(): return jsonify({"lights": onoff}) +@app.route("/create_device", methods=["PUT"]) +def create_device(): + r = request.get_json(force=True) + logging.error(r) + + try: + r["device_id"] + except (KeyError, TypeError): + return jsonify({"error": "missing key device_id"}), 400 + + db = get_cached_db() + with db: + row = db.execute( + "SELECT device_id from devices_table where device_id is :device_id", r + ) + + try: + next(row) + except StopIteration: + pass + else: + return jsonify({"error": "device already exists"}), 400 + + new_device = dict(lights_on=False, **r) + + logging.info("Creating new device: %s", new_device) + + with db: + db.execute( + "INSERT INTO devices_table (device_id, lights_on) VALUES (:device_id, :lights_on)", + new_device, + ) + + return jsonify({"status": "created device {device_id}".format(**r)}), 201 + + @app.route("/reset", methods=["POST"]) def reset_db(): db = get_cached_db() + return _reset_db(db) + +def _reset_db(db): with db: - db.execute("DELETE FROM devices_table") - db.execute("INSERT INTO devices_table VALUES ('123', 0)") + + def attempt(query): + try: + db.execute(query) + except: + pass + + attempt("DELETE FROM devices_table") + attempt( + "CREATE TABLE devices_table (device_id TEXT NOT NULL, lights_on INTEGER NOT NULL)" + ) return "", 204 + + +if __name__ == "__main__": + db = get_db() + _reset_db(db) diff --git a/example/mqtt/test_mqtt.tavern.yaml b/example/mqtt/test_mqtt.tavern.yaml index 7b7589328..956a18a9e 100644 --- a/example/mqtt/test_mqtt.tavern.yaml +++ b/example/mqtt/test_mqtt.tavern.yaml @@ -14,15 +14,25 @@ paho-mqtt: &mqtt_spec timeout: 3 client: transport: websockets - client_id: tavern-tester + client_id: tavern-tester-{random_device_id} stages: + - &setup_device_for_test + name: create device + request: + url: "{host}/create_device" + method: PUT + json: + device_id: "{random_device_id}" + response: + status_code: 201 + - name: Echo text mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo payload: hello world mqtt_response: - topic: /device/123/echo/response + topic: /device/{random_device_id}/echo/response payload: hello world timeout: 5 qos: 1 @@ -34,16 +44,48 @@ test_name: Test mqtt message echo json includes: - !include common.yaml + paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + + - name: Echo json + mqtt_publish: + topic: /device/{random_device_id}/echo + json: + message: hello world + mqtt_response: + topic: /device/{random_device_id}/echo/response + json: + message: hello world + timeout: 5 + qos: 1 + +--- + +test_name: Test mqtt message echo json formatted topic name + +marks: + - usefixtures: + - get_publish_topic + - get_response_topic_suffix + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + - name: Echo json mqtt_publish: - topic: /device/123/echo + topic: '{get_publish_topic}' json: message: hello world mqtt_response: - topic: /device/123/echo/response + topic: '/device/{random_device_id}/echo/{get_response_topic_suffix}' json: message: hello world timeout: 5 @@ -62,14 +104,16 @@ strict: - json:off stages: + - *setup_device_for_test + - name: Check that at least part of response is echoed mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo json: key_1: message1 key_2: message2 mqtt_response: - topic: /device/123/echo/response + topic: /device/{random_device_id}/echo/response json: key_1: message1 timeout: 5 @@ -90,14 +134,16 @@ strict: _xfail: run stages: + - *setup_device_for_test + - name: Check that at least part of response is echoed mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo json: key_1: message1 key_2: message2 mqtt_response: - topic: /device/123/echo/response + topic: /device/{random_device_id}/echo/response json: key_1: message1 timeout: 5 @@ -113,13 +159,15 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: Echo json mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo json: message: hello world mqtt_response: - topic: /device/123/echo/response + topic: /device/{random_device_id}/echo/response json: message: hello world timeout: 5 @@ -137,9 +185,11 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: Echo text with retain force on mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo payload: hello world retain: true qos: 1 @@ -168,11 +218,13 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: step 1 - ping/pong mqtt_publish: - topic: /device/123/ping + topic: /device/{random_device_id}/ping mqtt_response: - topic: /device/123/pong + topic: /device/{random_device_id}/pong timeout: 5 qos: 1 @@ -186,18 +238,20 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: step 1 - ping/pong mqtt_publish: - topic: /device/123/ping + topic: /device/{random_device_id}/ping payload: blaeruhg mqtt_response: - topic: /device/123/pong + topic: /device/{random_device_id}/pong timeout: 5 qos: 1 --- -test_name: Make sure posting publishes mqtt message +test_name: Make sure can handle multiple types of responses includes: - !include common.yaml @@ -205,11 +259,13 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: step 1 - post message trigger request: url: "{host}/send_mqtt_message" json: - device_id: 123 + device_id: "{random_device_id}" payload: "hello" method: POST headers: @@ -217,11 +273,11 @@ stages: response: status_code: 200 json: - topic: "/device/123" + topic: "/device/{random_device_id}" headers: content-type: application/json mqtt_response: - topic: /device/123 + topic: /device/{random_device_id} payload: "hello" timeout: 5 @@ -235,20 +291,13 @@ includes: paho-mqtt: *mqtt_spec stages: - - name: step 0 - reset devices - request: - url: "{host}/reset" - method: POST - headers: - content-type: application/json - response: - status_code: 204 + - *setup_device_for_test - name: step 1 - get device state with lights off request: url: "{host}/get_device_state" params: - device_id: 123 + device_id: "{random_device_id}" method: GET headers: content-type: application/json @@ -261,7 +310,7 @@ stages: - name: step 2 - publish an mqtt message saying that the lights are now on mqtt_publish: - topic: /device/123/lights + topic: /device/{random_device_id}/lights qos: 1 payload: "on" delay_after: 2 @@ -270,7 +319,7 @@ stages: request: url: "{host}/get_device_state" params: - device_id: 123 + device_id: "{random_device_id}" method: GET headers: content-type: application/json @@ -291,20 +340,13 @@ includes: paho-mqtt: *mqtt_spec stages: - - name: step 0 - reset devices - request: - url: "{host}/reset" - method: POST - headers: - content-type: application/json - response: - status_code: 204 + - *setup_device_for_test - name: Check lights status is off mqtt_publish: - topic: /device/123/status + topic: /device/{random_device_id}/status mqtt_response: - topic: /device/123/status/response + topic: /device/{random_device_id}/status/response json: lights: 0 timeout: 3 @@ -312,16 +354,16 @@ stages: - name: Turn lights on mqtt_publish: - topic: /device/123/lights + topic: /device/{random_device_id}/lights qos: 1 payload: "on" delay_after: 2 - name: Check lights status is on mqtt_publish: - topic: /device/123/status + topic: /device/{random_device_id}/status mqtt_response: - topic: /device/123/status/response + topic: /device/{random_device_id}/status/response json: lights: 1 timeout: 3 @@ -337,11 +379,13 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: Test checking for lights status with anyint mqtt_publish: - topic: /device/123/status + topic: /device/{random_device_id}/status mqtt_response: - topic: /device/123/status/response + topic: /device/{random_device_id}/status/response json: lights: !anyint timeout: 3 @@ -357,11 +401,13 @@ includes: paho-mqtt: *mqtt_spec stages: - - name: Test checking for lights status with anyint + - *setup_device_for_test + + - name: Test checking for lights status with anything with json mqtt_publish: - topic: /device/123/status + topic: /device/{random_device_id}/status mqtt_response: - topic: /device/123/status/response + topic: /device/{random_device_id}/status/response json: lights: !anything timeout: 3 @@ -377,11 +423,13 @@ includes: paho-mqtt: *mqtt_spec stages: - - name: Test checking for lights status with anyint + - *setup_device_for_test + + - name: Test checking for lights status with anything with payload mqtt_publish: - topic: /device/123/status + topic: /device/{random_device_id}/status mqtt_response: - topic: /device/123/status/response + topic: /device/{random_device_id}/status/response payload: !anything timeout: 3 qos: 1 @@ -396,13 +444,15 @@ includes: paho-mqtt: *mqtt_spec stages: + - *setup_device_for_test + - name: Echo json mqtt_publish: - topic: /device/123/echo + topic: /device/{random_device_id}/echo json: message: !raw "Hello {world}" mqtt_response: - topic: /device/123/echo/response + topic: /device/{random_device_id}/echo/response json: message: !raw "Hello {world}" timeout: 5 @@ -418,17 +468,300 @@ includes: paho-mqtt: client: transport: websockets - client_id: tavern-tester + client_id: tavern-tester-{random_device_id} connect: host: "{mqtt_host}" port: !int "{mqtt_port:d}" timeout: 3 stages: + - *setup_device_for_test + + - name: step 1 - ping/pong + mqtt_publish: + topic: /device/{random_device_id}/ping + mqtt_response: + topic: /device/{random_device_id}/pong + timeout: 5 + qos: 1 + +--- + +test_name: Test get both statuses + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: create device 2 + request: + url: "{host}/create_device" + method: PUT + json: + device_id: "{random_device_id_2}" + + - name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + - topic: /device/{random_device_id}/status/response + payload: !anything + timeout: 3 + qos: 1 + - topic: /device/{random_device_id_2}/status/response + payload: !anything + timeout: 3 + qos: 1 + +--- + +test_name: Test out of order responses + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: create device 2 + request: + url: "{host}/create_device" + method: PUT + json: + device_id: "{random_device_id_2}" + + - name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + - topic: /device/{random_device_id_2}/status/response + payload: !anything + timeout: 3 + qos: 1 + - topic: /device/{random_device_id}/status/response + payload: !anything + timeout: 3 + qos: 1 + +--- + +test_name: Save something and reuse it, one response + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + topic: /device/{random_device_id}/status/response + json: + lights: !anything + timeout: 3 + qos: 1 + save: + json: + lights_status: lights + + - name: Echo text + mqtt_publish: + topic: /device/{random_device_id}/echo + payload: "{lights_status}" + mqtt_response: + topic: /device/{random_device_id}/echo/response + payload: "{lights_status}" + timeout: 5 + qos: 1 + +--- + +test_name: Save something and reuse it, multiple responses, saved from both + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: Turn lights on for first device + mqtt_publish: + topic: /device/{random_device_id}/lights + qos: 1 + payload: "on" + delay_after: 2 + + - name: create device 2 + request: + url: "{host}/create_device" + method: PUT + json: + device_id: "{random_device_id_2}" + + - name: Get device statuses + mqtt_publish: + topic: /devices/status + mqtt_response: + - topic: /device/{random_device_id}/status/response + timeout: 3 + qos: 1 + json: + lights: 1 + save: + json: + device_1_lights: lights + - topic: /device/{random_device_id_2}/status/response + timeout: 3 + qos: 1 + json: + lights: 0 + save: + json: + device_2_lights: lights + + - name: Ensure can use saved values 1 + request: + url: "{host}/send_mqtt_message" + json: + device_id: "{random_device_id}" + payload: "{device_1_lights}" + method: POST + headers: + content-type: application/json + response: + status_code: 200 + json: + topic: "/device/{random_device_id}" + headers: + content-type: application/json + mqtt_response: + topic: /device/{random_device_id} + payload: "1" + timeout: 5 + + - name: Ensure can use saved values 2 + request: + url: "{host}/send_mqtt_message" + json: + device_id: "{random_device_id_2}" + payload: "{device_2_lights}" + method: POST + headers: + content-type: application/json + response: + status_code: 200 + json: + topic: "/device/{random_device_id_2}" + headers: + content-type: application/json + mqtt_response: + topic: /device/{random_device_id_2} + payload: "0" + timeout: 5 + +--- + +test_name: Save something from an ext function and reuse it, one response + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + topic: /device/{random_device_id}/status/response + json: + lights: !anything + timeout: 3 + qos: 1 + save: + $ext: + function: testing_utils:return_hello + + - name: Echo text + mqtt_publish: + topic: /device/{random_device_id}/echo + payload: "{hello}" + mqtt_response: + topic: /device/{random_device_id}/echo/response + payload: "there" + timeout: 5 + qos: 1 + +--- + +test_name: Save something from an ext function and reuse it, multiple response + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + - name: step 1 - ping/pong mqtt_publish: - topic: /device/123/ping + topic: /devices/status mqtt_response: - topic: /device/123/pong + - topic: /device/{random_device_id}/status/response + json: + lights: !anything + timeout: 3 + qos: 1 + save: + $ext: + function: testing_utils:return_hello + + - name: Echo text + mqtt_publish: + topic: /device/{random_device_id}/echo + payload: "{hello}" + mqtt_response: + topic: /device/{random_device_id}/echo/response + payload: "there" timeout: 5 qos: 1 + +--- + +test_name: Update an MQTT publish from an ext function + +includes: + - !include common.yaml + +paho-mqtt: *mqtt_spec + +stages: + - *setup_device_for_test + + - name: step 1 - ping/pong + mqtt_publish: + topic: /device/{random_device_id}/echo + json: + $ext: + function: testing_utils:return_hello + mqtt_response: + topic: /device/{random_device_id}/echo/response + timeout: 3 + qos: 1 + json: + hello: there diff --git a/example/mqtt/test_mqtt_failures.tavern.yaml b/example/mqtt/test_mqtt_failures.tavern.yaml index 7957ed02c..b54a9cfee 100644 --- a/example/mqtt/test_mqtt_failures.tavern.yaml +++ b/example/mqtt/test_mqtt_failures.tavern.yaml @@ -92,3 +92,51 @@ stages: topic: /device/123/status/response timeout: 3 qos: 1 + +--- + +test_name: Test unexpected message fails + +includes: +- !include common.yaml + +paho-mqtt: *mqtt_spec + +_xfail: run + +stages: +- name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + - topic: /device/456/status/response + payload: !anything + timeout: 2 + qos: 1 + - topic: /device/123/status/response + payload: !anything + timeout: 2 + qos: 1 + unexpected: true + +--- + +test_name: Test unexpected message fails even on its own + +includes: +- !include common.yaml + +paho-mqtt: *mqtt_spec + +_xfail: run + +stages: +- name: step 1 - ping/pong + mqtt_publish: + topic: /devices/status + mqtt_response: + - topic: /device/123/status/response + payload: !anything + timeout: 3 + qos: 1 + unexpected: true diff --git a/example/mqtt/testing_utils.py b/example/mqtt/testing_utils.py index 6d5e52053..f483ca942 100644 --- a/example/mqtt/testing_utils.py +++ b/example/mqtt/testing_utils.py @@ -1,4 +1,7 @@ def message_says_hello(msg): - """Make sure that the response was friendly - """ + """Make sure that the response was friendly""" assert msg.payload.get("message") == "hello world" + + +def return_hello(_=None): + return {"hello": "there"} diff --git a/example/remember/server.py b/example/remember/server.py deleted file mode 100644 index b931c3c38..000000000 --- a/example/remember/server.py +++ /dev/null @@ -1,58 +0,0 @@ -from flask import Flask, jsonify, request, make_response, session -from itsdangerous import URLSafeTimedSerializer -from datetime import timedelta, datetime -from hashlib import sha512 - -app = Flask(__name__) -app.config.update(SECRET_KEY="secret") - -users = {"mark": {"password": "password", "regular": "foo", "protected": "bar"}} - -serializer = URLSafeTimedSerializer( - secret_key="secret", - salt="cookie", - signer_kwargs=dict(key_derivation="hmac", digest_method=sha512), -) - - -@app.route("/login", methods=["POST"]) -def login(): - r = request.get_json() - username = r["username"] - password = r["password"] - - if password == users[username]["password"]: - session["user"] = username - response = make_response("", 200) - response.set_cookie( - "remember", - value=serializer.dumps(username), - expires=datetime.utcnow() + timedelta(days=30), - httponly=True, - ) - return response - - return "", 401 - - -@app.route("/regular", methods=["GET"]) -def regular(): - username = session.get("user") - - if not username: - remember = request.cookies.get("remember") - if remember: - username = serializer.loads(remember, max_age=3600) - - if username: - return jsonify(regular=users[username]["regular"]), 200 - - return "", 401 - - -@app.route("/protected", methods=["GET"]) -def protected(): - username = session.get("user") - if username: - return jsonify(protected=users[username]["protected"]), 200 - return "", 401 diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 350524757..000000000 --- a/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..1a95883f8 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,199 @@ +[build-system] +requires = ["flit-core >=3.2,<4"] +build-backend = "flit_core.buildapi" + +[project] + +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Framework :: Pytest", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Topic :: Utilities", + "Topic :: Software Development :: Testing", + "License :: OSI Approved :: MIT License", +] + +keywords = ["testing", "pytest"] + +name = "tavern" +description = "Simple testing of RESTful APIs" +version = "2.0.5" + +dependencies = [ + "PyYAML>=5.3.1,<7", + "jmespath>=1,<2", + "jsonschema>=3.2.0,<5", + "paho-mqtt>=1.3.1,<=1.6.1", + "pyjwt>=2.4.0,<3", + "pykwalify>=1.8.0,<2", + "pytest>=7,<8", + "python-box>=6,<7", + "requests>=2.22.0,<3", + "stevedore>=4,<5", + + "grpcio", + "grpcio-reflection", + "grpcio-status", +] + +requires-python = ">=3.8" + +[[project.authors]] +name = "Michael Boulton" + +[project.license] +file = "LICENSE" + +[project.readme] +file = "README.md" +content-type = "text/markdown" + +[project.urls] +Home = "https://taverntesting.github.io/" +Documentation = "https://tavern.readthedocs.io/en/latest/" +Source = "https://github.com/taverntesting/tavern" + +[project.optional-dependencies] +dev = [ + "Faker", + "allure-pytest", + "black==23.1.0", + "bump2version", + "colorlog", + "docker-compose", + "flask", + "fluent-logger", + "itsdangerous", + "mypy", + "mypy-extensions", + "coverage[toml]", + "types-PyYAML", + "flit >=3.2,<4", + "pip-tools", + "pre-commit", + "pygments", + "pytest-cov", + "ruff>=0.0.246", + "pytest-xdist", + "py", + "tox>=3,<4", + "tox-travis", + "twine", + "wheel", + # This has to be installed separately, otherwise you can't upload to pypi + # "tbump@https://github.com/michaelboulton/tbump/archive/714ba8957a3c84b625608ceca39811ebe56229dc.zip", +] + + +[project.scripts] + +tavern-ci = "tavern.entry:main" + +[project.entry-points.pytest11] + +tavern = "tavern._core.pytest" + +[project.entry-points.tavern_http] +requests = "tavern._plugins.rest.tavernhook:TavernRestPlugin" +[project.entry-points.tavern_mqtt] +paho-mqtt = "tavern._plugins.mqtt.tavernhook" +[project.entry-points.tavern_grpc] +grpc = "tavern._plugins.grpc.tavernhook" + +[tool.black] +target-version = ['py37'] + +[tool.mypy] +python_version = 3.8 +ignore_missing_imports = true + +[tool.coverage.run] +branch = false +omit = [ + "tests/*", + ".eggs/*", + "env/*", + "build/*", + "dist/*", +] +source = ["tavern"] + +[tool.coverage.paths] +tavern = [ + "tavern/", + ".tox/py39-generic/lib/python3.9/site-packages/tavern/", + ".tox/py39-mqtt/lib/python3.9/site-packages/tavern", +] + +[tool.pytest.ini_options] +testpaths = ["tavern", "tests/unit"] +addopts = [ + "--doctest-modules", + "-r", "xs", + "-vv", + "--strict-markers", + "-p", "no:logging", + "--tb=short", +] +norecursedirs = [ + ".git", + ".tox", + "example", +] + +[tool.ruff] +ignore = [ + "E501", # line length + "RUF005", # union types only valid from 3.10+ + "B905", # zip(..., strict=True) only valid from 3.10+ + "PLR0912", "PLR0915", "PLR0911", "PLR0913", # too many branches/variables/return values - sometimes this is just unavoidable + "PLR2004", # 'magic numbers' +] +select = ["E", "F", "B", "W", "I", "S", "C4", "ICN", "T20", "PLE", "RUF", "SIM105", "PL"] +# Look at: UP +target-version = "py38" + +[tool.ruff.per-file-ignores] +"tests/*" = ["S"] + +[tool.ruff.isort] +known-first-party = ["tavern"] + +[tool.tbump.version] +current = "2.0.5" + +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ((?P[a-zA-Z]+)(?P\d+))? + ''' + +[tool.tbump.git] +message_template = "Bump to {new_version}" +tag_template = "{new_version}" + +[[tool.tbump.file]] +src = "tavern/__init__.py" + +[[tool.tbump.file]] +src = "pyproject.toml" + +[[tool.tbump.file]] +src = "docs/source/conf.py" + + +[[tool.tbump.before_commit]] +name = "Update changelog" +cmd = "./scripts/update-changelog.bash" + +# TODO: enable +# [[tool.tbump.after_push]] +# name = "publish" +# cmd = "./scripts/release.sh" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 8435e0042..000000000 --- a/pytest.ini +++ /dev/null @@ -1,25 +0,0 @@ -[coverage:run] -omit = - tests/* - .eggs/* - setup.py - env/* - build/* - dist/* - -source = - tavern - -[pytest] -testpaths=tavern tests/unit -addopts = - --doctest-modules - -r xs - -vv - --strict - -p no:logging - --tb=short -norecursedirs = - .git - .tox - example diff --git a/requirements.txt b/requirements.txt index 5d0252916..bba30f5c4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,987 @@ -# Install all requirements --e .[tests] +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --all-extras --generate-hashes --output-file=requirements.txt --resolver=backtracking pyproject.toml +# +allure-pytest==2.12.0 \ + --hash=sha256:1a10b2b78334443097d7be890a53c991e857e13d14781377c2f8d11eb4b5582c \ + --hash=sha256:85b73b1dbe9908ba4f84b80118a93e1049c02dd593209260d8c1c950cf286f6c + # via tavern (pyproject.toml) +allure-python-commons==2.12.0 \ + --hash=sha256:d5c362dd01167f086331822e9b1912d4e6fd6cbc2d1a006dd048e77e82a7ae73 \ + --hash=sha256:f968c69d4e656bcf274f2ef6578f24afcaab663f8814e238d0ab2b8bec2e0134 + # via allure-pytest +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via + # allure-python-commons + # jsonschema + # pytest +bcrypt==4.0.1 \ + --hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \ + --hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \ + --hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \ + --hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \ + --hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \ + --hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \ + --hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \ + --hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \ + --hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \ + --hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \ + --hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \ + --hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \ + --hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \ + --hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \ + --hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \ + --hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \ + --hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \ + --hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \ + --hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \ + --hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \ + --hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3 + # via paramiko +black==23.1.0 \ + --hash=sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd \ + --hash=sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555 \ + --hash=sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481 \ + --hash=sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468 \ + --hash=sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9 \ + --hash=sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a \ + --hash=sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958 \ + --hash=sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580 \ + --hash=sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26 \ + --hash=sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32 \ + --hash=sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8 \ + --hash=sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753 \ + --hash=sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b \ + --hash=sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074 \ + --hash=sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651 \ + --hash=sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24 \ + --hash=sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6 \ + --hash=sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad \ + --hash=sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac \ + --hash=sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221 \ + --hash=sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06 \ + --hash=sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27 \ + --hash=sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648 \ + --hash=sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739 \ + --hash=sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104 + # via tavern (pyproject.toml) +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +build==0.9.0 \ + --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ + --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 + # via pip-tools +bump2version==1.0.1 \ + --hash=sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410 \ + --hash=sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6 + # via tavern (pyproject.toml) +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via + # cryptography + # pynacl +cfgv==3.3.1 \ + --hash=sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426 \ + --hash=sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736 + # via pre-commit +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.1.3 \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 + # via + # black + # flask + # pip-tools +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 + # via tavern (pyproject.toml) +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +coverage[toml]==7.0.0 \ + --hash=sha256:0a8b0e86bede874bf5da566b02194fbb12dd14ce3585cabd58452007f272ba81 \ + --hash=sha256:100546219af59d2ad82d4575de03a303eb27b75ea36ffbd1677371924d50bcbc \ + --hash=sha256:10b6246cae61896ab4c7568e498e492cbb73a2dfa4c3af79141c43cf806f929a \ + --hash=sha256:215f40ef86f1958a1151fa7fad2b4f2f99534c4e10a34a1e065eba3f19ef8868 \ + --hash=sha256:2331b7bd84a1be79bd17ca8e103ce38db8cbf7cb354dc56e651ba489cf849212 \ + --hash=sha256:30220518dd89c4878908d73f5f3d1269f86e9e045354436534587a18c7b9da85 \ + --hash=sha256:32b94ad926e933976627f040f96dd1d9b0ac91f8d27e868c30a28253b9b6ac2d \ + --hash=sha256:33efe89cd0efef016db19d8d05aa46631f76793de90a61b6717acb202b36fe60 \ + --hash=sha256:36b62f0220459e528ad5806cc7dede71aa716e067d2cb10cb4a09686b8791fba \ + --hash=sha256:3c0deee68e0dae1d6e3fe6943c76d7e66fbeb6519bd08e4e5366bcc28a8a9aca \ + --hash=sha256:3ec256a592b497f26054195f7d7148892aca8c4cdcc064a7cc66ef7a0455b811 \ + --hash=sha256:43ec1935c6d6caab4f3bc126d20bd709c0002a175d62208ebe745be37a826a41 \ + --hash=sha256:5885a4ceb6dde34271bb0adafa4a248a7f589c89821e9da3110c39f92f41e21b \ + --hash=sha256:59e71912c7fc78d08a567ee65656123878f49ca1b5672e660ea70bf8dfbebf8f \ + --hash=sha256:793dcd9d42035746fc7637df4336f7581df19d33c5c5253cf988c99d8e93a8ba \ + --hash=sha256:8593c9baf1f0f273afa22f5b45508b76adc7b8e94e17e7d98fbe1e3cd5812af2 \ + --hash=sha256:8938f3a10f45019b502020ba9567b97b6ecc8c76b664b421705c5406d4f92fe8 \ + --hash=sha256:8dbf83a4611c591b5de65069b6fd4dd3889200ed270cd2f7f5ac765d3842889f \ + --hash=sha256:8f1e6d9c70d45a960d3f3d781ea62b167fdf2e0e1f6bb282b96feea653adb923 \ + --hash=sha256:96b5b1f1079e48f56bfccf103bcf44d48b9eb5163f1ea523fad580f15d3fe5e0 \ + --hash=sha256:97c0b001ff15b8e8882995fc07ac0a08c8baf8b13c1145f3f12e0587bbb0e335 \ + --hash=sha256:9a175da2a7320e18fc3ee1d147639a2b3a8f037e508c96aa2da160294eb50e17 \ + --hash=sha256:9fadd15f9fcfd7b16d9cccce9f5e6ec6f9b8df860633ad9aa62c2b14c259560f \ + --hash=sha256:a290b7921c1c05787b953e5854d394e887df40696f21381cc33c4e2179bf50ac \ + --hash=sha256:a30b646fbdd5bc52f506e149fa4fbdef82432baf6b81774e61ec4e3b43b9cbde \ + --hash=sha256:a6fff0f08bc5ffd0d78db821971472b4adc2ee876b86f743e46d634fb8e3c22f \ + --hash=sha256:a7e1bb36b4e57a2d304322021b35d4e4a25fa0d501ba56e8e51efaebf4480556 \ + --hash=sha256:a8785791c2120af114ea7a06137f7778632e568a5aa2bbfc3b46c573b702af74 \ + --hash=sha256:ae088eb1cbdad8206931b1bf3f11dee644e038a9300be84d3e705e29356e5b1d \ + --hash=sha256:b18df11efa615b79b9ecc13035a712957ff6283f7b244e57684e1c092869f541 \ + --hash=sha256:b8f7cd942dda3795fc9eadf303cc53a422ac057e3b70c2ad6d4276ec6a83a541 \ + --hash=sha256:bc904aa96105d73357de03de76336b1e3db28e2b12067d36625fd9646ab043fd \ + --hash=sha256:bcaf18e46668057051a312c714a4548b81f7e8fb3454116ad97be7562d2a99e4 \ + --hash=sha256:bf437a04b9790d3c9cd5b48e9ce9aa84229040e3ae7d6c670a55118906113c5a \ + --hash=sha256:c1ba6e63b831112b9484ff5905370d89e43d4316bac76d403031f60d61597466 \ + --hash=sha256:c4b63888bef2928d0eca12cbce0760cfb696acb4fe226eb55178b6a2a039328a \ + --hash=sha256:c685fc17d6f4f1a3833e9dac27d0b931f7ccb52be6c30d269374203c7d0204a2 \ + --hash=sha256:cda63459eb20652b22e038729a8f5063862c189a3963cb042a764b753172f75e \ + --hash=sha256:d43d406a4d73aa7f855fa44fa77ff47e739b565b2af3844600cdc016d01e46b9 \ + --hash=sha256:d564142a03d3bc8913499a458e931b52ddfe952f69b6cd4b24d810fd2959044a \ + --hash=sha256:d6b4af31fb49a2ae8de1cd505fa66c403bfcc5066e845ac19d8904dcfc9d40da \ + --hash=sha256:db8141856dc9be0917413df7200f53accf1d84c8b156868e6af058a1ea8e903a \ + --hash=sha256:de06e7585abe88c6d38c1b73ce4c3cb4c1a79fbb0da0d0f8e8689ef5729ec60d \ + --hash=sha256:e06abac1a4aec1ff989131e43ca917fc7bd296f34bf0cfe86cbf74343b21566d \ + --hash=sha256:e645c73cbfc4577d93747d3f793115acf6f907a7eb9208fa807fdcf2da1964a4 \ + --hash=sha256:e907db8bdd0ad1253a33c20fdc5f0f6209d271114a9c6f1fcdf96617343f7ca0 \ + --hash=sha256:f2569682d6ea9628da8d6ba38579a48b1e53081226ec7a6c82b5024b3ce5009f \ + --hash=sha256:f6a4bf5bdee93f6817797beba7086292c2ebde6df0d5822e0c33f8b05415c339 \ + --hash=sha256:f9071e197faa24837b967bc9aa0b9ef961f805a75f1ee3ea1f3367f55cd46c3c \ + --hash=sha256:fb85b7a7a4b204bd59d6d0b0c8d87d9ffa820da225e691dfaffc3137dc05b5f6 \ + --hash=sha256:fee283cd36c3f14422d9c1b51da24ddbb5e1eed89ad2480f6a9f115df38b5df8 + # via + # pytest-cov + # tavern (pyproject.toml) +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 + # via + # paramiko + # secretstorage +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e + # via virtualenv +distro==1.8.0 \ + --hash=sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8 \ + --hash=sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff + # via docker-compose +docker[ssh]==6.0.1 \ + --hash=sha256:896c4282e5c7af5c45e8b683b0b0c33932974fe6e50fc6906a0a83616ab3da97 \ + --hash=sha256:dbcb3bd2fa80dca0788ed908218bf43972772009b881ed1e20dfc29a65e49782 + # via docker-compose +docker-compose==1.29.2 \ + --hash=sha256:4c8cd9d21d237412793d18bd33110049ee9af8dab3fe2c213bbd0733959b09b7 \ + --hash=sha256:8d5589373b35c8d3b1c8c1182c6e4a4ff14bffa3dd0b605fcd08f73c94cef809 + # via tavern (pyproject.toml) +dockerpty==0.4.1 \ + --hash=sha256:69a9d69d573a0daa31bcd1c0774eeed5c15c295fe719c61aca550ed1393156ce + # via docker-compose +docopt==0.6.2 \ + --hash=sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491 + # via + # docker-compose + # pykwalify +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via + # flit + # readme-renderer +exceptiongroup==1.0.4 \ + --hash=sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828 \ + --hash=sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec + # via pytest +execnet==1.9.0 \ + --hash=sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5 \ + --hash=sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142 + # via pytest-xdist +faker==15.3.4 \ + --hash=sha256:2d5443724f640ce07658ca8ca8bbd40d26b58914e63eec6549727869aa67e2cc \ + --hash=sha256:c2a2ff9dd8dfd991109b517ab98d5cb465e857acb45f6b643a0e284a9eb2cc76 + # via tavern (pyproject.toml) +filelock==3.8.2 \ + --hash=sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2 \ + --hash=sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c + # via + # tox + # virtualenv +flask==2.2.2 \ + --hash=sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b \ + --hash=sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526 + # via tavern (pyproject.toml) +flit==3.8.0 \ + --hash=sha256:5ee0f88fd1cfa4160d1a8fa01237e96d06d677ae0403a0bbabbb277cb37c5e9c \ + --hash=sha256:d0f2a8f4bd45dc794befbf5839ecc0fd3830d65a57bd52b5997542fac5d5e937 + # via tavern (pyproject.toml) +flit-core==3.8.0 \ + --hash=sha256:64a29ec845164a6abe1136bf4bc5ae012bdfe758ed42fc7571a9059a7c80bd83 \ + --hash=sha256:b305b30c99526df5e63d6022dd2310a0a941a187bd3884f4c8ef0418df6c39f3 + # via flit +fluent-logger==0.10.0 \ + --hash=sha256:543637e5e62ec3fc3c92b44e5a4e148a3cea88a0f8ca4fae26c7e60fda7564c1 \ + --hash=sha256:678bda90c513ff0393964b64544ce41ef25669d2089ce6c3b63d9a18554b9bfa + # via tavern (pyproject.toml) +googleapis-common-protos==1.58.0 \ + --hash=sha256:c727251ec025947d545184ba17e3578840fc3a24a0516a020479edab660457df \ + --hash=sha256:ca3befcd4580dab6ad49356b46bf165bb68ff4b32389f028f1abd7c10ab9519a + # via grpcio-status +grpcio==1.51.1 \ + --hash=sha256:094e64236253590d9d4075665c77b329d707b6fca864dd62b144255e199b4f87 \ + --hash=sha256:0dc5354e38e5adf2498312f7241b14c7ce3484eefa0082db4297189dcbe272e6 \ + --hash=sha256:0e1a9e1b4a23808f1132aa35f968cd8e659f60af3ffd6fb00bcf9a65e7db279f \ + --hash=sha256:0fb93051331acbb75b49a2a0fd9239c6ba9528f6bdc1dd400ad1cb66cf864292 \ + --hash=sha256:16c71740640ba3a882f50b01bf58154681d44b51f09a5728180a8fdc66c67bd5 \ + --hash=sha256:172405ca6bdfedd6054c74c62085946e45ad4d9cec9f3c42b4c9a02546c4c7e9 \ + --hash=sha256:17ec9b13cec4a286b9e606b48191e560ca2f3bbdf3986f91e480a95d1582e1a7 \ + --hash=sha256:22b011674090594f1f3245960ced7386f6af35485a38901f8afee8ad01541dbd \ + --hash=sha256:24ac1154c4b2ab4a0c5326a76161547e70664cd2c39ba75f00fc8a2170964ea2 \ + --hash=sha256:257478300735ce3c98d65a930bbda3db172bd4e00968ba743e6a1154ea6edf10 \ + --hash=sha256:29cb97d41a4ead83b7bcad23bdb25bdd170b1e2cba16db6d3acbb090bc2de43c \ + --hash=sha256:2b170eaf51518275c9b6b22ccb59450537c5a8555326fd96ff7391b5dd75303c \ + --hash=sha256:31bb6bc7ff145e2771c9baf612f4b9ebbc9605ccdc5f3ff3d5553de7fc0e0d79 \ + --hash=sha256:3c2b3842dcf870912da31a503454a33a697392f60c5e2697c91d133130c2c85d \ + --hash=sha256:3f9b0023c2c92bebd1be72cdfca23004ea748be1813a66d684d49d67d836adde \ + --hash=sha256:471d39d3370ca923a316d49c8aac66356cea708a11e647e3bdc3d0b5de4f0a40 \ + --hash=sha256:49d680356a975d9c66a678eb2dde192d5dc427a7994fb977363634e781614f7c \ + --hash=sha256:4c4423ea38a7825b8fed8934d6d9aeebdf646c97e3c608c3b0bcf23616f33877 \ + --hash=sha256:506b9b7a4cede87d7219bfb31014d7b471cfc77157da9e820a737ec1ea4b0663 \ + --hash=sha256:538d981818e49b6ed1e9c8d5e5adf29f71c4e334e7d459bf47e9b7abb3c30e09 \ + --hash=sha256:59dffade859f157bcc55243714d57b286da6ae16469bf1ac0614d281b5f49b67 \ + --hash=sha256:5a6ebcdef0ef12005d56d38be30f5156d1cb3373b52e96f147f4a24b0ddb3a9d \ + --hash=sha256:5dca372268c6ab6372d37d6b9f9343e7e5b4bc09779f819f9470cd88b2ece3c3 \ + --hash=sha256:6df3b63538c362312bc5fa95fb965069c65c3ea91d7ce78ad9c47cab57226f54 \ + --hash=sha256:6f0b89967ee11f2b654c23b27086d88ad7bf08c0b3c2a280362f28c3698b2896 \ + --hash=sha256:75e29a90dc319f0ad4d87ba6d20083615a00d8276b51512e04ad7452b5c23b04 \ + --hash=sha256:7942b32a291421460d6a07883033e392167d30724aa84987e6956cd15f1a21b9 \ + --hash=sha256:9235dcd5144a83f9ca6f431bd0eccc46b90e2c22fe27b7f7d77cabb2fb515595 \ + --hash=sha256:97d67983189e2e45550eac194d6234fc38b8c3b5396c153821f2d906ed46e0ce \ + --hash=sha256:9ff42c5620b4e4530609e11afefa4a62ca91fa0abb045a8957e509ef84e54d30 \ + --hash=sha256:a8a0b77e992c64880e6efbe0086fe54dfc0bbd56f72a92d9e48264dcd2a3db98 \ + --hash=sha256:aacb54f7789ede5cbf1d007637f792d3e87f1c9841f57dd51abf89337d1b8472 \ + --hash=sha256:bc59f7ba87972ab236f8669d8ca7400f02a0eadf273ca00e02af64d588046f02 \ + --hash=sha256:cc2bece1737b44d878cc1510ea04469a8073dbbcdd762175168937ae4742dfb3 \ + --hash=sha256:cd3baccea2bc5c38aeb14e5b00167bd4e2373a373a5e4d8d850bd193edad150c \ + --hash=sha256:dad6533411d033b77f5369eafe87af8583178efd4039c41d7515d3336c53b4f1 \ + --hash=sha256:e223a9793522680beae44671b9ed8f6d25bbe5ddf8887e66aebad5e0686049ef \ + --hash=sha256:e473525c28251558337b5c1ad3fa969511e42304524a4e404065e165b084c9e4 \ + --hash=sha256:e4ef09f8997c4be5f3504cefa6b5c6cc3cf648274ce3cede84d4342a35d76db6 \ + --hash=sha256:e6dfc2b6567b1c261739b43d9c59d201c1b89e017afd9e684d85aa7a186c9f7a \ + --hash=sha256:eacad297ea60c72dd280d3353d93fb1dcca952ec11de6bb3c49d12a572ba31dd \ + --hash=sha256:f1158bccbb919da42544a4d3af5d9296a3358539ffa01018307337365a9a0c64 \ + --hash=sha256:f1fec3abaf274cdb85bf3878167cfde5ad4a4d97c68421afda95174de85ba813 \ + --hash=sha256:f96ace1540223f26fbe7c4ebbf8a98e3929a6aa0290c8033d12526847b291c0f \ + --hash=sha256:fbdbe9a849854fe484c00823f45b7baab159bdd4a46075302281998cb8719df5 + # via + # grpcio-reflection + # grpcio-status + # tavern (pyproject.toml) +grpcio-reflection==1.51.1 \ + --hash=sha256:b70af764a83e42a44f65df1edb232e972ab69e72bc7fbbad481e66c29a9d8cb8 \ + --hash=sha256:c07a93c0c36ef88fe475744289863b4787005eff4de0cc04213ecad718b01aae + # via tavern (pyproject.toml) +grpcio-status==1.51.1 \ + --hash=sha256:a52cbdc4b18f325bfc13d319ae7c7ae7a0fee07f3d9a005504d6097896d7a495 \ + --hash=sha256:ac2617a3095935ebd785e2228958f24b10a0d527a0c9eb5a0863c784f648a816 + # via tavern (pyproject.toml) +identify==2.5.10 \ + --hash=sha256:dce9e31fee7dbc45fea36a9e855c316b8fbf807e65a862f160840bb5a2bf5dfd \ + --hash=sha256:fb7c2feaeca6976a3ffa31ec3236a6911fbc51aec9acc111de2aed99f244ade2 + # via pre-commit +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # via requests +importlib-metadata==5.2.0 \ + --hash=sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f \ + --hash=sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd + # via + # flask + # keyring + # twine +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 + # via pytest +itsdangerous==2.1.2 \ + --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \ + --hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a + # via + # flask + # tavern (pyproject.toml) +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via flask +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via tavern (pyproject.toml) +jsonschema==3.2.0 \ + --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ + --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a + # via + # docker-compose + # tavern (pyproject.toml) +keyring==23.13.1 \ + --hash=sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd \ + --hash=sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678 + # via twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via + # jinja2 + # werkzeug +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab + # via jaraco-classes +msgpack==1.0.4 \ + --hash=sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467 \ + --hash=sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae \ + --hash=sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92 \ + --hash=sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef \ + --hash=sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624 \ + --hash=sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227 \ + --hash=sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88 \ + --hash=sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9 \ + --hash=sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8 \ + --hash=sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd \ + --hash=sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6 \ + --hash=sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55 \ + --hash=sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e \ + --hash=sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2 \ + --hash=sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44 \ + --hash=sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6 \ + --hash=sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9 \ + --hash=sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab \ + --hash=sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae \ + --hash=sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa \ + --hash=sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9 \ + --hash=sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e \ + --hash=sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250 \ + --hash=sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce \ + --hash=sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075 \ + --hash=sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236 \ + --hash=sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae \ + --hash=sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e \ + --hash=sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f \ + --hash=sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08 \ + --hash=sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6 \ + --hash=sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d \ + --hash=sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43 \ + --hash=sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1 \ + --hash=sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6 \ + --hash=sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0 \ + --hash=sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c \ + --hash=sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff \ + --hash=sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db \ + --hash=sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243 \ + --hash=sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661 \ + --hash=sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba \ + --hash=sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e \ + --hash=sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb \ + --hash=sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52 \ + --hash=sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6 \ + --hash=sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1 \ + --hash=sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f \ + --hash=sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da \ + --hash=sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f \ + --hash=sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c \ + --hash=sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8 + # via fluent-logger +mypy==0.991 \ + --hash=sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d \ + --hash=sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6 \ + --hash=sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf \ + --hash=sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f \ + --hash=sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813 \ + --hash=sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33 \ + --hash=sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad \ + --hash=sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05 \ + --hash=sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297 \ + --hash=sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06 \ + --hash=sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd \ + --hash=sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243 \ + --hash=sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305 \ + --hash=sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476 \ + --hash=sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711 \ + --hash=sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70 \ + --hash=sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5 \ + --hash=sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461 \ + --hash=sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab \ + --hash=sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c \ + --hash=sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d \ + --hash=sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135 \ + --hash=sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93 \ + --hash=sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648 \ + --hash=sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a \ + --hash=sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb \ + --hash=sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3 \ + --hash=sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372 \ + --hash=sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb \ + --hash=sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef + # via tavern (pyproject.toml) +mypy-extensions==0.4.3 \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 + # via + # black + # mypy + # tavern (pyproject.toml) +nodeenv==1.7.0 \ + --hash=sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e \ + --hash=sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b + # via pre-commit +packaging==22.0 \ + --hash=sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3 \ + --hash=sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3 + # via + # black + # build + # docker + # pytest + # tox +paho-mqtt==1.5.1 \ + --hash=sha256:9feb068e822be7b3a116324e01fb6028eb1d66412bf98595ae72698965cb1cae + # via tavern (pyproject.toml) +paramiko==2.12.0 \ + --hash=sha256:376885c05c5d6aa6e1f4608aac2a6b5b0548b1add40274477324605903d9cd49 \ + --hash=sha256:b2df1a6325f6996ef55a8789d0462f5b502ea83b3c990cbb5bbe57345c6812c4 + # via docker +pathspec==0.10.3 \ + --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ + --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 + # via black +pbr==5.11.0 \ + --hash=sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe \ + --hash=sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a + # via stevedore +pep517==0.13.0 \ + --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ + --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 + # via build +pip-tools==6.12.1 \ + --hash=sha256:88efb7b29a923ffeac0713e6f23ef8529cc6175527d42b93f73756cc94387293 \ + --hash=sha256:f0c0c0ec57b58250afce458e2e6058b1f30a4263db895b7d72fd6311bf1dc6f7 + # via tavern (pyproject.toml) +pkginfo==1.9.2 \ + --hash=sha256:ac03e37e4d601aaee40f8087f63fc4a2a6c9814dda2c8fa6aab1b1829653bdfa \ + --hash=sha256:d580059503f2f4549ad6e4c106d7437356dbd430e2c7df99ee1efe03d75f691e + # via twine +platformdirs==2.6.0 \ + --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ + --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e + # via + # black + # virtualenv +pluggy==1.0.0 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 + # via + # allure-python-commons + # pytest + # tox +pre-commit==2.20.0 \ + --hash=sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7 \ + --hash=sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959 + # via tavern (pyproject.toml) +protobuf==4.22.0 \ + --hash=sha256:1669cb7524221a8e2d9008d0842453dbefdd0fcdd64d67672f657244867635fb \ + --hash=sha256:29288813aacaa302afa2381db1d6e0482165737b0afdf2811df5fa99185c457b \ + --hash=sha256:47d31bdf58222dd296976aa1646c68c6ee80b96d22e0a3c336c9174e253fd35e \ + --hash=sha256:652d8dfece122a24d98eebfef30e31e455d300efa41999d1182e015984ac5930 \ + --hash=sha256:7c535d126e7dcc714105ab20b418c4fedbd28f8b8afc42b7350b1e317bbbcc71 \ + --hash=sha256:86c3d20428b007537ba6792b475c0853bba7f66b1f60e610d913b77d94b486e4 \ + --hash=sha256:a33a273d21852f911b8bda47f39f4383fe7c061eb1814db2c76c9875c89c2491 \ + --hash=sha256:ab4d043865dd04e6b09386981fe8f80b39a1e46139fb4a3c206229d6b9f36ff6 \ + --hash=sha256:b2fea9dc8e3c0f32c38124790ef16cba2ee0628fe2022a52e435e1117bfef9b1 \ + --hash=sha256:c27f371f0159feb70e6ea52ed7e768b3f3a4c5676c1900a7e51a24740381650e \ + --hash=sha256:c3325803095fb4c2a48649c321d2fbde59f8fbfcb9bfc7a86df27d112831c571 \ + --hash=sha256:e474b63bab0a2ea32a7b26a4d8eec59e33e709321e5e16fb66e766b61b82a95e \ + --hash=sha256:e894e9ae603e963f0842498c4cd5d39c6a60f0d7e4c103df50ee939564298658 + # via + # googleapis-common-protos + # grpcio-reflection + # grpcio-status +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via + # tavern (pyproject.toml) + # tox +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich + # tavern (pyproject.toml) +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 + # via tavern (pyproject.toml) +pykwalify==1.8.0 \ + --hash=sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651 \ + --hash=sha256:796b2ad3ed4cb99b88308b533fb2f559c30fa6efb4fa9fda11347f483d245884 + # via tavern (pyproject.toml) +pynacl==1.5.0 \ + --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ + --hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \ + --hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \ + --hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \ + --hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \ + --hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \ + --hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \ + --hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \ + --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ + --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 + # via paramiko +pyrsistent==0.19.2 \ + --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ + --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ + --hash=sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a \ + --hash=sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95 \ + --hash=sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712 \ + --hash=sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73 \ + --hash=sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41 \ + --hash=sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b \ + --hash=sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78 \ + --hash=sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab \ + --hash=sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308 \ + --hash=sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425 \ + --hash=sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2 \ + --hash=sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e \ + --hash=sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6 \ + --hash=sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2 \ + --hash=sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a \ + --hash=sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291 \ + --hash=sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584 \ + --hash=sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a \ + --hash=sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0 \ + --hash=sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770 + # via jsonschema +pytest==7.2.0 \ + --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ + --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 + # via + # allure-pytest + # pytest-cov + # pytest-xdist + # tavern (pyproject.toml) +pytest-cov==4.0.0 \ + --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ + --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 + # via tavern (pyproject.toml) +pytest-xdist==3.1.0 \ + --hash=sha256:40fdb8f3544921c5dfcd486ac080ce22870e71d82ced6d2e78fa97c2addd480c \ + --hash=sha256:70a76f191d8a1d2d6be69fc440cdf85f3e4c03c08b520fd5dc5d338d6cf07d89 + # via tavern (pyproject.toml) +python-box==6.1.0 \ + --hash=sha256:11cbe62f0dace8a6e2a10d210a5e87b99ad1a1286865568862516794c923a988 \ + --hash=sha256:1d29eafaa287857751e27fbe9a08dd856480f0037fe988b221eba4dac33e5852 \ + --hash=sha256:3638d3559f19ece7fa29f6a6550bc64696cd3b65e3d4154df07a3d06982252ff \ + --hash=sha256:3f0036f91e13958d2b37d2bc74c1197aa36ffd66755342eb64910f63d8a2990f \ + --hash=sha256:53998c3b95e31d1f31e46279ef1d27ac30b137746927260901ee61457f8468a0 \ + --hash=sha256:594b0363b187df855ff8649488b1301dddbbeea769629b7caeb584efe779b841 \ + --hash=sha256:6e7c243b356cb36e2c0f0e5ed7850969fede6aa812a7f501de7768996c7744d7 \ + --hash=sha256:7b73f26e40a7adc57b9e39f5687d026dfa8a336f48aefaf852a223b4e37392ad \ + --hash=sha256:9dbd92b67c443a97326273c9239fce04d3b6958be815d293f96ab65bc4a9dae7 \ + --hash=sha256:ab13208b053525ef154a36a4a52873b98a12b18b946edd4c939a4d5080e9a218 \ + --hash=sha256:ac44b3b85714a4575cc273b5dbd39ef739f938ef6c522d6757704a29e7797d16 \ + --hash=sha256:af6bcee7e1abe9251e9a41ca9ab677e1f679f6059321cfbae7e78a3831e0b736 \ + --hash=sha256:bdec0a5f5a17b01fc538d292602a077aa8c641fb121e1900dff0591791af80e8 \ + --hash=sha256:c14aa4e72bf30f4d573e62ff8030a86548603a100c3fb534561dbedf4a83f454 \ + --hash=sha256:d199cd289b4f4d053770eadd70217c76214aac30b92a23adfb9627fd8558d300 \ + --hash=sha256:ed6d7fe47d756dc2d9dea448702cea103716580a2efee7c859954929295fe28e \ + --hash=sha256:fa4696b5e09ccf695bf05c16bb5ca1fcc95a141a71a31eb262eee8e2ac07189a + # via tavern (pyproject.toml) +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via + # faker + # pykwalify +python-dotenv==0.21.0 \ + --hash=sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5 \ + --hash=sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045 + # via docker-compose +pyyaml==5.4.1 \ + --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ + --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ + --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ + --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ + --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ + --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ + --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ + --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ + --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ + --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ + --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ + --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ + --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ + --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ + --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ + --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ + --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ + --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ + --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ + --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ + --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ + --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ + --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 + # via + # docker-compose + # pre-commit + # tavern (pyproject.toml) +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # docker + # docker-compose + # flit + # requests-toolbelt + # tavern (pyproject.toml) + # twine +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 + # via twine +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via pykwalify +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +ruff==0.0.246 \ + --hash=sha256:2474a805c4244cfaf0390a745a0c5ea9e5452f52fbce0be74930fece8bd40b90 \ + --hash=sha256:449e6632c13902df06ac14ccfc7bad71841ab90bfa64e8cb3f1a2ea91e647df0 \ + --hash=sha256:45cde6f9df94fb393ffeeada2daca279569b9d53d1d95d49b9b5b418fe70bd23 \ + --hash=sha256:468f282e26d1845f4a06dcd76fdc355f4288208e6b97f061951a7ffd6725102e \ + --hash=sha256:529a7d72f48331b97367cd6d42273f3cafa764d9373e74b75561367135958546 \ + --hash=sha256:5839a213a90220845693c95d7e6a19ab26751b9e37047ef8f4a58dc49230c817 \ + --hash=sha256:589aff453085cad28b8d1f517161a6b37a6d359cda419f64c009e0f7ff424d72 \ + --hash=sha256:64af553f8ff4d1f51a24104ac8f81b5ce6df9c230d776fa4dd22db96699efdb0 \ + --hash=sha256:67d5fa9abdcb764a7cee242fb65e303662c9cb80104a2dd0657e96fca8a7c6d8 \ + --hash=sha256:734ff8fef2e7105cf6946e525b3e8cbed035edc9d58c4f47aac7205dbd1e55c0 \ + --hash=sha256:8173a00766b88b47431e8e744f577d06c6c52c0e18181ac29a701a9d5c035b39 \ + --hash=sha256:ca7d1ee44144460ae119a6212aaff77a671a5729d543b981c786c052011cdfe3 \ + --hash=sha256:dd4f58b9295615ebb01563a38a5594fcb4664bb6106b2ccd00b90c0f1d14cf8c \ + --hash=sha256:ebe6052bc87ee51d84af231ccd27e5338fdc30d8bf49e51bdcfceb44c51c5625 \ + --hash=sha256:f6004332134580f0ede29d86a9a16102ba07c25799e0ab9683359216a419366b \ + --hash=sha256:f8403e31e64b15c9b3e2745b0400e2f43eea81493ae0fa85e275ed0800a89c19 + # via tavern (pyproject.toml) +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # allure-pytest + # allure-python-commons + # bleach + # dockerpty + # jsonschema + # paramiko + # python-dateutil + # tox + # websocket-client +stevedore==4.1.1 \ + --hash=sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a \ + --hash=sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e + # via tavern (pyproject.toml) +texttable==1.6.7 \ + --hash=sha256:290348fb67f7746931bcdfd55ac7584ecd4e5b0846ab164333f0794b121760f2 \ + --hash=sha256:b7b68139aa8a6339d2c320ca8b1dc42d13a7831a346b446cb9eb385f0c76310c + # via docker-compose +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via pre-commit +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via + # black + # build + # coverage + # mypy + # pep517 + # pytest + # tox +tomli-w==1.0.0 \ + --hash=sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463 \ + --hash=sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9 + # via flit +tox==3.28.0 \ + --hash=sha256:57b5ab7e8bb3074edc3c0c0b4b192a4f3799d3723b2c5b76f1fa9f2d40316eea \ + --hash=sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640 + # via + # tavern (pyproject.toml) + # tox-travis +tox-travis==0.12 \ + --hash=sha256:442c96b078333c94e272d0e90e4582e35e0529ea98bcd2f7f96053d690c4e7a4 \ + --hash=sha256:465cd8f71ad878962a3fce0e9e2e213994e0ae4e0c30f87fe6af1b04ea282dc4 + # via tavern (pyproject.toml) +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 + # via tavern (pyproject.toml) +types-pyyaml==6.0.12.2 \ + --hash=sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b \ + --hash=sha256:6840819871c92deebe6a2067fb800c11b8a063632eb4e3e755914e7ab3604e83 + # via tavern (pyproject.toml) +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e + # via + # black + # mypy +urllib3==1.26.13 \ + --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ + --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 + # via + # docker + # requests + # twine +virtualenv==20.17.1 \ + --hash=sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4 \ + --hash=sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058 + # via + # pre-commit + # tox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +websocket-client==0.59.0 \ + --hash=sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32 \ + --hash=sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c + # via + # docker + # docker-compose +werkzeug==2.2.3 \ + --hash=sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe \ + --hash=sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612 + # via flask +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 + # via + # pip-tools + # tavern (pyproject.toml) +zipp==3.11.0 \ + --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ + --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 + # via importlib-metadata -# tests -tox -flask - -# Useful for development -black==20.8b1 -bump2version -pre-commit -pygments -twine -wheel +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# pip +# setuptools diff --git a/coverage.sh b/scripts/coverage.sh old mode 100644 new mode 100755 similarity index 57% rename from coverage.sh rename to scripts/coverage.sh index df31a7032..b654708f1 --- a/coverage.sh +++ b/scripts/coverage.sh @@ -2,9 +2,9 @@ set -ex -tox -c tox-integration.ini -e py38-generic -tox -c tox-integration.ini -e py38-mqtt -tox -e py38 +tox -c tox-integration.ini -e py39-generic +tox -c tox-integration.ini -e py39-mqtt +tox -e py39 coverage combine --append .coverage tests/integration/.coverage example/mqtt/.coverage coverage report -m diff --git a/scripts/release.sh b/scripts/release.sh new file mode 100755 index 000000000..7fe58664f --- /dev/null +++ b/scripts/release.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +# Releasing: +# 0. pip install tbump@https://github.com/michaelboulton/tbump/archive/714ba8957a3c84b625608ceca39811ebe56229dc.zip -c constraints.txt +# 1. tbump --tag-message "" +# 2. run this script + +rm -rf build/ dist/ ./*.egg-info +fd pycache -u | xargs rm -rf + +flit build --format wheel --no-setup-py +flit publish --format wheel --no-setup-py --repository pypi diff --git a/scripts/smoke.bash b/scripts/smoke.bash new file mode 100755 index 000000000..dceb58d52 --- /dev/null +++ b/scripts/smoke.bash @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +set -ex + +ruff tavern + +# Separate as isort can interfere with other testenvs +tox --parallel -c tox.ini \ + -e py3check + +tox --parallel -c tox.ini \ + -e py3 \ + -e py3mypy + +tox -c tox-integration.ini \ + -e py3-generic \ + -e py3-advanced \ + -e py3-cookies \ + -e py3-components \ + -e py3-hooks \ + -e py3-mqtt diff --git a/scripts/update-changelog.bash b/scripts/update-changelog.bash new file mode 100755 index 000000000..9a6b1dc6a --- /dev/null +++ b/scripts/update-changelog.bash @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +print_changelog() { + echo "# Changelog" + echo + + for current_tag in $(git tag --sort=creatordate | grep -v '2.0.0.*\(a\|b\|rc\)'); do + tag_date=$(git log -1 --pretty=format:'%ad' --date=short ${current_tag}) + + hashes="##" + if [[ "$current_tag" =~ .*0$ ]]; then + hashes="#" + fi + echo "$hashes $() $(git tag -n1 $current_tag) (${tag_date})" + + tag_content=$(git tag -n9 $current_tag | tail -n '+2') + if [ -n "$tag_content" ]; then + echo + cat < CHANGELOG.md +$(print_changelog) +EOF diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index c51e99dbe..000000000 --- a/setup.cfg +++ /dev/null @@ -1,66 +0,0 @@ -[metadata] -name = tavern -description = Simple testing of RESTful APIs -version = attr: tavern.__version__ -long_description = file: README.rst -author = Michael Boulton -author_email = boulton@zoetrope.io -url = https://taverntesting.github.io/ - -license = MIT -license_file = LICENSE - -keywords = - testing - pytest -classifiers = - Development Status :: 5 - Production/Stable - Intended Audience :: Developers - Framework :: Pytest - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Topic :: Utilities - Topic :: Software Development :: Testing - License :: OSI Approved :: MIT License - -[options] -packages = find: -include_package_data = True - -install_requires = - PyYAML>=5.3.1,<6 - pykwalify>=1.7.0,<1.8 - requests>=2.22.0,<3 - pyjwt>=1.7.1,<2 - paho-mqtt>=1.3.1,<=1.5.1 - jmespath<1 - pytest>=6,<7 - python-box>4,<6 - stevedore - - grpcio - grpcio-reflection - grpcio-status - google-api-python-client - -[options.packages.find] -exclude = - tests - -[options.entry_points] -console_scripts = - tavern-ci = tavern.entry:main -pytest11 = - tavern = tavern.testutils.pytesthook - -tavern_http = - requests = tavern._plugins.rest.tavernhook:TavernRestPlugin -tavern_mqtt = - paho-mqtt = tavern._plugins.mqtt.tavernhook -tavern_grpc = - grpc = tavern._plugins.grpc.tavernhook - -[aliases] -test=pytest diff --git a/setup.py b/setup.py deleted file mode 100644 index e9d9fbfa1..000000000 --- a/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -from setuptools import setup - -TESTS_REQUIRE = [ - "pytest-cov", - "colorlog", - "faker", - "flake8", - "pygments", - "pylint==2.6.0", - "black", - "mypy", - "mypy-extensions", - "isort==5.7.0", - "allure-pytest", -] - -setup( - name="tavern", - - tests_require=TESTS_REQUIRE, - extras_require={ - "tests": TESTS_REQUIRE - }, - - zip_safe=True -) diff --git a/smoke.bash b/smoke.bash deleted file mode 100755 index 7d9494954..000000000 --- a/smoke.bash +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -set -ex - -PYVER=38 - -tox -c tox.ini \ - -e py${PYVER}flakes \ - -e py${PYVER} \ - -e py${PYVER}black \ - -e py${PYVER}lint - -tox -c tox-integration.ini \ - -e py${PYVER}-generic \ - -e py${PYVER}-mqtt diff --git a/tavern/__init__.py b/tavern/__init__.py index 2f1c7fe3a..c12592b32 100644 --- a/tavern/__init__.py +++ b/tavern/__init__.py @@ -1,2 +1,2 @@ """Stop pytest warning about module already imported: PYTEST_DONT_REWRITE""" -__version__ = "1.14.0" +__version__ = "2.0.5" diff --git a/tavern/_plugins/__init__.py b/tavern/_core/__init__.py similarity index 100% rename from tavern/_plugins/__init__.py rename to tavern/_core/__init__.py diff --git a/tavern/util/dict_util.py b/tavern/_core/dict_util.py similarity index 87% rename from tavern/util/dict_util.py rename to tavern/_core/dict_util.py index d2c776c40..277dc252e 100644 --- a/tavern/util/dict_util.py +++ b/tavern/_core/dict_util.py @@ -1,13 +1,16 @@ -import collections +import contextlib import logging import os import re import string +from typing import Any, Dict, List, Mapping, Union -from box import Box, box +import box import jmespath +from box.box import Box -from tavern.util.loader import ( +from tavern._core import exceptions +from tavern._core.loader import ( ANYTHING, ForceIncludeToken, RegexSentinel, @@ -15,17 +18,17 @@ TypeSentinel, ) -from . import exceptions from .formatted_str import FormattedString +from .strict_util import StrictSetting, StrictSettingKinds, extract_strict_setting logger = logging.getLogger(__name__) -def _check_and_format_values(to_format, box_vars): +def _check_and_format_values(to_format, box_vars: Mapping[str, Any]) -> str: formatter = string.Formatter() would_format = formatter.parse(to_format) - for (_, field_name, _, _) in would_format: + for _, field_name, _, _ in would_format: if field_name is None: continue @@ -51,7 +54,7 @@ def _check_and_format_values(to_format, box_vars): return to_format.format(**box_vars) -def _attempt_find_include(to_format, box_vars): +def _attempt_find_include(to_format: str, box_vars: box.Box): formatter = string.Formatter() would_format = list(formatter.parse(to_format)) @@ -85,10 +88,10 @@ def _attempt_find_include(to_format, box_vars): would_replace = formatter.get_field(field_name, [], box_vars)[0] - return formatter.convert_field(would_replace, conversion) + return formatter.convert_field(would_replace, conversion) # type: ignore -def format_keys(val, variables, no_double_format=True): +def format_keys(val, variables: Mapping, no_double_format: bool = True): """recursively format a dictionary with the given values Args: @@ -102,7 +105,11 @@ def format_keys(val, variables, no_double_format=True): str,int,list,dict: recursively formatted values """ formatted = val - box_vars = Box(variables) + + if not isinstance(variables, Box): + box_vars = Box(variables) + else: + box_vars = variables if isinstance(val, dict): formatted = {} @@ -131,7 +138,7 @@ def format_keys(val, variables, no_double_format=True): return formatted -def recurse_access_key(data, query): +def recurse_access_key(data, query: str): """ Search for something in the given data using the given query. @@ -201,10 +208,8 @@ def _deprecated_recurse_access_key(current_val, keys): else: current_key = keys.pop(0) - try: + with contextlib.suppress(ValueError): current_key = int(current_key) - except ValueError: - pass try: return _deprecated_recurse_access_key(current_val[current_key], keys) @@ -218,7 +223,7 @@ def _deprecated_recurse_access_key(current_val, keys): raise -def deep_dict_merge(initial_dct, merge_dct): +def deep_dict_merge(initial_dct: Dict, merge_dct: Mapping) -> dict: """Recursive dict merge. Instead of updating only top-level keys, dict_merge recurses down into dicts nested to an arbitrary depth and returns the merged dict. Keys values present in merge_dct take @@ -230,16 +235,12 @@ def deep_dict_merge(initial_dct, merge_dct): merge_dct: dct merged into dct Returns: - dict: recursively merged dict + recursively merged dict """ dct = initial_dct.copy() for k in merge_dct: - if ( - k in dct - and isinstance(dct[k], dict) - and isinstance(merge_dct[k], collections.abc.Mapping) - ): + if k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], Mapping): dct[k] = deep_dict_merge(dct[k], merge_dct[k]) else: dct[k] = merge_dct[k] @@ -247,7 +248,7 @@ def deep_dict_merge(initial_dct, merge_dct): return dct -def check_expected_keys(expected, actual): +def check_expected_keys(expected, actual) -> None: """Check that a set of expected keys is a superset of the actual keys Args: @@ -317,7 +318,12 @@ def yield_keyvals(block): yield [sidx], sidx, val -def check_keys_match_recursive(expected_val, actual_val, keys, strict=True): +def check_keys_match_recursive( + expected_val: Any, + actual_val: Any, + keys: List[Union[str, int]], + strict: StrictSettingKinds = True, +) -> None: """Utility to recursively check response values expected and actual both have to be of the same type or it will raise an @@ -329,19 +335,19 @@ def check_keys_match_recursive(expected_val, actual_val, keys, strict=True): True >>> check_keys_match_recursive({"a": {"b": "c"}}, {"a": {"b": "d"}}, []) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): - File "/home/michael/code/tavern/tavern/tavern/util/dict_util.py", line 223, in check_keys_match_recursive - tavern.util.exceptions.KeyMismatchError: Key mismatch: (expected["a"]["b"] = 'c', actual["a"]["b"] = 'd') + File "/home/michael/code/tavern/tavern/tavern/_core.util/dict_util.py", line 223, in check_keys_match_recursive + tavern._core.exceptions.KeyMismatchError: Key mismatch: (expected["a"]["b"] = 'c', actual["a"]["b"] = 'd') Todo: This could be turned into a single-dispatch function for cleaner code and to remove a load of the isinstance checks Args: - expected_val (dict, list, str): expected value - actual_val (dict, list, str): actual value - keys (list): any keys which have been recursively parsed to get to this + expected_val: expected value + actual_val: actual value + keys: any keys which have been recursively parsed to get to this point. Used for debug output. - strict (bool): Whether 'strict' key checking should be done. If this is + strict: Whether 'strict' key checking should be done. If this is False, a mismatch in dictionary keys between the expected and the actual values will not raise an error (but a mismatch in value will raise an error) @@ -350,8 +356,6 @@ def check_keys_match_recursive(expected_val, actual_val, keys, strict=True): KeyMismatchError: expected_val and actual_val did not match """ - # pylint: disable=too-many-locals,too-many-statements - def full_err(): """Get error in the format: @@ -395,13 +399,15 @@ def _format_err(which): issubclass(actual_type, type(expected_val)) ) + strict_bool, strict_setting = extract_strict_setting(strict) + try: - assert actual_val == expected_val + assert actual_val == expected_val # noqa except AssertionError as e: # At this point, there is likely to be an error unless we're using any # of the type sentinels - if not (expected_val is ANYTHING): # pylint: disable=superfluous-parens + if expected_val is not ANYTHING: if not expected_matches: if isinstance(expected_val, RegexSentinel): msg = "Expected a string to match regex '{}' ({})".format( @@ -438,12 +444,12 @@ def _format_err(which): # If there are more keys in 'expected' compared to 'actual', # this is still a hard error and we shouldn't continue - if extra_expected_keys or strict: + if extra_expected_keys or strict_bool: raise exceptions.KeyMismatchError(full_msg) from e else: logger.debug( "Mismatch in returned data, continuing due to strict=%s: %s", - strict, + strict_bool, full_msg, exc_info=True, ) @@ -461,10 +467,10 @@ def _format_err(which): logger.debug( "Skipping comparing missing key '%s' due to strict=%s", key, - strict, + strict_bool, ) elif isinstance(expected_val, list): - if not strict: + if not strict_bool: missing = [] actual_iter = iter(actual_val) @@ -500,6 +506,8 @@ def _format_err(which): ) else: logger.debug("'%s' present in response", e_val) + if strict_setting == StrictSetting.LIST_ANY_ORDER: + actual_iter = iter(actual_val) break if missing: @@ -519,9 +527,9 @@ def _format_err(which): try: check_keys_match_recursive(e_val, a_val, keys + [i], strict) except exceptions.KeyMismatchError as sub_e: - # This should _ALWAYS_ raise an error, but it will be more - # obvious where the error came from (in python 3 at least) - # and will take ANYTHING into account + # This should _ALWAYS_ raise an error (unless the reason it didn't match was the + # 'anything' sentinel), but it will be more obvious where the error came from + # (in python 3 at least) and will take ANYTHING into account raise sub_e from e elif expected_val is ANYTHING: logger.debug("Actual value = '%s' - matches !anything", actual_val) diff --git a/tavern/util/exceptions.py b/tavern/_core/exceptions.py similarity index 83% rename from tavern/util/exceptions.py rename to tavern/_core/exceptions.py index 297a731ba..992eabbbc 100644 --- a/tavern/util/exceptions.py +++ b/tavern/_core/exceptions.py @@ -9,7 +9,7 @@ class BadSchemaError(TavernException): class TestFailError(TavernException): """Test failed somehow""" - def __init__(self, msg, failures=None): + def __init__(self, msg, failures=None) -> None: super().__init__(msg) self.failures = failures or [] @@ -58,10 +58,18 @@ class GRPCRequestException(TavernException): """Error making requests in GRPCRequest()""" +class ProtoCompilerException(TavernException): + """Some kind of error using protoc""" + + class MQTTRequestException(TavernException): """Error making requests in MQTTRequest()""" +class MQTTTopicException(TavernException): + """Internal (?) error with subscriptions""" + + class MQTTTLSError(TavernException): """Error with TLS arguments to MQTT client""" @@ -114,10 +122,10 @@ class InvalidFormattedJsonError(TavernException): """Tried to use the magic json format tag in an invalid way""" -class InvalidExtBlockException(TavernException): +class MisplacedExtBlockException(TavernException): """Tried to use the '$ext' block in a place it is no longer valid to use it""" - def __init__(self, block): + def __init__(self, block) -> None: super().__init__( "$ext function found in block {} - this has been moved to verify_response_with block - see documentation".format( block @@ -129,9 +137,18 @@ class InvalidRetryException(TavernException): """Invalid spec for max_retries""" -class ProtoGenError(TavernException): - """Error generating protobuf imports""" - - class RegexAccessError(TavernException): """Error accessing a key via regex""" + + +class DuplicateStrictError(TavernException): + """Tried to set stage strictness for multiple responses""" + + +class ConcurrentError(TavernException): + """Error while processing concurrent future""" + + +class UnexpectedExceptionError(TavernException): + """We expected a certain kind of exception in check_exception_raised but it was something + else""" diff --git a/tavern/util/extfunctions.py b/tavern/_core/extfunctions.py similarity index 55% rename from tavern/util/extfunctions.py rename to tavern/_core/extfunctions.py index 70afcacfe..e7b534ee7 100644 --- a/tavern/util/extfunctions.py +++ b/tavern/_core/extfunctions.py @@ -1,12 +1,27 @@ import functools import importlib import logging +from typing import Any, List, Mapping, Optional + +from tavern._core import exceptions -from . import exceptions from .dict_util import deep_dict_merge -def get_pykwalify_logger(module): +def is_ext_function(block: Any) -> bool: + """ + Whether the given object is an ext function block + + Args: + block: Any object + + Returns: + bool: If it is an ext function style dict + """ + return isinstance(block, dict) and block.get("$ext", None) is not None + + +def get_pykwalify_logger(module: Optional[str]) -> logging.Logger: """Get logger for this module Have to do it like this because the way that pykwalify load extension @@ -20,16 +35,16 @@ def get_pykwalify_logger(module): return logging.getLogger(module) -def _getlogger(): - return get_pykwalify_logger("tavern.util.extfunctions") +def _getlogger() -> logging.Logger: + return get_pykwalify_logger("tavern._core.extfunctions") -def import_ext_function(entrypoint): +def import_ext_function(entrypoint: str): """Given a function name in the form of a setuptools entry point, try to dynamically load and return it Args: - entrypoint (str): setuptools-style entrypoint in the form + entrypoint: setuptools-style entrypoint in the form module.submodule:function Returns: @@ -48,14 +63,14 @@ def import_ext_function(entrypoint): raise exceptions.InvalidExtFunctionError(msg) from e try: - module = importlib.import_module(module) + imported = importlib.import_module(module) except ImportError as e: msg = "Error importing module {}".format(module) logger.exception(msg) raise exceptions.InvalidExtFunctionError(msg) from e try: - function = getattr(module, funcname) + function = getattr(imported, funcname) except AttributeError as e: msg = "No function named {} in {}".format(funcname, module) logger.exception(msg) @@ -64,83 +79,95 @@ def import_ext_function(entrypoint): return function -def get_wrapped_response_function(ext): +def get_wrapped_response_function(ext: Mapping): """Wraps a ext function with arguments given in the test file This is similar to functools.wrap, but this makes sure that 'response' is always the first argument passed to the function Args: - ext (dict): $ext function dict with function, extra_args, and + ext: $ext function dict with function, extra_args, and extra_kwargs to pass Returns: function: Wrapped function """ - args = ext.get("extra_args") or () - kwargs = ext.get("extra_kwargs") or {} - try: - func = import_ext_function(ext["function"]) - except KeyError as e: - raise exceptions.BadSchemaError( - "No function specified in external function block" - ) from e + + func, args, kwargs = _get_ext_values(ext) @functools.wraps(func) def inner(response): result = func(response, *args, **kwargs) - _getlogger().info("Result of calling '%s': '%s'", func, result) + _getlogger().debug("Result of calling '%s': '%s'", func, result) return result - inner.func = func + inner.func = func # type: ignore return inner -def get_wrapped_create_function(ext): +def get_wrapped_create_function(ext: Mapping): """Same as get_wrapped_response_function, but don't require a response""" - args = ext.get("extra_args") or () - kwargs = ext.get("extra_kwargs") or {} - func = import_ext_function(ext["function"]) + + func, args, kwargs = _get_ext_values(ext) @functools.wraps(func) def inner(): result = func(*args, **kwargs) - _getlogger().info("Result of calling '%s': '%s'", func, result) + _getlogger().debug("Result of calling '%s': '%s'", func, result) return result - inner.func = func + inner.func = func # type: ignore return inner -def update_from_ext(request_args, keys_to_check, test_block_config): +def _get_ext_values(ext: Mapping): + if not isinstance(ext, Mapping): + raise exceptions.InvalidExtFunctionError( + "ext block should be a dict, but it was a {}".format(type(ext)) + ) + + args = ext.get("extra_args") or () + kwargs = ext.get("extra_kwargs") or {} + try: + func = import_ext_function(ext["function"]) + except KeyError as e: + raise exceptions.BadSchemaError( + "No function specified in external function block" + ) from e + + return func, args, kwargs + + +def update_from_ext(request_args: dict, keys_to_check: List[str]) -> None: """ Updates the request_args dict with any values from external functions Args: - request_args (dict): dictionary of request args - keys_to_check (list): list of keys in request to possibly update from - test_block_config (dict): whether to merge or replace values + request_args: dictionary of request args + keys_to_check: list of keys in request to possibly update from """ - merge_ext_values = test_block_config.get("merge_ext_values") - new_args = {} + logger = _getlogger() for key in keys_to_check: try: - func = get_wrapped_create_function(request_args[key].pop("$ext")) - except (KeyError, TypeError, AttributeError): - pass - else: - new_args[key] = func() - - _getlogger().debug("Will merge ext values? %s", merge_ext_values) - - if merge_ext_values: - merged_args = deep_dict_merge(request_args, new_args) - else: - merged_args = dict(request_args, **new_args) + block = request_args[key] + except KeyError: + logger.debug("No %s block", key) + continue + + try: + pop = block.pop("$ext") + except (KeyError, AttributeError, TypeError): + logger.debug("No ext functions in %s block", key) + continue + + func = get_wrapped_create_function(pop) + new_args[key] = func() + + merged_args = deep_dict_merge(request_args, new_args) request_args.update(**merged_args) diff --git a/tavern/util/formatted_str.py b/tavern/_core/formatted_str.py similarity index 100% rename from tavern/util/formatted_str.py rename to tavern/_core/formatted_str.py diff --git a/tavern/util/general.py b/tavern/_core/general.py similarity index 59% rename from tavern/util/general.py rename to tavern/_core/general.py index 1bab3e3b3..51984c907 100644 --- a/tavern/util/general.py +++ b/tavern/_core/general.py @@ -1,25 +1,27 @@ import logging +import os +from typing import List -from tavern.util.loader import load_single_document_yaml +from tavern._core.loader import load_single_document_yaml from .dict_util import deep_dict_merge logger = logging.getLogger(__name__) -def load_global_config(global_cfg_paths): +def load_global_config(global_cfg_paths: List[os.PathLike]) -> dict: """Given a list of file paths to global config files, load each of them and return the joined dictionary. This does a deep dict merge. Args: - global_cfg_paths (list(str)): List of filenames to load from + global_cfg_paths: List of filenames to load from Returns: - dict: joined global configs + joined global configs """ - global_cfg = {} + global_cfg: dict = {} if global_cfg_paths: logger.debug("Loading global config from %s", global_cfg_paths) @@ -28,3 +30,14 @@ def load_global_config(global_cfg_paths): global_cfg = deep_dict_merge(global_cfg, contents) return global_cfg + + +valid_http_methods = [ + "GET", + "PUT", + "POST", + "DELETE", + "PATCH", + "OPTIONS", + "HEAD", +] diff --git a/tavern/testutils/jmesutils.py b/tavern/_core/jmesutils.py similarity index 68% rename from tavern/testutils/jmesutils.py rename to tavern/_core/jmesutils.py index 0738ca4eb..ed54ae22a 100644 --- a/tavern/testutils/jmesutils.py +++ b/tavern/_core/jmesutils.py @@ -1,11 +1,12 @@ import operator import re +from typing import Any, Dict, List, Sized -from tavern.util import exceptions +from tavern._core import exceptions -def test_type(val, mytype): - """ Check value fits one of the types, if so return true, else false """ +def test_type(val, mytype) -> bool: + """Check value fits one of the types, if so return true, else false""" typelist = TYPES.get(str(mytype).lower()) if typelist is None: raise TypeError( @@ -13,11 +14,11 @@ def test_type(val, mytype): ) try: for testtype in typelist: - if isinstance(val, testtype): + if isinstance(val, testtype): # type: ignore return True return False except TypeError: - return isinstance(val, typelist) + return isinstance(val, typelist) # type: ignore COMPARATORS = { @@ -36,7 +37,7 @@ def test_type(val, mytype): "regex": lambda x, y: regex_compare(str(x), str(y)), "type": test_type, } -TYPES = { +TYPES: Dict[str, List[Any]] = { "none": [type(None)], "number": [int, float], "int": [int], @@ -48,12 +49,12 @@ def test_type(val, mytype): } -def regex_compare(_input, regex): +def regex_compare(_input, regex) -> bool: return bool(re.search(regex, _input)) -def safe_length(var): - """ Exception-safe length check, returns -1 if no length on type or error """ +def safe_length(var: Sized) -> int: + """Exception-safe length check, returns -1 if no length on type or error""" try: return len(var) except TypeError: @@ -61,12 +62,12 @@ def safe_length(var): def validate_comparison(each_comparison): - try: - assert set(each_comparison.keys()) == {"jmespath", "operator", "expected"} - except KeyError as e: + if extra := set(each_comparison.keys()) - {"jmespath", "operator", "expected"}: raise exceptions.BadSchemaError( - "Invalid keys given to JMES validation function" - ) from e + "Invalid keys given to JMES validation function (got extra keys: {})".format( + extra + ) + ) jmespath, _operator, expected = ( each_comparison["jmespath"], @@ -82,7 +83,9 @@ def validate_comparison(each_comparison): return jmespath, _operator, expected -def actual_validation(_operator, _actual, expected, _expression, expression): +def actual_validation( + _operator: str, _actual, expected, _expression, expression +) -> None: if not COMPARATORS[_operator](_actual, expected): raise exceptions.JMESError( "Validation '{}' ({}) failed!".format(expression, _expression) diff --git a/tavern/util/loader.py b/tavern/_core/loader.py similarity index 91% rename from tavern/util/loader.py rename to tavern/_core/loader.py index 4cac76f39..1c33cef36 100644 --- a/tavern/util/loader.py +++ b/tavern/_core/loader.py @@ -1,11 +1,11 @@ # https://gist.github.com/joshbode/569627ced3076931b02f -from abc import abstractmethod -from distutils.util import strtobool -from itertools import chain +import dataclasses import logging import os.path import re import uuid +from abc import abstractmethod +from itertools import chain import pytest import yaml @@ -16,14 +16,14 @@ from yaml.resolver import Resolver from yaml.scanner import Scanner -from tavern.util import exceptions -from tavern.util.exceptions import BadSchemaError +from tavern._core import exceptions +from tavern._core.exceptions import BadSchemaError +from tavern._core.strtobool import strtobool logger = logging.getLogger(__name__) def makeuuid(loader, node): - # pylint: disable=unused-argument return str(uuid.uuid4()) @@ -48,7 +48,7 @@ def compose_document(self): def create_node_class(cls): - class node_class(cls): # noqa + class node_class(cls): def __init__(self, x, start_mark, end_mark): cls.__init__(self, x) self.start_mark = start_mark @@ -80,19 +80,18 @@ def construct_yaml_seq(self, node): return list_node(obj, node.start_mark, node.end_mark) -SourceMappingConstructor.add_constructor( - u"tag:yaml.org,2002:map", SourceMappingConstructor.construct_yaml_map +SourceMappingConstructor.add_constructor( # type: ignore + "tag:yaml.org,2002:map", SourceMappingConstructor.construct_yaml_map ) -SourceMappingConstructor.add_constructor( - u"tag:yaml.org,2002:seq", SourceMappingConstructor.construct_yaml_seq +SourceMappingConstructor.add_constructor( # type: ignore + "tag:yaml.org,2002:seq", SourceMappingConstructor.construct_yaml_seq ) yaml.add_representer(dict_node, yaml.representer.SafeRepresenter.represent_dict) yaml.add_representer(list_node, yaml.representer.SafeRepresenter.represent_list) -# pylint: disable=too-many-ancestors class IncludeLoader( Reader, Scanner, @@ -126,7 +125,6 @@ def __init__(self, stream): def _get_include_dirs(loader): - # pylint: disable=protected-access loader_list = [loader._root] if IncludeLoader.env_path_list is None: @@ -142,7 +140,7 @@ def _get_include_dirs(loader): def find_include(loader, node): - """Locate an include file and return the abs path. """ + """Locate an include file and return the abs path.""" for directory in _get_include_dirs(loader): filename = os.path.abspath( os.path.join(directory, loader.construct_scalar(node)) @@ -233,6 +231,7 @@ class DictSentinel(TypeSentinel): constructor = dict +@dataclasses.dataclass class RegexSentinel(TypeSentinel): """Sentinel that matches a regex in a part of the response @@ -240,7 +239,7 @@ class RegexSentinel(TypeSentinel): """ constructor = str - compiled = None + compiled: re.Pattern def __str__(self): return "".format(self.compiled.pattern) @@ -255,9 +254,7 @@ def passes(self, string): @classmethod def from_yaml(cls, loader, node): - c = cls() - c.compiled = re.compile(node.value) - return c + return cls(re.compile(node.value)) class _RegexMatchSentinel(RegexSentinel): @@ -356,11 +353,11 @@ class FloatToken(TypeConvertToken): constructor = float -class StrToBoolConstructor(object): +class StrToBoolConstructor: """Using `bool` as a constructor directly will evaluate all strings to `True`.""" def __new__(cls, s): - return bool(strtobool(s)) + return strtobool(s) class BoolToken(TypeConvertToken): @@ -368,7 +365,7 @@ class BoolToken(TypeConvertToken): constructor = StrToBoolConstructor -class StrToRawConstructor(object): +class StrToRawConstructor: """Used when we want to ignore brace formatting syntax""" def __new__(cls, s): @@ -383,6 +380,16 @@ class RawStrToken(TypeConvertToken): class ForceIncludeToken(TypeConvertToken): """Magic tag that changes the way string formatting works""" + yaml_tag = "!force_original_structure" + + @staticmethod + def constructor(_): + raise ValueError + + +class DeprecatedForceIncludeToken(ForceIncludeToken): + """Old name for the above""" + yaml_tag = "!force_format_include" @staticmethod @@ -422,15 +429,15 @@ def to_yaml(cls, dumper, data): yaml.dumper.Dumper.add_representer(ApproxScalar, ApproxSentinel.to_yaml) -def load_single_document_yaml(filename): +def load_single_document_yaml(filename: os.PathLike) -> dict: """ Load a yaml file and expect only one document Args: - filename (str): path to document + filename: path to document Returns: - dict: content of file + content of file Raises: UnexpectedDocumentsError: If more than one document was in the file @@ -438,7 +445,7 @@ def load_single_document_yaml(filename): with open(filename, "r", encoding="utf-8") as fileobj: try: - contents = yaml.load(fileobj, Loader=IncludeLoader) + contents = yaml.load(fileobj, Loader=IncludeLoader) # noqa except yaml.composer.ComposerError as e: msg = "Expected only one document in this file but found multiple" raise exceptions.UnexpectedDocumentsError(msg) from e @@ -446,13 +453,10 @@ def load_single_document_yaml(filename): return contents -def error_on_empty_scalar(self, mark): # pylint: disable=unused-argument +def error_on_empty_scalar(self, mark): location = "{mark.name:s}:{mark.line:d} - column {mark.column:d}".format(mark=mark) error = "Error at {} - cannot define an empty value in test - either give it a value or explicitly set it to None".format( location ) raise exceptions.BadSchemaError(error) - - -yaml.parser.Parser.process_empty_scalar = error_on_empty_scalar # type:ignore diff --git a/tavern/plugins.py b/tavern/_core/plugins.py similarity index 59% rename from tavern/plugins.py rename to tavern/_core/plugins.py index f1a51e8c1..79c5b4000 100644 --- a/tavern/plugins.py +++ b/tavern/_core/plugins.py @@ -3,29 +3,32 @@ This is here mainly to make MQTT easier, this will almost defintiely change significantly if/when a proper plugin system is implemented! """ +import dataclasses import logging +from functools import partial +from typing import Any, List, Mapping, Optional import stevedore -from tavern.util.dict_util import format_keys - -from .util import exceptions +from tavern._core import exceptions +from tavern._core.dict_util import format_keys +from tavern._core.pytest.config import TestConfig +from tavern.request import BaseRequest logger = logging.getLogger(__name__) -class PluginHelperBase(object): +class PluginHelperBase: """Base for plugins""" def plugin_load_error(mgr, entry_point, err): """Handle import errors""" - # pylint: disable=unused-argument msg = "Error loading plugin {} - {}".format(entry_point, err) raise exceptions.PluginLoadError(msg) from err -def is_valid_reqresp_plugin(ext): +def is_valid_reqresp_plugin(ext: Any) -> bool: """Whether this is a valid 'reqresp' plugin Requires certain functions/variables to be present @@ -34,7 +37,7 @@ def is_valid_reqresp_plugin(ext): Not all of these are required for all request/response types probably Args: - ext (object): class or module plugin object + ext: class or module plugin object Returns: bool: Whether the plugin has everything we need to use it @@ -59,13 +62,11 @@ def is_valid_reqresp_plugin(ext): return all(hasattr(ext.plugin, i) for i in required) -class _PluginCache(object): - # pylint: disable=inconsistent-return-statements - - def __init__(self): - self.plugins = {} +@dataclasses.dataclass +class _PluginCache: + plugins: List[Any] = dataclasses.field(default_factory=list) - def __call__(self, config=None): + def __call__(self, config: Optional[TestConfig] = None): if not config and not self.plugins: raise exceptions.PluginLoadError("No config to load plugins from") elif self.plugins: @@ -87,7 +88,7 @@ def _load_plugins(self, test_block_config): - Different plugin names Args: - test_block_config (dict): available config for test + test_block_config (tavern.pytesthook.config.TestConfig): available config for test Raises: exceptions.MissingSettingsError: Description @@ -95,20 +96,20 @@ def _load_plugins(self, test_block_config): Returns: list: Loaded plugins, can be a class or a module """ - # pylint: disable=no-self-use plugins = [] + def enabled(current_backend, ext): + return ( + ext.name == test_block_config.tavern_internal.backends[current_backend] + ) + for backend in ["http", "mqtt", "grpc"]: namespace = "tavern_{}".format(backend) - def enabled(ext): - # pylint: disable=cell-var-from-loop - return ext.name == test_block_config["backends"][backend] - manager = stevedore.EnabledExtensionManager( namespace=namespace, - check_func=enabled, + check_func=partial(enabled, backend), verify_requirements=True, on_load_failure_callback=plugin_load_error, ) @@ -131,15 +132,15 @@ def enabled(ext): load_plugins = _PluginCache() -def get_extra_sessions(test_spec, test_block_config): +def get_extra_sessions(test_spec: Mapping, test_block_config: TestConfig) -> dict: """Get extra 'sessions' for any extra test types Args: - test_spec (dict): Spec for the test block - test_block_config (dict): available config for test + test_spec: Spec for the test block + test_block_config: available config for test Returns: - dict: mapping of name: session. Session should be a context manager. + mapping of name to session. Session should be a context manager. """ sessions = {} @@ -155,26 +156,28 @@ def get_extra_sessions(test_spec, test_block_config): "Initialising session for %s (%s)", p.name, p.plugin.session_type ) session_spec = test_spec.get(p.name, {}) - formatted = format_keys( - session_spec, test_block_config.get("variables", {}) - ) + formatted = format_keys(session_spec, test_block_config.variables) sessions[p.name] = p.plugin.session_type(**formatted) return sessions -def get_request_type(stage, test_block_config, sessions): +def get_request_type( + stage: Mapping, + test_block_config: TestConfig, + sessions: Mapping, +) -> BaseRequest: """Get the request object for this stage there can only be one Args: - stage (dict): spec for this stage - test_block_config (dict): variables for this test run - sessions (dict): all available sessions + stage: spec for this stage + test_block_config: variables for this test run + sessions: all available sessions Returns: - BaseRequest: request object with a run() method + request object with a run() method Raises: exceptions.DuplicateKeysError: More than one kind of request specified @@ -215,7 +218,39 @@ def get_request_type(stage, test_block_config, sessions): return request_maker -def get_expected(stage, test_block_config, sessions): +class ResponseVerifier(dict): + plugin_name: str + + +def _foreach_response(stage: Mapping, test_block_config: TestConfig, action): + """Do something for each response + + Args: + stage: Stage of test + test_block_config: Config for test + action ((p: {plugin, name}, response_block: dict) -> Any): function that takes (plugin, response block) + + Returns: + mapping of plugin name to list of expected (normally length 1) + """ + + plugins = load_plugins(test_block_config) + + retvals = {} + + for p in plugins: + response_block = stage.get(p.plugin.response_block_name) + if response_block is not None: + retvals[p.name] = action(p, response_block) + + return retvals + + +def get_expected( + stage: Mapping, + test_block_config: TestConfig, + sessions: Mapping, +): """Get expected responses for each type of request Though only 1 request can be made, it can cause multiple responses. @@ -225,55 +260,60 @@ def get_expected(stage, test_block_config, sessions): BEFORE running the request. Args: - stage (dict): test stage - test_block_config (dict): available configuration for this test - sessions (dict): all available sessions + stage: test stage + test_block_config: available configuration for this test + sessions: all available sessions Returns: - dict: mapping of request type: expected response dict + mapping of request type to expected response dict """ - plugins = load_plugins(test_block_config) - - expected = {} - - for p in plugins: - if p.plugin.response_block_name in stage: - logger.debug("Getting expected response for %s", p.name) - plugin_expected = p.plugin.get_expected_from_request( - stage, test_block_config, sessions[p.name] - ) - expected[p.name] = plugin_expected + def action(p, response_block): + plugin_expected = p.plugin.get_expected_from_request( + response_block, test_block_config, sessions[p.name] + ) + if plugin_expected: + plugin_expected = ResponseVerifier(**plugin_expected) + plugin_expected.plugin_name = p.name + return plugin_expected + else: + return None - return expected + return _foreach_response(stage, test_block_config, action) -def get_verifiers(stage, test_block_config, sessions, expected): +def get_verifiers( + stage: Mapping, + test_block_config: TestConfig, + sessions: Mapping, + expected: Mapping, +): """Get one or more response validators for this stage Args: - stage (dict): spec for this stage - test_block_config (dict): variables for this test run - sessions (dict): all available sessions - expected (dict): expected responses for this stage + stage: spec for this stage + test_block_config: variables for this test run + sessions: all available sessions + expected: expected responses for this stage Returns: - BaseResponse: response validator object with a verify(response) method + response validator object with a verify(response) method """ - plugins = load_plugins(test_block_config) + def action(p, _): + session = sessions[p.name] + logger.debug( + "Initialising verifier for %s (%s)", p.name, p.plugin.verifier_type + ) + verifiers = [] - verifiers = [] + plugin_expected = expected[p.name] - for p in plugins: - if p.plugin.response_block_name in stage: - session = sessions[p.name] - logger.debug( - "Initialising verifier for %s (%s)", p.name, p.plugin.verifier_type - ) - verifier = p.plugin.verifier_type( - session, stage["name"], expected[p.name], test_block_config - ) - verifiers.append(verifier) + verifier = p.plugin.verifier_type( + session, stage["name"], plugin_expected, test_block_config + ) + verifiers.append(verifier) + + return verifiers - return verifiers + return _foreach_response(stage, test_block_config, action) diff --git a/tavern/testutils/pytesthook/__init__.py b/tavern/_core/pytest/__init__.py similarity index 100% rename from tavern/testutils/pytesthook/__init__.py rename to tavern/_core/pytest/__init__.py diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py new file mode 100644 index 000000000..22da8ca0a --- /dev/null +++ b/tavern/_core/pytest/config.py @@ -0,0 +1,48 @@ +import copy +import dataclasses +from typing import Any + +from tavern._core.strict_util import StrictLevel + + +@dataclasses.dataclass(frozen=True) +class TavernInternalConfig: + """Internal config that should be used only by tavern""" + + pytest_hook_caller: Any + backends: dict + + +@dataclasses.dataclass(frozen=True) +class TestConfig: + """Tavern configuration - there is a global config, then test-specific config, and + finally stage-specific config, but they all use this structure + + Attributes: + follow_redirects: whether the test should follow redirects + variables: variables available for use in the stage + strict: Strictness for test/stage + stages: Any extra stages imported from other config files + """ + + variables: dict + strict: StrictLevel + follow_redirects: bool + stages: list + + tavern_internal: TavernInternalConfig + + def copy(self) -> "TestConfig": + """Returns a shallow copy of self""" + return copy.copy(self) + + def with_new_variables(self) -> "TestConfig": + """Returns a shallow copy of self but with the variables copied. This stops things being + copied between tests. Can't use deepcopy because the variables might contain things that + can't be pickled and hence can't be deep copied.""" + copied = self.copy() + return dataclasses.replace(copied, variables=copy.copy(self.variables)) + + def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": + """Create a copy of the config but with a new strictness setting""" + return dataclasses.replace(self, strict=new_strict) diff --git a/tavern/testutils/pytesthook/error.py b/tavern/_core/pytest/error.py similarity index 72% rename from tavern/testutils/pytesthook/error.py rename to tavern/_core/pytest/error.py index a0fde7d34..6512db003 100644 --- a/tavern/testutils/pytesthook/error.py +++ b/tavern/_core/pytest/error.py @@ -1,14 +1,17 @@ import json import logging import re +from io import StringIO +from typing import List, Mapping, Optional -from _pytest._code.code import FormattedExcinfo -import py import yaml +from _pytest._code.code import FormattedExcinfo +from _pytest._io import TerminalWriter -from tavern.util import exceptions -from tavern.util.dict_util import format_keys -from tavern.util.stage_lines import ( +from tavern._core import exceptions +from tavern._core.dict_util import format_keys +from tavern._core.report import prepare_yaml +from tavern._core.stage_lines import ( end_mark, get_stage_lines, read_relevant_lines, @@ -18,8 +21,8 @@ logger = logging.getLogger(__name__) -class ReprdError(object): - def __init__(self, exce, item): +class ReprdError: + def __init__(self, exce, item) -> None: self.exce = exce self.item = item @@ -33,25 +36,26 @@ def _get_available_format_keys(self): dict: variables for formatting test """ try: - # pylint: disable=protected-access - keys = self.exce._excinfo[1].test_block_config["variables"] + keys = self.exce._excinfo[1].test_block_config.variables except AttributeError: logger.warning("Unable to read stage variables - error output may be wrong") - keys = self.item.global_cfg + keys = self.item.global_cfg.variables return keys - def _print_format_variables(self, tw, code_lines): + def _print_format_variables( + self, tw: TerminalWriter, code_lines: List[str] + ) -> List[str]: """Print a list of the format variables and their value at this stage If the format variable is not defined, print it in red as '???' Args: - tw (TerminalWriter): Pytest TW instance - code_lines (list(str)): Source lines for this stage + tw: Pytest TW instance + code_lines: Source lines for this stage Returns: - list(str): List of all missing format variables + List of all missing format variables """ def read_formatted_vars(lines): @@ -63,7 +67,7 @@ def read_formatted_vars(lines): if match.group("format_var") is not None: yield match.group("format_var") - format_variables = list(read_formatted_vars(code_lines)) + format_variables = set(read_formatted_vars(code_lines)) keys = self._get_available_format_keys() @@ -102,8 +106,12 @@ def read_formatted_vars(lines): return missing def _print_test_stage( - self, tw, code_lines, missing_format_vars, line_start - ): # pylint: disable=no-self-use + self, + tw: TerminalWriter, + code_lines: List[str], + missing_format_vars: List[str], + line_start: Optional[int], + ) -> None: """Print the direct source lines from this test stage If we couldn't get the stage for some reason, print the entire test out. @@ -112,11 +120,11 @@ def _print_test_stage( them in red. Args: - tw (Termin): Pytest TW instance - code_lines (list(str)): Raw source for this stage - missing_format_vars (list(str)): List of all missing format + tw: Pytest TW instance + code_lines: Raw source for this stage + missing_format_vars: List of all missing format variables for this stage - line_start (int): Source line of this stage + line_start: Source line of this stage """ if line_start: tw.line( @@ -131,33 +139,39 @@ def _print_test_stage( else: tw.line(line, white=True) - def _print_formatted_stage(self, tw, stage): + def _print_formatted_stage(self, tw: TerminalWriter, stage: Mapping) -> None: """Print the 'formatted' stage that Tavern will actually use to send the request/process the response Args: - tw (TerminalWriter): Pytest TW instance - stage (dict): The 'final' stage used by Tavern + tw: Pytest TW instance + stage: The 'final' stage used by Tavern """ tw.line("Formatted stage:", white=True, bold=True) - # This will definitely exist - formatted_lines = yaml.dump(stage, default_flow_style=False).split("\n") - keys = self._get_available_format_keys() + # Format stage variables recursively + formatted_stage = format_keys(stage, keys) + + # Replace formatted strings with strings for dumping + formatted_stage = prepare_yaml(formatted_stage) + + # Dump formatted stage to YAML format + formatted_lines = yaml.dump(formatted_stage, default_flow_style=False).split( + "\n" + ) + for line in formatted_lines: if not line: continue - if "{}" not in line: - line = format_keys(line, keys) tw.line(" {}".format(line), white=True) - def _print_errors(self, tw): + def _print_errors(self, tw: TerminalWriter) -> None: """Print any errors in the 'normal' Pytest style Args: - tw (TerminalWriter): Pytest TW instance + tw: Pytest TW instance """ tw.line("Errors:", white=True, bold=True) @@ -168,13 +182,12 @@ def _print_errors(self, tw): for line in lines: tw.line(line, red=True, bold=True) - def toterminal(self, tw): + def toterminal(self, tw: TerminalWriter) -> None: """Print out a custom error message to the terminal""" # Try to get the stage so we can print it out. I'm not sure if the stage # will ever NOT be present, but better to check just in case try: - # pylint: disable=protected-access stage = self.exce._excinfo[1].stage except AttributeError: stage = None @@ -208,12 +221,12 @@ def toterminal(self, tw): self._print_errors(tw) @property - def longreprtext(self): - tw = py.io.TerminalWriter(stringio=True) # pylint: disable=no-member - tw.hasmarkup = False + def longreprtext(self) -> str: + # information. + io = StringIO() + tw = TerminalWriter(file=io) self.toterminal(tw) - exc = tw.stringio.getvalue() - return exc.strip() + return io.getvalue().strip() - def __str__(self): + def __str__(self) -> str: return self.longreprtext diff --git a/tavern/testutils/pytesthook/file.py b/tavern/_core/pytest/file.py similarity index 65% rename from tavern/testutils/pytesthook/file.py rename to tavern/_core/pytest/file.py index 48f2fb23e..0733cb01e 100644 --- a/tavern/testutils/pytesthook/file.py +++ b/tavern/_core/pytest/file.py @@ -2,14 +2,17 @@ import functools import itertools import logging +from typing import Dict, Iterator, List, Mapping import pytest import yaml +from box import Box -from tavern.schemas.files import verify_tests -from tavern.util import exceptions -from tavern.util.dict_util import format_keys, get_tavern_box -from tavern.util.loader import IncludeLoader +from tavern._core import exceptions +from tavern._core.dict_util import deep_dict_merge, format_keys, get_tavern_box +from tavern._core.extfunctions import get_wrapped_create_function, is_ext_function +from tavern._core.loader import IncludeLoader +from tavern._core.schema.files import verify_tests from .item import YamlItem from .util import load_global_cfg @@ -83,11 +86,11 @@ def _format_test_marks(original_marks, fmt_vars, test_name): return pytest_marks, formatted_marks -def _generate_parametrized_test_items(keys, vals_combination): +def _generate_parametrized_test_items(keys: List, vals_combination): """Generate test name from given key(s)/value(s) combination Args: - keys (list): list of keys to format name with + keys: list of keys to format name with vals_combination (tuple(str)): this combination of values for the key """ flattened_values = [] @@ -102,10 +105,54 @@ def _generate_parametrized_test_items(keys, vals_combination): variables[key] = value flattened_values += [value] else: + if not isinstance(value, (list, tuple)): + value = [value] + + if len(value) != len(key): + raise exceptions.BadSchemaError( + "Invalid match between numbers of keys and number of values in parametrize mark ({} keys, {} values)".format( + (key), (value) + ) + ) + for subkey, subvalue in zip(key, value): variables[subkey] = subvalue flattened_values += [subvalue] + def maybe_load_ext(v): + key, value = v + + if is_ext_function(value): + # If it is an ext function, load the new (or supplemental) value[s] + ext = value.pop("$ext") + f = get_wrapped_create_function(ext) + new_value = f() + + if len(value) == 0: + # Use only this new value + return key, new_value + elif isinstance(new_value, dict): + # Merge with some existing data. At this point 'value' is known to be a dict. + return key, deep_dict_merge(value, f()) + else: + # For example, if it's defined like + # + # - testkey: testval + # $ext: + # function: mod:func + # + # and 'mod:func' returns a string, it's impossible to 'merge' with the existing data. + logger.error("Values still in 'val': %s", value) + raise exceptions.BadSchemaError( + "There were extra key/value pairs in the 'val' for this parametrize mark, but the ext function {} returned '{}' (of type {}) that was not a dictionary. It is impossible to merge these values.".format( + ext, new_value, type(new_value) + ) + ) + + return key, value + + variables = dict(map(maybe_load_ext, variables.items())) + logger.debug("Variables for this combination: %s", variables) logger.debug("Values for this combination: %s", flattened_values) @@ -116,7 +163,12 @@ def _generate_parametrized_test_items(keys, vals_combination): return variables, inner_formatted -def _get_parametrized_items(parent, test_spec, parametrize_marks, pytest_marks): +def _get_parametrized_items( + parent: pytest.File, + test_spec: Dict, + parametrize_marks: List[Dict], + pytest_marks: List[pytest.Mark], +) -> Iterator[YamlItem]: """Return new items with new format values available based on the mark This will change the name from something like 'test a thing' to 'test a @@ -128,9 +180,24 @@ def _get_parametrized_items(parent, test_spec, parametrize_marks, pytest_marks): doesn't appear to do anything. This could be removed? """ + logger.debug("parametrize marks: %s", parametrize_marks) + # These should be in the same order as specified in the input file vals = [i["parametrize"]["vals"] for i in parametrize_marks] + logger.debug("(possibly wrapped) values: %s", vals) + + def unwrap_map(value): + if is_ext_function(value): + ext = value.pop("$ext") + f = get_wrapped_create_function(ext) + new_value = f() + return new_value + + return value + + vals = list(map(unwrap_map, vals)) + try: combined = itertools.product(*vals) except TypeError as e: @@ -141,6 +208,13 @@ def _get_parametrized_items(parent, test_spec, parametrize_marks, pytest_marks): keys = [i["parametrize"]["key"] for i in parametrize_marks] for vals_combination in combined: + logger.debug("Generating test for %s/%s", keys, vals_combination) + + if len(vals_combination) != len(keys): + raise exceptions.BadSchemaError( + "Invalid match between numbers of keys and number of values in parametrize mark" + ) + variables, inner_formatted = _generate_parametrized_test_items( keys, vals_combination ) @@ -161,7 +235,7 @@ def _get_parametrized_items(parent, test_spec, parametrize_marks, pytest_marks): ) # And create the new item item_new = YamlItem.yamlitem_from_parent( - spec_new["test_name"], parent, spec_new, parent.fspath + spec_new["test_name"], parent, spec_new, parent.path ) item_new.add_markers(pytest_marks) @@ -171,41 +245,44 @@ def _get_parametrized_items(parent, test_spec, parametrize_marks, pytest_marks): class YamlFile(pytest.File): """Custom `File` class that loads each test block as a different test""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) # This (and the FakeObj below) are to make pytest-pspec not error out. - # The 'doctstring' for this is the filename, the 'docstring' for each + # The 'docstring' for this is the filename, the 'docstring' for each # individual test is the actual test name. - class FakeObj(object): - __doc__ = self.fspath + class FakeObj: + __doc__ = str(self.path) self.obj = FakeObj - def _get_test_fmt_vars(self, test_spec): + def _get_test_fmt_vars(self, test_spec: Mapping) -> dict: """Get any format variables that can be inferred for the test at this point Args: - test_spec (dict): Test specification, possibly with included config files + test_spec: Test specification, possibly with included config files Returns: - dict: available format variables + available format variables """ # Get included variables so we can do things like: # skipif: {my_integer} > 2 # skipif: 'https' in '{hostname}' # skipif: '{hostname}'.contains('ignoreme') - fmt_vars = {} + fmt_vars: Dict = {} global_cfg = load_global_cfg(self.config) - fmt_vars.update(**global_cfg.get("variables", {})) + fmt_vars.update(**global_cfg.variables) included = test_spec.get("includes", []) for i in included: fmt_vars.update(**i.get("variables", {})) - # Needed if something in a config file uses tavern.env_vars - tavern_box = get_tavern_box() + if self.session.config.option.collectonly: + tavern_box = Box(default_box=True) + else: + # Needed if something in a config file uses tavern.env_vars + tavern_box = get_tavern_box() try: fmt_vars = _format_without_inner(fmt_vars, tavern_box) @@ -217,20 +294,20 @@ def _get_test_fmt_vars(self, test_spec): tavern_box.merge_update(**fmt_vars) return tavern_box - def _generate_items(self, test_spec): + def _generate_items(self, test_spec: Dict) -> Iterator[YamlItem]: """Modify or generate tests based on test spec If there are any 'parametrize' marks, this will generate extra tests based on the values Args: - test_spec (dict): Test specification + test_spec: Test specification Yields: - YamlItem: Tavern YAML test + Tavern YAML test """ item = YamlItem.yamlitem_from_parent( - test_spec["test_name"], self, test_spec, self.fspath + test_spec["test_name"], self, test_spec, self.path ) original_marks = test_spec.get("marks", []) @@ -260,30 +337,34 @@ def _generate_items(self, test_spec): yield item - def collect(self): + def collect(self) -> Iterator[YamlItem]: """Load each document in the given input file into a different test Yields: - YamlItem: Essentially an individual pytest 'test object' + Pytest 'test objects' """ try: # Convert to a list so we can catch parser exceptions all_tests = list( - yaml.load_all(self.fspath.open(encoding="utf-8"), Loader=IncludeLoader) + yaml.load_all(self.path.open(encoding="utf-8"), Loader=IncludeLoader) ) except yaml.parser.ParserError as e: raise exceptions.BadSchemaError from e for test_spec in all_tests: if not test_spec: - logger.warning("Empty document in input file '%s'", self.fspath) + logger.warning("Empty document in input file '%s'", self.path) continue try: for i in self._generate_items(test_spec): i.initialise_fixture_attrs() yield i - except (TypeError, KeyError): - verify_tests(test_spec, with_plugins=False) - raise + except (TypeError, KeyError) as e: + try: + verify_tests(test_spec, with_plugins=False) + except Exception as e2: + raise e2 from e + else: + raise diff --git a/tavern/testutils/pytesthook/hooks.py b/tavern/_core/pytest/hooks.py similarity index 65% rename from tavern/testutils/pytesthook/hooks.py rename to tavern/_core/pytest/hooks.py index be66b3f76..c3e1c89b4 100644 --- a/tavern/testutils/pytesthook/hooks.py +++ b/tavern/_core/pytest/hooks.py @@ -1,24 +1,26 @@ +import os +import pathlib import re import pytest -from tavern.util import exceptions +from tavern._core import exceptions from .util import add_ini_options, add_parser_options, get_option_generic -def pytest_addoption(parser): +def pytest_addoption(parser: pytest.Parser) -> None: add_parser_options(parser.addoption, with_defaults=False) add_ini_options(parser) -def pytest_collect_file(parent, path): +def pytest_collect_file(parent, path: os.PathLike): """On collecting files, get any files that end in .tavern.yaml or .tavern.yml as tavern test files """ - if int(pytest.__version__.split(".")[0]) < 5: - raise exceptions.TavernException("Only pytest >=5 is supported") + if int(pytest.__version__.split(".", maxsplit=1)[0]) < 7: + raise exceptions.TavernException("Only pytest >=7 is supported") pattern = get_option_generic( parent.config, "tavern-file-path-regex", r".+\.tavern\.ya?ml$" @@ -33,21 +35,23 @@ def pytest_collect_file(parent, path): try: compiled = re.compile(pattern) - except Exception as e: # pylint: disable=broad-except + except Exception as e: raise exceptions.InvalidConfigurationException(e) from e match_tavern_file = compiled.search from .file import YamlFile - if match_tavern_file(path.strpath): - return YamlFile.from_parent(parent, fspath=path) + path = pathlib.Path(path) + + if match_tavern_file(str(path)): + return YamlFile.from_parent(parent, path=path) return None -def pytest_addhooks(pluginmanager): +def pytest_addhooks(pluginmanager) -> None: """Add our custom tavern hooks""" - from . import newhooks # pylint: disable=import-outside-toplevel + from . import newhooks pluginmanager.add_hookspecs(newhooks) diff --git a/tavern/testutils/pytesthook/item.py b/tavern/_core/pytest/item.py similarity index 72% rename from tavern/testutils/pytesthook/item.py rename to tavern/_core/pytest/item.py index 666d6bd83..2baff2fe2 100644 --- a/tavern/testutils/pytesthook/item.py +++ b/tavern/_core/pytest/item.py @@ -1,18 +1,23 @@ -import copy import logging +import pathlib +from typing import Optional, Tuple -from _pytest import fixtures import attr import pytest - -from tavern.core import run_test -from tavern.plugins import load_plugins -from tavern.schemas.files import verify_tests -from tavern.testutils.pytesthook import call_hook -from tavern.util import exceptions -from tavern.util.report import attach_text - -from .error import ReprdError +import yaml +from _pytest._code.code import ExceptionInfo +from _pytest.nodes import Node + +from tavern._core import exceptions +from tavern._core.loader import error_on_empty_scalar +from tavern._core.plugins import load_plugins +from tavern._core.pytest import call_hook +from tavern._core.pytest.error import ReprdError +from tavern._core.report import attach_text +from tavern._core.run import run_test +from tavern._core.schema.files import verify_tests + +from .config import TestConfig from .util import load_global_cfg logger = logging.getLogger(__name__) @@ -26,42 +31,60 @@ class YamlItem(pytest.Item): should be enough to track down what went wrong Attributes: - path (str): filename that this test came from - spec (dict): The whole dictionary of the test + path: filename that this test came from + spec: The whole dictionary of the test """ - def __init__(self, name, parent, spec, path): - super().__init__(name, parent) + # See https://github.com/taverntesting/tavern/issues/825 + _patched_yaml = False + + def __init__( + self, *, name: str, parent, spec, path: pathlib.Path, **kwargs + ) -> None: + super().__init__(name, parent, **kwargs) self.path = path self.spec = spec - self.global_cfg = {} + self.global_cfg: Optional[TestConfig] = None + + if not YamlItem._patched_yaml: + yaml.parser.Parser.process_empty_scalar = ( # type:ignore + error_on_empty_scalar + ) + + YamlItem._patched_yaml = True @classmethod - def yamlitem_from_parent(cls, name, parent, spec, path): + def yamlitem_from_parent(cls, name, parent: Node, spec, path: pathlib.Path): return cls.from_parent(parent, name=name, spec=spec, path=path) - def initialise_fixture_attrs(self): - # pylint: disable=protected-access,attribute-defined-outside-init - self.funcargs = {} + def initialise_fixture_attrs(self) -> None: + self.funcargs = {} # type: ignore # _get_direct_parametrize_args checks parametrize arguments in Python # functions, but we don't care about that in Tavern. - self.session._fixturemanager._get_direct_parametrize_args = lambda _: [] + self.session._fixturemanager._get_direct_parametrize_args = lambda _: [] # type: ignore fixtureinfo = self.session._fixturemanager.getfixtureinfo( self, self.obj, type(self), funcargs=False ) self._fixtureinfo = fixtureinfo self.fixturenames = fixtureinfo.names_closure - self._request = fixtures.FixtureRequest(self) + self._request = pytest.FixtureRequest(self, _ispytest=True) + + @property + def location(self): + """get location in file""" + location = super().location + location = (location[0], self.spec.start_mark.line, location[2]) + return location # Hack to stop issue with pytest-rerunfailures _initrequest = initialise_fixture_attrs - def setup(self): + def setup(self) -> None: super().setup() - self._request._fillfixtures() # pylint: disable=protected-access + self._request._fillfixtures() @property def obj(self): @@ -85,7 +108,7 @@ def fakefun(): def _obj(self): return self.obj - def add_markers(self, pytest_marks): + def add_markers(self, pytest_marks) -> None: for pm in pytest_marks: if pm.name == "usefixtures": if ( @@ -155,31 +178,23 @@ def _load_fixture_values(self): return values - def runtest(self): - # Do a deep copy because this sometimes still retains things from previous tests(?) - self.global_cfg = copy.deepcopy(load_global_cfg(self.config)) - - self.global_cfg.setdefault("variables", {}) + def runtest(self) -> None: + self.global_cfg = load_global_cfg(self.config) load_plugins(self.global_cfg) - self.global_cfg["tavern_internal"] = {"pytest_hook_caller": self.config.hook} - # INTERNAL - # NOTE - now that we can 'mark' tests, we could use pytest.mark.xfail - # instead. This doesn't differentiate between an error in verification - # and an error when running the test though. xfail = self.spec.get("_xfail", False) try: fixture_values = self._load_fixture_values() - self.global_cfg["variables"].update(fixture_values) + self.global_cfg.variables.update(fixture_values) call_hook( self.global_cfg, "pytest_tavern_beta_before_every_test_run", test_dict=self.spec, - variables=self.global_cfg["variables"], + variables=self.global_cfg.variables, ) verify_tests(self.spec) @@ -195,6 +210,7 @@ def runtest(self): stage["name"] = stage["id"] run_test(self.path, self.spec, self.global_cfg) + except exceptions.BadSchemaError: if xfail == "verify": logger.info("xfailing test while verifying schema") @@ -205,14 +221,26 @@ def runtest(self): logger.info("xfailing test when running") self.add_marker(pytest.mark.xfail, True) raise + else: + if xfail: + raise Exception("internal: xfail test did not fail '{}'".format(xfail)) # else: # if xfail: # logger.error("Expected test to fail") # raise exceptions.TestFailError( # "Expected test to fail at {} stage".format(xfail) # ) + finally: + call_hook( + self.global_cfg, + "pytest_tavern_beta_after_every_test_run", + test_dict=self.spec, + variables=self.global_cfg.variables, + ) - def repr_failure(self, excinfo, style=None): + def repr_failure( + self, excinfo: ExceptionInfo[BaseException], style: Optional[str] = None + ): """called when self.runtest() raises an exception. By default, will raise a custom formatted traceback if it's a tavern error. if not, will use the default @@ -234,5 +262,9 @@ def repr_failure(self, excinfo, style=None): attach_text(str(error), name="error_output") return error - def reportinfo(self): - return self.fspath, 0, "{s.path}::{s.name:s}".format(s=self) + def reportinfo(self) -> Tuple[pathlib.Path, int, str]: + return ( + self.path, + 0, + "{s.path}::{s.name:s}".format(s=self), + ) diff --git a/tavern/testutils/pytesthook/newhooks.py b/tavern/_core/pytest/newhooks.py similarity index 75% rename from tavern/testutils/pytesthook/newhooks.py rename to tavern/_core/pytest/newhooks.py index 7566da599..8fd494b2b 100644 --- a/tavern/testutils/pytesthook/newhooks.py +++ b/tavern/_core/pytest/newhooks.py @@ -1,10 +1,9 @@ -# pylint: disable=unused-argument import logging logger = logging.getLogger(__name__) -def pytest_tavern_beta_before_every_test_run(test_dict, variables): +def pytest_tavern_beta_before_every_test_run(test_dict, variables) -> None: """Called: - directly after fixtures are loaded for a test @@ -21,7 +20,18 @@ def pytest_tavern_beta_before_every_test_run(test_dict, variables): """ -def pytest_tavern_beta_after_every_response(expected, response): +def pytest_tavern_beta_after_every_test_run(test_dict, variables) -> None: + """Called: + + - After test run + + Args: + test_dict (dict): Test to run + variables (dict): Available variables + """ + + +def pytest_tavern_beta_after_every_response(expected, response) -> None: """Called after every _response_ - including MQTT/HTTP/etc Note: @@ -34,7 +44,7 @@ def pytest_tavern_beta_after_every_response(expected, response): """ -def pytest_tavern_beta_before_every_request(request_args): +def pytest_tavern_beta_before_every_request(request_args) -> None: """Called just before every request - including MQTT/HTTP/etc Note: @@ -46,12 +56,10 @@ def pytest_tavern_beta_before_every_request(request_args): """ -def call_hook(test_block_config, hookname, **kwargs): +def call_hook(test_block_config, hookname, **kwargs) -> None: """Utility to call the hooks""" try: - hook = getattr( - test_block_config["tavern_internal"]["pytest_hook_caller"], hookname - ) + hook = getattr(test_block_config.tavern_internal.pytest_hook_caller, hookname) except AttributeError: logger.critical("Error getting tavern hook!") raise diff --git a/tavern/testutils/pytesthook/util.py b/tavern/_core/pytest/util.py similarity index 70% rename from tavern/testutils/pytesthook/util.py rename to tavern/_core/pytest/util.py index 5194da1d4..88e70f743 100644 --- a/tavern/testutils/pytesthook/util.py +++ b/tavern/_core/pytest/util.py @@ -1,19 +1,23 @@ -from functools import lru_cache import logging +from functools import lru_cache +from typing import Any, Dict + +import pytest -from tavern.util.dict_util import format_keys, get_tavern_box -from tavern.util.general import load_global_config -from tavern.util.strict_util import StrictLevel +from tavern._core.dict_util import format_keys, get_tavern_box +from tavern._core.general import load_global_config +from tavern._core.pytest.config import TavernInternalConfig, TestConfig +from tavern._core.strict_util import StrictLevel logger = logging.getLogger(__name__) -def add_parser_options(parser_addoption, with_defaults=True): +def add_parser_options(parser_addoption, with_defaults: bool = True) -> None: """Add argparse options This is shared between the CLI and pytest (for now) - See also testutils.pytesthook.hooks.pytest_addoption + See also _core.pytesthook.hooks.pytest_addoption """ parser_addoption( "--tavern-global-cfg", @@ -40,7 +44,6 @@ def add_parser_options(parser_addoption, with_defaults=True): help="Default response matching strictness", default=None, nargs="+", - choices=["json", "headers", "redirect_query_params"], ) parser_addoption( "--tavern-use-default-traceback", @@ -61,18 +64,12 @@ def add_parser_options(parser_addoption, with_defaults=True): action="store", nargs=1, ) - parser_addoption( - "--tavern-merge-ext-function-values", - help="Merge values from external functions in http requests", - default=False, - action="store_true", - ) -def add_ini_options(parser): +def add_ini_options(parser: pytest.Parser) -> None: """Add an option to pass in a global config file for tavern - See also testutils.pytesthook.util.add_parser_options + See also _core.pytesthook._core.util.add_parser_options """ parser.addini( "tavern-global-cfg", @@ -113,57 +110,59 @@ def add_ini_options(parser): default=r".+\.tavern\.ya?ml$", type="args", ) - parser.addini( - "tavern-merge-ext-function-values", - help="Merge values from external functions in http requests", - default=False, - type="bool", - ) + + +def load_global_cfg(pytest_config: pytest.Config) -> TestConfig: + return _load_global_cfg(pytest_config).with_new_variables() @lru_cache() -def load_global_cfg(pytest_config): +def _load_global_cfg(pytest_config: pytest.Config) -> TestConfig: """Load globally included config files from cmdline/cfg file arguments Args: - pytest_config (pytest.Config): Pytest config object + pytest_config: Pytest config object Returns: - dict: variables/stages/etc from global config files + variables/stages/etc from global config files Raises: exceptions.UnexpectedKeysError: Invalid settings in one or more config files detected """ + # Load ini first ini_global_cfg_paths = pytest_config.getini("tavern-global-cfg") or [] # THEN load command line, to allow overwriting of values cmdline_global_cfg_paths = pytest_config.getoption("tavern_global_cfg") or [] all_paths = ini_global_cfg_paths + cmdline_global_cfg_paths - global_cfg = load_global_config(all_paths) + global_cfg_dict = load_global_config(all_paths) try: - loaded_variables = global_cfg["variables"] + loaded_variables = global_cfg_dict["variables"] except KeyError: logger.debug("Nothing to format in global config files") + variables = {} else: tavern_box = get_tavern_box() + variables = format_keys(loaded_variables, tavern_box) - global_cfg["variables"] = format_keys(loaded_variables, tavern_box) - - # Can be overridden in tests - global_cfg["strict"] = _load_global_strictness(pytest_config) - global_cfg["follow_redirects"] = _load_global_follow_redirects(pytest_config) - global_cfg["backends"] = _load_global_backends(pytest_config) - global_cfg["merge_ext_values"] = _load_global_merge_ext(pytest_config) - - logger.debug("Global config: %s", global_cfg) + global_cfg = TestConfig( + variables=variables, + strict=_load_global_strictness(pytest_config), + follow_redirects=_load_global_follow_redirects(pytest_config), + tavern_internal=TavernInternalConfig( + pytest_hook_caller=pytest_config.hook, + backends=_load_global_backends(pytest_config), + ), + stages=global_cfg_dict.get("stages", []), + ) return global_cfg -def _load_global_backends(pytest_config): +def _load_global_backends(pytest_config: pytest.Config) -> Dict[str, Any]: """Load which backend should be used""" backend_settings = {} @@ -176,7 +175,7 @@ def _load_global_backends(pytest_config): return backend_settings -def _load_global_strictness(pytest_config): +def _load_global_strictness(pytest_config: pytest.Config) -> StrictLevel: """Load the global 'strictness' setting""" options = get_option_generic(pytest_config, "tavern-strict", []) @@ -184,17 +183,12 @@ def _load_global_strictness(pytest_config): return StrictLevel.from_options(options) -def _load_global_follow_redirects(pytest_config): +def _load_global_follow_redirects(pytest_config: pytest.Config): """Load the global 'follow redirects' setting""" return get_option_generic(pytest_config, "tavern-always-follow-redirects", False) -def _load_global_merge_ext(pytest_config): - """Load the global setting about whether external values should be merged or not""" - return get_option_generic(pytest_config, "tavern-merge-ext-function-values", True) - - -def get_option_generic(pytest_config, flag, default): +def get_option_generic(pytest_config: pytest.Config, flag: str, default): """Get a configuration option or return the default Priority order is cmdline, then ini, then default""" diff --git a/tavern/util/report.py b/tavern/_core/report.py similarity index 70% rename from tavern/util/report.py rename to tavern/_core/report.py index 03f823fa0..432d9abe3 100644 --- a/tavern/util/report.py +++ b/tavern/_core/report.py @@ -4,31 +4,30 @@ import yaml try: - from allure import attach + from allure import attach, step from allure import attachment_type as at - from allure import step yaml_type = at.YAML except ImportError: yaml_type = None - def attach(*args, **kwargs): # pylint: disable=unused-argument + def attach(*args, **kwargs) -> None: logger.debug("Not attaching anything as allure is not installed") - def step(name): # pylint: disable=unused-argument + def step(name): def call(step_func): return step_func return call -from tavern.util.formatted_str import FormattedString -from tavern.util.stage_lines import get_stage_lines, read_relevant_lines +from tavern._core.formatted_str import FormattedString +from tavern._core.stage_lines import get_stage_lines, read_relevant_lines logger = logging.getLogger(__name__) -def _prepare_yaml(val): +def prepare_yaml(val): """Sanitises the formatted string into a format safe for dumping""" formatted = val @@ -38,16 +37,16 @@ def _prepare_yaml(val): for key in val: if isinstance(key, FormattedString): key = str(key) - formatted[key] = _prepare_yaml(val[key]) + formatted[key] = prepare_yaml(val[key]) elif isinstance(val, (list, tuple, set)): - formatted = [_prepare_yaml(item) for item in val] + formatted = [prepare_yaml(item) for item in val] elif isinstance(formatted, FormattedString): return str(formatted) return formatted -def attach_stage_content(stage): +def attach_stage_content(stage) -> None: first_line, last_line, _ = get_stage_lines(stage) code_lines = list(read_relevant_lines(stage, first_line, last_line)) @@ -56,12 +55,12 @@ def attach_stage_content(stage): def attach_yaml(payload, name): - prepared = _prepare_yaml(payload) + prepared = prepare_yaml(payload) dumped = yaml.safe_dump(prepared) return attach_text(dumped, name, yaml_type) -def attach_text(payload, name, attachment_type=None): +def attach_text(payload, name, attachment_type=None) -> None: return attach(payload, name=name, attachment_type=attachment_type) diff --git a/tavern/_core/run.py b/tavern/_core/run.py new file mode 100644 index 000000000..b950b1d53 --- /dev/null +++ b/tavern/_core/run.py @@ -0,0 +1,321 @@ +import copy +import functools +import logging +import pathlib +from contextlib import ExitStack +from copy import deepcopy +from typing import List, Mapping, MutableMapping + +import box + +from tavern._core import exceptions +from tavern._core.plugins import ( + get_expected, + get_extra_sessions, + get_request_type, + get_verifiers, +) +from tavern._core.strict_util import StrictLevel + +from .dict_util import format_keys, get_tavern_box +from .pytest import call_hook +from .pytest.config import TestConfig +from .report import attach_stage_content, wrap_step +from .strtobool import strtobool +from .testhelpers import delay, retry + +logger = logging.getLogger(__name__) + + +def _resolve_test_stages(test_spec: Mapping, available_stages: Mapping): + # Need to get a final list of stages in the tests (resolving refs) + test_stages = [] + for raw_stage in test_spec["stages"]: + stage = raw_stage + if stage.get("type") == "ref": + if "id" in stage: + ref_id = stage["id"] + if ref_id in available_stages: + # Make sure nothing downstream can change the globally + # defined stage. Just give the test a local copy. + stage = deepcopy(available_stages[ref_id]) + logger.debug("found stage reference: %s", ref_id) + else: + logger.error("Bad stage: unknown stage referenced: %s", ref_id) + raise exceptions.InvalidStageReferenceError( + "Unknown stage reference: {}".format(ref_id) + ) + else: + logger.error("Bad stage: 'ref' type must specify 'id'") + raise exceptions.BadSchemaError("'ref' stage type must specify 'id'") + test_stages.append(stage) + + return test_stages + + +def _get_included_stages( + tavern_box: box.Box, + test_block_config: TestConfig, + test_spec: Mapping, + available_stages: List[dict], +) -> List[dict]: + """ + Get any stages which were included via config files which will be available + for use in this test + + Args: + available_stages: List of stages which already exist + tavern_box: Available parameters for fomatting at this point + test_block_config: Current test config dictionary + test_spec: Specification for current test + + Returns: + Fully resolved stages + """ + + def stage_ids(s): + return [i["id"] for i in s] + + if test_spec.get("includes"): + # Need to do this separately here so there is no confusion between global and included stages + for included in test_spec["includes"]: + for stage in included.get("stages", {}): + if stage["id"] in stage_ids(available_stages): + raise exceptions.DuplicateStageDefinitionError( + "Stage id '{}' defined in stage-included test which was already defined in global configuration".format( + stage["id"] + ) + ) + + included_stages = [] # type: ignore + + for included in test_spec["includes"]: + if "variables" in included: + formatted_include = format_keys(included["variables"], tavern_box) + test_block_config.variables.update(formatted_include) + + for stage in included.get("stages", []): + if stage["id"] in stage_ids(included_stages): + raise exceptions.DuplicateStageDefinitionError( + "Stage with specified id already defined: {}".format( + stage["id"] + ) + ) + included_stages.append(stage) + else: + included_stages = [] + + return included_stages + + +def run_test( + in_file: pathlib.Path, + test_spec: MutableMapping, + global_cfg: TestConfig, +) -> None: + """Run a single tavern test + + Note that each tavern test can consist of multiple requests (log in, + create, update, delete, etc). + + The global configuration is copied and used as an initial configuration for + this test. Any values which are saved from any tests are saved into this + test block and can be used for formatting in later stages in the test. + + Args: + in_file: filename containing this test + test_spec: The specification for this test + global_cfg: Any global configuration for this test + + Raises: + TavernException: If any of the tests failed + """ + + # Initialise test config for this test with the global configuration before + # starting + test_block_config = global_cfg.copy() + default_global_stricness = global_cfg.strict + + tavern_box = get_tavern_box() + + if not test_spec: + logger.warning("Empty test block in %s", in_file) + return + + # Get included stages and resolve any into the test spec dictionary + available_stages = test_block_config.stages + included_stages = _get_included_stages( + tavern_box, test_block_config, test_spec, available_stages + ) + all_stages = {s["id"]: s for s in available_stages + included_stages} + test_spec["stages"] = _resolve_test_stages(test_spec, all_stages) + + test_block_config.variables["tavern"] = tavern_box["tavern"] + + test_block_name = test_spec["test_name"] + + logger.info("Running test : %s", test_block_name) + + with ExitStack() as stack: + sessions = get_extra_sessions(test_spec, test_block_config) + + for name, session in sessions.items(): + logger.debug("Entering context for %s", name) + stack.enter_context(session) + + def getonly(stage): + o = stage.get("only") + if o is None: + return False + elif isinstance(o, bool): + return o + else: + return strtobool(o) + + has_only = any(getonly(stage) for stage in test_spec["stages"]) + + # Run tests in a path in order + for idx, stage in enumerate(test_spec["stages"]): + if stage.get("skip"): + continue + if has_only and not getonly(stage): + continue + + test_block_config = test_block_config.with_strictness( + default_global_stricness + ) + test_block_config = test_block_config.with_strictness( + _calculate_stage_strictness(stage, test_block_config, test_spec) + ) + + # Wrap run_stage with retry helper + run_stage_with_retries = retry(stage, test_block_config)(run_stage) + + partial = functools.partial( + run_stage_with_retries, sessions, stage, test_block_config + ) + + allure_name = "Stage {}: {}".format( + idx, format_keys(stage["name"], test_block_config.variables) + ) + step = wrap_step(allure_name, partial) + + try: + step() + except exceptions.TavernException as e: + e.stage = stage # type: ignore + e.test_block_config = test_block_config # type: ignore + raise + + if getonly(stage): + break + + +def _calculate_stage_strictness( + stage: dict, test_block_config: TestConfig, test_spec: Mapping +) -> StrictLevel: + """Figure out the strictness for this stage + + Can be overridden per stage, or per test + + Priority is global (see pytest _core.util file) <= test <= stage + """ + stage_options = None + new_strict = test_block_config.strict + + if test_spec.get("strict", None) is not None: + stage_options = test_spec["strict"] + logger.debug("Getting test level strict setting: %s", stage_options) + + stage_strictness_set = None + + def update_stage_options(new_option): + if stage_strictness_set: + raise exceptions.DuplicateStrictError + logger.debug("Setting stage level strict setting: %s", new_option) + return new_option + + if stage.get("response", {}).get("strict", None) is not None: + stage_strictness_set = stage_options = update_stage_options( + stage["response"]["strict"] + ) + + mqtt_response = stage.get("mqtt_response", None) + if mqtt_response is not None: + if isinstance(mqtt_response, dict): + if mqtt_response.get("strict", None) is not None: + stage_strictness_set = stage_options = update_stage_options( + stage["mqtt_response"]["strict"] + ) + elif isinstance(mqtt_response, list): + for response in mqtt_response: + if response.get("strict", None) is not None: + stage_strictness_set = stage_options = update_stage_options( + response["strict"] + ) + else: + raise exceptions.BadSchemaError( + "mqtt_response was invalid type {}".format(type(mqtt_response)) + ) + + if stage_options is not None: + if stage_options is True: + new_strict = StrictLevel.all_on() + elif stage_options is False: + new_strict = StrictLevel.all_off() + else: + new_strict = StrictLevel.from_options(stage_options) + else: + logger.debug("Global default strictness used for this stage") + + logger.debug("Strict key checking for this stage is '%s'", test_block_config.strict) + + return new_strict + + +def run_stage( + sessions: Mapping, + stage: Mapping, + test_block_config: TestConfig, +) -> None: + """Run one stage from the test + + Args: + sessions: Dictionary of relevant 'session' objects used for this test + stage: specification of stage to be run + test_block_config: available variables for test + """ + stage = copy.deepcopy(stage) + name = stage["name"] + + attach_stage_content(stage) + + r = get_request_type(stage, test_block_config, sessions) + + tavern_box = test_block_config.variables["tavern"] + tavern_box.update(request_vars=r.request_vars) + + expected = get_expected(stage, test_block_config, sessions) + + delay(stage, "before", test_block_config.variables) + + logger.info("Running stage : %s", name) + + call_hook( + test_block_config, + "pytest_tavern_beta_before_every_request", + request_args=r.request_vars, + ) + + verifiers = get_verifiers(stage, test_block_config, sessions, expected) + + response = r.run() + + for response_type, response_verifiers in verifiers.items(): + logger.debug("Running verifiers for %s", response_type) + for v in response_verifiers: + saved = v.verify(response) + test_block_config.variables.update(saved) + + tavern_box.pop("request_vars") + delay(stage, "after", test_block_config.variables) diff --git a/tavern/_plugins/grpc/__init__.py b/tavern/_core/schema/__init__.py similarity index 100% rename from tavern/_plugins/grpc/__init__.py rename to tavern/_core/schema/__init__.py diff --git a/tavern/schemas/extensions.py b/tavern/_core/schema/extensions.py similarity index 77% rename from tavern/schemas/extensions.py rename to tavern/_core/schema/extensions.py index 0cf79bd5e..be9ab3148 100644 --- a/tavern/schemas/extensions.py +++ b/tavern/_core/schema/extensions.py @@ -1,14 +1,20 @@ import os import re +from typing import Union from grpc import StatusCode from pykwalify.types import is_bool, is_float, is_int -from tavern.util import exceptions -from tavern.util.exceptions import BadSchemaError -from tavern.util.extfunctions import get_pykwalify_logger, import_ext_function -from tavern.util.loader import ApproxScalar, BoolToken, FloatToken, IntToken -from tavern.util.strict_util import StrictLevel +from tavern._core import exceptions +from tavern._core.exceptions import BadSchemaError +from tavern._core.extfunctions import ( + get_pykwalify_logger, + import_ext_function, + is_ext_function, +) +from tavern._core.general import valid_http_methods +from tavern._core.loader import ApproxScalar, BoolToken, FloatToken, IntToken +from tavern._core.strict_util import StrictLevel # To extend pykwalify's type validation, extend its internal functions @@ -28,7 +34,6 @@ def validate(value): # These plug into the pykwalify extension function API def validator_like(validate, description): def validator(value, rule_obj, path): - # pylint: disable=unused-argument if validate(value): return True else: @@ -45,7 +50,7 @@ def validator(value, rule_obj, path): bool_variable = validator_like(is_bool_like, "bool-like") -def _validate_one_extension(input_value): +def _validate_one_extension(input_value) -> None: expected_keys = {"function", "extra_args", "extra_kwargs"} extra = set(input_value) - expected_keys @@ -57,7 +62,7 @@ def _validate_one_extension(input_value): try: import_ext_function(input_value["function"]) - except Exception as e: # pylint: disable=broad-except + except Exception as e: raise BadSchemaError("Couldn't load {}".format(input_value["function"])) from e extra_args = input_value.get("extra_args") @@ -74,7 +79,7 @@ def _validate_one_extension(input_value): ) -def validate_extensions(value, rule_obj, path): +def validate_extensions(value, rule_obj, path) -> bool: """Given a specification for calling a validation function, make sure that the arguments are valid (ie, function is valid, arguments are of the correct type...) @@ -93,8 +98,6 @@ def validate_extensions(value, rule_obj, path): BadSchemaError: Something in the validation function spec was wrong """ - # pylint: disable=unused-argument - if isinstance(value, list): for vf in value: _validate_one_extension(vf) @@ -104,8 +107,7 @@ def validate_extensions(value, rule_obj, path): return True -def validate_status_code_is_int_or_list_of_ints(value, rule_obj, path): - # pylint: disable=unused-argument +def validate_status_code_is_int_or_list_of_ints(value, rule_obj, path) -> bool: err_msg = "status_code has to be an integer or a list of integers (got {})".format( value ) @@ -143,8 +145,7 @@ def is_grpc_status(value): return False -def check_usefixtures(value, rule_obj, path): - # pylint: disable=unused-argument +def check_usefixtures(value, rule_obj, path) -> bool: err_msg = "'usefixtures' has to be a list with at least one item" if not isinstance(value, (list, tuple)): @@ -156,9 +157,8 @@ def check_usefixtures(value, rule_obj, path): return True -def verify_oneof_id_name(value, rule_obj, path): +def verify_oneof_id_name(value, rule_obj, path) -> bool: """Checks that if 'name' is not present, 'id' is""" - # pylint: disable=unused-argument name = value.get("name") if not name: @@ -171,55 +171,68 @@ def verify_oneof_id_name(value, rule_obj, path): return True -def check_parametrize_marks(value, rule_obj, path): - # pylint: disable=unused-argument - +def check_parametrize_marks(value, rule_obj, path) -> bool: key_or_keys = value["key"] vals = value["vals"] # At this point we can assume vals is a list - check anyway - if not isinstance(vals, list): + if not (isinstance(vals, list) or is_ext_function(vals)): raise BadSchemaError("'vals' should be a list") if isinstance(key_or_keys, str): - # example: - # - parametrize: - # key: edible - # vals: - # - rotten - # - fresh - # - unripe - err_msg = ( - "If 'key' in parametrize is a string, 'vals' must be a list of scalar items" - ) - for v in vals: - if isinstance(v, (list, dict)): - raise BadSchemaError(err_msg) - + # Vals can be anything + return True elif isinstance(key_or_keys, list): - # example: - # - parametrize: - # key: - # - edible - # - fruit - # vals: - # - [rotten, apple] - # - [fresh, orange] - # - [unripe, pear] err_msg = "If 'key' is a list, 'vals' must be a list of lists where each list is the same length as 'key'" - for v in vals: - if not isinstance(v, list): - raise BadSchemaError(err_msg) - elif len(v) != len(key_or_keys): + + # Checking for whether the ext function actually returns the correct + # values has to be deferred until the point where the function is + # actually called + if not is_ext_function(vals): + # broken example: + # - parametrize: + # key: + # - edible + # - fruit + # vals: + # a: b + if not isinstance(vals, list): raise BadSchemaError(err_msg) + # example: + # - parametrize: + # key: + # - edible + # - fruit + # vals: + # - [rotten, apple] + # - [fresh, orange] + # - [unripe, pear] + for v in vals: + if not isinstance(v, list): + # This catches the case like + # + # - parametrize: + # key: + # - edible + # - fruit + # vals: + # - fresh + # - orange + # + # This will parametrize 'edible' as [f, r, e, s, h] which is almost certainly not desired + raise BadSchemaError(err_msg) + if len(v) != len(key_or_keys): + # If the 'vals' list has more or less keys + raise BadSchemaError(err_msg) + else: raise BadSchemaError("'key' must be a string or a list") return True -def validate_data_key(value, rule_obj, path): +def validate_data_key(value, rule_obj, path) -> bool: """Validate the 'data' key in a http request From requests docs: @@ -231,7 +244,6 @@ def validate_data_key(value, rule_obj, path): We could handle lists of tuples, but it seems entirely pointless to maintain compatibility for something which is more verbose and does the same thing """ - # pylint: disable=unused-argument if isinstance(value, dict): # Fine @@ -263,13 +275,11 @@ def validate_data_key(value, rule_obj, path): return True -def validate_request_json(value, rule_obj, path): +def validate_request_json(value, rule_obj, path) -> bool: """Performs the above match, but also matches a dict or a list. This it just because it seems like you can't match a dict OR a list in pykwalify """ - # pylint: disable=unused-argument - def nested_values(d): if isinstance(d, dict): for v in d.values(): @@ -293,7 +303,7 @@ def nested_values(d): return True -def validate_json_with_ext(value, rule_obj, path): +def validate_json_with_ext(value, rule_obj, path) -> bool: """Validate json with extensions""" validate_request_json(value, rule_obj, path) @@ -307,9 +317,8 @@ def validate_json_with_ext(value, rule_obj, path): return True -def check_strict_key(value, rule_obj, path): +def check_strict_key(value, rule_obj, path) -> bool: """Make sure the 'strict' key is either a bool or a list""" - # pylint: disable=unused-argument if not isinstance(value, list) and not is_bool_like(value): raise BadSchemaError("'strict' has to be either a boolean or a list") @@ -325,9 +334,8 @@ def check_strict_key(value, rule_obj, path): return True -def validate_timeout_tuple_or_float(value, rule_obj, path): +def validate_timeout_tuple_or_float(value, rule_obj, path) -> bool: """Make sure timeout is a float/int or a tuple of floats/ints""" - # pylint: disable=unused-argument err_msg = "'timeout' must be either a float/int or a 2-tuple of floats/ints - got '{}' (type {})".format( value, type(value) @@ -350,9 +358,8 @@ def check_is_timeout_val(v): return True -def validate_verify_bool_or_str(value, rule_obj, path): +def validate_verify_bool_or_str(value: Union[bool, str], rule_obj, path) -> bool: """Make sure the 'verify' key is either a bool or a str""" - # pylint: disable=unused-argument if not isinstance(value, (bool, str)) and not is_bool_like(value): raise BadSchemaError( @@ -362,9 +369,8 @@ def validate_verify_bool_or_str(value, rule_obj, path): return True -def validate_cert_tuple_or_str(value, rule_obj, path): +def validate_cert_tuple_or_str(value, rule_obj, path) -> bool: """Make sure the 'cert' key is either a str or tuple""" - # pylint: disable=unused-argument err_msg = ( "The 'cert' key must be the path to a single file (containing the private key and the certificate) " @@ -383,13 +389,14 @@ def validate_cert_tuple_or_str(value, rule_obj, path): return True -def validate_file_spec(value, rule_obj, path): - """Validate file upload arguments """ - # pylint: disable=unused-argument +def validate_file_spec(value, rule_obj, path) -> bool: + """Validate file upload arguments""" if not isinstance(value, dict): raise BadSchemaError( - "File specification must be a mapping of file names to file specs" + "File specification must be a mapping of file names to file specs, got {}".format( + value + ) ) for _, filespec in value.items(): @@ -429,13 +436,12 @@ def validate_file_spec(value, rule_obj, path): def raise_body_error(value, rule_obj, path): """Raise an error about the deprecated 'body' key""" - # pylint: disable=unused-argument msg = "The 'body' key has been replaced with 'json' in 1.0 to make it more in line with other blocks. see https://github.com/taverntesting/tavern/issues/495 for details." raise BadSchemaError(msg) -def retry_variable(value, rule_obj, path): +def retry_variable(value: int, rule_obj, path) -> bool: """Check retry variables""" int_variable(value, rule_obj, path) @@ -445,3 +451,20 @@ def retry_variable(value, rule_obj, path): raise BadSchemaError("max_retries must be greater than 0") return True + + +def validate_http_method(value: str, rule_obj, path) -> bool: + """Check http method""" + + if not isinstance(value, str): + raise BadSchemaError("HTTP method should be a string") + + if value not in valid_http_methods: + logger = get_pykwalify_logger("tavern.schemas.extensions") + logger.debug( + "Givern HTTP method '%s' was not one of %s - assuming it will be templated", + value, + valid_http_methods, + ) + + return True diff --git a/tavern/schemas/files.py b/tavern/_core/schema/files.py similarity index 83% rename from tavern/schemas/files.py rename to tavern/_core/schema/files.py index 1038b31cc..a5bb4af67 100644 --- a/tavern/schemas/files.py +++ b/tavern/_core/schema/files.py @@ -1,28 +1,27 @@ import contextlib import copy -import functools import logging import os import tempfile +from typing import Dict import pykwalify -from pykwalify import core import yaml +from pykwalify import core -from tavern.plugins import load_plugins -from tavern.util.exceptions import BadSchemaError -from tavern.util.loader import IncludeLoader, load_single_document_yaml - -core.yaml.safe_load = functools.partial(yaml.load, Loader=IncludeLoader) +from tavern._core.exceptions import BadSchemaError +from tavern._core.loader import load_single_document_yaml +from tavern._core.plugins import load_plugins +from tavern._core.schema.jsonschema import verify_jsonschema logger = logging.getLogger(__name__) -class SchemaCache(object): +class SchemaCache: """Caches loaded schemas""" - def __init__(self): - self._loaded = {} + def __init__(self) -> None: + self._loaded: Dict[str, dict] = {} def _load_base_schema(self, schema_filename): try: @@ -52,8 +51,8 @@ def _load_schema_with_plugins(self, schema_filename): # Don't require a schema logger.debug("No schema defined for %s", p.name) else: - base_schema["mapping"].update( - plugin_schema.get("initialisation", {}) + base_schema["properties"].update( + plugin_schema.get("properties", {}) ) self._loaded[mangled] = base_schema @@ -81,13 +80,11 @@ def __call__(self, schema_filename, with_plugins): load_schema_file = SchemaCache() -def verify_generic(to_verify, schema): - """Verify a generic file against a given schema - +def verify_pykwalify(to_verify, schema) -> None: + """Verify a generic file against a given pykwalify schema Args: to_verify (dict): Filename of source tests to check schema (dict): Schema to verify against - Raises: BadSchemaError: Schema did not match """ @@ -132,7 +129,7 @@ def wrapfile(to_wrap): os.remove(wrapped_tmp.name) -def verify_tests(test_spec, with_plugins=True): +def verify_tests(test_spec, with_plugins: bool = True) -> None: """Verify that a specific test block is correct Todo: @@ -146,7 +143,7 @@ def verify_tests(test_spec, with_plugins=True): """ here = os.path.dirname(os.path.abspath(__file__)) - schema_filename = os.path.join(here, "tests.schema.yaml") + schema_filename = os.path.join(here, "tests.jsonschema.yaml") schema = load_schema_file(schema_filename, with_plugins) - verify_generic(test_spec, schema) + verify_jsonschema(test_spec, schema) diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py new file mode 100644 index 000000000..944dc0105 --- /dev/null +++ b/tavern/_core/schema/jsonschema.py @@ -0,0 +1,195 @@ +import logging +import re + +import jsonschema +from jsonschema import Draft7Validator, ValidationError +from jsonschema.validators import extend + +from tavern._core.dict_util import recurse_access_key +from tavern._core.exceptions import BadSchemaError +from tavern._core.loader import ( + AnythingSentinel, + BoolToken, + FloatToken, + IntToken, + RawStrToken, + TypeConvertToken, + TypeSentinel, +) +from tavern._core.schema.extensions import ( + check_parametrize_marks, + check_strict_key, + retry_variable, + validate_file_spec, + validate_http_method, + validate_json_with_ext, + validate_request_json, +) +from tavern._core.stage_lines import ( + get_stage_filename, + get_stage_lines, + read_relevant_lines, +) + +logger = logging.getLogger(__name__) + + +def is_str_or_bytes_or_token(checker, instance): + return Draft7Validator.TYPE_CHECKER.is_type(instance, "string") or isinstance( + instance, (bytes, RawStrToken, AnythingSentinel) + ) + + +def is_number_or_token(checker, instance): + return Draft7Validator.TYPE_CHECKER.is_type(instance, "number") or isinstance( + instance, (IntToken, FloatToken, AnythingSentinel) + ) + + +def is_integer_or_token(checker, instance): + return Draft7Validator.TYPE_CHECKER.is_type(instance, "integer") or isinstance( + instance, (IntToken, AnythingSentinel) + ) + + +def is_boolean_or_token(checker, instance): + return Draft7Validator.TYPE_CHECKER.is_type(instance, "boolean") or isinstance( + instance, (BoolToken, AnythingSentinel) + ) + + +def is_object_or_sentinel(checker, instance): + return ( + Draft7Validator.TYPE_CHECKER.is_type(instance, "object") + or isinstance(instance, (TypeSentinel, TypeConvertToken)) + or instance is None + ) + + +def oneOf(validator, oneOf, instance, schema): + """Patched version of 'oneof' that does not complain if something is matched by multiple branches""" + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + "%r is not valid under any of the given schemas" % (instance,), + context=all_errors, + ) + + more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + logger.debug("%r is valid under each of %s", instance, reprs) + + +CustomValidator = extend( + Draft7Validator, + type_checker=Draft7Validator.TYPE_CHECKER.redefine("object", is_object_or_sentinel) + .redefine("string", is_str_or_bytes_or_token) + .redefine("boolean", is_boolean_or_token) + .redefine("integer", is_integer_or_token) + .redefine("number", is_number_or_token), + validators={ + "oneOf": oneOf, + }, +) + + +def verify_jsonschema(to_verify, schema) -> None: + """Verify a generic file against a given jsonschema + + Args: + to_verify (dict): Filename of source tests to check + schema (dict): Schema to verify against + + Raises: + BadSchemaError: Schema did not match + """ + + validator = CustomValidator(schema) + + try: + validator.validate(to_verify) + except jsonschema.ValidationError as e: + real_context = [] + + # ignore these strings because they're red herrings + for c in e.context: + description = c.schema.get("description", "") + if description == "Reference to another stage from an included config file": + continue + + instance = c.instance + filename = get_stage_filename(instance) + if filename is None: + # Depending on what block raised the error, it mightbe difficult to tell what it was, so check the parent too + instance = e.instance + filename = get_stage_filename(instance) + + if filename: + with open(filename, "r", encoding="utf-8") as infile: + n_lines = len(infile.readlines()) + + first_line, last_line, _ = get_stage_lines(instance) + first_line = max(first_line - 2, 0) + last_line = min(last_line + 2, n_lines) + + reg = re.compile(r"^\s*$") + + lines = read_relevant_lines(instance, first_line, last_line) + lines = [line for line in lines if not reg.match(line.strip())] + content = "\n".join(list(lines)) + real_context.append( + f""" +{c.message} +{filename}: line {first_line}-{last_line}: + +{content} +""" + ) + else: + real_context.append( + f""" +{c.message} + + +""" + ) + + msg = "\n---\n" + "\n---\n".join([str(i) for i in real_context]) + raise BadSchemaError(msg) from None + + extra_checks = { + "stages[*].mqtt_publish.json[]": validate_request_json, + "stages[*].mqtt_response.payload[]": validate_request_json, + "stages[*].request.json[]": validate_request_json, + "stages[*].request.data[]": validate_request_json, + "stages[*].request.params[]": validate_request_json, + "stages[*].request.headers[]": validate_request_json, + "stages[*].request.method[]": validate_http_method, + "stages[*].request.save[]": validate_json_with_ext, + "stages[*].request.files[]": validate_file_spec, + "marks[*].parametrize[]": check_parametrize_marks, + "stages[*].response.strict[]": validate_json_with_ext, + "stages[*].max_retries[]": retry_variable, + "strict": check_strict_key, + } + + for path, func in extra_checks.items(): + data = recurse_access_key(to_verify, path) + if data: + if path.endswith("[]"): + if not isinstance(data, list): + raise BadSchemaError + + for element in data: + func(element, None, path) + else: + func(data, None, path) diff --git a/tavern/_core/schema/tests.jsonschema.yaml b/tavern/_core/schema/tests.jsonschema.yaml new file mode 100644 index 000000000..ebd62f086 --- /dev/null +++ b/tavern/_core/schema/tests.jsonschema.yaml @@ -0,0 +1,427 @@ +$schema: "http://json-schema.org/draft-07/schema#" +$id: "https://raw.githubusercontent.com/taverntesting/tavern/master/tavern/schemas/tests.jsonschema.yaml" + +title: Tavern +description: "Schema for Tavern test files" + +### + +definitions: + strict_block: + oneOf: + - type: string + - type: boolean + - type: array + items: + type: string + + verify_block: + type: object + additionalProperties: false + + required: + - function + properties: + function: + type: string + description: Path to function in the form import.path:name + + extra_args: + type: array + + extra_kwargs: + type: object + + any_json: + oneOf: + - type: array + - type: object + - type: number + - type: string + - type: boolean + + included_file: + type: object + additionalProperties: false + + required: + - name + + properties: + name: + type: string + description: Name for this included file + + description: + type: string + description: Extra description for included file + + variables: + type: object + description: Variables to use in tests + + stages: + type: array + description: Stages to reference from tests + + items: + $ref: "#/definitions/stage" + + http_request: + type: object + additionalProperties: false + description: HTTP request to perform as part of stage + + required: + - url + + properties: + url: + type: string + description: URL to make request to + + cert: + description: Certificate to use - either a path to a certificate and key in one file, or a two item list containing the certificate and key separately + oneOf: + - type: string + - type: array + minItems: 2 + maxItems: 2 + items: + type: string + + auth: + description: Authorisation to use for request - a list containing username and password + type: array + minItems: 2 + maxItems: 2 + items: + type: string + + verify: + description: Whether to verify the server's certificates + oneOf: + - type: boolean + default: false + - type: string + + method: + description: HTTP method to use for request + default: GET + type: string + + follow_redirects: + type: boolean + description: Whether to follow redirects from 3xx responses + default: false + + stream: + type: boolean + description: Whether to stream the download from the request + default: false + + cookies: + type: array + description: Which cookies to use in the request + + items: + oneOf: + - type: string + - type: object + + json: + description: JSON body to send in request body + $ref: "#/definitions/any_json" + + params: + description: Query parameters + type: object + + headers: + description: Headers for request + type: object + + data: + description: Form data to send in request + oneOf: + - type: object + - type: string + + timeout: + description: How long to wait for requests to time out + oneOf: + - type: number + - type: array + minItems: 2 + maxItems: 2 + items: + type: number + + file_body: + type: string + description: Path to a file to upload as the request body + + files: + type: object + description: Files to send as part of the request + + clear_session_cookies: + description: Whether to clear sesion cookies before running this request + type: boolean + + mqtt_publish: + type: object + description: Publish MQTT message + additionalProperties: false + + properties: + topic: + type: string + description: Topic to publish on + + payload: + type: string + description: Raw payload to post + + json: + description: JSON payload to post + $ref: "#/definitions/any_json" + + qos: + type: integer + description: QoS level to use for request + default: 0 + + retain: + type: boolean + description: Whether the message should be retained + default: false + + mqtt_response: + type: object + additionalProperties: false + description: Expected MQTT response + + properties: + unexpected: + type: boolean + description: Receiving this message fails the test + + topic: + type: string + description: Topic message should be received on + + payload: + description: Expected raw payload in response + oneOf: + - type: number + - type: integer + - type: string + - type: boolean + + json: + description: Expected JSON payload in response + $ref: "#/definitions/any_json" + + timeout: + type: number + description: How long to wait for response to arrive + + qos: + type: integer + description: QoS level that message should be received on + minimum: 0 + maximum: 2 + + verify_response_with: + oneOf: + - $ref: "#/definitions/verify_block" + - type: array + items: + $ref: "#/definitions/verify_block" + + save: + type: object + description: Which objects to save from the response + + http_response: + type: object + additionalProperties: false + description: Expected HTTP response + + properties: + strict: + $ref: "#/definitions/strict_block" + + status_code: + description: Status code(s) to match + oneOf: + - type: integer + - type: array + minItems: 1 + items: + type: integer + + cookies: + type: array + description: Cookies expected to be returned + uniqueItems: true + minItems: 1 + + items: + type: string + + json: + description: Expected JSON response + $ref: "#/definitions/any_json" + + verify_response_with: + oneOf: + - $ref: "#/definitions/verify_block" + - type: array + items: + $ref: "#/definitions/verify_block" + + headers: + description: Headers expected in response + type: object + + save: + type: object + description: Which objects to save from the response + + stage_ref: + type: object + description: Reference to another stage from an included config file + additionalProperties: false + + required: + - type + - id + + properties: + type: + type: string + pattern: ^ref$ + + id: + type: string + + stage: + type: object + description: One stage in a test + additionalProperties: false + required: + - name + + properties: + id: + type: string + description: ID of stage for use in stage references + + max_retries: + type: integer + description: Number of times to retry this request + default: 0 + + skip: + type: boolean + description: Whether to skip this stage + default: false + + only: + type: boolean + description: Only run this stage + default: false + + delay_before: + type: number + description: How long to delay before running stage + + delay_after: + type: number + description: How long to delay after running stage + + name: + type: string + description: Name of this stage + + mqtt_publish: + $ref: "#/definitions/mqtt_publish" + + mqtt_response: + oneOf: + - $ref: "#/definitions/mqtt_response" + - type: array + items: + $ref: "#/definitions/mqtt_response" + + request: + $ref: "#/definitions/http_request" + + response: + $ref: "#/definitions/http_response" + +### + +type: object +additionalProperties: false +required: + - test_name + - stages + +properties: + test_name: + type: string + description: Name of test + + _xfail: + type: string + enum: + - verify + - run + + marks: + type: array + description: Pytest marks to use on test + items: + anyOf: + - type: string + - type: object + additionalProperties: false + properties: + filterwarnings: + type: string + + skipif: + type: string + + usefixtures: + type: array + items: + type: string + + parametrize: + type: object + required: + - key + - vals + + strict: + $ref: "#/definitions/strict_block" + + includes: + type: array + minItems: 1 + items: + $ref: "#/definitions/included_file" + + stages: + type: array + description: Stages in test + minItems: 1 + + items: + oneOf: + - $ref: "#/definitions/stage" + - $ref: "#/definitions/stage_ref" diff --git a/tavern/schemas/tests.schema.yaml b/tavern/_core/schema/tests.schema.yaml similarity index 95% rename from tavern/schemas/tests.schema.yaml rename to tavern/_core/schema/tests.schema.yaml index 101599c6a..ea1494aac 100644 --- a/tavern/schemas/tests.schema.yaml +++ b/tavern/_core/schema/tests.schema.yaml @@ -88,6 +88,9 @@ schema;stage: type: map required: false mapping: + unexpected: + type: bool + required: false topic: type: str required: true @@ -228,14 +231,7 @@ schema;stage: method: type: str - enum: - - GET - - PUT - - POST - - DELETE - - PATCH - - OPTIONS - - HEAD + func: validate_http_method timeout: type: any @@ -252,12 +248,10 @@ schema;stage: func: validate_verify_bool_or_str required: false - meta: - type: seq + clear_session_cookies: + type: any + func: bool_variable required: false - sequence: - - type: str - unique: true response: type: map @@ -349,13 +343,13 @@ mapping: required: true sequence: - type: str + - type: bool - type: int - type: float - type: seq sequence: - - type: str - - type: int - - type: float + - type: any + - include: any_map _xfail: type: str diff --git a/tavern/util/stage_lines.py b/tavern/_core/stage_lines.py similarity index 88% rename from tavern/util/stage_lines.py rename to tavern/_core/stage_lines.py index 325afb837..d6511161b 100644 --- a/tavern/util/stage_lines.py +++ b/tavern/_core/stage_lines.py @@ -15,7 +15,7 @@ def get_stage_lines(stage): def read_relevant_lines(yaml_block, first_line, last_line): """Get lines between start and end mark""" - filename = start_mark(yaml_block).name + filename = get_stage_filename(yaml_block) if filename is None: logger.warning("unable to read yaml block") @@ -27,6 +27,10 @@ def read_relevant_lines(yaml_block, first_line, last_line): yield line.split("#", 1)[0].rstrip() +def get_stage_filename(yaml_block): + return start_mark(yaml_block).name + + class EmptyBlock: line = 0 name = None diff --git a/tavern/_core/strict_util.py b/tavern/_core/strict_util.py new file mode 100644 index 000000000..3530b6aaa --- /dev/null +++ b/tavern/_core/strict_util.py @@ -0,0 +1,164 @@ +import dataclasses +import enum +import logging +import re +from typing import List, Optional, Tuple, Union + +from tavern._core import exceptions +from tavern._core.strtobool import strtobool + +logger = logging.getLogger(__name__) + + +class StrictSetting(enum.Enum): + """The actual setting for a particular block""" + + ON = 1 + OFF = 2 + UNSET = 3 + LIST_ANY_ORDER = 4 + + +valid_keys = ["json", "headers", "redirect_query_params"] + +valid_switches = ["on", "off", "list_any_order"] + + +def strict_setting_factory(str_setting: Optional[str]) -> StrictSetting: + """Converts from cmdline/setting file to an enum""" + if str_setting is None: + return StrictSetting.UNSET + else: + if str_setting == "list_any_order": + return StrictSetting.LIST_ANY_ORDER + + parsed = strtobool(str_setting) + + if parsed: + return StrictSetting.ON + else: + return StrictSetting.OFF + + +@dataclasses.dataclass(frozen=True) +class StrictOption: + """The section and the setting. The setting is only stored here because json works slightly + differently, otherwise it's redundant""" + + section: str + setting: StrictSetting + + def is_on(self) -> bool: + if self.section == "json": + # Must be specifically disabled for response body + return self.setting not in [StrictSetting.OFF, StrictSetting.LIST_ANY_ORDER] + else: + # Off by default for everything else + return self.setting in [StrictSetting.ON] + + +def validate_and_parse_option(key: str) -> StrictOption: + regex = re.compile( + "(?P
{sections})(:(?P{switches}))?".format( + sections="|".join(valid_keys), switches="|".join(valid_switches) + ) + ) + + match = regex.fullmatch(key) + + if not match: + raise exceptions.InvalidConfigurationException( + "Invalid value for 'strict' given - expected one of {}, got '{}'".format( + ["{}[:on/off]".format(key) for key in valid_keys], key + ) + ) + + as_dict = match.groupdict() + + if as_dict["section"] != "json" and as_dict["setting"] == "list_any_order": + logger.warning( + "Using 'list_any_order' key outside of 'json' section has no meaning" + ) + + return StrictOption(as_dict["section"], strict_setting_factory(as_dict["setting"])) + + +@dataclasses.dataclass(frozen=True) +class StrictLevel: + """Strictness settings for every block in a response + + TODO: change the name of this class, it's awful""" + + json: StrictOption = dataclasses.field( + default=StrictOption("json", strict_setting_factory(None)) + ) + headers: StrictOption = dataclasses.field( + default=StrictOption("headers", strict_setting_factory(None)) + ) + redirect_query_params: StrictOption = dataclasses.field( + default=StrictOption("redirect_query_params", strict_setting_factory(None)) + ) + + @classmethod + def from_options(cls, options: Union[List[str], str]) -> "StrictLevel": + if isinstance(options, str): + options = [options] + elif not isinstance(options, list): + raise exceptions.InvalidConfigurationException( + "'strict' setting should be a list of strings" + ) + + logger.debug("Parsing options to strict level: %s", options) + + parsed = [validate_and_parse_option(key) for key in options] + + return cls(**{i.section: i for i in parsed}) + + def option_for(self, section: str) -> StrictOption: + """Provides a string-based way of getting strict settings for a section""" + try: + return getattr(self, section) + except AttributeError as e: + raise exceptions.InvalidConfigurationException( + "No setting for '{}'".format(section) + ) from e + + @classmethod + def all_on(cls) -> "StrictLevel": + return cls.from_options([i + ":on" for i in valid_keys]) + + @classmethod + def all_off(cls) -> "StrictLevel": + return cls.from_options([i + ":off" for i in valid_keys]) + + +StrictSettingKinds = Union[None, bool, StrictSetting, StrictOption] + + +def extract_strict_setting(strict: StrictSettingKinds) -> Tuple[bool, StrictSetting]: + """Takes either a bool, StrictOption, or a StrictSetting and return the bool representation + and StrictSetting representation""" + + logger.debug("Parsing a '%s': %s", type(strict), strict) + + if isinstance(strict, StrictSetting): + strict_setting = strict + strict = strict == StrictSetting.ON + elif isinstance(strict, StrictOption): + strict_setting = strict.setting + strict = strict.is_on() + elif isinstance(strict, bool): + strict_setting = strict_setting_factory(str(strict)) + elif strict is None: + strict = False + strict_setting = strict_setting_factory("false") + else: + raise exceptions.InvalidConfigurationException( + "Unable to parse strict setting '{}' of type '{}'".format( + strict, type(strict) + ) + ) + + logger.debug("Got strict as '%s', setting as '%s'", strict, strict_setting) + + return strict, strict_setting diff --git a/tavern/_core/strtobool.py b/tavern/_core/strtobool.py new file mode 100644 index 000000000..2ead9e834 --- /dev/null +++ b/tavern/_core/strtobool.py @@ -0,0 +1,10 @@ +def strtobool(val: str) -> bool: + """Copied and slightly modified from distutils as it's being removed in a future version of + Python""" + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError("invalid truth value %r" % (val,)) diff --git a/tavern/util/retry.py b/tavern/_core/testhelpers.py similarity index 76% rename from tavern/util/retry.py rename to tavern/_core/testhelpers.py index f6a74cd33..095c34782 100644 --- a/tavern/util/retry.py +++ b/tavern/_core/testhelpers.py @@ -1,19 +1,39 @@ -from functools import wraps import logging +import time +from functools import wraps +from typing import Mapping -from . import exceptions -from .delay import delay -from .dict_util import format_keys +from tavern._core import exceptions +from tavern._core.dict_util import format_keys +from tavern._core.pytest.config import TestConfig logger = logging.getLogger(__name__) -def retry(stage, test_block_config): - """Look for retry and try to repeat the stage `retry` times. +def delay(stage, when, variables) -> None: + """Look for delay_before/delay_after and sleep Args: - test_block_config (dict): Configuration for current test stage (dict): test stage + when (str): 'before' or 'after' + variables (dict): Variables to format with + """ + + try: + length = format_keys(stage["delay_{}".format(when)], variables) + except KeyError: + pass + else: + logger.debug("Delaying %s request for %.2f seconds", when, length) + time.sleep(length) + + +def retry(stage: Mapping, test_block_config: TestConfig): + """Look for retry and try to repeat the stage `retry` times. + + Args: + stage: test stage + test_block_config: Configuration for current test """ if "max_retries" in stage: @@ -54,7 +74,7 @@ def wrapped(*args, **kwargs): stage["name"], i + 1, ) - delay(stage, "after", test_block_config["variables"]) + delay(stage, "after", test_block_config.variables) else: logger.error( "Stage '%s' did not succeed in %i retries.", @@ -81,11 +101,11 @@ def wrapped(*args, **kwargs): return retry_wrapper -def maybe_format_max_retries(max_retries, test_block_config): +def maybe_format_max_retries(max_retries, test_block_config: TestConfig) -> int: """Possibly handle max_retries validation""" # Probably a format variable, or just invalid (in which case it will fail further down) - max_retries = format_keys(max_retries, test_block_config["variables"]) + max_retries = format_keys(max_retries, test_block_config.variables) # Missing type token will mean that max_retries is still a string and will fail here # Could auto convert here as well, but keep it consistent and just fail diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index a1d4ef37f..af41cfd36 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,20 +1,20 @@ -from distutils.spawn import find_executable -from importlib import import_module import logging import os import pkgutil import subprocess import sys import warnings +from distutils.spawn import find_executable +from importlib import import_module +import grpc from google.protobuf import descriptor_pb2, json_format from google.protobuf import symbol_database as _symbol_database -import grpc from grpc_reflection.v1alpha import reflection_pb2, reflection_pb2_grpc from grpc_status import rpc_status -from tavern.util import exceptions -from tavern.util.dict_util import check_expected_keys +from tavern._core import exceptions +from tavern._core.dict_util import check_expected_keys logger = logging.getLogger(__name__) @@ -22,11 +22,19 @@ warnings.simplefilter("ignore") warnings.warn("deprecated", DeprecationWarning) -# Find the Protocol Compiler. -if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): - protoc = os.environ.get("PROTOC") -else: - protoc = find_executable("protoc") + +def find_protoc() -> str: + # Find the Protocol Compiler. + if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): + return os.environ["PROTOC"] + + if protoc := find_executable("protoc"): + return protoc + + raise exceptions.ProtoCompilerException + + +protoc = find_protoc() def _generate_proto_import(source, output): @@ -34,14 +42,10 @@ def _generate_proto_import(source, output): .proto file. Does nothing if the output already exists and is newer than the input.""" - if protoc is None: - raise exceptions.ProtoGenError( - "protoc is not installed nor found in ../src. Please compile it " - "or install the binary package.\n" - ) - if not os.path.exists(source): - raise exceptions.ProtoGenError("Can't find required file: %s\n" % source) + raise exceptions.ProtoCompilerException( + "Can't find required file: {}".format(source) + ) if not os.path.exists(output): os.makedirs(output) @@ -56,11 +60,9 @@ def _generate_proto_import(source, output): protoc_command = [protoc, "-I" + source, "--python_out=" + output] protoc_command.extend(protos) - call = subprocess.run(protoc_command, capture_output=True) - try: - call.check_returncode() - except subprocess.CalledProcessError as e: - raise exceptions.ProtoGenError(call.stderr) from e + call = subprocess.run(protoc_command) + if call.returncode != 0: + raise exceptions.ProtoCompilerException(call.stderr) def _import_grpc_module(output): @@ -72,7 +74,7 @@ def _import_grpc_module(output): output_path.extend(mod.__path__) sys.path.extend(output_path) - for (_, name, _) in pkgutil.iter_modules(output_path): + for _, name, _ in pkgutil.iter_modules(output_path): import_module("." + name, package=output) @@ -91,7 +93,7 @@ def __init__(self, **kwargs): check_expected_keys(expected_blocks["connect"], _connect_args) metadata = kwargs.pop("metadata", {}) - self._metadata = metadata.items() + self._metadata = [(key, value) for key, value in metadata.items()] _proto_args = kwargs.pop("proto", {}) check_expected_keys(expected_blocks["proto"], _proto_args) @@ -117,13 +119,12 @@ def __init__(self, **kwargs): self.channels = {} self.sym_db = _symbol_database.Default() - if _proto_args: - proto_module = _proto_args.get("module", "proto") - if "source" in _proto_args: - proto_source = _proto_args["source"] - _generate_proto_import(proto_source, proto_module) + proto_module = _proto_args.get("module", "proto") + if "source" in _proto_args: + proto_source = _proto_args["source"] + _generate_proto_import(proto_source, proto_module) - _import_grpc_module(proto_module) + _import_grpc_module(proto_module) def _register_file_descriptor(self, service_proto): for i in range(len(service_proto.file_descriptor_proto)): @@ -198,7 +199,9 @@ def _make_call_request(self, host, full_service): try: self._get_reflection_info(channel, service_name=service) - except grpc.RpcError as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + except ( + grpc.RpcError + ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. logger.error("Call failure: %s", rpc_error) status = rpc_status.from_call(rpc_error) if status is None: diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index b28f45fc6..77bc6304e 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -4,9 +4,9 @@ from box import Box -from tavern.request.base import BaseRequest -from tavern.util import exceptions -from tavern.util.dict_util import check_expected_keys, format_keys +from tavern._core import exceptions +from tavern._core.dict_util import check_expected_keys, format_keys +from tavern.request import BaseRequest logger = logging.getLogger(__name__) diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index d8a4e8718..42410bbec 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -3,15 +3,15 @@ from google.protobuf import json_format from grpc import StatusCode -from tavern.response.base import BaseResponse -from tavern.util.exceptions import TestFailError +from tavern._core.exceptions import TestFailError +from tavern.response import BaseResponse logger = logging.getLogger(__name__) class GRPCResponse(BaseResponse): def __init__(self, client, name, expected, test_block_config): - super().__init__(name, expected, test_block_config) + super(GRPCResponse, self).__init__(name, expected, test_block_config) self._client = client @@ -23,6 +23,37 @@ def __str__(self): else: return "" + def _validate_block(self, blockname, block): + """Validate a block of the response + + Args: + blockname (str): which part of the response is being checked + block (dict): The actual part being checked + """ + try: + expected_block = self.expected[blockname] or {} + except KeyError: + expected_block = {} + + if isinstance(expected_block, dict): + if expected_block.pop("$ext", None): + logger.warning( + "$ext function found in block %s - this has been moved to verify_response_with block - see documentation", + blockname, + ) + + logger.debug("Validating response %s against %s", blockname, expected_block) + + # 'strict' could be a list, in which case we only want to enable strict + # key checking for that specific bit of the response + test_strictness = self.test_block_config["strict"] + if isinstance(test_strictness, list): + block_strictness = blockname in test_strictness + else: + block_strictness = test_strictness + + self.recurse_check_key_match(expected_block, block, blockname, block_strictness) + def verify(self, response): # Get any keys to save saved = {} diff --git a/tavern/_plugins/grpc/schema.yaml b/tavern/_plugins/grpc/schema.yaml index a9a7b4a73..771a0c212 100644 --- a/tavern/_plugins/grpc/schema.yaml +++ b/tavern/_plugins/grpc/schema.yaml @@ -42,4 +42,4 @@ initialisation: type: str module: required: false - type: str \ No newline at end of file + type: str diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py index e7f4c4f2e..dd6f16d58 100644 --- a/tavern/_plugins/grpc/tavernhook.py +++ b/tavern/_plugins/grpc/tavernhook.py @@ -3,8 +3,7 @@ import yaml -from tavern.util.dict_util import format_keys - +from ..._core.dict_util import format_keys from .client import GRPCClient from .request import GRPCRequest from .response import GRPCResponse diff --git a/tavern/_plugins/mqtt/__init__.py b/tavern/_plugins/mqtt/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/_plugins/mqtt/client.py b/tavern/_plugins/mqtt/client.py index 9b0a2772e..32e1d0fcd 100644 --- a/tavern/_plugins/mqtt/client.py +++ b/tavern/_plugins/mqtt/client.py @@ -1,13 +1,15 @@ +import dataclasses import logging -from queue import Empty, Full, Queue import ssl import threading import time +from queue import Empty, Full, Queue +from typing import Dict, List, Mapping, MutableMapping, Optional import paho.mqtt.client as paho -from tavern.util import exceptions -from tavern.util.dict_util import check_expected_keys +from tavern._core import exceptions +from tavern._core.dict_util import check_expected_keys # MQTT error values _err_vals = { @@ -33,48 +35,79 @@ logger = logging.getLogger(__name__) -class _Subscription(object): - def __init__(self, topic, subscribed=False): - self.topic = topic - self.subscribed = subscribed +def root_topic(topic): + return topic.split("+")[0].split("#")[0] + + +@dataclasses.dataclass +class _Subscription: + topic: str + subscribed: bool = False + + # Arbitrary number, could just be 1 and only accept 1 message per stages + # but we might want to raise an error if more than 1 message is received + # during a test stage. + queue: Queue = dataclasses.field(default_factory=lambda: Queue(maxsize=30)) + + +def check_file_exists(key, filename) -> None: + try: + with open(filename, "r", encoding="utf-8"): + pass + except IOError as e: + raise exceptions.MQTTTLSError( + "Couldn't load '{}' from '{}'".format(key, filename) + ) from e -def _handle_tls_args(tls_args): +def _handle_tls_args( + tls_args: MutableMapping, +) -> Optional[Mapping]: """Make sure TLS options are valid""" if not tls_args: return None - if "enable" in tls_args: - if not tls_args["enable"]: - # if enable=false, return immediately - return None + if "enable" in tls_args and not tls_args["enable"]: + # if enable=false, return immediately + return None + + _check_and_update_common_tls_args(tls_args, ["certfile", "keyfile"]) + + return tls_args + + +def _handle_ssl_context_args( + ssl_context_args: MutableMapping, +) -> Optional[Mapping]: + """Make sure SSL Context options are valid""" + if not ssl_context_args: + return None + + _check_and_update_common_tls_args( + ssl_context_args, ["certfile", "keyfile", "cafile"] + ) + + return ssl_context_args + + +def _check_and_update_common_tls_args( + tls_args: MutableMapping, check_file_keys: List[str] +): + """Checks common args between ssl/tls args""" + + # could be moved to schema validation stage + for key in check_file_keys: + if key in tls_args: + check_file_exists(key, tls_args[key]) if "keyfile" in tls_args and "certfile" not in tls_args: raise exceptions.MQTTTLSError( "If specifying a TLS keyfile, a certfile also needs to be specified" ) - def check_file_exists(key): - try: - with open(tls_args[key], "r", encoding="utf-8"): - pass - except IOError as e: - raise exceptions.MQTTTLSError( - "Couldn't load '{}' from '{}'".format(key, tls_args[key]) - ) from e - except KeyError: - pass - - # could be moved to schema validation stage - check_file_exists("certfile") - check_file_exists("keyfile") - - # This shouldn't raise an AttributeError because it's enumerated - try: + if "cert_reqs" in tls_args: tls_args["cert_reqs"] = getattr(ssl, tls_args["cert_reqs"]) - except KeyError: - pass try: tls_args["tls_version"] = getattr(ssl, tls_args["tls_version"]) @@ -88,13 +121,9 @@ def check_file_exists(key): except KeyError: pass - return tls_args - -class MQTTClient(object): - # pylint: disable=too-many-instance-attributes - - def __init__(self, **kwargs): +class MQTTClient: + def __init__(self, **kwargs) -> None: expected_blocks = { "client": { "client_id", @@ -116,6 +145,15 @@ def __init__(self, **kwargs): "ciphers", }, "auth": {"username", "password"}, + "ssl_context": { + "ca_certs", + "certfile", + "keyfile", + "password", + "tls_version", + "ciphers", + "alpn_protocols", + }, } logger.debug("Initialising MQTT client with %s", kwargs) @@ -135,17 +173,28 @@ def __init__(self, **kwargs): if "host" not in self._connect_args: msg = "Need 'host' in 'connect' block for mqtt" - logger.error(msg) raise exceptions.MissingKeysError(msg) self._connect_timeout = self._connect_args.pop("timeout", 3) - # If there is any tls kwarg (including 'enable'), enable tls + # If there is any tls or ssl_context kwarg, configure tls encryption file_tls_args = kwargs.pop("tls", {}) + file_ssl_context_args = kwargs.pop("ssl_context", {}) + + if file_tls_args and file_ssl_context_args: + msg = ( + "'tls' and 'ssl_context' are both specified but are mutually exclusive" + ) + raise exceptions.MQTTTLSError(msg) + check_expected_keys(expected_blocks["tls"], file_tls_args) self._tls_args = _handle_tls_args(file_tls_args) logger.debug("TLS is %s", "enabled" if self._tls_args else "disabled") + # If there is any SSL kwarg, enable tls through the SSL context + check_expected_keys(expected_blocks["ssl_context"], file_ssl_context_args) + self._ssl_context_args = _handle_ssl_context_args(file_ssl_context_args) + logger.debug("Paho client args: %s", self._client_args) self._client = paho.Client(**self._client_args) self._client.enable_logger() @@ -154,6 +203,11 @@ def __init__(self, **kwargs): self._client.username_pw_set(**self._auth_args) self._client.on_message = self._on_message + self._client.on_connect = self._on_connect + self._client.on_disconnect = self._on_disconnect + self._client.on_connect_fail = self._on_connect_fail + self._client.on_socket_open = self._on_socket_open + self._client.on_socket_close = self._on_socket_close if self._tls_args: try: @@ -167,59 +221,164 @@ def __init__(self, **kwargs): "Unexpected SSL error enabling TLS" ) from e - # Arbitrary number, could just be 1 and only accept 1 message per stages - # but we might want to raise an error if more than 1 message is received - # during a test stage. - self._message_queue = Queue(maxsize=10) - self._userdata = {"queue": self._message_queue} - self._client.user_data_set(self._userdata) + if self._ssl_context_args: + # Create SSLContext object + tls_version = self._ssl_context_args.get("tls_version") + if tls_version is None: + # If the python version supports it, use highest TLS version automatically + if hasattr(ssl, "PROTOCOL_TLS_CLIENT"): + tls_version = ssl.PROTOCOL_TLS_CLIENT + elif hasattr(ssl, "PROTOCOL_TLS"): + tls_version = ssl.PROTOCOL_TLS + else: + tls_version = ssl.PROTOCOL_TLSv1_2 + ca_certs = self._ssl_context_args.get("cert_reqs") + context = ssl.create_default_context(cafile=ca_certs) + + certfile = self._ssl_context_args.get("certfile") + keyfile = self._ssl_context_args.get("keyfile") + password = self._ssl_context_args.get("password") + + # Configure context + if certfile is not None: + context.load_cert_chain(certfile, keyfile, password) + + cert_reqs = self._ssl_context_args.get("cert_reqs") + if cert_reqs == ssl.CERT_NONE and hasattr(context, "check_hostname"): + context.check_hostname = False + + context.verify_mode = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if ca_certs is not None: + context.load_verify_locations(ca_certs) + else: + context.load_default_certs() + + ciphers = self._ssl_context_args.get("cipthers") + if ciphers is not None: + context.set_ciphers(ciphers) - # Topics to subscribe to - mapping of subscription message id to a tuple - # of (topic, sub_status) where sub_status is true or false based on - # whether it has finished subscribing or not - self._subscribed = {} + alpn_protocols = self._ssl_context_args.get("alpn_protocols") + if alpn_protocols is not None: + context.set_alpn_protocols(alpn_protocols) + + self._client.tls_set_context(context) + + if cert_reqs != ssl.CERT_NONE: + # Default to secure, sets context.check_hostname attribute + # if available + self._client.tls_insecure_set(False) + else: + # But with ssl.CERT_NONE, we can not check_hostname + self._client.tls_insecure_set(True) + + # Topics to subscribe to - mapping of subscription message id to subscription object + self._subscribed: Dict[int, _Subscription] = {} # Lock to ensure there is no race condition when subscribing self._subscribe_lock = threading.RLock() # callback self._client.on_subscribe = self._on_subscribe + # Mapping of topic -> subscription id, for indexing into self._subscribed + self._subscription_mappings: Dict[str, int] = {} + self._userdata = { + "_subscription_mappings": self._subscription_mappings, + "_subscribed": self._subscribed, + } + self._client.user_data_set(self._userdata) + @staticmethod - def _on_message(client, userdata, message): + def _on_message(client, userdata, message) -> None: """Add any messages received to the queue Todo: If the queue is faull trigger an error in main thread somehow """ - # pylint: disable=unused-argument logger.info("Received mqtt message on %s", message.topic) + sanitised = root_topic(message.topic) + try: - userdata["queue"].put(message) + userdata["_subscribed"][ + userdata["_subscription_mappings"][sanitised] + ].queue.put(message) + except KeyError as e: + raise exceptions.MQTTTopicException( + "Message received on unregistered topic: {}".format(message.topic) + ) from e except Full: logger.exception("message queue full") - def message_received(self, timeout=1): + @staticmethod + def _on_connect(client, userdata, flags, rc) -> None: + logger.debug( + "Client '%s' successfully connected to the broker with result code '%s'", + client._client_id.decode(), + paho.connack_string(rc), + ) + + @staticmethod + def _on_disconnect(client, userdata, rc) -> None: + if rc == paho.CONNACK_ACCEPTED: + logger.debug( + "Client '%s' successfully disconnected from the broker with result code '%s'", + client._client_id.decode(), + paho.connack_string(rc), + ) + else: + logger.warning( + "Client %s failed to disconnect cleanly due to %s, possibly from a network error", + client._client_id.decode(), + paho.connack_string(rc), + ) + + @staticmethod + def _on_connect_fail(client, userdata) -> None: + logger.error( + "Failed to connect client '%s' to the broker", client._client_id.decode() + ) + + @staticmethod + def _on_socket_open(client, userdata, socket) -> None: + logger.debug("MQTT socket opened") + + @staticmethod + def _on_socket_close(client, userdata, socket) -> None: + logger.debug("MQTT socket closed") + + def message_received(self, topic: str, timeout: int = 1): """Check that a message is in the message queue Args: + topic (str): topic to fetch message for timeout (int): How long to wait before signalling that the message was not received. Returns: - bool: whether the message was received within the timeout + paho.MQTTMessage: whether the message was received within the timeout Todo: Allow regexes for topic names? Better validation for mqtt payloads """ + sanitised = root_topic(topic) + try: - msg = self._message_queue.get(block=True, timeout=timeout) + with self._subscribe_lock: + queue = self._subscribed[self._subscription_mappings[sanitised]].queue + except KeyError as e: + raise exceptions.MQTTTopicException( + "Unregistered topic: {}".format(topic) + ) from e + + try: + msg = queue.get(block=True, timeout=timeout) except Empty: logger.error("Message not received after %d seconds", timeout) return None - else: - return msg + + return msg def publish(self, topic, payload=None, qos=None, retain=None): """publish message using paho library""" @@ -243,7 +402,7 @@ def publish(self, topic, payload=None, qos=None, retain=None): return msg - def _wait_for_subscriptions(self): + def _wait_for_subscriptions(self) -> None: """Wait for all pending subscriptions to finish""" logger.debug("Checking subscriptions") @@ -281,29 +440,32 @@ def not_finished_subscribing_to(): if not to_wait_for: logger.debug("Finished subscribing to all topics") - def subscribe(self, topic, *args, **kwargs): + def subscribe(self, topic: str, *args, **kwargs) -> None: """Subscribe to topic should be called for every expected message in mqtt_response """ logger.debug("Subscribing to topic '%s'", topic) - with self._subscribe_lock: - (status, mid) = self._client.subscribe(topic, *args, **kwargs) + (status, mid) = self._client.subscribe(topic, *args, **kwargs) - if status == 0: - self._subscribed[mid] = _Subscription(topic, False) - else: - logger.error("Error subscribing to '%s'", topic) + if status == 0: + sanitised = root_topic(topic) + with self._subscribe_lock: + self._subscription_mappings[sanitised] = mid + self._subscribed[mid] = _Subscription(topic) + else: + raise exceptions.MQTTError( + "Error subscribing to '{}' (err code {})".format(topic, status) + ) - def unsubscribe_all(self): + def unsubscribe_all(self) -> None: """Unsubscribe from all topics""" with self._subscribe_lock: for subscription in self._subscribed.values(): self._client.unsubscribe(subscription.topic) - def _on_subscribe(self, client, userdata, mid, granted_qos): - # pylint: disable=unused-argument + def _on_subscribe(self, client, userdata, mid: int, granted_qos) -> None: with self._subscribe_lock: if mid in self._subscribed: self._subscribed[mid].subscribed = True @@ -311,22 +473,22 @@ def _on_subscribe(self, client, userdata, mid, granted_qos): "Successfully subscribed to '%s'", self._subscribed[mid].topic ) else: + logger.debug("Only tracking: %s", self._subscribed.keys()) logger.warning( "Got SUBACK message with mid '%s', but did not recognise that mid - will try later", mid, ) - def __enter__(self): + def __enter__(self) -> "MQTTClient": logger.debug("Connecting to %s", self._connect_args) self._client.connect_async(**self._connect_args) self._client.loop_start() - elapsed = 0 + elapsed = 0.0 while elapsed < self._connect_timeout: - # pylint: disable=protected-access - if self._client._state == paho.mqtt_cs_connected: + if self._client.is_connected(): logger.debug("Connected to broker at %s", self._connect_args["host"]) return self else: @@ -343,9 +505,9 @@ def __enter__(self): ) raise exceptions.MQTTError - def __exit__(self, *args): + def __exit__(self, *args) -> None: self._disconnect() - def _disconnect(self): + def _disconnect(self) -> None: self._client.disconnect() self._client.loop_stop() diff --git a/tavern/_plugins/mqtt/jsonschema.yaml b/tavern/_plugins/mqtt/jsonschema.yaml new file mode 100644 index 000000000..7d1fb7edf --- /dev/null +++ b/tavern/_plugins/mqtt/jsonschema.yaml @@ -0,0 +1,169 @@ +$schema: "http://json-schema.org/draft-07/schema#" + +title: Paho MQTT schema +description: Schema for paho-mqtt connection + +### + +type: object +additionalProperties: false +required: + - paho-mqtt + +properties: + paho-mqtt: + type: object + description: Connection options for paho-mqtt + additionalProperties: false + required: + - connect + + properties: + client: + description: Arguments to pass to the paho-mqtt client constructor + + type: object + additionalProperties: false + + properties: + client_id: + type: string + description: MQTT client ID + + clean_session: + type: boolean + description: Whether to start a clean session + + transport: + type: string + description: Whether to use raw TCP or websockets to connect + enum: + - tcp + - websockets + + connect: + description: Connection options + + type: object + additionalProperties: false + + required: + - host + + properties: + host: + type: string + description: Host to connect to + + port: + type: integer + description: Port to use with connection + + keepalive: + type: number + description: How often to send keepalive packets + + timeout: + type: number + description: How long to wait for connection before giving up + + tls: + description: Basic custom options to control secure connection + + type: object + additionalProperties: false + + properties: + enable: + type: boolean + description: Whether to enable TLS + default: true + + ca_certs: + type: string + description: Path to CA cert bundle + + certfile: + type: string + description: Path to certificate for server + + keyfile: + type: string + description: Path to private key for client + + cert_reqs: + type: string + description: Controls connection with cert + enum: + - CERT_NONE + - CERT_OPTIONAL + - CERT_REQUIRED + + tls_version: + type: string + description: TLS version to use + + ciphers: + type: string + description: Allowed ciphers to use with connection + + ssl_context: + description: Advanced custom options to control secure connection using SSLContext + + type: object + additionalProperties: false + + properties: + ca_certs: + type: string + description: Path to CA cert bundle + + certfile: + type: string + description: Path to certificate for server + + keyfile: + type: string + description: Path to private key for client + + password: + type: string + description: Password for keyfile + + cert_reqs: + type: string + description: Controls connection with cert + enum: + - CERT_NONE + - CERT_OPTIONAL + - CERT_REQUIRED + + tls_version: + type: string + description: TLS version to use + + ciphers: + type: string + description: Allowed ciphers to use with connection + + alpn_protocols: + type: array + description: | + Which protocols the socket should advertise during the SSL/TLS handshake. + See https://docs.python.org/3/library/ssl.html#ssl.SSLContext.set_alpn_protocols + + auth: + description: Username and password for basic authorisation + + type: object + additionalProperties: false + + required: + - username + + properties: + username: + type: string + + password: + type: string diff --git a/tavern/_plugins/mqtt/request.py b/tavern/_plugins/mqtt/request.py index 28fe6ca6e..0a9de87a9 100644 --- a/tavern/_plugins/mqtt/request.py +++ b/tavern/_plugins/mqtt/request.py @@ -1,33 +1,34 @@ import functools import json import logging +from typing import Dict -from box import Box +from box.box import Box -from tavern.request.base import BaseRequest -from tavern.util import exceptions -from tavern.util.dict_util import check_expected_keys, format_keys -from tavern.util.extfunctions import update_from_ext -from tavern.util.report import attach_yaml +from tavern._core import exceptions +from tavern._core.dict_util import check_expected_keys, format_keys +from tavern._core.extfunctions import update_from_ext +from tavern._core.pytest.config import TestConfig +from tavern._core.report import attach_yaml +from tavern._plugins.mqtt.client import MQTTClient +from tavern.request import BaseRequest logger = logging.getLogger(__name__) -def get_publish_args(rspec, test_block_config): - """Format mqtt request args +def get_publish_args(rspec: Dict, test_block_config: TestConfig) -> dict: + """Format mqtt request args and update using ext functions""" - Todo: - Anything else to do here? - """ - - fspec = format_keys(rspec, test_block_config["variables"]) + fspec = format_keys(rspec, test_block_config.variables) - if "json" in rspec: - if "payload" in rspec: + if "json" in fspec: + if "payload" in fspec: raise exceptions.BadSchemaError( "Can only specify one of 'payload' or 'json' in MQTT request" ) + update_from_ext(fspec, ["json"]) + fspec["payload"] = json.dumps(fspec.pop("json")) return fspec @@ -39,20 +40,22 @@ class MQTTRequest(BaseRequest): Similar to RestRequest, publishes a single message. """ - def __init__(self, client, rspec, test_block_config): + def __init__( + self, client: MQTTClient, rspec: Dict, test_block_config: TestConfig + ) -> None: expected = {"topic", "payload", "json", "qos", "retain"} check_expected_keys(expected, rspec) publish_args = get_publish_args(rspec, test_block_config) - update_from_ext(publish_args, ["json"], test_block_config) + self._publish_args = publish_args self._prepared = functools.partial(client.publish, **publish_args) # Need to do this here because get_publish_args will modify the original # input, which we might want to use to format. No error handling because # all the error handling is done in the previous call - self._original_publish_args = format_keys(rspec, test_block_config["variables"]) + self._original_publish_args = format_keys(rspec, test_block_config.variables) # TODO # From paho: @@ -73,5 +76,5 @@ def run(self): raise exceptions.MQTTRequestException from e @property - def request_vars(self): + def request_vars(self) -> Box: return Box(self._original_publish_args) diff --git a/tavern/_plugins/mqtt/response.py b/tavern/_plugins/mqtt/response.py index 52d451c4c..73513f837 100644 --- a/tavern/_plugins/mqtt/response.py +++ b/tavern/_plugins/mqtt/response.py @@ -1,24 +1,37 @@ +import concurrent +import concurrent.futures +import contextlib +import itertools import json import logging import time +from dataclasses import dataclass +from typing import List, Optional, Tuple, Union -from tavern.response.base import BaseResponse -from tavern.testutils.pytesthook.newhooks import call_hook -from tavern.util import exceptions -from tavern.util.dict_util import check_keys_match_recursive -from tavern.util.loader import ANYTHING -from tavern.util.report import attach_yaml +from paho.mqtt.client import MQTTMessage + +from tavern._core import exceptions +from tavern._core.dict_util import check_keys_match_recursive +from tavern._core.loader import ANYTHING +from tavern._core.pytest.newhooks import call_hook +from tavern._core.report import attach_yaml +from tavern._core.strict_util import StrictSetting +from tavern.response import BaseResponse + +from .client import MQTTClient logger = logging.getLogger(__name__) +_default_timeout = 1 + class MQTTResponse(BaseResponse): - def __init__(self, client, name, expected, test_block_config): + def __init__(self, client: MQTTClient, name, expected, test_block_config) -> None: super().__init__(name, expected, test_block_config) self._client = client - self.received_messages = [] + self.received_messages = [] # type: ignore def __str__(self): if self.response: @@ -26,74 +39,145 @@ def __str__(self): else: return "" - def _get_payload_vals(self): - # TODO move this check to initialisation/schema checking - if "json" in self.expected: - if "payload" in self.expected: - raise exceptions.BadSchemaError( - "Can only specify one of 'payload' or 'json' in MQTT response" + def verify(self, response) -> dict: + """Ensure mqtt message has arrived + + Args: + response: not used except for debug printing + """ + + self.response = response + + try: + return self._await_response() + finally: + self._client.unsubscribe_all() + + def _await_response(self) -> dict: + """Actually wait for response + + Returns: + dict: things to save to variables for the rest of this test + """ + + # Get into class with metadata attached + expected = self.expected["mqtt_responses"] + + by_topic = { + m: list(v) for m, v in itertools.groupby(expected, lambda x: x["topic"]) + } + + correct_messages: List["_ReturnedMessage"] = [] + warnings: List[str] = [] + + with concurrent.futures.ThreadPoolExecutor() as executor: + futures = [] + + for topic, expected_for_topic in by_topic.items(): + logger.debug("Starting thread for messages on topic '%s'", topic) + futures.append( + executor.submit( + self._await_messages_on_topic, topic, expected_for_topic + ) ) - payload = self.expected["json"] - json_payload = True + for future in concurrent.futures.as_completed(futures): + # for future in futures: + try: + messages, warnings = future.result() + except Exception as e: + raise exceptions.ConcurrentError( + "unexpected error getting result from future" + ) from e + else: + warnings.extend(warnings) + correct_messages.extend(messages) - if payload.pop("$ext", None): - raise exceptions.InvalidExtBlockException( + if self.errors: + if warnings: + self._adderr("\n".join(warnings)) + + raise exceptions.TestFailError( + "Test '{:s}' failed:\n{:s}".format(self.name, self._str_errors()), + failures=self.errors, + ) + + saved = {} + + for msg in correct_messages: + # Check saving things from the payload and from json + saved.update( + self.maybe_get_save_values_from_save_block( + "payload", + msg.msg.payload, + outer_save_block=msg.expected, + ) + ) + saved.update( + self.maybe_get_save_values_from_save_block( "json", + msg.msg.payload, + outer_save_block=msg.expected, ) - elif "payload" in self.expected: - payload = self.expected["payload"] - json_payload = False - else: - payload = None - json_payload = False - - return payload, json_payload + ) - def _await_response(self): - """Actually wait for response""" + saved.update(self.maybe_get_save_values_from_ext(msg.msg, msg.expected)) - # pylint: disable=too-many-statements + # Trying to save might have introduced errors, so check again + if self.errors: + raise exceptions.TestFailError( + "Saving results from test '{:s}' failed:\n{:s}".format( + self.name, self._str_errors() + ), + failures=self.errors, + ) - topic = self.expected["topic"] - timeout = self.expected.get("timeout", 1) + return saved - test_strictness = self.test_block_config["strict"] - block_strictness = test_strictness.setting_for("json").is_on() + def _await_messages_on_topic( + self, topic: str, expected + ) -> Tuple[List["_ReturnedMessage"], List[str]]: + """ + Waits for the specific message - expected_payload, expect_json_payload = self._get_payload_vals() + Args: + expected (list): expected response for this block - # Any warnings to do with the request - # eg, if a message was received but it didn't match, message had payload, etc. - warnings = [] + Returns: + tuple(msg, list): The correct message (if any) and warnings from processing the message + """ - def addwarning(w, *args, **kwargs): - logger.warning(w, *args, **kwargs) - warnings.append(w % args) + timeout = max(m.get("timeout", _default_timeout) for m in expected) - time_spent = 0 + # A list of verifiers that can be used to validate messages for this topic + verifiers = [_MessageVerifier(self.test_block_config, v) for v in expected] - msg = None + correct_messages = [] + warnings = [] - while time_spent < timeout: + time_spent = 0.0 + while (time_spent < timeout) and verifiers: t0 = time.time() - msg = self._client.message_received(timeout - time_spent) + msg = self._client.message_received(topic, timeout - time_spent) if not msg: # timed out break + logger.debug("Seeing if message '%s' matched expected", msg) + call_hook( self.test_block_config, "pytest_tavern_beta_after_every_response", - expected=self.expected, + expected=expected, response=msg, ) self.received_messages.append(msg) - msg.payload = msg.payload.decode("utf8") + with contextlib.suppress(AttributeError): + msg.payload = msg.payload.decode("utf8") attach_yaml( { @@ -104,109 +188,167 @@ def addwarning(w, *args, **kwargs): name="rest_response", ) - if expect_json_payload: - try: - msg.payload = json.loads(msg.payload) - except json.decoder.JSONDecodeError: - addwarning( - "Expected a json payload but got '%s'", - msg.payload, - exc_info=True, - ) - msg = None - continue - - if expected_payload is None: - # pylint: disable=no-else-break - if msg.payload is None or msg.payload == "": - logger.info( - "Got message with no payload (as expected) on '%s'", topic - ) - break - else: - addwarning( - "Message had payload '%s' but we expected no payload", - msg.payload, - ) - elif expected_payload is ANYTHING: - logger.info("Got message on %s matching !anything token", topic) - break - elif msg.payload != expected_payload: - if expect_json_payload: - try: - check_keys_match_recursive( - expected_payload, msg.payload, [], strict=block_strictness + found: List[int] = [] + for i, v in enumerate(verifiers): + if v.is_valid(msg): + correct_messages.append(_ReturnedMessage(v.expected, msg)) + if found: + logger.warning( + "Message was matched by multiple mqtt_response blocks" ) - except exceptions.KeyMismatchError: - # Just want to log the mismatch - pass - else: - logger.info( - "Got expected message in '%s' with payload '%s'", - msg.topic, - msg.payload, - ) - break + found.append(i) + warnings.extend(v.popwarnings()) + verifiers = [v for (i, v) in enumerate(verifiers) if i not in found] - addwarning( - "Got unexpected payload on topic '%s': '%s' (expected '%s')", - msg.topic, - msg.payload, - expected_payload, + time_spent += time.time() - t0 + + if verifiers: + for v in verifiers: + self._adderr( + "Expected '%s' on topic '%s' but no such message received", + v.expected_payload, + topic, ) - elif msg.topic != topic: + + for msg in correct_messages: + if msg.expected.get("unexpected"): + self._adderr( + "Got '%s' on topic '%s' marked as unexpected", + msg.expected["payload"], + topic, + ) + + self._maybe_run_validate_functions(msg) + + return correct_messages, warnings + + +@dataclass +class _ReturnedMessage: + """An actual message returned from the API and it's matching 'expected' block.""" + + expected: dict + msg: MQTTMessage + + +class _MessageVerifier: + def __init__(self, test_block_config, expected) -> None: + self.expires = time.time() + expected.get("timeout", _default_timeout) + + self.expected = expected + self.expected_payload, self.expect_json_payload = self._get_payload_vals( + expected + ) + + test_strictness = test_block_config.strict + self.block_strictness: StrictSetting = test_strictness.option_for("json") + + # Any warnings to do with the request + # eg, if a message was received but it didn't match, message had payload, etc. + self.warnings: List[str] = [] + + def is_valid(self, msg: MQTTMessage) -> bool: + if time.time() > self.expires: + return False + + topic = self.expected["topic"] + + def addwarning(w, *args, **kwargs): + logger.warning(w, *args, **kwargs) + self.warnings.append(w % args) + + if self.expect_json_payload: + try: + msg.payload = json.loads(msg.payload) + except json.decoder.JSONDecodeError: addwarning( - "Got unexpected message in '%s' with payload '%s'", - msg.topic, + "Expected a json payload but got '%s'", msg.payload, + exc_info=True, ) + return False + + if self.expected_payload is None: + if msg.payload is None or msg.payload == "": + logger.info("Got message with no payload (as expected) on '%s'", topic) + return True else: - logger.info( - "Got expected message in '%s' with payload '%s'", - msg.topic, + addwarning( + "Message had payload '%s' but we expected no payload", msg.payload, ) - break - - msg = None - time_spent += time.time() - t0 - - if msg: - self._maybe_run_validate_functions(msg) - else: - self._adderr( - "Expected '%s' on topic '%s' but no such message received", - expected_payload, - topic, + elif self.expected_payload is ANYTHING: + logger.info("Got message on %s matching !anything token", topic) + return True + elif msg.payload != self.expected_payload: + if self.expect_json_payload: + try: + check_keys_match_recursive( + self.expected_payload, + msg.payload, + [], + strict=self.block_strictness, + ) + except exceptions.KeyMismatchError: + # Just want to log the mismatch + pass + else: + logger.info( + "Got expected message in '%s' with expected payload", + msg.topic, + ) + logger.debug("Matched payload was '%s", msg.payload) + return True + + addwarning( + "Got unexpected payload on topic '%s': '%s' (expected '%s')", + msg.topic, + msg.payload, + self.expected_payload, ) - - if self.errors: - if warnings: - self._adderr("\n".join(warnings)) - - raise exceptions.TestFailError( - "Test '{:s}' failed:\n{:s}".format(self.name, self._str_errors()), - failures=self.errors, + else: + logger.info( + "Got expected message in '%s' with expected payload", + msg.topic, ) + logger.debug("Matched payload was '%s", msg.payload) + return True - saved = {} + return False - saved.update(self.maybe_get_save_values_from_save_block("json", msg.payload)) + @staticmethod + def _get_payload_vals(expected) -> Tuple[Optional[Union[str, dict]], bool]: + """Gets the payload from the 'expected' block - saved.update(self.maybe_get_save_values_from_ext(msg, self.expected)) - - return saved + Returns: + tuple: First element is the expected payload, second element is whether it's + expected to be json or not + """ + # TODO move this check to initialisation/schema checking + if "json" in expected: + if "payload" in expected: + raise exceptions.BadSchemaError( + "Can only specify one of 'payload' or 'json' in MQTT response" + ) - def verify(self, response): - """Ensure mqtt message has arrived + payload = expected["json"] + json_payload = True - Args: - response: not used - """ + if payload.pop("$ext", None): + raise exceptions.MisplacedExtBlockException( + "json", + ) + elif "payload" in expected: + payload = expected["payload"] + json_payload = False + else: + payload = None + json_payload = False - self.response = response + return payload, json_payload - try: - return self._await_response() - finally: - self._client.unsubscribe_all() + def popwarnings(self) -> List[str]: + popped = [] + while self.warnings: + popped.append(self.warnings.pop(0)) + return popped diff --git a/tavern/_plugins/mqtt/schema.yaml b/tavern/_plugins/mqtt/schema.yaml index d4bb8ef58..9dac6888a 100644 --- a/tavern/_plugins/mqtt/schema.yaml +++ b/tavern/_plugins/mqtt/schema.yaml @@ -7,7 +7,6 @@ initialisation: required: false type: map mapping: - client: required: false type: map @@ -82,6 +81,51 @@ initialisation: required: false type: str + ssl_context: + required: false + type: map + mapping: + ca_certs: + required: false + type: str + + certfile: + required: false + type: str + + keyfile: + required: false + type: str + + password: + required: false + type: str + # This is the password for the keyfile, and is only needed if the keyfile is password encrypted + # If not supplied, but the keyfile is password protect, the ssl module will prompt for a password in terminal + + cert_reqs: + required: false + type: str + enum: + - CERT_NONE + - CERT_OPTIONAL + - CERT_REQUIRED + + tls_version: + required: false + type: str + # This could be an enum but there's lots of them, and which ones are + # actually valid changes based on which version of python you're + # using. Just let any ssl errors propagate through + + ciphers: + required: false + type: str + + alpn_protocols: + required: false + type: array + auth: required: false type: map diff --git a/tavern/_plugins/mqtt/tavernhook.py b/tavern/_plugins/mqtt/tavernhook.py index 493989b30..9b68d18dc 100644 --- a/tavern/_plugins/mqtt/tavernhook.py +++ b/tavern/_plugins/mqtt/tavernhook.py @@ -3,7 +3,7 @@ import yaml -from tavern.util.dict_util import format_keys +from tavern._core.dict_util import format_keys from .client import MQTTClient from .request import MQTTRequest @@ -11,24 +11,27 @@ logger = logging.getLogger(__name__) - session_type = MQTTClient request_type = MQTTRequest request_block_name = "mqtt_publish" -def get_expected_from_request(stage, test_block_config, session): +def get_expected_from_request(response_block, test_block_config, session): + expected = None + # mqtt response is not required - m_expected = stage.get("mqtt_response") - if m_expected: - # format so we can subscribe to the right topic - f_expected = format_keys(m_expected, test_block_config["variables"]) - mqtt_client = session - mqtt_client.subscribe(f_expected["topic"], f_expected.get("qos", 1)) - expected = f_expected - else: - expected = {} + if response_block: + expected = {"mqtt_responses": []} + if isinstance(response_block, dict): + response_block = [response_block] + + for response in response_block: + # format so we can subscribe to the right topic + f_expected = format_keys(response, test_block_config.variables) + mqtt_client = session + mqtt_client.subscribe(f_expected["topic"], f_expected.get("qos", 1)) + expected["mqtt_responses"].append(f_expected) return expected @@ -36,6 +39,6 @@ def get_expected_from_request(stage, test_block_config, session): verifier_type = MQTTResponse response_block_name = "mqtt_response" -schema_path = join(abspath(dirname(__file__)), "schema.yaml") +schema_path = join(abspath(dirname(__file__)), "jsonschema.yaml") with open(schema_path, "r", encoding="utf-8") as schema_file: schema = yaml.load(schema_file, Loader=yaml.SafeLoader) diff --git a/tavern/_plugins/rest/__init__.py b/tavern/_plugins/rest/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/_plugins/rest/request.py b/tavern/_plugins/rest/request.py index 7e4fd3b6f..1639b9905 100644 --- a/tavern/_plugins/rest/request.py +++ b/tavern/_plugins/rest/request.py @@ -1,28 +1,31 @@ import contextlib -from contextlib import ExitStack -from itertools import filterfalse, tee import json import logging import mimetypes import os -from urllib.parse import quote_plus import warnings +from contextlib import ExitStack +from itertools import filterfalse, tee +from typing import Mapping, MutableMapping, Optional, Union +from urllib.parse import quote_plus -from box import Box import requests +from box.box import Box from requests.cookies import cookiejar_from_dict from requests.utils import dict_from_cookiejar -from tavern.request.base import BaseRequest -from tavern.util import exceptions -from tavern.util.dict_util import check_expected_keys, deep_dict_merge, format_keys -from tavern.util.extfunctions import update_from_ext -from tavern.util.report import attach_yaml +from tavern._core import exceptions +from tavern._core.dict_util import check_expected_keys, deep_dict_merge, format_keys +from tavern._core.extfunctions import update_from_ext +from tavern._core.general import valid_http_methods +from tavern._core.pytest.config import TestConfig +from tavern._core.report import attach_yaml +from tavern.request import BaseRequest logger = logging.getLogger(__name__) -def get_request_args(rspec, test_block_config): +def get_request_args(rspec: MutableMapping, test_block_config: TestConfig) -> dict: """Format the test spec given values inthe global config Todo: @@ -30,29 +33,23 @@ def get_request_args(rspec, test_block_config): can be generated from a function Args: - rspec (dict): Test spec - test_block_config (dict): Test block config + rspec: Test spec + test_block_config: Test block config Returns: - dict: Formatted test spec + Formatted test spec Raises: BadSchemaError: Tried to pass a body in a GET request """ - # pylint: disable=too-many-locals,too-many-statements - - request_args = {} - - # Ones that are required and are enforced to be present by the schema - required_in_file = ["method", "url"] - - optional_with_default = {"verify": True, "stream": False} - if "method" not in rspec: logger.debug("Using default GET method") rspec["method"] = "GET" + if "headers" not in rspec: + rspec["headers"] = {} + content_keys = ["data", "json", "files", "file_body"] in_request = [c for c in content_keys if c in rspec] @@ -67,23 +64,71 @@ def get_request_args(rspec, test_block_config): "send {})".format(" and ".join(in_request)) ) - headers = rspec.get("headers", {}) - has_content_header = "content-type" in [h.lower() for h in headers.keys()] + normalised_headers = {k.lower(): v for k, v in rspec["headers"].items()} + + def get_header(name): + return normalised_headers.get(name, None) + + content_header = get_header("content-type") + encoding_header = get_header("content-encoding") if "files" in rspec: - if has_content_header: + if content_header: logger.warning( - "Tried to specify a content-type header while sending a file - this will be ignored" + "Tried to specify a content-type header while sending multipart files - this will be ignored" ) rspec["headers"] = { - i: j for i, j in headers.items() if i.lower() != "content-type" + i: j + for i, j in normalised_headers.items() + if i.lower() != "content-type" } - fspec = format_keys(rspec, test_block_config["variables"]) + fspec = format_keys(rspec, test_block_config.variables) + + if fspec["method"] not in valid_http_methods: + raise exceptions.BadSchemaError( + "Unknown HTTP method {}".format(fspec["method"]) + ) + + # If the user is using the file_body key, try to guess what type of file/encoding it is. + filename = fspec.get("file_body") + if filename: + with ExitStack() as stack: + file_spec = guess_filespec(filename, stack, test_block_config) + fspec["file_body"] = filename + if len(file_spec) == 2: + logger.debug( + "No content type or encoding inferred from file_body for %s", + filename, + ) + + if len(file_spec) >= 3: + inferred_content_type = file_spec[2] + if content_header: + logger.info( + "inferred content type '%s' from %s, but using user specified content type '%s'", + inferred_content_type, + filename, + content_header, + ) + else: + fspec["headers"]["content-type"] = inferred_content_type + + if len(file_spec) == 4: + inferred_content_encoding = file_spec[3] + if encoding_header: + logger.info( + "inferred content encoding '%s' from %s, but using user specified encoding '%s", + inferred_content_encoding, + filename, + encoding_header, + ) + else: + fspec["headers"].update(**inferred_content_encoding) + + ######################################### - send_in_body = fspec.get("file_body") - if send_in_body: - request_args["file_body"] = send_in_body + request_args = {} def add_request_args(keys, optional): for key in keys: @@ -96,6 +141,12 @@ def add_request_args(keys, optional): # This should never happen raise + # Ones that are required and are enforced to be present by the schema + required_in_file = ["method", "url"] + + optional_with_default = {"verify": True, "stream": False} + + add_request_args(["file_body"], True) add_request_args(required_in_file, False) add_request_args(RestRequest.optional_in_file, True) @@ -148,7 +199,7 @@ def add_request_args(keys, optional): @contextlib.contextmanager -def _set_cookies_for_request(session, request_args): +def _set_cookies_for_request(session: requests.Session, request_args: Mapping): """ Possibly reset session cookies for a single request then set them back. If no cookies were present in the request arguments, do nothing. @@ -157,8 +208,8 @@ def _set_cookies_for_request(session, request_args): the cookies anyway Args: - session (requests.Session): Current session - request_args (dict): current request arguments + session: Current session + request_args: current request arguments """ if "cookies" in request_args: old_cookies = dict_from_cookiejar(session.cookies) @@ -169,13 +220,13 @@ def _set_cookies_for_request(session, request_args): yield -def _check_allow_redirects(rspec, test_block_config): +def _check_allow_redirects(rspec: dict, test_block_config: TestConfig): """ Check for allow_redirects flag in settings/stage Args: - rspec (dict): request dictionary - test_block_config (dict): config available for test + rspec: request dictionary + test_block_config: config available for test Returns: bool: Whether to allow redirects for this stage or not @@ -184,7 +235,7 @@ def _check_allow_redirects(rspec, test_block_config): allow_redirects = False # Then check to see if we should follow redirects based on settings - global_follow_redirects = test_block_config.get("follow_redirects") + global_follow_redirects = test_block_config.follow_redirects if global_follow_redirects is not None: allow_redirects = global_follow_redirects @@ -204,23 +255,25 @@ def _check_allow_redirects(rspec, test_block_config): return allow_redirects -def _read_expected_cookies(session, rspec, test_block_config): +def _read_expected_cookies( + session: requests.Session, rspec: Mapping, test_block_config: TestConfig +) -> Optional[dict]: """ Read cookies to inject into request, ignoring others which are present Args: - session (Session): session object - rspec (dict): test spec - test_block_config (dict): config available for test + session: session object + rspec: test spec + test_block_config: config available for test Returns: - dict: cookies to use in request, if any + cookies to use in request, if any """ # Need to do this down here - it is separate from getting request args as # it depends on the state of the session existing_cookies = session.cookies.get_dict() cookies_to_use = format_keys( - rspec.get("cookies", None), test_block_config["variables"] + rspec.get("cookies", None), test_block_config.variables ) if cookies_to_use is None: @@ -273,7 +326,7 @@ def partition(pred, iterable): return deep_dict_merge(from_cookiejar, from_extra) -def _read_filespec(filespec): +def _read_filespec(filespec: Union[str, dict]): """ Get configuration for uploading file @@ -283,7 +336,7 @@ def _read_filespec(filespec): filespec: Either a string with the path to a file or a dictionary with file_path and possible content_type and/or content_encoding Returns: - tuple: (file path, content type, content encoding) + (file path, content type, content encoding) """ if isinstance(filespec, str): return filespec, None, None @@ -300,51 +353,26 @@ def _read_filespec(filespec): ) -def _get_file_arguments(request_args, stack, test_block_config): +def _get_file_arguments( + request_args: dict, stack: ExitStack, test_block_config: TestConfig +) -> dict: """Get corect arguments for anything that should be passed as a file to requests Args: - test_block_config (dict): config for test - stack (ExitStack): context stack to add file objects to so they're + request_args: args passed to requests + test_block_config: config for test + stack: context stack to add file objects to so they're closed correctly after use Returns: - dict: mapping of {"files": ...} to pass directly to requests + mapping of 'files' block to pass directly to requests """ files_to_send = {} for key, filespec in request_args.get("files", {}).items(): - if not mimetypes.inited: - mimetypes.init() - - filepath, content_type, encoding = _read_filespec(filespec) - filepath = format_keys(filepath, test_block_config["variables"]) - - filename = os.path.basename(filepath) - - # a 2-tuple ('filename', fileobj) - file_spec = [filename, stack.enter_context(open(filepath, "rb"))] - - # Try to guess as well, but don't override what the user specified - guessed_content_type, guessed_encoding = mimetypes.guess_type(filepath) - content_type = content_type or guessed_content_type - encoding = encoding or guessed_encoding - - # If it doesn't have a mimetype, or can't guess it, don't - # send the content type for the file - if content_type: - # a 3-tuple ('filename', fileobj, 'content_type') - logger.debug("content_type for '%s' = '%s'", filename, content_type) - file_spec.append(content_type) - if encoding: - # or a 4-tuple ('filename', fileobj, 'content_type', custom_headers) - logger.debug("encoding for '%s' = '%s'", filename, encoding) - # encoding is None for no encoding or the name of the - # program used to encode (e.g. compress or gzip). The - # encoding is suitable for use as a Content-Encoding header. - file_spec.append({"Content-Encoding": encoding}) + file_spec = guess_filespec(filespec, stack, test_block_config) files_to_send[key] = tuple(file_spec) @@ -354,6 +382,58 @@ def _get_file_arguments(request_args, stack, test_block_config): return {} +def guess_filespec( + filespec: Union[str, dict], stack: ExitStack, test_block_config: TestConfig +): + """tries to guess the content type and encoding from a file. + + Args: + test_block_config: config for test/stage + stack: exit stack to add open files context to + filespec: a string path to a file or a dictionary of the file path, content type, and encoding. + + Returns: + tuple: A tuple of either length 2 (filename and file object), 3 (as before, with ceontent type), or 4 (as before, with with content encoding) + + Notes: + If a 4-tuple is returned, the last element is a dictionary of headers to send to requests, _not_ the raw encoding value. + """ + if not mimetypes.inited: + mimetypes.init() + + filepath, content_type, encoding = _read_filespec(filespec) + + filepath = format_keys(filepath, test_block_config.variables) + filename = os.path.basename(filepath) + + # a 2-tuple ('filename', fileobj) + file_spec = [ + filename, + stack.enter_context(open(filepath, "rb")), + ] + + # Try to guess as well, but don't override what the user specified + guessed_content_type, guessed_encoding = mimetypes.guess_type(filepath) + content_type = content_type or guessed_content_type + encoding = encoding or guessed_encoding + + # If it doesn't have a mimetype, or can't guess it, don't + # send the content type for the file + if content_type: + # a 3-tuple ('filename', fileobj, 'content_type') + logger.debug("content_type for '%s' = '%s'", filename, content_type) + file_spec.append(content_type) + if encoding: + # or a 4-tuple ('filename', fileobj, 'content_type', custom_headers) + logger.debug("encoding for '%s' = '%s'", filename, encoding) + # encoding is None for no encoding or the name of the + # program used to encode (e.g. compress or gzip). The + # encoding is suitable for use as a Content-Encoding header. + file_spec.append({"Content-Encoding": encoding}) + + return file_spec + + class RestRequest(BaseRequest): optional_in_file = [ "json", @@ -369,13 +449,15 @@ class RestRequest(BaseRequest): # "auth" ] - def __init__(self, session, rspec, test_block_config): + def __init__( + self, session: requests.Session, rspec: dict, test_block_config: TestConfig + ) -> None: """Prepare request Args: - session (requests.Session): existing session - rspec (dict): test spec - test_block_config (dict): Any configuration for this the block of + session: existing session + rspec: test spec + test_block_config : Any configuration for this the block of tests Raises: @@ -383,10 +465,8 @@ def __init__(self, session, rspec, test_block_config): spec. Only valid keyword args to requests can be passed """ - if "meta" in rspec: - meta = rspec.pop("meta") - if meta and "clear_session_cookies" in meta: - session.cookies.clear_session_cookies() + if rspec.pop("clear_session_cookies", False): + session.cookies.clear_session_cookies() expected = { "method", @@ -413,7 +493,6 @@ def __init__(self, session, rspec, test_block_config): update_from_ext( request_args, RestRequest.optional_in_file, - test_block_config, ) # Used further down, but pop it asap to avoid unwanted side effects @@ -447,6 +526,7 @@ def prepared_request(): # These are mutually exclusive if file_body: + # Any headers will have been set in the above function file = stack.enter_context(open(file_body, "rb")) request_args.update(data=file) else: @@ -454,6 +534,10 @@ def prepared_request(): _get_file_arguments(request_args, stack, test_block_config) ) + headers = self._request_args.get("headers", {}) + for k, v in headers.items(): + headers[str(k)] = str(v) + return session.request(**self._request_args) self._prepared = prepared_request @@ -480,5 +564,5 @@ def run(self): raise exceptions.RestRequestException from e @property - def request_vars(self): + def request_vars(self) -> Box: return Box(self._request_args) diff --git a/tavern/_plugins/rest/response.py b/tavern/_plugins/rest/response.py index a158dcd6f..ca54b116f 100644 --- a/tavern/_plugins/rest/response.py +++ b/tavern/_plugins/rest/response.py @@ -1,29 +1,30 @@ +import contextlib import json import logging +from typing import Dict, Mapping, Optional from urllib.parse import parse_qs, urlparse +import requests from requests.status_codes import _codes # type:ignore -from tavern.response.base import BaseResponse, indent_err_text -from tavern.testutils.pytesthook.newhooks import call_hook -from tavern.util import exceptions -from tavern.util.dict_util import deep_dict_merge -from tavern.util.report import attach_yaml +from tavern._core import exceptions +from tavern._core.dict_util import deep_dict_merge +from tavern._core.pytest.newhooks import call_hook +from tavern._core.report import attach_yaml +from tavern.response import BaseResponse, indent_err_text logger = logging.getLogger(__name__) class RestResponse(BaseResponse): - def __init__(self, session, name, expected, test_block_config): - # pylint: disable=unused-argument - + def __init__(self, session, name: str, expected, test_block_config) -> None: defaults = {"status_code": 200} super().__init__(name, deep_dict_merge(defaults, expected), test_block_config) - self.status_code = None + self.status_code: Optional[int] = None - def check_code(code): + def check_code(code: int) -> None: if int(code) not in _codes: logger.warning("Unexpected status code '%s'", code) @@ -37,13 +38,13 @@ def check_code(code): except TypeError as e: raise exceptions.BadSchemaError("Invalid code") from e - def __str__(self): + def __str__(self) -> str: if self.response: return self.response.text.strip() else: return "" - def _verbose_log_response(self, response): + def _verbose_log_response(self, response) -> None: """Verbosely log the response object, with query params etc.""" logger.info("Response: '%s'", response) @@ -64,10 +65,8 @@ def log_dict_block(block, name): log_dict_block(response.headers, "Headers") - try: + with contextlib.suppress(ValueError): log_dict_block(response.json(), "Body") - except ValueError: - pass redirect_query_params = self._get_redirect_query_params(response) if redirect_query_params: @@ -76,7 +75,7 @@ def log_dict_block(block, name): logger.debug("Redirect location: %s", to_path) log_dict_block(redirect_query_params, "Redirect URL query parameters") - def _get_redirect_query_params(self, response): + def _get_redirect_query_params(self, response) -> Dict[str, str]: """If there was a redirect header, get any query parameters from it""" try: @@ -96,7 +95,7 @@ def _get_redirect_query_params(self, response): return redirect_query_params - def _check_status_code(self, status_code, body): + def _check_status_code(self, status_code, body) -> None: expected_code = self.expected["status_code"] if (isinstance(expected_code, int) and status_code == expected_code) or ( @@ -122,7 +121,7 @@ def _check_status_code(self, status_code, body): "Status code was %s, expected %s", status_code, expected_code ) - def verify(self, response): + def verify(self, response: requests.Response) -> dict: """Verify response against expected values and returns any values that we wanted to save for use in future requests @@ -130,10 +129,10 @@ def verify(self, response): matching values, validating a schema, etc... Args: - response (requests.Response): response object + response: response object Returns: - dict: Any saved values + Any saved values Raises: TestFailError: Something went wrong with validating the response @@ -204,3 +203,33 @@ def verify(self, response): ) return saved + + def _validate_block(self, blockname: str, block: Mapping) -> None: + """Validate a block of the response + + Args: + blockname: which part of the response is being checked + block: The actual part being checked + """ + try: + expected_block = self.expected[blockname] + except KeyError: + expected_block = None + + if isinstance(expected_block, dict): + if expected_block.pop("$ext", None): + raise exceptions.MisplacedExtBlockException( + blockname, + ) + + if blockname == "headers" and expected_block is not None: + # Special case for headers. These need to be checked in a case + # insensitive manner + block = {i.lower(): j for i, j in block.items()} + expected_block = {i.lower(): j for i, j in expected_block.items()} + + logger.debug("Validating response %s against %s", blockname, expected_block) + + test_strictness = self.test_block_config.strict + block_strictness = test_strictness.option_for(blockname) + self.recurse_check_key_match(expected_block, block, blockname, block_strictness) diff --git a/tavern/_plugins/rest/tavernhook.py b/tavern/_plugins/rest/tavernhook.py index b25bfafa6..208e32d36 100644 --- a/tavern/_plugins/rest/tavernhook.py +++ b/tavern/_plugins/rest/tavernhook.py @@ -2,9 +2,9 @@ import requests -from tavern.plugins import PluginHelperBase -from tavern.util import exceptions -from tavern.util.dict_util import format_keys +from tavern._core import exceptions +from tavern._core.dict_util import format_keys +from tavern._core.plugins import PluginHelperBase from .request import RestRequest from .response import RestResponse @@ -19,15 +19,13 @@ class TavernRestPlugin(PluginHelperBase): request_block_name = "request" @staticmethod - def get_expected_from_request(stage, test_block_config, session): - # pylint: disable=unused-argument - try: - r_expected = stage["response"] - except KeyError as e: - logger.error("Need a 'response' block if a 'request' is being sent") - raise exceptions.MissingSettingsError from e - - f_expected = format_keys(r_expected, test_block_config["variables"]) + def get_expected_from_request(response_block, test_block_config, session): + if response_block is None: + raise exceptions.MissingSettingsError( + "no response block specified for HTTP test stage" + ) + + f_expected = format_keys(response_block, test_block_config.variables) return f_expected verifier_type = RestResponse diff --git a/tavern/core.py b/tavern/core.py index db4aea835..c4c289dfd 100644 --- a/tavern/core.py +++ b/tavern/core.py @@ -1,286 +1,26 @@ -from contextlib import ExitStack -from copy import deepcopy -from distutils.util import strtobool -import functools -import logging import os +from contextlib import ExitStack +from typing import Union import pytest -from tavern.schemas.files import wrapfile -from tavern.util.strict_util import StrictLevel - -from .plugins import get_expected, get_extra_sessions, get_request_type, get_verifiers -from .testutils.pytesthook import call_hook -from .util import exceptions -from .util.delay import delay -from .util.dict_util import format_keys, get_tavern_box -from .util.report import attach_stage_content, wrap_step -from .util.retry import retry - -logger = logging.getLogger(__name__) - - -def _resolve_test_stages(test_spec, available_stages): - # Need to get a final list of stages in the tests (resolving refs) - test_stages = [] - for raw_stage in test_spec["stages"]: - stage = raw_stage - if stage.get("type") == "ref": - if "id" in stage: - ref_id = stage["id"] - if ref_id in available_stages: - # Make sure nothing downstream can change the globally - # defined stage. Just give the test a local copy. - stage = deepcopy(available_stages[ref_id]) - logger.debug("found stage reference: %s", ref_id) - else: - logger.error("Bad stage: unknown stage referenced: %s", ref_id) - raise exceptions.InvalidStageReferenceError( - "Unknown stage reference: {}".format(ref_id) - ) - else: - logger.error("Bad stage: 'ref' type must specify 'id'") - raise exceptions.BadSchemaError("'ref' stage type must specify 'id'") - test_stages.append(stage) - - return test_stages - - -def _get_included_stages(tavern_box, test_block_config, test_spec, available_stages): - """ - Get any stages which were included via config files which will be available - for use in this test - - Args: - available_stages (list): List of stages which already exist - tavern_box (box.Box): Available parameters for fomatting at this point - test_block_config (dict): Current test config dictionary - test_spec (dict): Specification for current test - - Returns: - list: Fully resolved - """ - - def stage_ids(s): - return [i["id"] for i in s] - - if test_spec.get("includes"): - # Need to do this separately here so there is no confusion between global and included stages - for included in test_spec["includes"]: - for stage in included.get("stages", {}): - if stage["id"] in stage_ids(available_stages): - raise exceptions.DuplicateStageDefinitionError( - "Stage id '{}' defined in stage-included test which was already defined in global configuration".format( - stage["id"] - ) - ) - - included_stages = [] - - for included in test_spec["includes"]: - if "variables" in included: - formatted_include = format_keys(included["variables"], tavern_box) - test_block_config["variables"].update(formatted_include) - - for stage in included.get("stages", []): - if stage["id"] in stage_ids(included_stages): - raise exceptions.DuplicateStageDefinitionError( - "Stage with specified id already defined: {}".format( - stage["id"] - ) - ) - included_stages.append(stage) - else: - included_stages = [] - - return included_stages - - -def run_test(in_file, test_spec, global_cfg): - """Run a single tavern test - - Note that each tavern test can consist of multiple requests (log in, - create, update, delete, etc). +from tavern._core import exceptions +from tavern._core.schema.files import wrapfile - The global configuration is copied and used as an initial configuration for - this test. Any values which are saved from any tests are saved into this - test block and can be used for formatting in later stages in the test. - Args: - in_file (str): filename containing this test - test_spec (dict): The specification for this test - global_cfg (dict): Any global configuration for this test - - No Longer Raises: - TavernException: If any of the tests failed - """ - - # pylint: disable=too-many-locals - - # Initialise test config for this test with the global configuration before - # starting - test_block_config = dict(global_cfg) - default_global_stricness = global_cfg["strict"] - - if "variables" not in test_block_config: - test_block_config["variables"] = {} - - tavern_box = get_tavern_box() - - if not test_spec: - logger.warning("Empty test block in %s", in_file) - return - - # Get included stages and resolve any into the test spec dictionary - available_stages = test_block_config.get("stages", []) - included_stages = _get_included_stages( - tavern_box, test_block_config, test_spec, available_stages - ) - all_stages = {s["id"]: s for s in available_stages + included_stages} - test_spec["stages"] = _resolve_test_stages(test_spec, all_stages) - - test_block_config["variables"]["tavern"] = tavern_box["tavern"] - - test_block_name = test_spec["test_name"] - - logger.info("Running test : %s", test_block_name) - - with ExitStack() as stack: - sessions = get_extra_sessions(test_spec, test_block_config) - - for name, session in sessions.items(): - logger.debug("Entering context for %s", name) - stack.enter_context(session) - - def getonly(stage): - o = stage.get("only") - if o is None: - return False - elif isinstance(o, bool): - return o - else: - return strtobool(o) - - has_only = any(getonly(stage) for stage in test_spec["stages"]) - - # Run tests in a path in order - for idx, stage in enumerate(test_spec["stages"]): - if stage.get("skip"): - continue - if has_only and not getonly(stage): - continue - - test_block_config["strict"] = default_global_stricness - _calculate_stage_strictness(stage, test_block_config, test_spec) - - # Wrap run_stage with retry helper - run_stage_with_retries = retry(stage, test_block_config)(run_stage) - - partial = functools.partial( - run_stage_with_retries, sessions, stage, test_block_config - ) - - allure_name = "Stage {}: {}".format(idx, stage["name"]) - step = wrap_step(allure_name, partial) - - try: - step() - except exceptions.TavernException as e: - e.stage = stage - e.test_block_config = test_block_config - raise - - if getonly(stage): - break - - -def _calculate_stage_strictness(stage, test_block_config, test_spec): - """Figure out the strictness for this stage - - Can be overridden per stage, or per test - - Priority is global (see pytest util file) <= test <= stage - """ - stage_options = None - - if test_spec.get("strict", None) is not None: - stage_options = test_spec["strict"] - - if stage.get("response", {}).get("strict", None) is not None: - stage_options = stage["response"]["strict"] - elif stage.get("mqtt_response", {}).get("strict", None) is not None: - stage_options = stage["mqtt_response"]["strict"] - - if stage_options is not None: - logger.debug("Overriding global strictness") - if stage_options is True: - strict_level = StrictLevel.all_on() - elif stage_options is False: - strict_level = StrictLevel.all_off() - else: - strict_level = StrictLevel.from_options(stage_options) - - test_block_config["strict"] = strict_level - else: - logger.debug("Global default strictness used for this stage") - - logger.debug( - "Strict key checking for this stage is '%s'", test_block_config["strict"] - ) - - -def run_stage(sessions, stage, test_block_config): - """Run one stage from the test - - Args: - sessions (dict): Dictionary of relevant 'session' objects used for this test - stage (dict): specification of stage to be run - test_block_config (dict): available variables for test - """ - name = stage["name"] - - attach_stage_content(stage) - - r = get_request_type(stage, test_block_config, sessions) - - tavern_box = test_block_config["variables"]["tavern"] - tavern_box.update(request_vars=r.request_vars) - - expected = get_expected(stage, test_block_config, sessions) - - delay(stage, "before", test_block_config["variables"]) - - logger.info("Running stage : %s", name) - - call_hook( - test_block_config, - "pytest_tavern_beta_before_every_request", - request_args=r.request_vars, - ) - - response = r.run() - - verifiers = get_verifiers(stage, test_block_config, sessions, expected) - for v in verifiers: - saved = v.verify(response) - test_block_config["variables"].update(saved) - - tavern_box.pop("request_vars") - delay(stage, "after", test_block_config["variables"]) - - -def _get_or_wrap_global_cfg(stack, tavern_global_cfg): +def _get_or_wrap_global_cfg( + stack: ExitStack, tavern_global_cfg: Union[dict, str] +) -> str: """ Try to parse global configuration from given argument. Args: - stack (ExitStack): context stack for wrapping file if a dictionary is given - tavern_global_cfg (dict, str): Dictionary or string. It should be a + stack: context stack for wrapping file if a dictionary is given + tavern_global_cfg: Dictionary or string. It should be a path to a file or a dictionary with configuration. Returns: - str: path to global config file + path to global config file Raises: InvalidSettingsError: If global config was not of the right type or a given path @@ -289,6 +29,7 @@ def _get_or_wrap_global_cfg(stack, tavern_global_cfg): Todo: Once python 2 is dropped, allow this to take a 'path like object' """ + if isinstance(tavern_global_cfg, str): if not os.path.exists(tavern_global_cfg): raise exceptions.InvalidSettingsError( @@ -308,18 +49,18 @@ def _get_or_wrap_global_cfg(stack, tavern_global_cfg): def run( - in_file, + in_file: str, tavern_global_cfg=None, tavern_mqtt_backend=None, tavern_http_backend=None, tavern_grpc_backend=None, tavern_strict=None, pytest_args=None, -): # pylint: disable=too-many-arguments +): """Run all tests contained in a file using pytest.main() Args: - in_file (str): file to run tests on + in_file: file to run tests on tavern_global_cfg (str, dict): Extra global config tavern_mqtt_backend (str, optional): name of MQTT plugin to use. If not specified, uses tavern-mqtt @@ -333,7 +74,7 @@ def run( to Pytest as if they were command line arguments Returns: - bool: Whether ALL tests passed or not + Whether ALL tests passed or not """ pytest_args = pytest_args or [] diff --git a/tavern/entry.py b/tavern/entry.py index 447ecec97..df48dd47e 100644 --- a/tavern/entry.py +++ b/tavern/entry.py @@ -1,13 +1,13 @@ import argparse -from argparse import ArgumentParser import logging.config +from argparse import ArgumentParser from textwrap import dedent from .core import run class TavernArgParser(ArgumentParser): - def __init__(self): + def __init__(self) -> None: description = """Parse yaml + make requests against an API Any extra arguments will be passed directly to Pytest. Run py.test --help for a list""" diff --git a/tavern/testutils/helpers.py b/tavern/helpers.py similarity index 55% rename from tavern/testutils/helpers.py rename to tavern/helpers.py index 987cf3e0b..8cb10878a 100644 --- a/tavern/testutils/helpers.py +++ b/tavern/helpers.py @@ -2,26 +2,30 @@ import json import logging import re +from typing import Dict, List, Optional -from box import Box import jmespath import jwt +import requests +from box.box import Box -from tavern.schemas.files import verify_generic -from tavern.testutils.jmesutils import actual_validation, validate_comparison -from tavern.util import exceptions -from tavern.util.dict_util import check_keys_match_recursive, recurse_access_key +from tavern._core import exceptions +from tavern._core.dict_util import check_keys_match_recursive, recurse_access_key +from tavern._core.jmesutils import actual_validation, validate_comparison +from tavern._core.schema.files import verify_pykwalify logger = logging.getLogger(__name__) -def check_exception_raised(response, exception_location): +def check_exception_raised( + response: requests.Response, exception_location: str +) -> None: """Make sure the result from the server is the same as the exception we expect to raise Args: - response (requests.Response): response object - exception_location (str): entry point style location of exception + response: response object + exception_location: entry point style location of exception """ dumped = json.loads(response.content.decode("utf8")) @@ -30,29 +34,41 @@ def check_exception_raised(response, exception_location): module = importlib.import_module(module_name) exception = getattr(module, exception_name) - if "title" in dumped: - assert dumped["title"] == exception.error_title - elif "error" in dumped: - assert dumped["error"] == exception.error_title + for possible_title in ["title", "error"]: + if possible_title in dumped: + try: + assert dumped[possible_title] == exception.error_title # noqa + except AssertionError as e: + raise exceptions.UnexpectedExceptionError( + "Incorrect title of exception" + ) from e actual_description = dumped.get("description", dumped.get("error_description")) expected_description = getattr( - exception, "error_description", getattr(exception, "description") + exception, "error_description", exception.description ) try: - assert actual_description == expected_description - except AssertionError: + assert actual_description == expected_description # noqa + except AssertionError as e: # If it has a format, ignore this error. Would be annoying to say how to # format things in the validator, especially if it's a set/dict which is # unordered + # TODO: improve logic? Use a regex like '{.+?}' instead? if not any(i in expected_description for i in "{}"): - raise + raise exceptions.UnexpectedExceptionError( + "exception description did not match" + ) from e - assert response.status_code == int(exception.status.split()[0]) + try: + assert response.status_code == int(exception.status.split()[0]) # noqa + except AssertionError as e: + raise exceptions.UnexpectedExceptionError( + "exception status code did not match" + ) from e -def validate_jwt(response, jwt_key, **kwargs): +def validate_jwt(response, jwt_key, **kwargs) -> Dict[str, Box]: """Make sure a jwt is valid This uses the pyjwt library to decode the jwt, so any keyword args needed @@ -72,6 +88,7 @@ def validate_jwt(response, jwt_key, **kwargs): dict: dictionary of jwt: boxed jwt claims """ token = response.json()[jwt_key] + decoded = jwt.decode(token, **kwargs) logger.debug("Decoded jwt to %s", decoded) @@ -79,7 +96,7 @@ def validate_jwt(response, jwt_key, **kwargs): return {"jwt": Box(decoded)} -def validate_pykwalify(response, schema): +def validate_pykwalify(response, schema) -> None: """Make sure the response matches a given schema Args: @@ -94,20 +111,26 @@ def validate_pykwalify(response, schema): ) from e else: - verify_generic(to_verify, schema) + verify_pykwalify(to_verify, schema) -def validate_regex(response, expression, *, header=None, in_jmespath=None): +def validate_regex( + response: requests.Response, + expression: str, + *, + header: Optional[str] = None, + in_jmespath: Optional[str] = None, +) -> Dict[str, Box]: """Make sure the response matches a regex expression Args: - response (requests.Response): requests.Response object - expression (str): Regex expression to use - header (str): Match against a particular header instead of the body - in_jmespath (str): if present, jmespath to access before trying to match + response: requests.Response object + expression: Regex expression to use + header: Match against a particular header instead of the body + in_jmespath: if present, jmespath to access before trying to match Returns: - dict: dictionary of regex: boxed name capture groups + mapping of regex to boxed name capture groups """ if header and in_jmespath: @@ -148,12 +171,12 @@ def validate_regex(response, expression, *, header=None, in_jmespath=None): return {"regex": Box(match.groupdict())} -def validate_content(response, comparisons): +def validate_content(response: requests.Response, comparisons: List[str]) -> None: """Asserts expected value with actual value using JMES path expression Args: - response (Response): reqeusts.Response object. - comparisons(list): + response: reqeusts.Response object. + comparisons: A list of dict containing the following keys: 1. jmespath : JMES path expression to extract data from. 2. operator : Operator to use to compare data. @@ -168,12 +191,35 @@ def validate_content(response, comparisons): expession = " ".join([str(path), str(_operator), str(expected)]) parsed_expession = " ".join([str(actual), str(_operator), str(expected)]) - if _operator == "eq" and 0: - check_keys_match_recursive(expected, actual, []) - else: - try: - actual_validation( - _operator, actual, expected, parsed_expession, expession - ) - except AssertionError as e: - raise exceptions.JMESError("Error validating JMES") from e + try: + actual_validation(_operator, actual, expected, parsed_expession, expession) + except AssertionError as e: + raise exceptions.JMESError("Error validating JMES") from e + + +def check_jmespath_match(parsed_response, query: str, expected: Optional[str] = None): + """ + Check that the JMES path given in 'query' is present in the given response + + Args: + parsed_response: Response list or dict + query: JMES query + expected: Possible value to match against. If None, + 'query' will just check that _something_ is present + """ + actual = jmespath.search(query, parsed_response) + + msg = "JMES path '{}' not found in response".format(query) + + if actual is None: + raise exceptions.JMESError(msg) + + if expected is not None: + # Reuse dict util helper as it should behave the same + check_keys_match_recursive(expected, actual, [], True) + elif not actual and not (actual == expected): + # This can return an empty list, but it might be what we expect. if not, + # raise an exception + raise exceptions.JMESError(msg) + + return actual diff --git a/tavern/request/base.py b/tavern/request.py similarity index 84% rename from tavern/request/base.py rename to tavern/request.py index bd8bf5e92..cc77bd47d 100644 --- a/tavern/request/base.py +++ b/tavern/request.py @@ -1,13 +1,15 @@ -from abc import abstractmethod import logging +from abc import abstractmethod + +import box logger = logging.getLogger(__name__) -class BaseRequest(object): +class BaseRequest: @property @abstractmethod - def request_vars(self): + def request_vars(self) -> box.Box: """ Variables used in the request diff --git a/tavern/request/__init__.py b/tavern/request/__init__.py deleted file mode 100644 index 319558f54..000000000 --- a/tavern/request/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .base import BaseRequest - -__all__ = ["BaseRequest"] diff --git a/tavern/response/base.py b/tavern/response.py similarity index 53% rename from tavern/response/base.py rename to tavern/response.py index 1ab4e4e9e..8bee7a6a1 100644 --- a/tavern/response/base.py +++ b/tavern/response.py @@ -1,42 +1,45 @@ +import logging +import traceback from abc import abstractmethod from collections.abc import Mapping -import logging from textwrap import indent -import traceback +from typing import Any, List, Optional -from tavern.util import exceptions -from tavern.util.dict_util import check_keys_match_recursive, recurse_access_key -from tavern.util.extfunctions import get_wrapped_response_function +from tavern._core import exceptions +from tavern._core.dict_util import check_keys_match_recursive, recurse_access_key +from tavern._core.extfunctions import get_wrapped_response_function +from tavern._core.pytest.config import TestConfig +from tavern._core.strict_util import StrictOption logger = logging.getLogger(__name__) -def indent_err_text(err): +def indent_err_text(err: str) -> str: if err == "null": err = "" return indent(err, " " * 4) -class BaseResponse(object): - def __init__(self, name, expected, test_block_config): +class BaseResponse: + def __init__(self, name: str, expected, test_block_config: TestConfig) -> None: self.name = name # all errors in this response - self.errors = [] + self.errors: List[str] = [] - self.validate_functions = [] + self.validate_functions: List = [] self._check_for_validate_functions(expected) self.test_block_config = test_block_config self.expected = expected - self.response = None + self.response: Optional[Any] = None - def _str_errors(self): + def _str_errors(self) -> str: return "- " + "\n- ".join(self.errors) - def _adderr(self, msg, *args, e=None): + def _adderr(self, msg, *args, e=None) -> None: if e: logger.exception(msg, *args) else: @@ -47,30 +50,48 @@ def _adderr(self, msg, *args, e=None): def verify(self, response): """Verify response against expected values and returns any values that we wanted to save for use in future requests + + It is expected that anything subclassing this can throw an exception indicating that the response + verification failed. """ - def recurse_check_key_match(self, expected_block, block, blockname, strict): + def recurse_check_key_match( + self, + expected_block: Optional[Mapping], + block: Mapping, + blockname: str, + strict: StrictOption, + ) -> None: """Valid returned data against expected data Todo: Optionally use a validation library too Args: - expected_block (dict): expected data - block (dict): actual data - blockname (str): 'name' of this block (params, mqtt, etc) for error messages + strict: strictness setting for this block + expected_block: expected data + block: actual data + blockname: 'name' of this block (params, mqtt, etc) for error messages """ - if not expected_block: + if expected_block is None: logger.debug("No expected %s to check against", blockname) return # This should be done _before_ it gets to this point - typically in get_expected_from_request from plugin # expected_block = format_keys( - # expected_block, self.test_block_config["variables"] + # expected_block, self.test_block_config.variables # ) if block is None: + if not expected_block: + logger.debug( + "No %s in response to check, but not erroring because expected was %s", + blockname, + expected_block, + ) + return + self._adderr( "expected %s in the %s, but there was no response %s", expected_block, @@ -89,7 +110,7 @@ def recurse_check_key_match(self, expected_block, block, blockname, strict): except exceptions.KeyMismatchError as e: self._adderr(e.args[0], e=e) - def _check_for_validate_functions(self, response_block): + def _check_for_validate_functions(self, response_block) -> None: """ See if there were any functions specified in the response block and save them for later use @@ -118,14 +139,14 @@ def check_deprecated_validate(name): if isinstance(block, dict): check_ext_functions(block.get("$ext", None)) if nfuncs != len(self.validate_functions): - raise exceptions.InvalidExtBlockException( + raise exceptions.MisplacedExtBlockException( name, ) # Could put in an isinstance check here check_deprecated_validate("json") - def _maybe_run_validate_functions(self, response): + def _maybe_run_validate_functions(self, response) -> None: """Run validation functions if available Note: @@ -141,7 +162,7 @@ def _maybe_run_validate_functions(self, response): for vf in self.validate_functions: try: vf(response) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self._adderr( "Error calling validate function '%s':\n%s", vf.func, @@ -149,27 +170,32 @@ def _maybe_run_validate_functions(self, response): e=e, ) - def maybe_get_save_values_from_ext(self, response, expected): + def maybe_get_save_values_from_ext( + self, response: Any, read_save_from: Mapping + ) -> dict: """If there is an $ext function in the save block, call it and save the response Args: - expected (dict): the expected response (incl body/json/headers/mqtt topic/etc etc) - Actual contents depends on which type of response is being checked - response (object): response object. - Actual contents depends on which type of response is being checked + response: response object. Actual contents depends on which type of + response is being checked + read_save_from: the expected response (incl + body/json/headers/mqtt topic/etc etc) containing a spec for which things + should be saved from the response. Actual contents depends on which type of + response is being checked Returns: - dict: mapping of name: value of things to save + mapping of name to value of things to save """ + try: - wrapped = get_wrapped_response_function(expected["save"]["$ext"]) + wrapped = get_wrapped_response_function(read_save_from["save"]["$ext"]) except KeyError: logger.debug("No save function for this stage") return {} try: - to_save = wrapped(response) - except Exception as e: # pylint: disable=broad-except + saved = wrapped(response) + except Exception as e: self._adderr( "Error calling save function '%s':\n%s", wrapped.func, @@ -178,86 +204,87 @@ def maybe_get_save_values_from_ext(self, response, expected): ) return {} - if isinstance(to_save, dict): - return to_save - elif to_save is not None: + logger.debug("saved %s from ext function", saved) + + if isinstance(saved, dict): + return saved + elif saved is not None: self._adderr( - "Unexpected return value '%s' from $ext save function", to_save + "Unexpected return value '%s' from $ext save function (expected a dict or None)", + saved, ) return {} - def maybe_get_save_values_from_save_block(self, key, to_check): - """Save a value from a specific block in the response - - This is different from maybe_get_save_values_from_ext - depends on the kind of response + def maybe_get_save_values_from_save_block( + self, + key: str, + save_from: Optional[Mapping], + *, + outer_save_block: Optional[Mapping] = None, + ) -> dict: + """Save a value from a specific block in the response. - Args: - to_check (dict): An element of the response from which the given key - is extracted - key (str): Key to use + See docs for maybe_get_save_values_from_given_block for more info - Returns: - dict: dictionary of save_name: value, where save_name is the key we - wanted to save this value as + Keyword Args: + outer_save_block: Read things to save from this block instead of self.expected """ - saved = {} + + logger.debug("save from: %s", save_from) + + read_save_from = outer_save_block or self.expected + logger.debug("save spec: %s", read_save_from.get("save")) try: - expected = self.expected["save"][key] + to_save = read_save_from["save"][key] except KeyError: logger.debug("Nothing expected to save for %s", key) return {} - if not to_check: - self._adderr("No %s in response (wanted to save %s)", key, expected) - else: - for save_as, joined_key in expected.items(): - try: - saved[save_as] = recurse_access_key(to_check, joined_key) - except ( - exceptions.InvalidQueryResultTypeError, - exceptions.KeySearchNotFoundError, - ) as e: - self._adderr( - "Wanted to save '%s' from '%s', but it did not exist in the response", - joined_key, - key, - e=e, - ) - - if saved: - logger.debug("Saved %s for '%s' from response", saved, key) + return self.maybe_get_save_values_from_given_block(key, save_from, to_save) - return saved + def maybe_get_save_values_from_given_block( + self, + key: str, + save_from: Optional[Mapping], + to_save: Mapping, + ) -> dict: + """Save a value from a specific block in the response. - def _validate_block(self, blockname, block): - """Validate a block of the response + This is different from maybe_get_save_values_from_ext - depends on the kind of response Args: - blockname (str): which part of the response is being checked - block (dict): The actual part being checked + key: Name of key being used to save, for debugging + save_from: An element of the response from which values are being saved + to_save: block containing information about things to save + + Returns: + dict: dictionary of save_name: value, where save_name is the key we + wanted to save this value as """ - try: - expected_block = self.expected[blockname] or {} - except KeyError: - expected_block = {} - if isinstance(expected_block, dict): - if expected_block.pop("$ext", None): - raise exceptions.InvalidExtBlockException( - blockname, - ) + saved = {} - if blockname == "headers": - # Special case for headers. These need to be checked in a case - # insensitive manner - block = {i.lower(): j for i, j in block.items()} - expected_block = {i.lower(): j for i, j in expected_block.items()} + if not save_from: + self._adderr("No %s in response (wanted to save %s)", key, to_save) + return {} - logger.debug("Validating response %s against %s", blockname, expected_block) + for save_as, joined_key in to_save.items(): + try: + saved[save_as] = recurse_access_key(save_from, joined_key) + except ( + exceptions.InvalidQueryResultTypeError, + exceptions.KeySearchNotFoundError, + ) as e: + self._adderr( + "Wanted to save '%s' from '%s', but it did not exist in the response", + joined_key, + key, + e=e, + ) - test_strictness = self.test_block_config["strict"] - strict_setting = blockname if blockname != "body" else "json" - block_strictness = test_strictness.setting_for(strict_setting).is_on() - self.recurse_check_key_match(expected_block, block, blockname, block_strictness) + if saved: + logger.debug("Saved %s for '%s' from response", saved, key) + + return saved diff --git a/tavern/response/__init__.py b/tavern/response/__init__.py deleted file mode 100644 index edc42b3d8..000000000 --- a/tavern/response/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .base import BaseResponse - -__all__ = ["BaseResponse"] diff --git a/tavern/schemas/__init__.py b/tavern/schemas/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/testutils/__init__.py b/tavern/testutils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/util/__init__.py b/tavern/util/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/util/delay.py b/tavern/util/delay.py deleted file mode 100644 index 5ec440f73..000000000 --- a/tavern/util/delay.py +++ /dev/null @@ -1,24 +0,0 @@ -import logging -import time - -from .dict_util import format_keys - -logger = logging.getLogger(__name__) - - -def delay(stage, when, variables): - """Look for delay_before/delay_after and sleep - - Args: - stage (dict): test stage - when (str): 'before' or 'after' - variables (dict): Variables to format with - """ - - try: - length = format_keys(stage["delay_{}".format(when)], variables) - except KeyError: - pass - else: - logger.debug("Delaying %s request for %.2f seconds", when, length) - time.sleep(length) diff --git a/tavern/util/jmespath_util.py b/tavern/util/jmespath_util.py deleted file mode 100644 index c72510763..000000000 --- a/tavern/util/jmespath_util.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging - -import jmespath - -from tavern.util import exceptions -from tavern.util.dict_util import check_keys_match_recursive - -logger = logging.getLogger(__name__) - - -def check_jmespath_match(parsed_response, query, expected=None): - """ - Check that the JMES path given in 'query' is present in the given response - - Args: - parsed_response (dict, list): Response list or dict - query (str): JMES query - expected (str, optional): Possible value to match against. If None, - 'query' will just check that _something_ is present - """ - actual = jmespath.search(query, parsed_response) - - msg = "JMES path '{}' not found in response".format(query) - - if actual is None: - raise exceptions.JMESError(msg) - - if expected is not None: - # Reuse dict util helper as it should behave the same - check_keys_match_recursive(expected, actual, [], True) - elif not actual and not (actual == expected): # pylint: disable=superfluous-parens - # This can return an empty list, but it might be what we expect. if not, - # raise an exception - raise exceptions.JMESError(msg) - - return actual diff --git a/tavern/util/strict_util.py b/tavern/util/strict_util.py deleted file mode 100644 index 5cf00ae7b..000000000 --- a/tavern/util/strict_util.py +++ /dev/null @@ -1,98 +0,0 @@ -from distutils.util import strtobool -import enum -import re - -import attr - -from tavern.util import exceptions - - -class _StrictSetting(enum.Enum): - ON = 1 - OFF = 2 - UNSET = 3 - - -valid_keys = ["json", "headers", "redirect_query_params"] - - -def setting_factory(str_setting): - """Converts from cmdline/setting file to an enum""" - if str_setting is None: - return _StrictSetting.UNSET - else: - parsed = strtobool(str_setting) - - if parsed: - return _StrictSetting.ON - else: - return _StrictSetting.OFF - - -@attr.s(frozen=True) -class _StrictOption: - section = attr.ib(type=str) - setting = attr.ib(type=_StrictSetting) - - def is_on(self): - if self.section == "json": - # Must be specifically disabled for response body - return self.setting != _StrictSetting.OFF - else: - # Off by default for everything else - return self.setting == _StrictSetting.ON - - -def validate_and_parse_option(key): - regex = r"(?P
{})(:(?Pon|off))?".format("|".join(valid_keys)) - - match = re.fullmatch(regex, key) - - if not match: - raise exceptions.InvalidConfigurationException( - "Invalid value for 'strict' given - expected one of {}, got '{}'".format( - ["{}[:on/off]".format(key) for key in valid_keys], key - ) - ) - - as_dict = match.groupdict() - return _StrictOption(as_dict["section"], setting_factory(as_dict["setting"])) - - -@attr.s(frozen=True) -class StrictLevel: - json = attr.ib(default=_StrictOption("json", setting_factory(None))) - headers = attr.ib(default=_StrictOption("headers", setting_factory(None))) - redirect_query_params = attr.ib( - default=_StrictOption("redirect_query_params", setting_factory(None)) - ) - - @classmethod - def from_options(cls, options): - if isinstance(options, str): - options = [options] - elif not isinstance(options, list): - raise exceptions.InvalidConfigurationException( - "'strict' setting should be a list of strings" - ) - - parsed = [validate_and_parse_option(key) for key in options] - - return cls(**{i.section: i for i in parsed}) - - def setting_for(self, section): - """Provides a string-based way of getting strict settings for a section""" - try: - return getattr(self, section) - except AttributeError as e: - raise exceptions.InvalidConfigurationException( - "No strictness setting for '{}'".format(section) - ) from e - - @classmethod - def all_on(cls): - return cls.from_options([i + ":on" for i in valid_keys]) - - @classmethod - def all_off(cls): - return cls.from_options([i + ":off" for i in valid_keys]) diff --git a/tests/conftest.py b/tests/conftest.py index 7027a0500..a770e583b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,8 +2,13 @@ import os import pytest +import stevedore import yaml +import tavern +import tavern._plugins.mqtt.tavernhook as mqtt_plugin +from tavern._plugins.rest.tavernhook import TavernRestPlugin as rest_plugin + @pytest.fixture(scope="function", autouse=True) def run_all(): @@ -11,3 +16,20 @@ def run_all(): with open(os.path.join(current_dir, "logging.yaml"), "r") as spec_file: settings = yaml.load(spec_file, Loader=yaml.SafeLoader) logging.config.dictConfig(settings) + + +@pytest.fixture(scope="session", autouse=True) +def set_plugins(): + def extension(name, point): + return stevedore.extension.Extension(name, point, point, point) + + tavern._core.plugins.load_plugins.plugins = [ + extension( + "requests", + rest_plugin, + ), + extension( + "paho-mqtt", + mqtt_plugin, + ), + ] diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index 84e2e3dd9..68b46a7f7 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -1,11 +1,13 @@ -FROM python:3.5-alpine +FROM python:3.10-alpine + +RUN pip3 install pyjwt~=2.4.0 flask~=2.0.3 + +ENV FLASK_DEBUG=1 +ENV PYTHONUNBUFFERED=0 -RUN pip install flask pyjwt COPY server.py / ENV FLASK_APP=/server.py -ENV PYTHONUNBUFFERED=0 -ENV FLASK_DEBUG=1 CMD ["flask", "run", "--host=0.0.0.0"] diff --git a/tests/integration/OK.json.gz b/tests/integration/OK.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..6c1a58372f11908799b4a16b757dde73a26aa3a2 GIT binary patch literal 33 pcmb2|=HOWKZ)+kGv%j}qR&jnFL-42XVUL&?c=`UcvNA9*007G>3mE_a literal 0 HcmV?d00001 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ac912bbc0..a7461c1d4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -15,7 +15,7 @@ def sdkofsok(str_fixture): def bluerhug(request): # This doesn't really do anything at the moment. In future it might yield # the result or something, but it's a bit difficult to do at the moment. - response = yield "hello" + yield "hello" @pytest.fixture(scope="session", autouse=True) diff --git a/tests/integration/docker-compose.yaml b/tests/integration/docker-compose.yaml index 333af49bf..1a382008e 100644 --- a/tests/integration/docker-compose.yaml +++ b/tests/integration/docker-compose.yaml @@ -1,5 +1,5 @@ --- -version: '2' +version: "2" services: server: diff --git a/tests/integration/ext_functions.py b/tests/integration/ext_functions.py index 5d559492e..5c28e5fb7 100644 --- a/tests/integration/ext_functions.py +++ b/tests/integration/ext_functions.py @@ -1,2 +1,10 @@ def return_hello(): return {"hello": "there"} + + +def return_goodbye_string(): + return "goodbye" + + +def return_list_vals(): + return [{"a_value": "b_value"}, 2] diff --git a/tests/integration/server.py b/tests/integration/server.py index 1735a93d9..d409b89fb 100644 --- a/tests/integration/server.py +++ b/tests/integration/server.py @@ -1,16 +1,22 @@ import base64 +import gzip import itertools import json +import math import mimetypes import os import time -from urllib.parse import unquote_plus import uuid +from datetime import datetime, timedelta +from hashlib import sha512 +from urllib.parse import unquote_plus -from flask import Flask, Response, jsonify, redirect, request -import math +import jwt +from flask import Flask, Response, jsonify, make_response, redirect, request, session +from itsdangerous import URLSafeTimedSerializer app = Flask(__name__) +app.config.update(SECRET_KEY="secret") @app.route("/token", methods=["GET"]) @@ -77,23 +83,24 @@ def upload_fake_file(): if not request.files: return "", 401 + return _handle_files() + + +def _handle_files(): if not mimetypes.inited: mimetypes.init() - - for key, item in request.files.items(): + for item in request.files.values(): if item.filename: filetype = ".{}".format(item.filename.split(".")[-1]) if filetype in mimetypes.suffix_map: if not item.content_type: return "", 400 - # Try to download each of the files downloaded to /tmp and # then remove them for key in request.files: file_to_save = request.files[key] path = os.path.join("/tmp", file_to_save.filename) file_to_save.save(path) - return "", 200 @@ -109,23 +116,7 @@ def upload_fake_file_and_data(): if not request.content_type.startswith("multipart/form-data"): return "", 403 - if not mimetypes.inited: - mimetypes.init() - - for key, item in request.files.items(): - if item.filename: - filetype = ".{}".format(item.filename.split(".")[-1]) - if filetype in mimetypes.suffix_map: - if not item.content_type: - return "", 400 - - # Try to download each of the files downloaded to /tmp - for key in request.files: - file_to_save = request.files[key] - path = os.path.join("/tmp", file_to_save.filename) - file_to_save.save(path) - - return "", 200 + return _handle_files() @app.route("/nested/again", methods=["GET"]) @@ -174,7 +165,7 @@ def status_code_return(): @app.route("/echo", methods=["POST"]) def echo_values(): - body = request.get_json() + body = request.get_json(silent=True) response = body return jsonify(response), 200 @@ -210,6 +201,34 @@ def expect_raw_data(): return jsonify(response), code +@app.route("/expect_compressed_data", methods=["POST"]) +def expect_compressed_data(): + content_type_header = request.headers.get("content-type") + if content_type_header != "application/json": + return jsonify("invalid content type " + content_type_header), 400 + + content_encoding_header = request.headers.get("content-encoding") + if content_encoding_header != "gzip": + return jsonify("invalid content encoding " + content_encoding_header), 400 + + compressed_data = request.stream.read() + + decompressed = gzip.decompress(compressed_data) + + raw_data = decompressed.decode("utf8").strip() + + loaded = json.loads(raw_data) + + if loaded == "OK": + response = {"status": "ok"} + code = 200 + else: + response = {"status": "err: '{}'".format(raw_data)} + code = 400 + + return jsonify(response), code + + @app.route("/form_data", methods=["POST"]) def echo_form_values(): body = request.get_data() @@ -239,7 +258,7 @@ def poll(): def _maybe_get_cookie_name(): - return (request.get_json() or {}).get("cookie_name", "tavern-cookie") + return (request.get_json(silent=True) or {}).get("cookie_name", "tavern-cookie") @app.route("/get_cookie", methods=["POST"]) @@ -267,6 +286,19 @@ def redirect_to_other_endpoint(): return redirect("/redirect/destination", 302) +@app.route("/redirect/loop", methods=["GET"]) +def redirect_loop(): + try: + if redirect_loop.tries > 50: + return redirect("/redirect/destination", 302) + else: + redirect_loop.tries += 1 + except AttributeError: + redirect_loop.tries = 1 + + return redirect("/redirect/loop", 302) + + @app.route("/redirect/destination", methods=["GET"]) def get_redirected_to_here(): return jsonify({"status": "successful redirect"}), 200 @@ -315,3 +347,96 @@ def return_with_dot(): @app.route("/uuid/v4", methods=["GET"]) def get_uuid_v4(): return jsonify({"uuid": uuid.uuid4()}), 200 + + +@app.route("/707-regression", methods=["GET"]) +def get_707(): + return jsonify({"a": 1, "b": {"first": 10, "second": 20}, "c": 2}) + + +users = {"mark": {"password": "password", "regular": "foo", "protected": "bar"}} + +serializer = URLSafeTimedSerializer( + secret_key="secret", + salt="cookie", + signer_kwargs={"key_derivation": "hmac", "digest_method": sha512}, +) + + +@app.route("/withsession/login", methods=["POST"]) +def login(): + r = request.get_json() + username = r["username"] + password = r["password"] + + if password == users[username]["password"]: + session["user"] = username + response = make_response("", 200) + response.set_cookie( + "remember", + value=serializer.dumps(username), + expires=datetime.utcnow() + timedelta(days=30), + httponly=True, + ) + return response + + return "", 401 + + +@app.route("/withsession/regular", methods=["GET"]) +def regular(): + username = session.get("user") + + if not username: + remember = request.cookies.get("remember") + if remember: + username = serializer.loads(remember, max_age=3600) + + if username: + return jsonify(regular=users[username]["regular"]), 200 + + return "", 401 + + +@app.route("/withsession/protected", methods=["GET"]) +def protected(): + username = session.get("user") + if username: + return jsonify(protected=users[username]["protected"]), 200 + return "", 401 + + +@app.route("/606-regression-list", methods=["GET"]) +def get_606_list(): + return jsonify([]) + + +@app.route("/606-regression-dict", methods=["GET"]) +def get_606_dict(): + return jsonify({}) + + +@app.route("/magic-multi-method", methods=["GET", "POST", "DELETE"]) +def get_any_method(): + return jsonify({"method": request.method}) + + +@app.route("/get_jwt", methods=["POST"]) +def get_jwt(): + secret = "240c8c9c-39b9-426b-9503-3126f96c2eaf" + audience = "testserver" + + r = request.get_json() + + if r["user"] != "test-user" or r["password"] != "correct-password": + return jsonify({"error": "Incorrect username/password"}), 401 + + payload = { + "sub": "test-user", + "aud": audience, + "exp": datetime.utcnow() + timedelta(hours=1), + } + + token = jwt.encode(payload, secret, algorithm="HS256") + + return jsonify({"jwt": token}) diff --git a/tests/integration/test_allure.tavern.yaml b/tests/integration/test_allure.tavern.yaml new file mode 100644 index 000000000..42aac74c9 --- /dev/null +++ b/tests/integration/test_allure.tavern.yaml @@ -0,0 +1,15 @@ +--- +test_name: "Test test name can have formatting in it: {host}" + +includes: + - !include common.yaml + +stages: + - name: "Test stage name can have formatting in it: {host}" + request: + url: "{host}/echo" + method: POST + json: hi + response: + status_code: 200 + json: hi diff --git a/tests/integration/test_auth_key.tavern.yaml b/tests/integration/test_auth_key.tavern.yaml index 3764218e3..e2e84ebf8 100644 --- a/tests/integration/test_auth_key.tavern.yaml +++ b/tests/integration/test_auth_key.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test basic auth header stages: @@ -18,7 +17,6 @@ stages: auth_pass: fakepass --- - test_name: Test basic auth header with wrong username/pass stages: diff --git a/tests/integration/test_certs.tavern.yaml b/tests/integration/test_certs.tavern.yaml index 0c7b6c231..0533b431d 100644 --- a/tests/integration/test_certs.tavern.yaml +++ b/tests/integration/test_certs.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test cannot pass an invalid value to 'cert' _xfail: verify @@ -18,7 +17,6 @@ stages: value: "abc" --- - test_name: Test cannot pass too many values to 'cert' _xfail: verify diff --git a/example/remember/test_server.tavern.yaml b/tests/integration/test_cookie_remember.tavern.yaml similarity index 63% rename from example/remember/test_server.tavern.yaml rename to tests/integration/test_cookie_remember.tavern.yaml index 8493ef4d3..a82dbe68c 100644 --- a/example/remember/test_server.tavern.yaml +++ b/tests/integration/test_cookie_remember.tavern.yaml @@ -4,7 +4,7 @@ test_name: test after browser close stages: - name: login request: - url: http://localhost:5000/login + url: "{global_host}/withsession/login" method: POST json: username: mark @@ -17,10 +17,19 @@ stages: - name: get regular request: - url: http://localhost:5000/regular + url: "{global_host}/withsession/protected" method: GET - meta: - - clear_session_cookies # Simulate browser close before request. + clear_session_cookies: False + response: + json: + protected: bar + status_code: 200 + + - name: get regular + request: + url: "{global_host}/withsession/regular" + method: GET + clear_session_cookies: True # This flows through to the next stage as well response: json: regular: foo @@ -28,7 +37,7 @@ stages: - name: get protected stale request: - url: http://localhost:5000/protected + url: "{global_host}/withsession/protected" method: GET response: status_code: 401 @@ -39,7 +48,7 @@ test_name: test without browser close stages: - name: login again request: - url: http://localhost:5000/login + url: "{global_host}/withsession/login" method: POST json: username: mark @@ -52,7 +61,7 @@ stages: - name: get protected fresh request: - url: http://localhost:5000/protected + url: "{global_host}/withsession/protected" method: GET response: json: @@ -65,7 +74,7 @@ test_name: test without login stages: - name: get regular request: - url: http://localhost:5000/regular + url: "{global_host}/withsession/regular" method: GET response: status_code: 401 diff --git a/tests/integration/test_data_key.tavern.yaml b/tests/integration/test_data_key.tavern.yaml index 25d602620..3beb418ed 100644 --- a/tests/integration/test_data_key.tavern.yaml +++ b/tests/integration/test_data_key.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test sending form encoded data works includes: @@ -18,7 +17,6 @@ stages: id: *sent_uuid --- - test_name: Test sending raw data includes: @@ -46,7 +44,6 @@ stages: status: denied --- - test_name: Test sending base64 data includes: @@ -84,7 +81,6 @@ stages: status: "err: 'ERR'" --- - test_name: Test sending a list in 'data' raises an error _xfail: verify @@ -103,7 +99,6 @@ stages: status: ok --- - test_name: Test sending a float in 'data' raises an error _xfail: verify @@ -120,7 +115,6 @@ stages: status: ok --- - test_name: Test sending JSON and data at the same time fails _xfail: verify diff --git a/tests/integration/test_env_var_format.tavern.yaml b/tests/integration/test_env_var_format.tavern.yaml index 6bbbf8e05..6da193421 100644 --- a/tests/integration/test_env_var_format.tavern.yaml +++ b/tests/integration/test_env_var_format.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test getting format vars from environment variables includes: diff --git a/tests/integration/test_error.tavern.yaml b/tests/integration/test_error.tavern.yaml index fea71fb8f..f89220691 100644 --- a/tests/integration/test_error.tavern.yaml +++ b/tests/integration/test_error.tavern.yaml @@ -1,20 +1,18 @@ --- - test_name: Test yielding fixture includes: -- !include common.yaml + - !include common.yaml stages: -- name: do something - request: - method: DELETE - url: "{host}/echo" - json: - { "id": 0 } - response: - status_code: 200 - json: - value: { "id": 0 } + - name: do something + request: + method: DELETE + url: "{host}/echo" + json: { "id": 0 } + response: + status_code: 200 + json: + value: { "id": 0 } _xfail: run diff --git a/tests/integration/test_external_functions.tavern.yaml b/tests/integration/test_external_functions.tavern.yaml index ac306650c..bd4da1690 100644 --- a/tests/integration/test_external_functions.tavern.yaml +++ b/tests/integration/test_external_functions.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Use one function includes: @@ -13,12 +12,11 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: '' --- - test_name: Use one function in a list includes: @@ -32,12 +30,11 @@ stages: response: status_code: 200 verify_response_with: - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: expression: '' --- - test_name: Use two functions includes: @@ -51,15 +48,14 @@ stages: response: status_code: 200 verify_response_with: - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: expression: '' - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: expression: '' --- - test_name: Test first function failing will cause test to fail includes: @@ -75,15 +71,14 @@ stages: response: status_code: 200 verify_response_with: - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: - expression: 'bkllelkkkkkkkkkkfff' - - function: tavern.testutils.helpers:validate_regex + expression: "bkllelkkkkkkkkkkfff" + - function: tavern.helpers:validate_regex extra_kwargs: expression: '' --- - test_name: Test second function failing will cause test to fail includes: @@ -99,15 +94,14 @@ stages: response: status_code: 200 verify_response_with: - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: expression: '' - - function: tavern.testutils.helpers:validate_regex + - function: tavern.helpers:validate_regex extra_kwargs: - expression: 'bkllelkkkkkkkkkkfff' + expression: "bkllelkkkkkkkkkkfff" --- - test_name: Test merging in input (depends on option being enabled) includes: @@ -129,7 +123,6 @@ stages: hello: "there" --- - test_name: Test generating query params from ext functions stages: @@ -146,7 +139,6 @@ stages: hello: "there" --- - test_name: Test can still pass json in a query param stages: diff --git a/tests/integration/test_files.tavern.yaml b/tests/integration/test_files.tavern.yaml index 95ef486e2..764549226 100644 --- a/tests/integration/test_files.tavern.yaml +++ b/tests/integration/test_files.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test files can be uploaded with tavern includes: @@ -17,7 +16,6 @@ stages: status_code: 200 --- - test_name: Test files can be uploaded with a formatted file name includes: @@ -35,7 +33,6 @@ stages: status_code: 200 --- - test_name: Test files can be uploaded alongside data includes: @@ -54,7 +51,6 @@ stages: status_code: 200 --- - test_name: Test extra headers don't break content-type includes: @@ -74,7 +70,6 @@ stages: status_code: 200 --- - test_name: Test sending a text file will send the correct content type includes: @@ -91,7 +86,6 @@ stages: status_code: 200 --- - test_name: Test long form file upload includes: @@ -110,7 +104,6 @@ stages: status_code: 200 --- - test_name: Test sending file body includes: @@ -128,7 +121,23 @@ stages: status: ok --- +test_name: Test sending file body with appropriate encoding + +includes: + - !include common.yaml +stages: + - name: Upload gzipped json file body + request: + url: "{host}/expect_compressed_data" + method: POST + file_body: OK.json.gz + response: + status_code: 200 + json: + status: ok + +--- test_name: Test sending file body from variable ref includes: @@ -146,7 +155,6 @@ stages: status: ok --- - test_name: Test sending bad file body includes: @@ -162,7 +170,6 @@ stages: status_code: 400 --- - test_name: Test mutually exclusive with files _xfail: verify @@ -182,7 +189,6 @@ stages: status_code: 200 --- - test_name: Test mutually exclusive with data _xfail: verify @@ -201,7 +207,6 @@ stages: status_code: 200 --- - test_name: Test mutually exclusive with json _xfail: verify diff --git a/tests/integration/test_fixtures.tavern.yaml b/tests/integration/test_fixtures.tavern.yaml index adddde1e1..db4422f4e 100644 --- a/tests/integration/test_fixtures.tavern.yaml +++ b/tests/integration/test_fixtures.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test empty usefixtures errors includes: @@ -27,7 +26,6 @@ stages: value: "{yield_str_fixture}" --- - test_name: Test usefixtures being a mapping errors includes: @@ -51,7 +49,6 @@ stages: value: "{yield_str_fixture}" --- - test_name: Test usefixtures includes: @@ -75,7 +72,6 @@ stages: value: "{yield_str_fixture}" --- - test_name: Test yielding fixture includes: @@ -99,7 +95,6 @@ stages: json: value: "{yield_str_fixture}" --- - test_name: Test yielding fixture includes: @@ -108,8 +103,7 @@ includes: _xfail: verify marks: - usefixtures: - thing + usefixtures: thing stages: - name: Echo back a unicode value and make sure it matches @@ -123,7 +117,6 @@ stages: json: value: "{yield_str_fixture}" --- - test_name: Test autouse fixture includes: diff --git a/tests/integration/test_follow_redirects.tavern.yaml b/tests/integration/test_follow_redirects.tavern.yaml index 747f1c4b8..95261455e 100644 --- a/tests/integration/test_follow_redirects.tavern.yaml +++ b/tests/integration/test_follow_redirects.tavern.yaml @@ -1,5 +1,4 @@ --- - includes: - !include common.yaml @@ -11,9 +10,22 @@ stages: url: "{host}/redirect/source" response: status_code: 302 - --- +includes: + - !include common.yaml +test_name: Test redirecting loops + +stages: + - name: Expect a 302 without setting the flag + max_retries: 2 + request: + follow_redirects: true + url: "{host}/redirect/loop" + response: + status_code: 200 + +--- includes: - !include common.yaml diff --git a/tests/integration/test_header_comparisons.tavern.yaml b/tests/integration/test_header_comparisons.tavern.yaml index 513e5e3a4..e1d61a501 100644 --- a/tests/integration/test_header_comparisons.tavern.yaml +++ b/tests/integration/test_header_comparisons.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test matching both headers includes: @@ -37,7 +36,6 @@ stages: atestheader: orange --- - test_name: Test mismatch in header value _xfail: run diff --git a/tests/integration/test_helpers.tavern.yaml b/tests/integration/test_helpers.tavern.yaml new file mode 100644 index 000000000..ff444d139 --- /dev/null +++ b/tests/integration/test_helpers.tavern.yaml @@ -0,0 +1,57 @@ +--- +test_name: Make sure JWT verification works + +includes: + - !include common.yaml + +stages: + - name: login + request: + url: "{host}/get_jwt" + json: + user: test-user + password: correct-password + method: POST + response: + status_code: 200 + verify_response_with: + function: tavern.helpers:validate_jwt + extra_kwargs: + jwt_key: "jwt" + key: 240c8c9c-39b9-426b-9503-3126f96c2eaf + algorithms: [HS256] + options: + verify_signature: true + verify_aud: true + verify_exp: true + audience: testserver + +--- +test_name: Make sure JWT rejects the wrong algorithm + +includes: + - !include common.yaml + +stages: + - name: login + request: + url: "{host}/get_jwt" + json: + user: test-user + password: correct-password + method: POST + response: + status_code: 200 + verify_response_with: + function: tavern.helpers:validate_jwt + extra_kwargs: + jwt_key: "jwt" + key: 240c8c9c-39b9-426b-9503-3126f96c2eaf + algorithms: [RS256] + options: + verify_signature: true + verify_aud: true + verify_exp: true + audience: testserver + +_xfail: run diff --git a/tests/integration/test_jmes.tavern.yaml b/tests/integration/test_jmes.tavern.yaml index abc927291..05e07b59e 100644 --- a/tests/integration/test_jmes.tavern.yaml +++ b/tests/integration/test_jmes.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: test dict comparisons includes: @@ -13,7 +12,7 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_content + function: tavern.helpers:validate_content extra_kwargs: comparisons: - jmespath: "an_integer" @@ -33,7 +32,6 @@ stages: expected: value --- - test_name: test list comparisons includes: @@ -47,7 +45,7 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_content + function: tavern.helpers:validate_content extra_kwargs: comparisons: - jmespath: "[1]" @@ -72,7 +70,6 @@ stages: expected: -3.0 --- - test_name: Test we can save a single value with jmespath includes: @@ -102,7 +99,6 @@ stages: value: c82bfa63-fd2a-419a-8c06-21cb283fd9f7 --- - test_name: Test saving an item from a list includes: @@ -143,7 +139,6 @@ stages: value: "b" --- - test_name: Test saving an item from a dict inside a list includes: @@ -184,7 +179,6 @@ stages: value: "c" --- - test_name: Test we can save a single value with jmespath if it has a dot in includes: @@ -203,7 +197,6 @@ stages: saved_with_dot_b: '"data.b"' --- - test_name: Test 'negative' jmespath tests includes: @@ -220,7 +213,7 @@ stages: pages: 0 data: [] verify_response_with: - - function: tavern.testutils.helpers:validate_content + - function: tavern.helpers:validate_content extra_kwargs: comparisons: - jmespath: pages diff --git a/tests/integration/test_minimal.tavern.yaml b/tests/integration/test_minimal.tavern.yaml new file mode 100644 index 000000000..f66d3314b --- /dev/null +++ b/tests/integration/test_minimal.tavern.yaml @@ -0,0 +1,21 @@ +--- +# Every test file has one or more tests... +test_name: Get some fake data from the JSON placeholder API + +# ...and each test has one or more stages (e.g. an HTTP request) +stages: + - name: Make sure we have the right ID + + # Define the request to be made... + request: + url: https://jsonplaceholder.typicode.com/posts/1 + method: GET + + # ...and the expected response code and body + response: + status_code: 200 + json: + id: 1 + userId: 1 + title: "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + body: "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" diff --git a/tests/integration/test_parametrize.tavern.yaml b/tests/integration/test_parametrize.tavern.yaml index 26c5381e2..f0f4c395f 100644 --- a/tests/integration/test_parametrize.tavern.yaml +++ b/tests/integration/test_parametrize.tavern.yaml @@ -1,5 +1,45 @@ --- +test_name: Test parametrizing using 'vals' directly and not in the list +marks: + - parametrize: + key: mycoolvalue + vals: + $ext: + function: ext_functions:return_list_vals + +stages: + - name: Echo back parametrized value + request: + url: "{global_host}/echo" + method: POST + json: !force_format_include "{mycoolvalue}" + response: + status_code: 200 + json: !force_format_include "{tavern.request_vars.json}" + +--- +test_name: Test parametrizing using 'vals' directly and not in the list, list key + +marks: + - parametrize: + key: + - mycoolvalue + vals: + $ext: + function: ext_functions:return_list_vals + +stages: + - name: Echo back parametrized value + request: + url: "{global_host}/echo" + method: POST + json: !force_format_include "{mycoolvalue}" + response: + status_code: 200 + json: !force_format_include "{tavern.request_vars.json}" + +--- test_name: Test echo parametrized includes: @@ -26,7 +66,6 @@ stages: value: "{to_send}" --- - test_name: Test multiple parametrized values includes: @@ -59,7 +98,6 @@ stages: value: "{fruit}-{edible}" --- - test_name: Test multiple parametrized values, mismatched amounts includes: @@ -90,7 +128,6 @@ stages: value: "{fruit}-{edible}" --- - test_name: Test skip parametrized includes: @@ -118,7 +155,6 @@ stages: value: "klskdfiogj4iji34o" --- - test_name: Test skipif parametrized includes: @@ -173,7 +209,6 @@ stages: # value: "{to_send}" --- - test_name: Test invalid parametrize vals raises an error includes: @@ -200,7 +235,6 @@ stages: value: "{to_send}" --- - test_name: Test parametrize without include marks: @@ -222,7 +256,6 @@ stages: value: "{to_send}" --- - test_name: Test combined parametrizing includes: @@ -251,7 +284,6 @@ stages: value: "{fruit}-{edible}" --- - test_name: Test combined parametrizing with normal parametrizing includes: @@ -285,7 +317,6 @@ stages: value: "{fruit}-{edible}_{to_send}" --- - test_name: Test double combined parametrizing includes: @@ -321,15 +352,13 @@ stages: value: "{fruit}-{edible}_{first_half}-{second_half}" --- - test_name: Test include marks from a file includes: - !include common.yaml marks: - - parametrize: - !include parametrize_includes.yaml + - parametrize: !include parametrize_includes.yaml - parametrize: key: - first_half @@ -350,22 +379,86 @@ stages: json: value: "{fruit}-{edible}_{first_half}-{second_half}" ---- - -# NOTE: This might pass in future, if we allow parametrizing/storing JSON blocks rather than just single variables. -test_name: Test failing when vals is a list and key isn't +# Now fails at collection time +#--- +# +#test_name: Test failing when key is a list and vals isn't +# +#_xfail: verify +# +#includes: +# - !include common.yaml +# +#marks: +# - parametrize: +# key: +# - edible +# - fruit +# vals: +# - fresh +# - orange +# +#stages: +# - name: Echo back a unicode value and make sure it matches +# request: +# url: "{host}/echo" +# method: POST +# json: +# value: "{fruit}-{edible}" +# response: +# status_code: 200 +# json: +# value: "{fruit}-{edible}-nope" -_xfail: verify +# Now fails at collection time +#--- +# +#test_name: Test failing when keys and values list lengths do not match +# +#_xfail: verify +# +#includes: +# - !include common.yaml +# +#marks: +# - parametrize: +# key: +# - edible +# - fruit +# vals: +# - [fresh] +# +#stages: +# - name: Echo back a unicode value and make sure it matches +# request: +# url: "{host}/echo" +# method: POST +# json: +# value: "{fruit}-{edible}" +# response: +# status_code: 200 +# json: +# value: "{fruit}-{edible}-nope" +--- +test_name: Test parametrize from thing in common.yaml includes: - !include common.yaml marks: + - parametrize: + key: generic_str + vals: + # normal string + - "{v_str}" + # from env var + - "{second_part}" - parametrize: key: edible vals: - - [spear, mint] - - [jack, fruit] + - rotten + - fresh + - unripe stages: - name: Echo back a unicode value and make sure it matches @@ -373,157 +466,314 @@ stages: url: "{host}/echo" method: POST json: - value: "{fruit}-{edible}" + value: "{generic_str}-{edible}" response: status_code: 200 json: - value: "{fruit}-{edible}-nope" + value: "{generic_str}-{edible}" --- +test_name: Test parametrize from thing in global config -test_name: Test failing when key is a list and vals isn't +marks: + - parametrize: + key: generic_str + vals: + # normal string + - "{global_test_string_1}" + # from env var + - "{global_test_string_2}" + - parametrize: + key: edible + vals: + - rotten + - fresh + - unripe -_xfail: verify +stages: + - name: Echo back a unicode value and make sure it matches + request: + url: "{global_host}/echo" + method: POST + json: + value: "{generic_str}-{edible}" + response: + status_code: 200 + json: + value: "{generic_str}-{edible}" -includes: - - !include common.yaml +--- +test_name: Test that double formatting something in marks: - parametrize: key: - - edible - - fruit + - line + - text vals: - - fresh - - orange + # NOTE: "\" requires doubling, !raw will take care of "{" and "}" + - [1, "XYZ[\\]^_`abcdefghijk"] + - [2, !raw "lmnopqrstuvwxyz{|}~*"] stages: - - name: Echo back a unicode value and make sure it matches + - name: Echo back parametrized text request: - url: "{host}/echo" + url: "{global_host}/echo" method: POST json: - value: "{fruit}-{edible}" + value: "{line}-{text}" + response: + status_code: 200 + json: + value: "{line}-{text}" + +--- +test_name: Test parametrizing http method + +marks: + - parametrize: + key: method + vals: + - POST + - GET + - DELETE + +stages: + - name: Make a request to the magic endpoint and expect method back + request: + url: "{global_host}/magic-multi-method" + method: "{method}" response: status_code: 200 json: - value: "{fruit}-{edible}-nope" + method: "{method}" --- +test_name: Test parametrizing http method badly -test_name: Test failing when keys and values list lengths do not match +marks: + - parametrize: + key: method + vals: + - Brean _xfail: verify +stages: + - name: Make a request to the magic endpoint and expect method back + request: + url: "{global_host}/magic-multi-method" + method: "{method}" + response: + status_code: 200 + json: + method: "{method}" + +--- +test_name: Test sending a list of keys + includes: - !include common.yaml marks: - parametrize: - key: - - edible - - fruit + key: edible vals: - - [fresh] + - [not, edible, at, all] stages: - - name: Echo back a unicode value and make sure it matches + - name: make sure list is sent and returned request: url: "{host}/echo" method: POST json: - value: "{fruit}-{edible}" + value: !force_format_include "{edible}" response: status_code: 200 json: - value: "{fruit}-{edible}-nope" + value: + - not + - edible + - at + - all --- +test_name: Test sending a list of list of keys -test_name: Test parametrize from thing in common.yaml includes: - !include common.yaml marks: - parametrize: - key: generic_str - vals: - # normal string - - "{v_str}" - # from env var - - "{second_part}" - - parametrize: - key: edible + key: + - edible + - fruit vals: - - rotten - - fresh - - unripe + - [rotten, apple] + - [poisonous, pear] stages: - - name: Echo back a unicode value and make sure it matches + - name: make sure list is sent and returned request: url: "{host}/echo" method: POST json: - value: "{generic_str}-{edible}" + edibility: "{edible}" + fruit: "{fruit}" response: status_code: 200 json: - value: "{generic_str}-{edible}" + edibility: "{edible}" + fruit: "{fruit}" --- +test_name: Test sending a list of list of keys where one is not a string -test_name: Test parametrize from thing in global config +includes: + - !include common.yaml marks: - parametrize: - key: generic_str + key: + - fruit + - colours vals: - # normal string - - "{global_test_string_1}" - # from env var - - "{global_test_string_2}" + - [apple, [red, green, pink]] + - [pear, [yellow, green]] + +stages: + - name: make sure list and sublist is sent and returned + request: + url: "{host}/echo" + method: POST + json: + fruit: "{fruit}" + colours: !force_format_include "{colours}" + response: + status_code: 200 + json: + fruit: "{fruit}" + colours: !force_format_include "{tavern.request_vars.json.colours}" + +--- +test_name: Test parametrizing with an ext function + +marks: - parametrize: - key: edible + key: value_to_get vals: - - rotten - - fresh - - unripe + - goodbye + - $ext: + function: ext_functions:return_goodbye_string stages: - - name: Echo back a unicode value and make sure it matches + - name: Echo back parametrized value request: url: "{global_host}/echo" method: POST json: - value: "{generic_str}-{edible}" + value: "{value_to_get}" response: status_code: 200 json: - value: "{generic_str}-{edible}" + value: "goodbye" --- - -test_name: Test that double formatting something in +test_name: Test parametrizing with an ext function that returns a dict marks: - parametrize: - key: - - line - - text + key: value_to_get vals: - # NOTE: "\" requires doubling, !raw will take care of "{" and "}" - - [1, "XYZ[\\]^_`abcdefghijk"] - - [2, !raw "lmnopqrstuvwxyz{|}~*"] + - hello: there + - $ext: + function: ext_functions:return_hello stages: - - name: Echo back parametrized text + - name: Echo back parametrized value request: url: "{global_host}/echo" method: POST + json: !force_format_include "{value_to_get}" + response: + status_code: 200 json: - value: "{line}-{text}" + hello: "there" + +--- +test_name: Test parametrizing with an ext function that returns a dict with supplemental data + +marks: + - parametrize: + key: value_to_get + vals: + - and: this + hello: there + - and: this + $ext: + function: ext_functions:return_hello + +stages: + - name: Echo back parametrized value + request: + url: "{global_host}/echo" + method: POST + json: !force_format_include "{value_to_get}" response: status_code: 200 json: - value: "{line}-{text}" + hello: "there" + and: this + +#--- +# +# NOTE: This fails immediately because it's impossible to resolve at the test level +# +#test_name: Test parametrizing with an ext function that returns a dict with supplemental data, but wrong function type +# +#_xfail: verify +# +#marks: +#- parametrize: +# key: value_to_get +# vals: +# - and: this +# $ext: +# function: ext_functions:return_goodbye_string +# +#stages: +#- name: Echo back parametrized value +# request: +# url: "{global_host}/echo" +# method: POST +# json: !force_format_include "{value_to_get}" +# response: +# status_code: 200 +# json: {} +# +--- +test_name: Test parametrizing random different data types in the same test + +marks: + - parametrize: + key: value_to_send + vals: + - a + - [b, c] + - more: stuff + - yet: [more, stuff] + - $ext: + function: ext_functions:return_goodbye_string + - and: this + $ext: + function: ext_functions:return_hello + +stages: + - name: Echo back parametrized value + request: + url: "{global_host}/echo" + method: POST + json: !force_format_include "{value_to_send}" + response: + status_code: 200 + json: !force_format_include "{tavern.request_vars.json}" diff --git a/tests/integration/test_regex.tavern.yaml b/tests/integration/test_regex.tavern.yaml index 1ac980344..711c586f7 100644 --- a/tests/integration/test_regex.tavern.yaml +++ b/tests/integration/test_regex.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Make sure server response matches regex includes: @@ -13,12 +12,11 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: '' --- - test_name: Use saved value includes: @@ -32,7 +30,7 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: '' @@ -44,7 +42,7 @@ stages: status_code: 200 save: $ext: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: '.*)\?token=(?P.*)\">' @@ -64,40 +62,39 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_regex + function: tavern.helpers:validate_regex extra_kwargs: expression: '(?<=Hello)[wW]orld\d+$' header: X-Integration-Value --- - test_name: Match something in part of the request stages: -- name: simple match - request: - url: "{global_host}/echo" - method: POST - json: - fake: code=abc123&state=f - fake2: code=abc123&state=f - fake3: code=abc124&state=f - response: - status_code: 200 - save: - $ext: - function: tavern.testutils.helpers:validate_regex - extra_kwargs: - expression: "code=(?P.*)&state" - in_jmespath: "fake3" + - name: simple match + request: + url: "{global_host}/echo" + method: POST + json: + fake: code=abc123&state=f + fake2: code=abc123&state=f + fake3: code=abc124&state=f + response: + status_code: 200 + save: + $ext: + function: tavern.helpers:validate_regex + extra_kwargs: + expression: "code=(?P.*)&state" + in_jmespath: "fake3" -- name: Reuse thing specified in first request - request: - url: "{global_host}/echo" - method: POST - json: - fake: "{regex.code_token}" - response: - status_code: 200 - json: - fake: abc124 + - name: Reuse thing specified in first request + request: + url: "{global_host}/echo" + method: POST + json: + fake: "{regex.code_token}" + response: + status_code: 200 + json: + fake: abc124 diff --git a/tests/integration/test_response_types.tavern.yaml b/tests/integration/test_response_types.tavern.yaml index 36a420060..465c50005 100644 --- a/tests/integration/test_response_types.tavern.yaml +++ b/tests/integration/test_response_types.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Make sure it can handle list responses includes: @@ -32,7 +31,6 @@ stages: json: [a, b, c, 1, 2, 3, -1.0, -2.0, -3.0] --- - test_name: Test unicode responses includes: @@ -51,7 +49,6 @@ stages: value: 手机号格式不正确 --- - test_name: Test string as top-level JSON type includes: @@ -68,7 +65,6 @@ stages: json: "json_string" --- - test_name: Test boolean as top-level JSON type includes: @@ -85,7 +81,6 @@ stages: json: False --- - test_name: Test number as top-level JSON type includes: @@ -102,14 +97,13 @@ stages: json: 1337 --- - test_name: Test null as top-level JSON type includes: - !include common.yaml stages: - - name: Echo back a number value and make sure it matches + - name: Echo back a null value and make sure it matches request: url: "{host}/echo" method: POST diff --git a/tests/integration/test_retry.tavern.yaml b/tests/integration/test_retry.tavern.yaml index b3604033a..0904aeb42 100644 --- a/tests/integration/test_retry.tavern.yaml +++ b/tests/integration/test_retry.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Make sure tavern repeats request includes: @@ -17,7 +16,6 @@ stages: status: ready --- - test_name: Setting max_retries to a float should fail - doesn't make sense includes: @@ -37,7 +35,6 @@ stages: status: ready --- - test_name: Format max retry variable correctly includes: @@ -45,7 +42,7 @@ includes: stages: - name: polling - max_retries: !int '{retry_max}' + max_retries: !int "{retry_max}" request: url: "{host}/poll" method: GET @@ -55,7 +52,6 @@ stages: status: ready --- - test_name: Format max retry variable fails if not using type token includes: @@ -65,7 +61,7 @@ _xfail: verify stages: - name: polling - max_retries: '{retry_max}' + max_retries: "{retry_max}" request: url: "{host}/poll" method: GET @@ -75,7 +71,6 @@ stages: status: ready --- - test_name: Format max retry variable fails if invalid value includes: @@ -85,7 +80,7 @@ _xfail: run stages: - name: polling - max_retries: !int '{negative_int}' + max_retries: !int "{negative_int}" request: url: "{host}/poll" method: GET @@ -95,7 +90,6 @@ stages: status: ready --- - test_name: Format max retry variable fails if using wrong type token includes: @@ -105,7 +99,7 @@ _xfail: verify stages: - name: polling - max_retries: !float '{retry_max}' + max_retries: !float "{retry_max}" request: url: "{host}/poll" method: GET @@ -115,7 +109,6 @@ stages: status: ready --- - test_name: Setting max_retries to less than 0 should fail includes: @@ -135,7 +128,6 @@ stages: status: ready --- - test_name: Setting max_retries to something other than an int should fail includes: diff --git a/tests/integration/test_save_dict_value.tavern.yaml b/tests/integration/test_save_dict_value.tavern.yaml index cd43f427a..e9640901f 100644 --- a/tests/integration/test_save_dict_value.tavern.yaml +++ b/tests/integration/test_save_dict_value.tavern.yaml @@ -9,7 +9,6 @@ # a_bool: true --- - test_name: Test saving a dict stages: diff --git a/tests/integration/test_selective_tests.tavern.yaml b/tests/integration/test_selective_tests.tavern.yaml index bd5e44a8f..a603be425 100644 --- a/tests/integration/test_selective_tests.tavern.yaml +++ b/tests/integration/test_selective_tests.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test 'only' keyword for test isolation includes: @@ -28,7 +27,6 @@ stages: status_code: 999 --- - test_name: Test 'only' keyword for test isolation includes: @@ -47,4 +45,4 @@ stages: url: "{host}/fake_list" method: GET response: - status_code: 200 \ No newline at end of file + status_code: 200 diff --git a/tests/integration/test_skipped_tests.tavern.yaml b/tests/integration/test_skipped_tests.tavern.yaml index 0f14de69e..b06389e60 100644 --- a/tests/integration/test_skipped_tests.tavern.yaml +++ b/tests/integration/test_skipped_tests.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test 'skip' keyword for selectively ignoring tests includes: @@ -21,7 +20,6 @@ stages: status_code: 200 --- - test_name: Test unconditional skip with pytest marker includes: @@ -39,7 +37,6 @@ stages: status_code: 999 --- - test_name: Test skipif with pytest marker includes: @@ -57,7 +54,6 @@ stages: status_code: 999 --- - test_name: Test skipif with pytest marker with a formatted integer includes: @@ -75,7 +71,6 @@ stages: status_code: 999 --- - test_name: Test skipif with pytest marker with a formatted string includes: @@ -93,7 +88,6 @@ stages: status_code: 999 --- - test_name: Test skipif failure goes on to test failure includes: @@ -114,7 +108,6 @@ stages: status_code: 999 --- - test_name: Test skipif with env var includes: @@ -133,7 +126,6 @@ stages: status_code: 200 --- - test_name: Test skipif with env var, negative includes: diff --git a/tests/integration/test_status_codes.tavern.yaml b/tests/integration/test_status_codes.tavern.yaml index 9fc741eef..ce9bca9e7 100644 --- a/tests/integration/test_status_codes.tavern.yaml +++ b/tests/integration/test_status_codes.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test matching one of multiple status codes includes: @@ -27,7 +26,6 @@ stages: - 200 --- - test_name: Test missing from status code list fails _xfail: run @@ -48,7 +46,6 @@ stages: - 200 --- - test_name: Test using invalid status code format fails at verification _xfail: verify @@ -69,7 +66,6 @@ stages: second: 200 --- - test_name: Test using invalid status code value fails at verification _xfail: verify diff --git a/tests/integration/test_stream.tavern.yaml b/tests/integration/test_stream.tavern.yaml index 0c138f9df..466785c17 100644 --- a/tests/integration/test_stream.tavern.yaml +++ b/tests/integration/test_stream.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test streaming (downloading) file includes: diff --git a/tests/integration/test_strict_key_checks.tavern.yaml b/tests/integration/test_strict_key_checks.tavern.yaml index 8b0f07cff..b0b54a485 100644 --- a/tests/integration/test_strict_key_checks.tavern.yaml +++ b/tests/integration/test_strict_key_checks.tavern.yaml @@ -9,7 +9,6 @@ # a_bool: true --- - test_name: Test setting 'strict' to a string fails _xfail: verify @@ -24,7 +23,6 @@ stages: status_code: 200 --- - test_name: Test setting 'strict' to a dict fails _xfail: verify @@ -40,7 +38,6 @@ stages: status_code: 200 --- - test_name: Test setting 'strict' to a list with invalid values fails _xfail: verify @@ -56,7 +53,6 @@ stages: status_code: 200 --- - test_name: Test key matching matches headers case insensitive includes: @@ -72,7 +68,6 @@ stages: x-InTeGrAtIoN-vALUe: _HelloWorld1 --- - test_name: Test strict key matching against body fails _xfail: run @@ -101,7 +96,6 @@ stages: # a_bool: true --- - test_name: Test strict key matching against body fails with dict missing when specified in test _xfail: run @@ -129,7 +123,6 @@ stages: a_bool: true --- - test_name: Test strict key matching against body fails with dict missing when specified in stage _xfail: run @@ -156,7 +149,6 @@ stages: a_bool: true --- - test_name: Test strict key matching against headers with mismatch body passes # same as above, but changing 'strict' to only check headers should work @@ -185,7 +177,6 @@ stages: # a_bool: true --- - test_name: Test strict key matching against exact body is fine when strict is specified in the test strict: @@ -211,7 +202,6 @@ stages: a_bool: true --- - test_name: Test strict key matching against exact body is fine when strict is specified in the stage strict: @@ -237,7 +227,6 @@ stages: a_bool: true --- - test_name: Test strict key matching works for specific test stages includes: @@ -295,7 +284,6 @@ stages: a_bool: true --- - test_name: Test non-strict key matching one list item strict: @@ -319,7 +307,7 @@ stages: url: "{host}/fake_list" response: status_code: 200 -# Use new syntax + # Use new syntax strict: - json:off json: @@ -335,7 +323,6 @@ stages: - 2 --- - test_name: Test strict key matching one list item fails _xfail: run @@ -357,7 +344,6 @@ stages: - c --- - test_name: Test strict key matching works for specific test stages with false includes: @@ -378,7 +364,6 @@ stages: a_bool: true --- - test_name: Test strict key matching works for specific test stages with true _xfail: run @@ -401,7 +386,6 @@ stages: a_bool: true --- - test_name: Test non-strict key matching one list item strict: @@ -421,7 +405,89 @@ stages: - c --- +test_name: Test matching any order on json + +strict: + - json:list_any_order + +includes: + - !include common.yaml + +stages: + - name: match some things in list, in any order + request: + url: "{host}/fake_list" + response: + status_code: 200 + json: + - 2 + - c + - a + - -3.0 + - 1 + +--- +test_name: Test matching any order on json + +includes: + - !include common.yaml + +stages: + - name: match some things in list, in any order + request: + url: "{host}/fake_list" + response: + strict: + - json:list_any_order + status_code: 200 + json: + - 2 + - c + - a + - -3.0 + - 1 + +--- +test_name: Test matching any order on json nested + +strict: + - json:list_any_order + +includes: + - !include common.yaml + +stages: + - name: match some things in list, in any order + request: + url: "{host}/nested_list" + response: + status_code: 200 + json: + top: + - b + - key: val + - a + +--- +test_name: Test matching any order on json nested + +includes: + - !include common.yaml + +stages: + - name: match some things in list, in any order + request: + url: "{host}/nested_list" + response: + strict: + - json:list_any_order + status_code: 200 + json: + top: + - b + - key: val +--- test_name: Test non-strict key matching in one stage does not leak over to the next _xfail: run @@ -448,7 +514,6 @@ stages: - c --- - test_name: Test strict key matching one list item fails _xfail: run @@ -468,3 +533,116 @@ stages: strict: true json: - c + +--- +test_name: strict test + +_xfail: run + +stages: + - name: Simple get + request: + url: "{global_host}/707-regression" + method: GET + response: + status_code: 200 + json: + a: 1 + b: + first: 10 + # second: 20 + c: 2 + +--- +test_name: test empty list matches with strict json on + +strict: + - json:on + +stages: + - name: Simple get + request: + url: "{global_host}/606-regression-list" + method: GET + response: + status_code: 200 + json: [] + +--- +test_name: test full list does not match with strict json on + +_xfail: run + +strict: + - json:on + +stages: + - name: Simple get + request: + url: "{global_host}/fake_list" + method: GET + response: + status_code: 200 + json: [] + +--- +test_name: test full list matches with strict json off + +strict: + - json:off + +stages: + - name: Simple get + request: + url: "{global_host}/fake_list" + method: GET + response: + status_code: 200 + json: [] + +--- +test_name: test empty dict matches with strict json on + +strict: + - json:on + +stages: + - name: Simple get + request: + url: "{global_host}/606-regression-dict" + method: GET + response: + status_code: 200 + json: {} + +--- +test_name: test full dict does not match with strict json on + +_xfail: run + +strict: + - json:on + +stages: + - name: Simple get + request: + url: "{global_host}/fake_dictionary" + method: GET + response: + status_code: 200 + json: {} + +--- +test_name: test full dict matches with strict json off + +strict: + - json:off + +stages: + - name: Simple get + request: + url: "{global_host}/fake_dictionary" + method: GET + response: + status_code: 200 + json: {} diff --git a/tests/integration/test_timeout.tavern.yaml b/tests/integration/test_timeout.tavern.yaml index d91f44171..87114ab34 100644 --- a/tests/integration/test_timeout.tavern.yaml +++ b/tests/integration/test_timeout.tavern.yaml @@ -1,38 +1,35 @@ --- - test_name: Test timeout to server includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test single timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: 0.4 - response: - status_code: 200 + - name: Test single timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: 0.4 + response: + status_code: 200 --- - test_name: Test timeout to server tuple includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test tuple timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: - - 0.1 - - 0.4 - response: - status_code: 200 + - name: Test tuple timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: + - 0.1 + - 0.4 + response: + status_code: 200 --- - test_name: Test timeout to server actually times out _xfail: run @@ -50,79 +47,75 @@ stages: status_code: 200 --- - test_name: Test timeout to server tuple actually times out _xfail: run includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test tuple timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: - - 0.1 - - 0.1 - response: - status_code: 200 + - name: Test tuple timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: + - 0.1 + - 0.1 + response: + status_code: 200 --- - test_name: Test timeout tuple too short _xfail: verify includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test tuple timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: - - 0.1 - response: - status_code: 200 + - name: Test tuple timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: + - 0.1 + response: + status_code: 200 --- - test_name: Test timeout tuple too long _xfail: verify includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test tuple timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: - - 0.1 - - 0.4 - - 1 - response: - status_code: 200 + - name: Test tuple timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: + - 0.1 + - 0.4 + - 1 + response: + status_code: 200 --- - test_name: Test timeout wrong type _xfail: verify includes: -- !include common.yaml + - !include common.yaml stages: -- name: Test incorrect timeout parameter - request: - url: "{host}/get_thing_slow" - method: GET - timeout: hello - response: - status_code: 200 + - name: Test incorrect timeout parameter + request: + url: "{host}/get_thing_slow" + method: GET + timeout: hello + response: + status_code: 200 diff --git a/tests/integration/test_typetokens.tavern.yaml b/tests/integration/test_typetokens.tavern.yaml index 78ca4dc1f..fa620bca5 100644 --- a/tests/integration/test_typetokens.tavern.yaml +++ b/tests/integration/test_typetokens.tavern.yaml @@ -8,7 +8,6 @@ # a_string: abc --- - test_name: Test 'anything' token will match any response includes: @@ -23,7 +22,6 @@ stages: status_code: 200 json: !anything --- - test_name: Test 'anything' token will match any response, from included stage includes: @@ -34,7 +32,6 @@ stages: id: typetoken-anything-match --- - test_name: Test bool type match strict: @@ -63,7 +60,6 @@ stages: a_bool: !anybool --- - test_name: Test integer type match strict: @@ -92,7 +88,6 @@ stages: an_integer: !anyint --- - test_name: Test list type match includes: @@ -108,7 +103,6 @@ stages: json: !anylist --- - test_name: Test dict type match includes: @@ -124,7 +118,6 @@ stages: json: !anydict --- - test_name: Test string type match strict: @@ -153,7 +146,6 @@ stages: a_string: !anystr --- - test_name: Test all at once includes: @@ -175,7 +167,6 @@ stages: a_bool: !anybool --- - test_name: Match list item responses includes: @@ -200,7 +191,6 @@ stages: - !anyfloat --- - test_name: Match whole list 'anything' includes: @@ -217,7 +207,6 @@ stages: top: !anything --- - test_name: Match list items anything includes: @@ -237,7 +226,6 @@ stages: - !anything --- - test_name: Test converting to a bool from a formatted string includes: @@ -255,7 +243,7 @@ stages: response: status_code: 200 - # We could use strtobool from distutils to make this pass, but it's a bit + # We could use strtobool to make this pass, but it's a bit # of magic # - name: Convert bool from a string # request: @@ -269,7 +257,6 @@ stages: # status_code: 200 --- - test_name: Test using a converted bool as part of the validated schema includes: @@ -285,7 +272,6 @@ stages: status_code: 200 --- - test_name: Test can't use approx numbers in a request includes: @@ -307,7 +293,6 @@ stages: status_code: 200 --- - # This actually raises an error when first loading the file, so it's not easy to # test like this @@ -346,7 +331,6 @@ stages: pi: !approx 3.1415926 --- - test_name: Test converting to an integer includes: @@ -389,7 +373,6 @@ stages: # status_code: 200 --- - test_name: Test using a converted int as part of the validated schema includes: @@ -404,7 +387,6 @@ stages: status_code: !int "{status_200}" --- - test_name: Test conversion to an float from included files includes: @@ -456,7 +438,6 @@ stages: status_code: 200 --- - test_name: Test using a converted float as part of the validated schema includes: @@ -472,7 +453,6 @@ stages: status_code: 200 --- - test_name: Test saving specific types between stages includes: @@ -505,7 +485,6 @@ stages: status_code: 200 --- - test_name: Ignore variable syntax with double braces includes: @@ -524,7 +503,6 @@ stages: status_code: 200 --- - test_name: Test not converting a raw string (ignore variable like syntax) includes: @@ -543,7 +521,6 @@ stages: status_code: 200 --- - test_name: Test raw token works in response as well includes: @@ -562,7 +539,6 @@ stages: value: !raw '{"query": "{ val1 { val2 { val3 { val4, val5 } } } }"}' --- - test_name: Test magic format token includes: @@ -582,13 +558,12 @@ stages: request: url: "{host}/echo" method: POST - json: !force_format_include "{whole_body}" + json: !force_original_structure "{whole_body}" response: status_code: 200 - json: !force_format_include "{tavern.request_vars.json}" + json: !force_original_structure "{tavern.request_vars.json}" --- - test_name: Test magic format token with list includes: @@ -609,7 +584,7 @@ stages: request: url: "{host}/echo" method: POST - json: !force_format_include "{whole_list_body}" + json: !force_original_structure "{whole_list_body}" response: status_code: 200 json: @@ -624,7 +599,41 @@ stages: - -3.0 --- +test_name: Test old tag still works + +includes: + - !include common.yaml + +stages: + - name: get dictionary + request: + url: "{host}/fake_list" + response: + status_code: 200 + json: !anylist + save: + json: + whole_list_body: "@" + - name: reuse dictionary + request: + url: "{host}/echo" + method: POST + json: !force_format_include "{whole_list_body}" + response: + status_code: 200 + json: + - a + - b + - c + - 1 + - 2 + - 3 + - -1.0 + - -2.0 + - -3.0 + +--- test_name: Match a regex at top level includes: @@ -640,7 +649,6 @@ stages: json: !re_match "c82bfa63-.*" --- - test_name: Match a regex in a nested thing includes: @@ -663,7 +671,6 @@ stages: inner: !re_match "value" --- - test_name: Match a regex number doesnt work because its the wrong type _xfail: run @@ -683,7 +690,6 @@ stages: pi: !re_match "3.14.*" --- - test_name: Match a uuid v4 includes: diff --git a/tests/integration/test_validate_pykwalify.tavern.yaml b/tests/integration/test_validate_pykwalify.tavern.yaml index 1d622fa41..0aa96d52e 100644 --- a/tests/integration/test_validate_pykwalify.tavern.yaml +++ b/tests/integration/test_validate_pykwalify.tavern.yaml @@ -1,5 +1,4 @@ --- - test_name: Test validating with extension function includes: @@ -13,7 +12,7 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_pykwalify + function: tavern.helpers:validate_pykwalify extra_kwargs: schema: type: map @@ -34,7 +33,6 @@ stages: required: true --- - test_name: Test validating with extension function mismatch _xfail: run @@ -50,7 +48,7 @@ stages: response: status_code: 200 verify_response_with: - function: tavern.testutils.helpers:validate_pykwalify + function: tavern.helpers:validate_pykwalify extra_kwargs: schema: type: mapping diff --git a/tests/logging.yaml b/tests/logging.yaml index 888fd4342..eb2a2be95 100644 --- a/tests/logging.yaml +++ b/tests/logging.yaml @@ -1,31 +1,31 @@ --- version: 1 formatters: - default: - # colorlog is really useful - (): colorlog.ColoredFormatter - format: "%(asctime)s [%(bold)s%(log_color)s%(levelname)s%(reset)s]: (%(bold)s%(name)s:%(lineno)d%(reset)s) %(message)s" - style: "%" - datefmt: "%X" - log_colors: - DEBUG: cyan - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red,bg_white + default: + # colorlog is really useful + (): colorlog.ColoredFormatter + format: "%(asctime)s [%(bold)s%(log_color)s%(levelname)s%(reset)s]: (%(bold)s%(name)s:%(lineno)d%(reset)s) %(message)s" + style: "%" + datefmt: "%X" + log_colors: + DEBUG: cyan + INFO: green + WARNING: yellow + ERROR: red + CRITICAL: red,bg_white handlers: - # print to stderr in tests. This will only show up if the test fails - stderr: - class: colorlog.StreamHandler - formatter: default + # print to stderr in tests. This will only show up if the test fails + stderr: + class: colorlog.StreamHandler + formatter: default loggers: - paho: - handlers: - - stderr - level: DEBUG - tavern: - handlers: - - stderr - level: DEBUG + paho: + handlers: + - stderr + level: DEBUG + tavern: + handlers: + - stderr + level: DEBUG diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index ed6bc50ec..57ab621d4 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,23 +1,35 @@ +import copy from unittest.mock import Mock import pytest -from tavern.util.strict_util import StrictLevel +from tavern._core.plugins import load_plugins +from tavern._core.pytest.config import TavernInternalConfig, TestConfig +from tavern._core.strict_util import StrictLevel +_includes = TestConfig( + variables={ + "request": {"prefix": "www.", "url": "google.com"}, + "test_auth_token": "abc123", + "code": "def456", + "callback_url": "www.yahoo.co.uk", + "request_topic": "/abc", + }, + strict=StrictLevel.all_on(), + tavern_internal=TavernInternalConfig( + pytest_hook_caller=Mock(), + backends={"mqtt": "paho-mqtt", "http": "requests", "grpc": "grpc"}, + ), + follow_redirects=False, + stages=[], +) -@pytest.fixture(name="includes") + +@pytest.fixture(scope="function", name="includes") def fix_example_includes(): - includes = { - "variables": { - "request": {"prefix": "www.", "url": "google.com"}, - "test_auth_token": "abc123", - "code": "def456", - "callback_url": "www.yahoo.co.uk", - "request_topic": "/abc", - }, - "backends": {"mqtt": "paho-mqtt", "http": "requests", "grpc": "grpc"}, - "strict": StrictLevel.all_on(), - "tavern_internal": {"pytest_hook_caller": Mock()}, - } + return copy.deepcopy(_includes) + - return includes.copy() +@pytest.fixture(scope="session", autouse=True) +def initialise_plugins(): + load_plugins(_includes) diff --git a/tests/unit/response/test_mqtt_response.py b/tests/unit/response/test_mqtt_response.py index c6d9d220f..b4099f0c9 100644 --- a/tests/unit/response/test_mqtt_response.py +++ b/tests/unit/response/test_mqtt_response.py @@ -1,21 +1,28 @@ -from unittest.mock import Mock +import random +import re +import threading +from unittest.mock import Mock, patch import pytest +from tavern._core import exceptions +from tavern._core.loader import ANYTHING +from tavern._core.strict_util import StrictLevel from tavern._plugins.mqtt.client import MQTTClient from tavern._plugins.mqtt.response import MQTTResponse -from tavern.util import exceptions -from tavern.util.strict_util import StrictLevel -def test_nothing_returned_fails(): +def test_nothing_returned_fails(includes): """Raises an error if no message was received""" fake_client = Mock(spec=MQTTClient, message_received=Mock(return_value=None)) - expected = {"topic": "/a/b/c", "payload": "hello"} + expected = {"mqtt_responses": [{"topic": "/a/b/c", "payload": "hello"}]} verifier = MQTTResponse( - fake_client, "Test stage", expected, {"strict": StrictLevel.all_on()} + fake_client, + "Test stage", + expected, + includes.with_strictness(StrictLevel.all_on()), ) with pytest.raises(exceptions.TestFailError): @@ -31,8 +38,9 @@ def __init__(self, returned): self.timestamp = 0 -class TestResponse(object): - def _get_fake_verifier(self, expected, fake_messages, includes): +class TestResponse: + @staticmethod + def _get_fake_verifier(expected, fake_messages, includes): """Given a list of messages, return a mocked version of the MQTT response verifier which will take messages off the front of this list as if they were published @@ -43,20 +51,40 @@ def _get_fake_verifier(self, expected, fake_messages, includes): if not isinstance(fake_messages, list): pytest.fail("Need to pass a list of messages") - def yield_all_messages(): - msg_copy = fake_messages[:] + msg_lock = threading.RLock() + + responses: dict[str, list[FakeMessage]] = { + message.topic: [] for message in fake_messages + } + for message in fake_messages: + responses[message.topic].append(message) - def inner(timeout): + def yield_all_messages(): + def inner(topic, timeout): try: - return msg_copy.pop(0) - except IndexError: - return None + msg_lock.acquire() + + r = responses[topic] + if len(r) == 0: + return None + + return r.pop(random.randint(0, len(r) - 1)) + finally: + msg_lock.release() return inner - fake_client = Mock(spec=MQTTClient, message_received=yield_all_messages()) + fake_client = Mock( + spec=MQTTClient, + message_received=yield_all_messages(), + ) + + if not isinstance(expected, list): + expected = [expected] - return MQTTResponse(fake_client, "Test stage", expected, includes) + return MQTTResponse( + fake_client, "Test stage", {"mqtt_responses": expected}, includes + ) def test_message_on_same_topic_fails(self, includes): """Correct topic, wrong message""" @@ -87,6 +115,55 @@ def test_correct_message(self, includes): assert len(verifier.received_messages) == 1 assert verifier.received_messages[0].topic == fake_message.topic + @pytest.mark.parametrize("n_messages", (1, 2)) + def test_ext_function_called_save(self, includes, n_messages: int): + """Make sure that it calls ext functions appropriately on individual MQTT + responses and saved the response""" + expecteds = [] + fake_messages = [] + for i in range(n_messages): + expected = { + "topic": "/a/b/c/{}".format(i + 1), + "payload": "hello", + "save": { + "$ext": {"function": "function_name_{}".format(i + 1)}, + }, + } + + fake_message = FakeMessage(expected) + + expecteds += [expected] + fake_messages += [fake_message] + + verifier = self._get_fake_verifier(expecteds, fake_messages, includes) + + def fake_get_wrapped_response(): + def wrap(ext): + def actual(response, *args, **kwargs): + match = re.match(r"function_name_(?P\d+)", ext["function"]) + assert match + message_number = match.group("idx") + return {"saved_topic_{}".format(message_number): response.topic} + + return actual + + return wrap + + with patch( + "tavern.response.get_wrapped_response_function", + new_callable=fake_get_wrapped_response, + ): + saved = verifier.verify(None) + + assert len(verifier.received_messages) == n_messages + + for i in range(n_messages): + assert verifier.received_messages[i].topic == fake_messages[i].topic + + assert len(saved) == n_messages + + assert saved["saved_topic_{}".format(i + 1)] == expecteds[i]["topic"] + def test_correct_message_eventually(self, includes): """One wrong messge, then the correct one""" @@ -101,6 +178,121 @@ def test_correct_message_eventually(self, includes): verifier.verify(expected) + assert len(verifier.received_messages) >= 1 + received_topics = [m.topic for m in verifier.received_messages] + assert fake_message_good.topic in received_topics + + def test_unexpected_fail(self, includes): + """Messages marked unexpected fail test""" + + expected = {"topic": "/a/b/c", "payload": "hello", "unexpected": True} + + fake_message = FakeMessage(expected) + + verifier = self._get_fake_verifier(expected, [fake_message], includes) + + with pytest.raises(exceptions.TestFailError): + verifier.verify(expected) + + assert len(verifier.received_messages) == 1 + assert verifier.received_messages[0].topic == fake_message.topic + + @pytest.mark.parametrize("r", range(10)) + def test_multiple_messages(self, includes, r): + """One wrong message, two correct ones""" + + expected = [ + {"topic": "/a/b/c", "payload": "hello"}, + {"topic": "/d/e/f", "payload": "hellog"}, + ] + + fake_message_good_1 = FakeMessage(expected[0]) + fake_message_good_2 = FakeMessage(expected[1]) + fake_message_bad = FakeMessage({"topic": "/a/b/c", "payload": "goodbye"}) + + messages = [fake_message_bad, fake_message_good_1, fake_message_good_2] + random.shuffle(messages) + + verifier = self._get_fake_verifier( + expected, + messages, + includes, + ) + + verifier.verify(expected) + + assert len(verifier.received_messages) >= 2 + received_topics = [m.topic for m in verifier.received_messages] + assert fake_message_good_1.topic in received_topics + assert fake_message_good_2.topic in received_topics + + @pytest.mark.parametrize("r", range(10)) + def test_different_order(self, includes, r): + """Messages coming in a different order""" + + expected = [ + {"topic": "/a/b/c", "payload": "hello"}, + {"topic": "/d/e/f", "payload": "hellog"}, + ] + + fake_message_good_1 = FakeMessage(expected[0]) + fake_message_good_2 = FakeMessage(expected[1]) + + messages = [fake_message_good_2, fake_message_good_1] + random.shuffle(messages) + + verifier = self._get_fake_verifier(expected, messages, includes) + + verifier.verify(expected) + + assert len(verifier.received_messages) == 2 + received_topics = [m.topic for m in verifier.received_messages] + assert fake_message_good_1.topic in received_topics + assert fake_message_good_2.topic in received_topics + + # FIXME: Add tests for 'ext' functions are called in the right order + + @pytest.mark.parametrize( + "payload", + ( + ( + "!anything", + ANYTHING, + ), + ( + "null", + None, + ), + ( + "goog", + "goog", + ), + ), + ) + @pytest.mark.parametrize("r", range(10)) + def test_same_topic(self, includes, r, payload): + """Messages coming in a different order""" + + expected = [ + {"topic": "/a/b/c", "payload": "hello"}, + {"topic": "/a/b/c", "payload": payload[0]}, + ] + + fake_message_good_1 = FakeMessage(expected[0]) + fake_message_good_2 = FakeMessage(expected[1]) + + messages = [fake_message_good_2, fake_message_good_1] + random.shuffle(messages) + + verifier = self._get_fake_verifier(expected, messages, includes) + + loaded = [ + {"topic": "/a/b/c", "payload": "hello"}, + {"topic": "/a/b/c", "payload": payload[1]}, + ] + verifier.verify(loaded) + assert len(verifier.received_messages) == 2 - assert verifier.received_messages[0].topic == fake_message_bad.topic - assert verifier.received_messages[1].topic == fake_message_good.topic + received_topics = [m.topic for m in verifier.received_messages] + assert fake_message_good_1.topic in received_topics + assert fake_message_good_2.topic in received_topics diff --git a/tests/unit/response/test_rest.py b/tests/unit/response/test_rest.py index 9ec3a3d6d..fe3d79af4 100644 --- a/tests/unit/response/test_rest.py +++ b/tests/unit/response/test_rest.py @@ -2,10 +2,10 @@ import pytest +from tavern._core import exceptions +from tavern._core.dict_util import format_keys +from tavern._core.loader import ANYTHING from tavern._plugins.rest.response import RestResponse -from tavern.util import exceptions -from tavern.util.dict_util import format_keys -from tavern.util.loader import ANYTHING @pytest.fixture(name="example_response") @@ -343,7 +343,7 @@ def test_saved_value_in_validate(self, nested_response, nested_schema, includes) r = RestResponse( Mock(), "Test 1", - format_keys(nested_schema, includes["variables"]), + format_keys(nested_schema, includes.variables), includes, ) diff --git a/tests/unit/test_call_run.py b/tests/unit/test_call_run.py index 0ae417da7..30972c1af 100644 --- a/tests/unit/test_call_run.py +++ b/tests/unit/test_call_run.py @@ -2,8 +2,8 @@ import pytest +from tavern._core import exceptions from tavern.core import run -from tavern.util import exceptions @pytest.fixture(autouse=True) diff --git a/tests/unit/test_core.py b/tests/unit/test_core.py index 6a94dfa85..32d2b1985 100644 --- a/tests/unit/test_core.py +++ b/tests/unit/test_core.py @@ -1,16 +1,18 @@ -from copy import deepcopy +import dataclasses import json import os -from unittest.mock import MagicMock, Mock, patch import uuid +from copy import deepcopy +from unittest.mock import MagicMock, Mock, patch import paho.mqtt.client as paho import pytest import requests +from tavern._core import exceptions +from tavern._core.pytest.util import load_global_cfg +from tavern._core.run import run_test from tavern._plugins.mqtt.client import MQTTClient -from tavern.core import run_test -from tavern.util import exceptions @pytest.fixture(name="fulltest") @@ -115,7 +117,6 @@ def test_invalid_headers(self, fulltest, mockargs, includes): class TestIncludeStages: @pytest.fixture def fake_stages(self): - stages = [ { "id": "my_external_stage", @@ -170,7 +171,7 @@ def test_global_stage(self, fulltest, mockargs, includes, fake_stages): newtest["includes"] = stage_includes newtest["stages"].insert(0, {"type": "ref", "id": "my_external_stage"}) - includes["stages"] = fake_stages + includes = dataclasses.replace(includes, stages=fake_stages) with patch( "tavern._plugins.rest.request.requests.Session.request", @@ -190,7 +191,7 @@ def test_both_stages(self, fulltest, mockargs, includes, fake_stages): newtest["includes"] = stage_includes newtest["stages"].insert(0, {"type": "ref", "id": "my_external_stage"}) - includes["stages"] = fake_stages + includes = dataclasses.replace(includes, stages=fake_stages) with pytest.raises(exceptions.DuplicateStageDefinitionError): with patch( @@ -273,7 +274,7 @@ def test_sleep_before(self, fulltest, mockargs, includes): "tavern._plugins.rest.request.requests.Session.request", return_value=mock_response, ) as pmock: - with patch("tavern.util.delay.time.sleep") as smock: + with patch("tavern._core.testhelpers.time.sleep") as smock: run_test("heif", fulltest, includes) assert pmock.called @@ -290,7 +291,7 @@ def test_sleep_after(self, fulltest, mockargs, includes): "tavern._plugins.rest.request.requests.Session.request", return_value=mock_response, ) as pmock: - with patch("tavern.util.delay.time.sleep") as smock: + with patch("tavern._core.testhelpers.time.sleep") as smock: run_test("heif", fulltest, includes) assert pmock.called @@ -384,7 +385,9 @@ def test_format_request_var_value(self, fulltest, mockargs, includes, request_ke with patch( "tavern._plugins.rest.request.requests.Session.request", return_value=mock_response, - ) as pmock: + ) as pmock, patch( + "tavern._plugins.rest.request.valid_http_methods", ["POST", sent_value] + ): run_test("heif", fulltest, includes) assert pmock.called @@ -397,7 +400,9 @@ class TestFormatMQTTVarsJson: def fix_mqtt_publish_test(self): spec = { "test_name": "An mqtt test with a single stage", - "mqtt": {"connect": "localhost"}, + "paho-mqtt": { + "connect": {"host": "localhost"}, + }, "stages": [ { "name": "step 1", @@ -432,11 +437,13 @@ def test_format_request_var_dict(self, fulltest, includes): mock_response = Mock(**mockargs) fake_client = MagicMock( - spec=MQTTClient, message_received=Mock(return_value=mock_response) + spec=MQTTClient, + message_received=Mock(return_value=mock_response), ) - with patch("tavern._plugins.mqtt.client.paho.Client", fake_client), patch( - "tavern.core.get_extra_sessions", return_value={"paho-mqtt": fake_client} + with patch( + "tavern._core.run.get_extra_sessions", + return_value={"paho-mqtt": fake_client}, ) as pmock: run_test("heif", fulltest, includes) @@ -450,7 +457,9 @@ class TestFormatMQTTVarsPlain: def fix_mqtt_publish_test(self): spec = { "test_name": "An mqtt test with a single stage", - "mqtt": {"connect": "localhost"}, + "paho-mqtt": { + "connect": {"host": "localhost"}, + }, "stages": [ { "name": "step 1", @@ -482,9 +491,20 @@ def test_format_request_var_value(self, fulltest, includes): spec=MQTTClient, message_received=Mock(return_value=mock_response) ) - with patch("tavern._plugins.mqtt.client.paho.Client", fake_client), patch( - "tavern.core.get_extra_sessions", return_value={"paho-mqtt": fake_client} + with patch( + "tavern._core.run.get_extra_sessions", + return_value={"paho-mqtt": fake_client}, ) as pmock: run_test("heif", fulltest, includes) assert pmock.called + + +def test_copy_config(pytestconfig): + cfg_1 = load_global_cfg(pytestconfig) + + cfg_1.variables["test1"] = "abc" + + cfg_2 = load_global_cfg(pytestconfig) + + assert cfg_2.variables.get("test1") is None diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 85048b047..0d3da1c4c 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -1,3 +1,4 @@ +import contextlib import json import sys import tempfile @@ -8,28 +9,24 @@ import pytest import yaml -from tavern.core import run -from tavern.schemas.extensions import validate_file_spec -from tavern.testutils.helpers import ( - validate_content, - validate_pykwalify, - validate_regex, -) -from tavern.testutils.pytesthook.item import YamlItem -from tavern.util import exceptions -from tavern.util.dict_util import _check_and_format_values, format_keys -from tavern.util.loader import ForceIncludeToken -from tavern.util.strict_util import ( +from tavern._core import exceptions +from tavern._core.dict_util import _check_and_format_values, format_keys +from tavern._core.loader import ForceIncludeToken +from tavern._core.pytest.item import YamlItem +from tavern._core.schema.extensions import validate_file_spec +from tavern._core.strict_util import ( StrictLevel, - _StrictSetting, + StrictSetting, validate_and_parse_option, ) +from tavern.core import run +from tavern.helpers import validate_content, validate_pykwalify, validate_regex class FakeResponse: def __init__(self, text): self.text = text - self.headers = dict(test_header=text) + self.headers = {"test_header": text} class TestRegex: @@ -119,6 +116,22 @@ def test_extra_args(self): assert not pmock.called +class TestOptionParsing: + valid = [ + "{0:s}:{1:s}".format(section, setting) + for section in ("json", "headers", "redirect_params") + for setting in ("on", "off") + ] + + @pytest.mark.parametrize("optval", valid) + def test_strictness_parsing_good(self, pytestconfig, optval): + args = pytestconfig._parser.parse_known_args( + ["--tavern-strict={}".format(optval)] + ) + assert "tavern_strict" in args + assert args.tavern_strict == [optval] + + class TestTavernRepr: @pytest.fixture(name="fake_item") def fix_fake_item(self, request): @@ -127,6 +140,13 @@ def fix_fake_item(self, request): ) return item + @pytest.fixture(autouse=True, scope="session") + def add_opts(self, pytestconfig): + from tavern._core.pytest.hooks import pytest_addoption + + with contextlib.suppress(ValueError): + pytest_addoption(pytestconfig._parser) + def _make_fake_exc_info(self, exc_type): # Copied from pytest tests class FakeExcinfo(_pytest._code.ExceptionInfo): @@ -143,7 +163,7 @@ def test_not_called_for_normal_exception(self, fake_item): """Does not call tavern repr for non tavern errors""" fake_info = self._make_fake_exc_info(RuntimeError) - with patch("tavern.testutils.pytesthook.item.ReprdError") as rmock: + with patch("tavern._core.pytest.item.ReprdError") as rmock: fake_item.repr_failure(fake_info) assert not rmock.called @@ -154,7 +174,7 @@ def test_not_called_for_badschema_tavern_exception_(self, fake_item, ini_flag): fake_info = self._make_fake_exc_info(exceptions.BadSchemaError) with patch.object(fake_item.config, "getini", return_value=ini_flag): - with patch("tavern.testutils.pytesthook.item.ReprdError") as rmock: + with patch("tavern._core.pytest.item.ReprdError") as rmock: fake_item.repr_failure(fake_info) assert not rmock.called @@ -164,7 +184,7 @@ def test_not_called_ini(self, fake_item): fake_info = self._make_fake_exc_info(exceptions.InvalidSettingsError) with patch.object(fake_item.config, "getini", return_value=True): - with patch("tavern.testutils.pytesthook.item.ReprdError") as rmock: + with patch("tavern._core.pytest.item.ReprdError") as rmock: fake_item.repr_failure(fake_info) assert not rmock.called @@ -174,7 +194,7 @@ def test_not_called_cli(self, fake_item): fake_info = self._make_fake_exc_info(exceptions.InvalidSettingsError) with patch.object(fake_item.config, "getoption", return_value=True): - with patch("tavern.testutils.pytesthook.item.ReprdError") as rmock: + with patch("tavern._core.pytest.item.ReprdError") as rmock: fake_item.repr_failure(fake_info) assert not rmock.called @@ -182,7 +202,7 @@ def test_not_called_cli(self, fake_item): @pytest.fixture(name="nested_response") def fix_nested_response(): - class response_content(object): + class response_content: content = { "top": { "Thing": "value", @@ -225,6 +245,7 @@ def test_incorrect_value(self, nested_response): validate_content(nested_response, comparisons) +@pytest.mark.xfail class TestPykwalifyExtension: def test_validate_schema_correct(self, nested_response): correct_schema = dedent( @@ -271,12 +292,12 @@ def test_validate_schema_incorrect(self, nested_response): ) -class TestCheckParseValues(object): +class TestCheckParseValues: @pytest.mark.parametrize( "item", [[134], {"a": 2}, yaml, yaml.load, yaml.SafeLoader] ) def test_warns_bad_type(self, item): - with patch("tavern.util.dict_util.logger.warning") as wmock: + with patch("tavern._core.dict_util.logger.warning") as wmock: _check_and_format_values("{fd}", {"fd": item}) assert wmock.called_with( @@ -285,15 +306,15 @@ def test_warns_bad_type(self, item): ) ) - @pytest.mark.parametrize("item", [1, "a", 1.3, format_keys("{s}", dict(s=2))]) + @pytest.mark.parametrize("item", [1, "a", 1.3, format_keys("{s}", {"s": 2})]) def test_no_warn_good_type(self, item): - with patch("tavern.util.dict_util.logger.warning") as wmock: + with patch("tavern._core.dict_util.logger.warning") as wmock: _check_and_format_values("{fd}", {"fd": item}) assert not wmock.called -class TestFormatWithJson(object): +class TestFormatWithJson: @pytest.mark.parametrize( "item", [[134], {"a": 2}, yaml, yaml.load, yaml.SafeLoader] ) @@ -319,7 +340,7 @@ def test_bad_format_string_multiple(self): format_keys(ForceIncludeToken("{a}{b}"), {"fd": "123"}) -class TestCheckFileSpec(object): +class TestCheckFileSpec: def _wrap_test_block(self, dowith): validate_file_spec({"files": dowith}, Mock(), Mock()) @@ -373,38 +394,38 @@ def test_fails_bad_setting(self, setting): validate_and_parse_option("json:{}".format(setting)) @pytest.mark.parametrize("section", ["json", "headers", "redirect_query_params"]) - def test_defaults(self, section): - level = StrictLevel([]) + def test_defaults_good(self, section): + level = StrictLevel() if section == "json": - assert level.setting_for(section) + assert level.option_for(section).is_on() else: - assert not level.setting_for(section) + assert not level.option_for(section).is_on() @pytest.mark.parametrize("section", ["true", "1", "hi", ""]) - def test_defaults(self, section): - level = StrictLevel([]) + def test_defaults_bad(self, section): + level = StrictLevel() with pytest.raises(exceptions.InvalidConfigurationException): - level.setting_for(section) + level.option_for(section) # These tests could be removed, they are testing implementation details... @pytest.mark.parametrize("section", ["json", "headers", "redirect_query_params"]) def test_set_on(self, section): level = StrictLevel.from_options([section + ":on"]) - assert level.setting_for(section).setting == _StrictSetting.ON - assert level.setting_for(section).is_on() + assert level.option_for(section).setting == StrictSetting.ON + assert level.option_for(section).is_on() @pytest.mark.parametrize("section", ["json", "headers", "redirect_query_params"]) def test_set_off(self, section): level = StrictLevel.from_options([section + ":off"]) - assert level.setting_for(section).setting == _StrictSetting.OFF - assert not level.setting_for(section).is_on() + assert level.option_for(section).setting == StrictSetting.OFF + assert not level.option_for(section).is_on() @pytest.mark.parametrize("section", ["json", "headers", "redirect_query_params"]) def test_unset(self, section): level = StrictLevel.from_options([section]) - assert level.setting_for(section).setting == _StrictSetting.UNSET + assert level.option_for(section).setting == StrictSetting.UNSET diff --git a/tests/unit/test_mqtt.py b/tests/unit/test_mqtt.py index 57989b740..1d55b06fa 100644 --- a/tests/unit/test_mqtt.py +++ b/tests/unit/test_mqtt.py @@ -1,13 +1,12 @@ -import contextlib -import threading +from typing import Dict from unittest.mock import MagicMock, Mock, patch import paho.mqtt.client as paho import pytest +from tavern._core import exceptions from tavern._plugins.mqtt.client import MQTTClient, _handle_tls_args, _Subscription from tavern._plugins.mqtt.request import MQTTRequest -from tavern.util import exceptions def test_host_required(): @@ -20,25 +19,37 @@ def test_host_required(): MQTTClient(**args) -class TestClient(object): - @pytest.fixture(name="fake_client") - def fix_fake_client(self): - args = {"connect": {"host": "localhost"}} +@pytest.fixture(name="fake_client") +def fix_fake_client(): + args = {"connect": {"host": "localhost"}} + + mqtt_client = MQTTClient(**args) + + mqtt_client._subscribed[2] = _Subscription("abc") + mqtt_client._subscription_mappings["abc"] = 2 + + return mqtt_client - return MQTTClient(**args) + +class TestClient: + def test_no_queue(self, fake_client): + """Trying to fetch from a nonexistent queue raised exception""" + + with pytest.raises(exceptions.MQTTTopicException): + fake_client.message_received("", 0) def test_no_message(self, fake_client): """No message in queue returns None""" - assert fake_client.message_received(0) is None + assert fake_client.message_received("abc", 0) is None def test_message_queued(self, fake_client): """Returns message in queue""" message = "abc123" - fake_client._userdata["queue"].put(message) - assert fake_client.message_received(0) == message + fake_client._userdata["_subscribed"][2].queue.put(message) + assert fake_client.message_received("abc", 0) == message def test_context_connection_failure(self, fake_client): """Unable to connect on __enter__ raises MQTTError""" @@ -87,7 +98,7 @@ class FakeMessage: fake_client.publish("abc", "123") -class TestTLS(object): +class TestTLS: def test_missing_cert_gives_error(self): """Missing TLS cert gives an error""" args = {"certfile": "/lcliueurhug/ropko3kork32"} @@ -129,7 +140,7 @@ def test_unknown_fields(self, req, includes): def test_missing_format(self, req, includes): """All format variables should be present""" - del includes["variables"]["request_topic"] + del includes.variables["request_topic"] with pytest.raises(exceptions.MissingFormatError): MQTTRequest(Mock(), req, includes) @@ -139,7 +150,7 @@ def test_correct_format(self, req, includes): MQTTRequest(Mock(), req, includes) -class TestSubscription(object): +class TestSubscription: @staticmethod def get_mock_client_with(subcribe_action): mock_paho = Mock(spec=paho.Client, subscribe=subcribe_action) @@ -147,6 +158,7 @@ def get_mock_client_with(subcribe_action): spec=MQTTClient, _client=mock_paho, _subscribed={}, + _subscription_mappings={}, _subscribe_lock=MagicMock(), ) return mock_client @@ -160,7 +172,7 @@ def subscribe_success(topic, *args, **kwargs): MQTTClient.subscribe(mock_client, "abc") assert mock_client._subscribed[123].topic == "abc" - assert mock_client._subscribed[123].subscribed == False + assert mock_client._subscribed[123].subscribed is False def test_no_subscribe_on_err(self): def subscribe_err(topic, *args, **kwargs): @@ -168,7 +180,8 @@ def subscribe_err(topic, *args, **kwargs): mock_client = TestSubscription.get_mock_client_with(subscribe_err) - MQTTClient.subscribe(mock_client, "abc") + with pytest.raises(exceptions.MQTTError): + MQTTClient.subscribe(mock_client, "abc") assert mock_client._subscribed == {} @@ -181,3 +194,33 @@ def subscribe_success(topic, *args, **kwargs): MQTTClient._on_subscribe(mock_client, "abc", {}, 123, 0) assert mock_client._subscribed == {} + + +class TestExtFunctions: + @pytest.fixture() + def basic_mqtt_request_args(self) -> Dict: + return { + "topic": "/a/b/c", + } + + def test_basic(self, fake_client, basic_mqtt_request_args, includes): + MQTTRequest(fake_client, basic_mqtt_request_args, includes) + + def test_ext_function_bad(self, fake_client, basic_mqtt_request_args, includes): + basic_mqtt_request_args["json"] = {"$ext": "kk"} + + with pytest.raises(exceptions.InvalidExtFunctionError): + MQTTRequest(fake_client, basic_mqtt_request_args, includes) + + def test_ext_function_good(self, fake_client, basic_mqtt_request_args, includes): + basic_mqtt_request_args["json"] = { + "$ext": { + "function": "operator:add", + "extra_args": (1, 2), + } + } + + m = MQTTRequest(fake_client, basic_mqtt_request_args, includes) + + assert "payload" in m._publish_args + assert m._publish_args["payload"] == "3" diff --git a/tests/unit/test_pytest_hooks.py b/tests/unit/test_pytest_hooks.py index 1ea6e5490..d4c128098 100644 --- a/tests/unit/test_pytest_hooks.py +++ b/tests/unit/test_pytest_hooks.py @@ -1,35 +1,43 @@ +import os +import pathlib from dataclasses import dataclass -from unittest.mock import Mock +from unittest.mock import Mock, patch -from faker import Faker -import py -from py._path.local import LocalPath import pytest +from faker import Faker -from tavern.testutils.pytesthook.file import YamlFile, _get_parametrized_items +from tavern._core import exceptions +from tavern._core.pytest.file import YamlFile, _get_parametrized_items @dataclass class MockArgs: session: pytest.Session parent: pytest.File - fspath: LocalPath + path: pathlib.Path def mock_args(): """Get a basic test config to initialise a YamlFile object with""" - fspath = py.path.local("abc") + path = pathlib.Path("abc") - cargs = {"rootdir": "abc", "fspath": fspath} + cargs = {"rootdir": "abc", "path": path} - config = Mock(**cargs) + config = Mock(**cargs, rootpath="abc") session = Mock(_initialpaths=[], config=config) - parent = Mock(config=config, parent=None, nodeid="sdlfs", **cargs, session=session) + parent = Mock( + spec=os.PathLike, + config=config, + parent=None, + nodeid="sdlfs", + **cargs, + session=session, + ) - return MockArgs(session, parent, fspath) + return MockArgs(session, parent, path) def get_basic_parametrize_mark(faker): @@ -49,7 +57,7 @@ def get_joined_parametrize_mark(faker): def get_parametrised_tests(marks): args = mock_args() - y = YamlFile.from_parent(args.parent, fspath=args.fspath) + y = YamlFile.from_parent(args.parent, path=args.path) y.session = args.session spec = {"test_name": "a test", "stages": []} @@ -69,7 +77,7 @@ def test_none(): @pytest.mark.parametrize("faker", [Faker(), Faker("zh_CN")]) -class TestMakeFile(object): +class TestMakeFile: def test_only_single(self, faker): marks = [get_basic_parametrize_mark(faker)] @@ -146,3 +154,66 @@ def test_double_double_single(self, faker): # [w, x, y, z, 1, 2] # etc. assert len(tests) == 36 + + @pytest.mark.parametrize( + ("keys", "values"), + ( + ("a", ["b", "c", "d"]), + (["a"], ["b", "c", "d"]), + ("a", {"k": "v"}), + (["a"], {"k": "v"}), + (["a", "b"], [["b", "c"]]), + (["a", "b"], [["b", "c"], [{"a": "b"}, {"a": "b"}]]), + (["a", "b"], [["b", "c"], ["b", "c"], ["d", "e"]]), + ), + ) + def test_ext_function_top_level(self, faker, keys, values): + with patch( + "tavern._core.pytest.file.get_wrapped_create_function", + lambda _: lambda: values, + ): + marks = [ + {"parametrize": {"key": keys, "vals": {"$ext": {"function": "a:v"}}}} + ] + + tests = get_parametrised_tests(marks) + + assert len(tests) == len(values) + + @pytest.mark.parametrize( + ("keys", "values"), + ( + # must return a list of lists + (["a", "b"], {"a": "b"}), + # must return a list of lists + (["a", "b"], [{"a": "b"}]), + # must return a list of lists + (["a", "b"], [{"a": "b"}, {"a": "b"}]), + # must return a list of lists + (["a", "b"], "b"), + # must return a list of lists + (["a", "b"], ["b", "c"]), + # must return a list of lists, where each element is also 3 long + (["a", "b"], [["b", "c", "e"]]), + # must return a list of lists, where each element is also 3 long + (["a", "b"], [["b"]]), + ), + ) + def test_ext_function_top_level_invalid(self, faker, keys, values): + with patch( + "tavern._core.pytest.file.get_wrapped_create_function", + lambda _: lambda: values, + ): + marks = [ + {"parametrize": {"key": keys, "vals": {"$ext": {"function": "a:v"}}}} + ] + + with pytest.raises(exceptions.BadSchemaError): + get_parametrised_tests(marks) + + +def test_doc_string(): + args = mock_args() + y = YamlFile.from_parent(args.parent, path=args.path) + + assert isinstance(y.obj.__doc__, str) diff --git a/tests/unit/test_request.py b/tests/unit/test_request.py index d059bd9e3..2efdbe727 100644 --- a/tests/unit/test_request.py +++ b/tests/unit/test_request.py @@ -1,12 +1,15 @@ -from contextlib import ExitStack +import dataclasses import os import tempfile +from contextlib import ExitStack from unittest.mock import Mock import pytest import requests from requests.cookies import RequestsCookieJar +from tavern._core import exceptions +from tavern._core.extfunctions import update_from_ext from tavern._plugins.rest.request import ( RestRequest, _check_allow_redirects, @@ -14,8 +17,6 @@ _read_expected_cookies, get_request_args, ) -from tavern.util import exceptions -from tavern.util.extfunctions import update_from_ext @pytest.fixture(name="req") @@ -38,7 +39,7 @@ def fix_example_request(): return spec.copy() -class TestRequests(object): +class TestRequests: def test_unknown_fields(self, req, includes): """Unkown args should raise an error""" req["fodokfowe"] = "Hello" @@ -48,7 +49,7 @@ def test_unknown_fields(self, req, includes): def test_missing_format(self, req, includes): """All format variables should be present""" - del includes["variables"]["code"] + del includes.variables["code"] with pytest.raises(exceptions.MissingFormatError): RestRequest(Mock(), req, includes) @@ -63,11 +64,11 @@ def test_bad_get_body(self, req, includes): ) -class TestHttpRedirects(object): +class TestHttpRedirects: def test_session_called_no_redirects(self, req, includes): """Always disable redirects by defauly""" - assert _check_allow_redirects(req, includes) == False + assert _check_allow_redirects(req, includes) is False @pytest.mark.parametrize("do_follow", [True, False]) def test_session_do_follow_redirects_based_on_test(self, req, includes, do_follow): @@ -83,12 +84,12 @@ def test_session_do_follow_redirects_based_on_global_flag( ): """Globally enable following redirects in test""" - includes["follow_redirects"] = do_follow + includes = dataclasses.replace(includes, follow_redirects=do_follow) assert _check_allow_redirects(req, includes) == do_follow -class TestCookies(object): +class TestCookies: @pytest.fixture def mock_session(self): return Mock(spec=requests.Session, cookies=RequestsCookieJar()) @@ -107,7 +108,7 @@ def test_available_not_waited(self, req, includes): cookiejar.set("a", 2) mock_session = Mock(spec=requests.Session, cookies=cookiejar) - assert _read_expected_cookies(mock_session, req, includes) == None + assert _read_expected_cookies(mock_session, req, includes) is None def test_ask_for_nothing(self, req, includes): """explicitly ask fo rno cookies""" @@ -147,7 +148,7 @@ def test_format_cookies(self, req, includes): cookiejar.set("a", 2) req["cookies"] = ["{cookiename}"] - includes["variables"]["cookiename"] = "a" + includes.variables["cookiename"] = "a" mock_session = Mock(spec=requests.Session, cookies=cookiejar) @@ -179,7 +180,7 @@ def test_no_duplicate_cookie(self, req, includes): _read_expected_cookies(mock_session, req, includes) -class TestRequestArgs(object): +class TestRequestArgs: def test_default_method(self, req, includes): del req["method"] del req["data"] @@ -303,8 +304,7 @@ def test_verity_with_valid_values(self, req, includes, verify_values): class TestExtFunctions: - @pytest.mark.parametrize("merge_values", (True, False, None)) - def test_get_from_function(self, req, merge_values): + def test_get_from_function(self, req, includes): """Make sure ext functions work in request This is a bit of a silly example because we're passing a dictionary @@ -319,12 +319,9 @@ def test_get_from_function(self, req, merge_values): **original_json, } - update_from_ext(req, ["json"], {"merge_ext_values": merge_values}) + update_from_ext(req, ["json"]) - if merge_values: - assert req["json"] == dict(**to_copy, **original_json) - else: - assert req["json"] == to_copy + assert req["json"] == dict(**to_copy, **original_json) class TestOptionalDefaults: @@ -340,20 +337,60 @@ def test_passthrough_verify(self, req, includes, verify): class TestFileBody: - def test_file_body(self, req, includes): + def test_file_body_format(self, req, includes): """Test getting file body""" req.pop("data") - req["file_body"] = "{callback_url}" - includes["abcdef"] = "Hello" + with tempfile.NamedTemporaryFile(encoding="utf8", mode="w") as tmpin: + tmpin.write("OK") + includes.variables["tmpfile_loc"] = tmpin.name - args = get_request_args(req, includes) + req["file_body"] = "{tmpfile_loc}" + + args = get_request_args(req, includes) + + assert args["file_body"] == tmpin.name + + def test_file_body_content_type(self, req, includes): + """Test inferring content type etc. works""" + + req.pop("data") + req.pop("headers") + + with tempfile.NamedTemporaryFile( + encoding="utf8", mode="w", suffix=".json" + ) as tmpin: + tmpin.write("OK") + + req["file_body"] = tmpin.name + + args = get_request_args(req, includes) + + assert args["file_body"] == tmpin.name + assert args["headers"]["content-type"] == "application/json" + + def test_file_body_content_encoding(self, req, includes): + """Test inferring content type etc. works""" + + req.pop("data") + req.pop("headers") + + with tempfile.NamedTemporaryFile( + encoding="utf8", mode="w", suffix=".tar.gz" + ) as tmpin: + tmpin.write("OK") + + req["file_body"] = tmpin.name + + args = get_request_args(req, includes) - assert args["file_body"] == includes["variables"]["callback_url"] + assert args["file_body"] == tmpin.name + assert args["headers"]["content-type"] == "application/x-tar" + assert args["headers"]["Content-Encoding"] == "gzip" -class TestGetFiles(object): +class TestGetFiles: @pytest.fixture def mock_stack(self): return Mock(spec=ExitStack) @@ -422,7 +459,7 @@ def test_format_filename(self, mock_stack, includes, file_args): """Filenames should be formatted in short and long styles""" with tempfile.NamedTemporaryFile(suffix=".json") as tfile: - includes["variables"]["tmpname"] = tfile.name + includes.variables["tmpname"] = tfile.name request_args = {"files": {"file1": tfile.name}} file_spec = _get_file_arguments(request_args, mock_stack, includes) diff --git a/tests/unit/test_schema.py b/tests/unit/test_schema.py index 3dd0e8cb6..3cedd9e57 100644 --- a/tests/unit/test_schema.py +++ b/tests/unit/test_schema.py @@ -6,9 +6,9 @@ import pytest import yaml -from tavern.schemas.files import verify_tests -from tavern.util.exceptions import BadSchemaError -from tavern.util.loader import load_single_document_yaml +from tavern._core.exceptions import BadSchemaError +from tavern._core.loader import load_single_document_yaml +from tavern._core.schema.files import verify_tests @pytest.fixture(name="test_dict") @@ -106,7 +106,7 @@ def test_timeout_tuple_fail(self, test_dict, incorrect_value): class TestCert: - @pytest.mark.parametrize("correct_value", ("a", ("a", "b"), ["a", "b"])) + @pytest.mark.parametrize("correct_value", ("a", ["a", "b"])) def test_cert_as_string_tuple_list(self, test_dict, correct_value): test_dict["stages"][0]["request"]["cert"] = correct_value verify_tests(test_dict) diff --git a/tests/unit/test_strict_util.py b/tests/unit/test_strict_util.py new file mode 100644 index 000000000..5cdd23e78 --- /dev/null +++ b/tests/unit/test_strict_util.py @@ -0,0 +1,35 @@ +import pytest + +from tavern._core.strict_util import StrictOption, StrictSetting, extract_strict_setting + + +@pytest.mark.parametrize( + "strict", [True, StrictSetting.ON, StrictOption("json", StrictSetting.ON)] +) +def test_extract_strict_setting_true(strict): + as_bool, as_setting = extract_strict_setting(strict) + assert as_bool is True + if isinstance(strict, StrictSetting): + assert as_setting == strict + if isinstance(strict, StrictOption): + assert as_setting == strict.setting + + +@pytest.mark.parametrize( + "strict", + [ + False, + StrictSetting.OFF, + StrictSetting.LIST_ANY_ORDER, + StrictSetting.UNSET, + StrictOption("json", StrictSetting.OFF), + None, + ], +) +def test_extract_strict_setting_false(strict): + as_bool, as_setting = extract_strict_setting(strict) + assert as_bool is False + if isinstance(strict, StrictSetting): + assert as_setting == strict + if isinstance(strict, StrictOption): + assert as_setting == strict.setting diff --git a/tests/unit/test_utilities.py b/tests/unit/test_utilities.py index 8c917b065..5db6cadf1 100644 --- a/tests/unit/test_utilities.py +++ b/tests/unit/test_utilities.py @@ -1,24 +1,22 @@ -from collections import OrderedDict import contextlib import copy import os import tempfile +from collections import OrderedDict from textwrap import dedent from unittest.mock import Mock, patch import pytest import yaml -from tavern.schemas.extensions import validate_extensions -from tavern.schemas.files import wrapfile -from tavern.util import exceptions -from tavern.util.dict_util import ( +from tavern._core import exceptions +from tavern._core.dict_util import ( check_keys_match_recursive, deep_dict_merge, format_keys, recurse_access_key, ) -from tavern.util.loader import ( +from tavern._core.loader import ( ANYTHING, DictSentinel, FloatSentinel, @@ -29,6 +27,8 @@ construct_include, load_single_document_yaml, ) +from tavern._core.schema.extensions import validate_extensions +from tavern._core.schema.files import wrapfile class TestValidateFunctions: @@ -173,7 +173,7 @@ def test_match_nested_anything_list(self): def test_match_ordered(self): """Should be able to match an ordereddict""" - first = dict(a=1, b=2) + first = {"a": 1, "b": 2} second = OrderedDict(b=2, a=1) @@ -443,14 +443,14 @@ def test_load_extensions(self, suffix): example = {"a": "b"} with TestLoadFile.magic_wrap(example, suffix) as tmpfile: - with patch("tavern.util.loader.os.path.join", return_value=tmpfile): + with patch("tavern._core.loader.os.path.join", return_value=tmpfile): assert example == construct_include(Mock(), Mock()) def test_load_bad_extension(self): example = {"a": "b"} with TestLoadFile.magic_wrap(example, ".bllakjf") as tmpfile: - with patch("tavern.util.loader.os.path.join", return_value=tmpfile): + with patch("tavern._core.loader.os.path.join", return_value=tmpfile): with pytest.raises(exceptions.BadSchemaError): construct_include(Mock(), Mock()) @@ -467,13 +467,13 @@ def test_include_path(self): Mock(), ) - with patch("tavern.util.loader.IncludeLoader.env_path_list", None): + with patch("tavern._core.loader.IncludeLoader.env_path_list", None): assert example == construct_include( Mock(_root=tmppath, construct_scalar=lambda x: tmpfilename), Mock() ) os.environ[IncludeLoader.env_var_name] = tmppath - with patch("tavern.util.loader.IncludeLoader.env_path_list", None): + with patch("tavern._core.loader.IncludeLoader.env_path_list", None): assert example == construct_include( Mock( _root="/does-not-exist", construct_scalar=lambda x: tmpfilename diff --git a/tox-integration.ini b/tox-integration.ini index e3d04ffa0..12de6ec93 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -1,15 +1,17 @@ [tox] -envlist = {py36,py37,py38,pypy3}-{generic,cookies,mqtt,grpc,advanced,components,noextra,hooks} +envlist = py3-{generic,cookies,mqtt,grpc,advanced,components,noextra,hooks} skip_missing_interpreters = true +isolated_build = True [testenv] +basepython = python3.10 passenv = DOCKER_TLS_VERIFY DOCKER_HOST DOCKER_CERT_PATH DOCKER_BUILDKIT setenv = TEST_HOST = http://localhost:5003 SECOND_URL_PART = again PYTHONPATH = . changedir = - grpc: example/grpc_ + grpc: example/grpc mqtt: example/mqtt cookies: example/cookies advanced: example/advanced @@ -20,6 +22,7 @@ changedir = deps = docker-compose flask + allure-pytest pyjwt pytest-xdist pytest-cov @@ -29,12 +32,13 @@ commands = ; docker-compose stop ; docker-compose build docker-compose up --build -d - python -m pytest --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --tavern-merge-ext-function-values --cov tavern + python -m pytest --collect-only + python -m pytest --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --cov tavern - generic: py.test --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --tavern-merge-ext-function-values -n 3 - generic: tavern-ci --stdout . --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --tavern-merge-ext-function-values - generic: python -c "from tavern.core import run; exit(run('.', '{toxinidir}/tests/integration/global_cfg.yaml', pytest_args=['--tavern-merge-ext-function-values']))" - generic: python -c "from tavern.core import run; exit(run('.', pytest_args=['--tavern-merge-ext-function-values', '--tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml']))" + generic: py.test --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml -n 3 + generic: tavern-ci --stdout . --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml + generic: python -c "from tavern.core import run; exit(run('.', '{toxinidir}/tests/integration/global_cfg.yaml', pytest_args=[]))" + generic: python -c "from tavern.core import run; exit(run('.', pytest_args=['--tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml']))" cookies: tavern-ci --stdout test_server.tavern.yaml cookies: python -c "from tavern.core import run; exit(run('test_server.tavern.yaml', pytest_args=[]))" diff --git a/tox.ini b/tox.ini index e1754c7cc..70494974e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,40 +1,28 @@ [tox] -envlist = py36,py37,py38,py38lint,py38black,py38mypy +envlist = py3,py3mypy,py3check skip_missing_interpreters = true +isolated_build = True [testenv] +passenv = XDG_CACHE_HOME +basepython = python3.10 +whitelist_externals = + mypy +install_command = python -m pip install {opts} {packages} -c constraints.txt extras = - tests + dev commands = {envbindir}/python -m pytest --cov-report term-missing --cov tavern -[testenv:py38lint] -basepython = python3.8 +[testenv:py3check] commands = - pylint tavern/ + pre-commit run --all-files -[testenv:py38flakes] -skip_install = true -basepython = python3.8 -commands = - flake8 tavern - -[testenv:py38black] -basepython = python3.8 -commands = - black --check tavern - black --check tests/unit - -[testenv:py38isort] -basepython = python3.8 -commands = - isort . --check --settings-path={toxinidir}/.isort.cfg --diff - isort . --check --settings-path={toxinidir}/.isort.cfg - -[testenv:py38mypy] -basepython = python3.8 +[testenv:py3mypy] setenv = MYPYPATH = {toxinidir} +deps = + types-PyYAML + types-requests commands = - mypy -p tavern --config-file {toxinidir}/mypy.ini - + mypy -p tavern --config-file {toxinidir}/pyproject.toml --install-types --non-interactive From 21a4a050c1f8dfecc3f90a5bb6ecd721febaf55b Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Tue, 21 Feb 2023 16:16:19 +0000 Subject: [PATCH 15/72] Fix imports --- constraints.txt | 58 ++++++++++++- pyproject.toml | 1 + requirements.txt | 144 ++++++++++++++++++++++++++++++- scripts/smoke.bash | 4 +- tavern/_core/__init__.py | 0 tavern/_core/pytest/file.py | 2 +- tavern/_core/pytest/hooks.py | 1 + tavern/_core/pytest/item.py | 2 +- tavern/_core/pytest/util.py | 1 + tavern/_core/schema/__init__.py | 0 tavern/_plugins/__init__.py | 0 tavern/_plugins/grpc/__init__.py | 0 tavern/_plugins/mqtt/__init__.py | 0 tavern/_plugins/rest/__init__.py | 0 14 files changed, 205 insertions(+), 8 deletions(-) create mode 100644 tavern/_core/__init__.py create mode 100644 tavern/_core/schema/__init__.py create mode 100644 tavern/_plugins/__init__.py create mode 100644 tavern/_plugins/grpc/__init__.py create mode 100644 tavern/_plugins/mqtt/__init__.py create mode 100644 tavern/_plugins/rest/__init__.py diff --git a/constraints.txt b/constraints.txt index 3131fdc38..7e8c9d55a 100644 --- a/constraints.txt +++ b/constraints.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --all-extras --output-file=constraints.txt --resolver=backtracking --strip-extras pyproject.toml @@ -23,6 +23,8 @@ build==0.9.0 # via pip-tools bump2version==1.0.1 # via tavern (pyproject.toml) +cachetools==5.3.0 + # via google-auth certifi==2022.12.7 # via requests cffi==1.15.1 @@ -86,12 +88,41 @@ flit-core==3.8.0 # via flit fluent-logger==0.10.0 # via tavern (pyproject.toml) +google-api-core==2.11.0 + # via google-api-python-client +google-api-python-client==2.79.0 + # via tavern (pyproject.toml) +google-auth==2.16.1 + # via + # google-api-core + # google-api-python-client + # google-auth-httplib2 +google-auth-httplib2==0.1.0 + # via google-api-python-client +googleapis-common-protos==1.58.0 + # via + # google-api-core + # grpcio-status +grpcio==1.51.1 + # via + # grpcio-reflection + # grpcio-status + # tavern (pyproject.toml) +grpcio-reflection==1.51.1 + # via tavern (pyproject.toml) +grpcio-status==1.51.1 + # via tavern (pyproject.toml) +httplib2==0.21.0 + # via + # google-api-python-client + # google-auth-httplib2 identify==2.5.10 # via pre-commit idna==3.4 # via requests importlib-metadata==5.2.0 # via + # flask # keyring # twine iniconfig==1.1.1 @@ -165,10 +196,22 @@ pluggy==1.0.0 # tox pre-commit==2.20.0 # via tavern (pyproject.toml) +protobuf==4.22.0 + # via + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # grpcio-status py==1.11.0 # via # tavern (pyproject.toml) # tox +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth pycparser==2.21 # via cffi pygments==2.13.0 @@ -182,6 +225,8 @@ pykwalify==1.8.0 # via tavern (pyproject.toml) pynacl==1.5.0 # via paramiko +pyparsing==3.0.9 + # via httplib2 pyrsistent==0.19.2 # via jsonschema pytest==7.2.0 @@ -214,6 +259,7 @@ requests==2.28.1 # docker # docker-compose # flit + # google-api-core # requests-toolbelt # tavern (pyproject.toml) # twine @@ -223,6 +269,8 @@ rfc3986==2.0.0 # via twine rich==12.6.0 # via twine +rsa==4.9 + # via google-auth ruamel-yaml==0.17.21 # via pykwalify ruamel-yaml-clib==0.2.7 @@ -237,6 +285,8 @@ six==1.16.0 # allure-python-commons # bleach # dockerpty + # google-auth + # google-auth-httplib2 # jsonschema # paramiko # python-dateutil @@ -270,7 +320,11 @@ twine==4.0.2 types-pyyaml==6.0.12.2 # via tavern (pyproject.toml) typing-extensions==4.4.0 - # via mypy + # via + # black + # mypy +uritemplate==4.1.1 + # via google-api-python-client urllib3==1.26.13 # via # docker diff --git a/pyproject.toml b/pyproject.toml index 1a95883f8..03096a02b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ dependencies = [ "grpcio", "grpcio-reflection", "grpcio-status", + "google-api-python-client", ] requires-python = ">=3.8" diff --git a/requirements.txt b/requirements.txt index 380b70d8c..3452236c2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --all-extras --generate-hashes --output-file=requirements.txt --resolver=backtracking pyproject.toml @@ -81,6 +81,10 @@ bump2version==1.0.1 \ --hash=sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410 \ --hash=sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6 # via tavern (pyproject.toml) +cachetools==5.3.0 \ + --hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \ + --hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4 + # via google-auth certifi==2022.12.7 \ --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 @@ -320,6 +324,95 @@ fluent-logger==0.10.0 \ --hash=sha256:543637e5e62ec3fc3c92b44e5a4e148a3cea88a0f8ca4fae26c7e60fda7564c1 \ --hash=sha256:678bda90c513ff0393964b64544ce41ef25669d2089ce6c3b63d9a18554b9bfa # via tavern (pyproject.toml) +google-api-core==2.11.0 \ + --hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \ + --hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e + # via google-api-python-client +google-api-python-client==2.79.0 \ + --hash=sha256:577c0aeae1eb3c754eacb9122d369d67609fef759bc6a4fa16cafeab4f30019b \ + --hash=sha256:b9b6dc5f139892310093ba75d0df4c78f48655078953c923957dab1ec86129e7 + # via tavern (pyproject.toml) +google-auth==2.16.1 \ + --hash=sha256:5fd170986bce6bfd7bb5c845c4b8362edb1e0cba901e062196e83f8bb5d5d32c \ + --hash=sha256:75d76ea857df65938e1f71dcbcd7d0cd48e3f80b34b8870ba229c9292081f7ef + # via + # google-api-core + # google-api-python-client + # google-auth-httplib2 +google-auth-httplib2==0.1.0 \ + --hash=sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10 \ + --hash=sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac + # via google-api-python-client +googleapis-common-protos==1.58.0 \ + --hash=sha256:c727251ec025947d545184ba17e3578840fc3a24a0516a020479edab660457df \ + --hash=sha256:ca3befcd4580dab6ad49356b46bf165bb68ff4b32389f028f1abd7c10ab9519a + # via + # google-api-core + # grpcio-status +grpcio==1.51.1 \ + --hash=sha256:094e64236253590d9d4075665c77b329d707b6fca864dd62b144255e199b4f87 \ + --hash=sha256:0dc5354e38e5adf2498312f7241b14c7ce3484eefa0082db4297189dcbe272e6 \ + --hash=sha256:0e1a9e1b4a23808f1132aa35f968cd8e659f60af3ffd6fb00bcf9a65e7db279f \ + --hash=sha256:0fb93051331acbb75b49a2a0fd9239c6ba9528f6bdc1dd400ad1cb66cf864292 \ + --hash=sha256:16c71740640ba3a882f50b01bf58154681d44b51f09a5728180a8fdc66c67bd5 \ + --hash=sha256:172405ca6bdfedd6054c74c62085946e45ad4d9cec9f3c42b4c9a02546c4c7e9 \ + --hash=sha256:17ec9b13cec4a286b9e606b48191e560ca2f3bbdf3986f91e480a95d1582e1a7 \ + --hash=sha256:22b011674090594f1f3245960ced7386f6af35485a38901f8afee8ad01541dbd \ + --hash=sha256:24ac1154c4b2ab4a0c5326a76161547e70664cd2c39ba75f00fc8a2170964ea2 \ + --hash=sha256:257478300735ce3c98d65a930bbda3db172bd4e00968ba743e6a1154ea6edf10 \ + --hash=sha256:29cb97d41a4ead83b7bcad23bdb25bdd170b1e2cba16db6d3acbb090bc2de43c \ + --hash=sha256:2b170eaf51518275c9b6b22ccb59450537c5a8555326fd96ff7391b5dd75303c \ + --hash=sha256:31bb6bc7ff145e2771c9baf612f4b9ebbc9605ccdc5f3ff3d5553de7fc0e0d79 \ + --hash=sha256:3c2b3842dcf870912da31a503454a33a697392f60c5e2697c91d133130c2c85d \ + --hash=sha256:3f9b0023c2c92bebd1be72cdfca23004ea748be1813a66d684d49d67d836adde \ + --hash=sha256:471d39d3370ca923a316d49c8aac66356cea708a11e647e3bdc3d0b5de4f0a40 \ + --hash=sha256:49d680356a975d9c66a678eb2dde192d5dc427a7994fb977363634e781614f7c \ + --hash=sha256:4c4423ea38a7825b8fed8934d6d9aeebdf646c97e3c608c3b0bcf23616f33877 \ + --hash=sha256:506b9b7a4cede87d7219bfb31014d7b471cfc77157da9e820a737ec1ea4b0663 \ + --hash=sha256:538d981818e49b6ed1e9c8d5e5adf29f71c4e334e7d459bf47e9b7abb3c30e09 \ + --hash=sha256:59dffade859f157bcc55243714d57b286da6ae16469bf1ac0614d281b5f49b67 \ + --hash=sha256:5a6ebcdef0ef12005d56d38be30f5156d1cb3373b52e96f147f4a24b0ddb3a9d \ + --hash=sha256:5dca372268c6ab6372d37d6b9f9343e7e5b4bc09779f819f9470cd88b2ece3c3 \ + --hash=sha256:6df3b63538c362312bc5fa95fb965069c65c3ea91d7ce78ad9c47cab57226f54 \ + --hash=sha256:6f0b89967ee11f2b654c23b27086d88ad7bf08c0b3c2a280362f28c3698b2896 \ + --hash=sha256:75e29a90dc319f0ad4d87ba6d20083615a00d8276b51512e04ad7452b5c23b04 \ + --hash=sha256:7942b32a291421460d6a07883033e392167d30724aa84987e6956cd15f1a21b9 \ + --hash=sha256:9235dcd5144a83f9ca6f431bd0eccc46b90e2c22fe27b7f7d77cabb2fb515595 \ + --hash=sha256:97d67983189e2e45550eac194d6234fc38b8c3b5396c153821f2d906ed46e0ce \ + --hash=sha256:9ff42c5620b4e4530609e11afefa4a62ca91fa0abb045a8957e509ef84e54d30 \ + --hash=sha256:a8a0b77e992c64880e6efbe0086fe54dfc0bbd56f72a92d9e48264dcd2a3db98 \ + --hash=sha256:aacb54f7789ede5cbf1d007637f792d3e87f1c9841f57dd51abf89337d1b8472 \ + --hash=sha256:bc59f7ba87972ab236f8669d8ca7400f02a0eadf273ca00e02af64d588046f02 \ + --hash=sha256:cc2bece1737b44d878cc1510ea04469a8073dbbcdd762175168937ae4742dfb3 \ + --hash=sha256:cd3baccea2bc5c38aeb14e5b00167bd4e2373a373a5e4d8d850bd193edad150c \ + --hash=sha256:dad6533411d033b77f5369eafe87af8583178efd4039c41d7515d3336c53b4f1 \ + --hash=sha256:e223a9793522680beae44671b9ed8f6d25bbe5ddf8887e66aebad5e0686049ef \ + --hash=sha256:e473525c28251558337b5c1ad3fa969511e42304524a4e404065e165b084c9e4 \ + --hash=sha256:e4ef09f8997c4be5f3504cefa6b5c6cc3cf648274ce3cede84d4342a35d76db6 \ + --hash=sha256:e6dfc2b6567b1c261739b43d9c59d201c1b89e017afd9e684d85aa7a186c9f7a \ + --hash=sha256:eacad297ea60c72dd280d3353d93fb1dcca952ec11de6bb3c49d12a572ba31dd \ + --hash=sha256:f1158bccbb919da42544a4d3af5d9296a3358539ffa01018307337365a9a0c64 \ + --hash=sha256:f1fec3abaf274cdb85bf3878167cfde5ad4a4d97c68421afda95174de85ba813 \ + --hash=sha256:f96ace1540223f26fbe7c4ebbf8a98e3929a6aa0290c8033d12526847b291c0f \ + --hash=sha256:fbdbe9a849854fe484c00823f45b7baab159bdd4a46075302281998cb8719df5 + # via + # grpcio-reflection + # grpcio-status + # tavern (pyproject.toml) +grpcio-reflection==1.51.1 \ + --hash=sha256:b70af764a83e42a44f65df1edb232e972ab69e72bc7fbbad481e66c29a9d8cb8 \ + --hash=sha256:c07a93c0c36ef88fe475744289863b4787005eff4de0cc04213ecad718b01aae + # via tavern (pyproject.toml) +grpcio-status==1.51.1 \ + --hash=sha256:a52cbdc4b18f325bfc13d319ae7c7ae7a0fee07f3d9a005504d6097896d7a495 \ + --hash=sha256:ac2617a3095935ebd785e2228958f24b10a0d527a0c9eb5a0863c784f648a816 + # via tavern (pyproject.toml) +httplib2==0.21.0 \ + --hash=sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01 \ + --hash=sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34 + # via + # google-api-python-client + # google-auth-httplib2 identify==2.5.10 \ --hash=sha256:dce9e31fee7dbc45fea36a9e855c316b8fbf807e65a862f160840bb5a2bf5dfd \ --hash=sha256:fb7c2feaeca6976a3ffa31ec3236a6911fbc51aec9acc111de2aed99f244ade2 @@ -332,6 +425,7 @@ importlib-metadata==5.2.0 \ --hash=sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f \ --hash=sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd # via + # flask # keyring # twine iniconfig==1.1.1 \ @@ -570,12 +664,41 @@ pre-commit==2.20.0 \ --hash=sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7 \ --hash=sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959 # via tavern (pyproject.toml) +protobuf==4.22.0 \ + --hash=sha256:1669cb7524221a8e2d9008d0842453dbefdd0fcdd64d67672f657244867635fb \ + --hash=sha256:29288813aacaa302afa2381db1d6e0482165737b0afdf2811df5fa99185c457b \ + --hash=sha256:47d31bdf58222dd296976aa1646c68c6ee80b96d22e0a3c336c9174e253fd35e \ + --hash=sha256:652d8dfece122a24d98eebfef30e31e455d300efa41999d1182e015984ac5930 \ + --hash=sha256:7c535d126e7dcc714105ab20b418c4fedbd28f8b8afc42b7350b1e317bbbcc71 \ + --hash=sha256:86c3d20428b007537ba6792b475c0853bba7f66b1f60e610d913b77d94b486e4 \ + --hash=sha256:a33a273d21852f911b8bda47f39f4383fe7c061eb1814db2c76c9875c89c2491 \ + --hash=sha256:ab4d043865dd04e6b09386981fe8f80b39a1e46139fb4a3c206229d6b9f36ff6 \ + --hash=sha256:b2fea9dc8e3c0f32c38124790ef16cba2ee0628fe2022a52e435e1117bfef9b1 \ + --hash=sha256:c27f371f0159feb70e6ea52ed7e768b3f3a4c5676c1900a7e51a24740381650e \ + --hash=sha256:c3325803095fb4c2a48649c321d2fbde59f8fbfcb9bfc7a86df27d112831c571 \ + --hash=sha256:e474b63bab0a2ea32a7b26a4d8eec59e33e709321e5e16fb66e766b61b82a95e \ + --hash=sha256:e894e9ae603e963f0842498c4cd5d39c6a60f0d7e4c103df50ee939564298658 + # via + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # grpcio-status py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # via # tavern (pyproject.toml) # tox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 @@ -607,6 +730,10 @@ pynacl==1.5.0 \ --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 # via paramiko +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via httplib2 pyrsistent==0.19.2 \ --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ @@ -721,6 +848,7 @@ requests==2.28.1 \ # docker # docker-compose # flit + # google-api-core # requests-toolbelt # tavern (pyproject.toml) # twine @@ -736,6 +864,10 @@ rich==12.6.0 \ --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth ruamel-yaml==0.17.21 \ --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af @@ -806,6 +938,8 @@ six==1.16.0 \ # allure-python-commons # bleach # dockerpty + # google-auth + # google-auth-httplib2 # jsonschema # paramiko # python-dateutil @@ -859,7 +993,13 @@ types-pyyaml==6.0.12.2 \ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e - # via mypy + # via + # black + # mypy +uritemplate==4.1.1 \ + --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \ + --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e + # via google-api-python-client urllib3==1.26.13 \ --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 diff --git a/scripts/smoke.bash b/scripts/smoke.bash index dceb58d52..ef4984834 100755 --- a/scripts/smoke.bash +++ b/scripts/smoke.bash @@ -14,8 +14,8 @@ tox --parallel -c tox.ini \ tox -c tox-integration.ini \ -e py3-generic \ + -e py3-mqtt \ -e py3-advanced \ -e py3-cookies \ -e py3-components \ - -e py3-hooks \ - -e py3-mqtt + -e py3-hooks diff --git a/tavern/_core/__init__.py b/tavern/_core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_core/pytest/file.py b/tavern/_core/pytest/file.py index 3e5c2829f..0733cb01e 100644 --- a/tavern/_core/pytest/file.py +++ b/tavern/_core/pytest/file.py @@ -4,10 +4,10 @@ import logging from typing import Dict, Iterator, List, Mapping +import pytest import yaml from box import Box -import pytest from tavern._core import exceptions from tavern._core.dict_util import deep_dict_merge, format_keys, get_tavern_box from tavern._core.extfunctions import get_wrapped_create_function, is_ext_function diff --git a/tavern/_core/pytest/hooks.py b/tavern/_core/pytest/hooks.py index 2078e440d..c3e1c89b4 100644 --- a/tavern/_core/pytest/hooks.py +++ b/tavern/_core/pytest/hooks.py @@ -3,6 +3,7 @@ import re import pytest + from tavern._core import exceptions from .util import add_ini_options, add_parser_options, get_option_generic diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index 171d84642..2baff2fe2 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -3,11 +3,11 @@ from typing import Optional, Tuple import attr +import pytest import yaml from _pytest._code.code import ExceptionInfo from _pytest.nodes import Node -import pytest from tavern._core import exceptions from tavern._core.loader import error_on_empty_scalar from tavern._core.plugins import load_plugins diff --git a/tavern/_core/pytest/util.py b/tavern/_core/pytest/util.py index efa8358f1..88e70f743 100644 --- a/tavern/_core/pytest/util.py +++ b/tavern/_core/pytest/util.py @@ -3,6 +3,7 @@ from typing import Any, Dict import pytest + from tavern._core.dict_util import format_keys, get_tavern_box from tavern._core.general import load_global_config from tavern._core.pytest.config import TavernInternalConfig, TestConfig diff --git a/tavern/_core/schema/__init__.py b/tavern/_core/schema/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_plugins/__init__.py b/tavern/_plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_plugins/grpc/__init__.py b/tavern/_plugins/grpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_plugins/mqtt/__init__.py b/tavern/_plugins/mqtt/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_plugins/rest/__init__.py b/tavern/_plugins/rest/__init__.py new file mode 100644 index 000000000..e69de29bb From ee0c30c33d1b24c8b0785d82729aaddd3f92af22 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 2 Apr 2023 11:14:00 +0100 Subject: [PATCH 16/72] Put backends in one file --- tavern/_core/plugins.py | 10 +++++----- tavern/_core/pytest/config.py | 8 +++++++- tavern/_core/pytest/util.py | 3 +-- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/tavern/_core/plugins.py b/tavern/_core/plugins.py index 79c5b4000..bfe5fde40 100644 --- a/tavern/_core/plugins.py +++ b/tavern/_core/plugins.py @@ -77,7 +77,7 @@ def __call__(self, config: Optional[TestConfig] = None): self.plugins = self._load_plugins(config) return self.plugins - def _load_plugins(self, test_block_config): + def _load_plugins(self, test_block_config: TestConfig) -> List[Any]: """Load plugins from the 'tavern' entrypoint namespace This can be a module or a class as long as it defines the right things @@ -88,13 +88,13 @@ def _load_plugins(self, test_block_config): - Different plugin names Args: - test_block_config (tavern.pytesthook.config.TestConfig): available config for test + test_block_config: available config for test Raises: - exceptions.MissingSettingsError: Description + exceptions.MissingSettingsError: invalid entry points set Returns: - list: Loaded plugins, can be a class or a module + Loaded plugins, can be a class or a module """ plugins = [] @@ -104,7 +104,7 @@ def enabled(current_backend, ext): ext.name == test_block_config.tavern_internal.backends[current_backend] ) - for backend in ["http", "mqtt", "grpc"]: + for backend in test_block_config.backends(): namespace = "tavern_{}".format(backend) manager = stevedore.EnabledExtensionManager( diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index 22da8ca0a..62d498a70 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -1,6 +1,6 @@ import copy import dataclasses -from typing import Any +from typing import Any, List from tavern._core.strict_util import StrictLevel @@ -46,3 +46,9 @@ def with_new_variables(self) -> "TestConfig": def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": """Create a copy of the config but with a new strictness setting""" return dataclasses.replace(self, strict=new_strict) + + @staticmethod + def backends() -> List[str]: + # TODO: This is here in case in future we want to be able to turn some of these + # on or off + return ["http", "mqtt", "grpc"] diff --git a/tavern/_core/pytest/util.py b/tavern/_core/pytest/util.py index 88e70f743..b972faa5a 100644 --- a/tavern/_core/pytest/util.py +++ b/tavern/_core/pytest/util.py @@ -166,8 +166,7 @@ def _load_global_backends(pytest_config: pytest.Config) -> Dict[str, Any]: """Load which backend should be used""" backend_settings = {} - backends = ["http", "mqtt", "grpc"] - for b in backends: + for b in TestConfig.backends(): backend_settings[b] = get_option_generic( pytest_config, "tavern-{}-backend".format(b), None ) From 9ee78d7c469ed3b6db226b9ebb241c0c00e3b016 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 2 Apr 2023 11:51:10 +0100 Subject: [PATCH 17/72] Update version in grpc docker file --- example/grpc/server.Dockerfile | 2 +- scripts/smoke.bash | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/example/grpc/server.Dockerfile b/example/grpc/server.Dockerfile index d6271641e..db6184af0 100644 --- a/example/grpc/server.Dockerfile +++ b/example/grpc/server.Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.5-slim-jessie +FROM python:3.9-slim-buster RUN pip install grpcio grpcio-tools grpcio-reflection diff --git a/scripts/smoke.bash b/scripts/smoke.bash index ef4984834..cc581927d 100755 --- a/scripts/smoke.bash +++ b/scripts/smoke.bash @@ -15,6 +15,7 @@ tox --parallel -c tox.ini \ tox -c tox-integration.ini \ -e py3-generic \ -e py3-mqtt \ + -e py3-grpc \ -e py3-advanced \ -e py3-cookies \ -e py3-components \ From d537dba54ff2675aead54d261fe54f52a61b8b88 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 2 Apr 2023 12:53:46 +0100 Subject: [PATCH 18/72] Regenerate grpc definitions appropriately --- constraints.txt | 4 + .../grpc/{server.Dockerfile => Dockerfile} | 2 +- example/grpc/docker-compose.yaml | 4 +- example/grpc/helloworld.proto | 15 ++ example/grpc/helloworld_pb2.py | 139 +++--------------- example/grpc/helloworld_pb2.pyi | 22 +++ example/grpc/helloworld_pb2_grpc.py | 81 ++++++---- example/grpc/regenerate.sh | 3 + example/grpc/server.py | 2 +- pyproject.toml | 1 + requirements.txt | 49 ++++++ 11 files changed, 167 insertions(+), 155 deletions(-) rename example/grpc/{server.Dockerfile => Dockerfile} (62%) create mode 100644 example/grpc/helloworld.proto create mode 100644 example/grpc/helloworld_pb2.pyi create mode 100755 example/grpc/regenerate.sh diff --git a/constraints.txt b/constraints.txt index 7e8c9d55a..5eea5dacd 100644 --- a/constraints.txt +++ b/constraints.txt @@ -107,11 +107,14 @@ grpcio==1.51.1 # via # grpcio-reflection # grpcio-status + # grpcio-tools # tavern (pyproject.toml) grpcio-reflection==1.51.1 # via tavern (pyproject.toml) grpcio-status==1.51.1 # via tavern (pyproject.toml) +grpcio-tools==1.51.1 + # via tavern (pyproject.toml) httplib2==0.21.0 # via # google-api-python-client @@ -202,6 +205,7 @@ protobuf==4.22.0 # googleapis-common-protos # grpcio-reflection # grpcio-status + # grpcio-tools py==1.11.0 # via # tavern (pyproject.toml) diff --git a/example/grpc/server.Dockerfile b/example/grpc/Dockerfile similarity index 62% rename from example/grpc/server.Dockerfile rename to example/grpc/Dockerfile index db6184af0..2b3bbe32e 100644 --- a/example/grpc/server.Dockerfile +++ b/example/grpc/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.9-slim-buster -RUN pip install grpcio grpcio-tools grpcio-reflection +RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 COPY server.py / COPY helloworld_pb2.py / diff --git a/example/grpc/docker-compose.yaml b/example/grpc/docker-compose.yaml index 980aa7c43..bb5f6869a 100644 --- a/example/grpc/docker-compose.yaml +++ b/example/grpc/docker-compose.yaml @@ -5,6 +5,6 @@ services: server: build: context: . - dockerfile: server.Dockerfile + dockerfile: Dockerfile ports: - - "50051:50051" \ No newline at end of file + - "50051:50051" diff --git a/example/grpc/helloworld.proto b/example/grpc/helloworld.proto new file mode 100644 index 000000000..3c7d77d46 --- /dev/null +++ b/example/grpc/helloworld.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +option py_generic_services = true; + +message HelloRequest { + string name = 1; +} + +message HelloReply { + string message = 1; +} + +service Greeter { + rpc SayHello (HelloRequest) returns (HelloReply) {} +} diff --git a/example/grpc/helloworld_pb2.py b/example/grpc/helloworld_pb2.py index 1bb6ce053..b1840443f 100644 --- a/example/grpc/helloworld_pb2.py +++ b/example/grpc/helloworld_pb2.py @@ -1,13 +1,11 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: helloworld.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,120 +13,19 @@ -DESCRIPTOR = _descriptor.FileDescriptor( - name='helloworld.proto', - package='helloworld', - syntax='proto3', - serialized_pb=_b('\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3') -) - - - - -_HELLOREQUEST = _descriptor.Descriptor( - name='HelloRequest', - full_name='helloworld.HelloRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='helloworld.HelloRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=32, - serialized_end=60, -) - - -_HELLOREPLY = _descriptor.Descriptor( - name='HelloReply', - full_name='helloworld.HelloReply', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='message', full_name='helloworld.HelloReply.message', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=62, - serialized_end=91, -) - -DESCRIPTOR.message_types_by_name['HelloRequest'] = _HELLOREQUEST -DESCRIPTOR.message_types_by_name['HelloReply'] = _HELLOREPLY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HelloRequest = _reflection.GeneratedProtocolMessageType('HelloRequest', (_message.Message,), dict( - DESCRIPTOR = _HELLOREQUEST, - __module__ = 'helloworld_pb2' - # @@protoc_insertion_point(class_scope:helloworld.HelloRequest) - )) -_sym_db.RegisterMessage(HelloRequest) - -HelloReply = _reflection.GeneratedProtocolMessageType('HelloReply', (_message.Message,), dict( - DESCRIPTOR = _HELLOREPLY, - __module__ = 'helloworld_pb2' - # @@protoc_insertion_point(class_scope:helloworld.HelloReply) - )) -_sym_db.RegisterMessage(HelloReply) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW')) - -_GREETER = _descriptor.ServiceDescriptor( - name='Greeter', - full_name='helloworld.Greeter', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=93, - serialized_end=166, - methods=[ - _descriptor.MethodDescriptor( - name='SayHello', - full_name='helloworld.Greeter.SayHello', - index=0, - containing_service=None, - input_type=_HELLOREQUEST, - output_type=_HELLOREPLY, - options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_GREETER) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t23\n\x07Greeter\x12(\n\x08SayHello\x12\r.HelloRequest\x1a\x0b.HelloReply\"\x00\x42\x03\x90\x01\x01\x62\x06proto3') -DESCRIPTOR.services_by_name['Greeter'] = _GREETER +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: -# @@protoc_insertion_point(module_scope) \ No newline at end of file + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\220\001\001' + _HELLOREQUEST._serialized_start=20 + _HELLOREQUEST._serialized_end=48 + _HELLOREPLY._serialized_start=50 + _HELLOREPLY._serialized_end=79 + _GREETER._serialized_start=81 + _GREETER._serialized_end=132 +_builder.BuildServices(DESCRIPTOR, 'helloworld_pb2', globals()) +# @@protoc_insertion_point(module_scope) diff --git a/example/grpc/helloworld_pb2.pyi b/example/grpc/helloworld_pb2.pyi new file mode 100644 index 000000000..28c1e554c --- /dev/null +++ b/example/grpc/helloworld_pb2.pyi @@ -0,0 +1,22 @@ +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import service as _service +from typing import ClassVar as _ClassVar, Optional as _Optional + +DESCRIPTOR: _descriptor.FileDescriptor + +class HelloReply(_message.Message): + __slots__ = ["message"] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + message: str + def __init__(self, message: _Optional[str] = ...) -> None: ... + +class HelloRequest(_message.Message): + __slots__ = ["name"] + NAME_FIELD_NUMBER: _ClassVar[int] + name: str + def __init__(self, name: _Optional[str] = ...) -> None: ... + +class Greeter(_service.service): ... + +class Greeter_Stub(Greeter): ... diff --git a/example/grpc/helloworld_pb2_grpc.py b/example/grpc/helloworld_pb2_grpc.py index b55ac8e7a..601ec741f 100644 --- a/example/grpc/helloworld_pb2_grpc.py +++ b/example/grpc/helloworld_pb2_grpc.py @@ -1,45 +1,66 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc import helloworld_pb2 as helloworld__pb2 class GreeterStub(object): - """The greeting service definition. - """ + """Missing associated documentation comment in .proto file.""" - def __init__(self, channel): - """Constructor. - Args: - channel: A grpc.Channel. - """ - self.SayHello = channel.unary_unary( - '/helloworld.Greeter/SayHello', - request_serializer=helloworld__pb2.HelloRequest.SerializeToString, - response_deserializer=helloworld__pb2.HelloReply.FromString, - ) + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SayHello = channel.unary_unary( + '/Greeter/SayHello', + request_serializer=helloworld__pb2.HelloRequest.SerializeToString, + response_deserializer=helloworld__pb2.HelloReply.FromString, + ) class GreeterServicer(object): - """The greeting service definition. - """ + """Missing associated documentation comment in .proto file.""" - def SayHello(self, request, context): - """Sends a greeting - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + def SayHello(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_GreeterServicer_to_server(servicer, server): - rpc_method_handlers = { - 'SayHello': grpc.unary_unary_rpc_method_handler( - servicer.SayHello, - request_deserializer=helloworld__pb2.HelloRequest.FromString, - response_serializer=helloworld__pb2.HelloReply.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'helloworld.Greeter', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) \ No newline at end of file + rpc_method_handlers = { + 'SayHello': grpc.unary_unary_rpc_method_handler( + servicer.SayHello, + request_deserializer=helloworld__pb2.HelloRequest.FromString, + response_serializer=helloworld__pb2.HelloReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'Greeter', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class Greeter(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def SayHello(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/Greeter/SayHello', + helloworld__pb2.HelloRequest.SerializeToString, + helloworld__pb2.HelloReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/example/grpc/regenerate.sh b/example/grpc/regenerate.sh new file mode 100755 index 000000000..9a9a54aef --- /dev/null +++ b/example/grpc/regenerate.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +python -m grpc_tools.protoc --proto_path=$(pwd) --pyi_out=$(pwd) --python_out=$(pwd) --grpc_python_out=$(pwd) helloworld.proto diff --git a/example/grpc/server.py b/example/grpc/server.py index 6bad7eac2..f65904793 100644 --- a/example/grpc/server.py +++ b/example/grpc/server.py @@ -32,4 +32,4 @@ def serve(): if __name__ == '__main__': logging.basicConfig() - serve() \ No newline at end of file + serve() diff --git a/pyproject.toml b/pyproject.toml index 407095a46..11b73b508 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,6 +85,7 @@ dev = [ "tox-travis", "twine", "wheel", + "grpcio-tools", # This has to be installed separately, otherwise you can't upload to pypi # "tbump@https://github.com/michaelboulton/tbump/archive/714ba8957a3c84b625608ceca39811ebe56229dc.zip", ] diff --git a/requirements.txt b/requirements.txt index 3452236c2..a2b70ad99 100644 --- a/requirements.txt +++ b/requirements.txt @@ -398,6 +398,7 @@ grpcio==1.51.1 \ # via # grpcio-reflection # grpcio-status + # grpcio-tools # tavern (pyproject.toml) grpcio-reflection==1.51.1 \ --hash=sha256:b70af764a83e42a44f65df1edb232e972ab69e72bc7fbbad481e66c29a9d8cb8 \ @@ -407,6 +408,53 @@ grpcio-status==1.51.1 \ --hash=sha256:a52cbdc4b18f325bfc13d319ae7c7ae7a0fee07f3d9a005504d6097896d7a495 \ --hash=sha256:ac2617a3095935ebd785e2228958f24b10a0d527a0c9eb5a0863c784f648a816 # via tavern (pyproject.toml) +grpcio-tools==1.51.1 \ + --hash=sha256:048793747339f327ea091d8f022c6756d89713d8080dffde5ce7380cc348ea8e \ + --hash=sha256:055819992ddd30c642a7fd6f344a03747be3afa95cb910f8a2e5efaabd41cde5 \ + --hash=sha256:0a218f64e667f3332b74080bdc5440aaf0fa6700ae07a0b54ecf085aaef2aa9f \ + --hash=sha256:14e82c2b3ee7e300611c2c729d411b3b911e4cca5f4ec14787457a2fb72ff9d4 \ + --hash=sha256:15b8acf4eaa0ebe37e2f69108de49efd935b7abe9c7e58ba737490b99906aa76 \ + --hash=sha256:16b8b915625dc6eb2ea7efdfb06f1fae44a9066c9016453a2ca120c034f33090 \ + --hash=sha256:1c44b57a6770b78a1eafe355878ff1ec59a2fa07455a2cbd522c071eedae04d4 \ + --hash=sha256:2281180490c475d09b7aa05dabafa5e09de9902176931e7295113f636c2b5360 \ + --hash=sha256:27113b354f7587684eb55125733e6e5be1f489458abfe12344dabd918d8dcc54 \ + --hash=sha256:331a897306adeec3c67470431ea8d8b4972b689d32966f94506d91f4dac20952 \ + --hash=sha256:392ad4cd004f7b843cf7d916d9a15b2d6585965bfef235be1c88d8f8649777e5 \ + --hash=sha256:3a671466158ed74c07ee070fb940ed783acf59ba6e6e53cb4de8fd63819c6c7f \ + --hash=sha256:40ef70e8c5d0310dedff9af502b520b4c7e215bce94094527fb959150a0c594a \ + --hash=sha256:4957f1ffa16598aa5379505fcbaeb47d65693a46b0817f4ee61db76707092aeb \ + --hash=sha256:49624394805568acd7d767dea5a00d970fca5ad8f395fe0161eeea0de5133eba \ + --hash=sha256:4e3249a2ec435b3b972610c66c8a714c188844500d564c910f57a2771dc61978 \ + --hash=sha256:531586c5598a99658249f3c5e92826d6d2bb117abd6ffc88527d1e1d9eaef924 \ + --hash=sha256:566809d9942e78821b279af70f3cf159a328127f9f3d5fee8d83ad8b2d27b2fe \ + --hash=sha256:64d8ad369417759f5fdb8ffb7cbd6374fecc06ab51c9a226dee9bbd7d311c3b5 \ + --hash=sha256:674b340f2f7bb2adbc3f15144bd37ce5ea83239f78b68dbbd0ea3cba00107e2b \ + --hash=sha256:67b304282cad38642587ebae68617e450e1ad4fa1c0c8b19e9e30274dbb32716 \ + --hash=sha256:6b83d7fc2597c6d392c225177d1fbbcff74900f8cc40b33236987fd1ff841330 \ + --hash=sha256:6d6626a6e4dbe843df96dc8c08dd244d2191a75324f54bfa4ebaa3e76b0b1958 \ + --hash=sha256:6e72a30be1746ea0749a8486d0ca0120c0b2757fe84fc246a5144b1ef66d7b89 \ + --hash=sha256:794f26a09b70f4f101df5cf54c6c12dc1b65747ab1dee5bda02c2991389ade56 \ + --hash=sha256:79c06d2577cb4d977922bbf01234de3b20f73d1784d3cbe3179deee1bdb9a60b \ + --hash=sha256:87bc5f3e3698c65907d397003c64d25c3ea84e3d6aa46dac133bd98bf66835ee \ + --hash=sha256:8e62d23d3fed9d4f81738f98dd193dbd2e21aed4a8f0dd715e75b5439e649727 \ + --hash=sha256:98777b5031f1b3c58b688815ffa83435c103b2152c26eb144f80f4a4bb34addb \ + --hash=sha256:9906fb6bf6d9c30c23d85153f12d130f44325afe8f9ebe58aa7a6c82ecade9d8 \ + --hash=sha256:9dfe6c12b0e2c07f6a4a91a9912ef4e5bd007672533891a44e6f433ffbf7c3b1 \ + --hash=sha256:a66b3a5d18a7615f0f828b72e2d2935751459c89cc4725e56bdfb3d2cd93281f \ + --hash=sha256:aab24a342642329de38139cb26f8492882ca0d8551bb87f6530bcc613945a0d0 \ + --hash=sha256:b4fb8ed6d29f2d6cf03ef99ffaad635bbc132a59be77013691392fe557e67144 \ + --hash=sha256:c4649af7f5d9553975ee66b6bfae20a84be779f13e163fa835e782961895e63c \ + --hash=sha256:ccd37165d7a3e93f460096a2eb62b7a9c1ebe5c424eaee42d8e92740d0c8f6bc \ + --hash=sha256:d5e033c04b416afcddd5231b3ff94a34fb5d26fba2416eb940e69b05f22cfd25 \ + --hash=sha256:d7b186183515ad6b8584ffe4bd820b72b00f6e7d121fb1c36294edeea9092313 \ + --hash=sha256:d8cc862a1ad30f94528d66cc6f95fb9e659005e568313e54a23550535b649573 \ + --hash=sha256:de51a0a71845b854f6a5967756c893c96bd03e37f39e5dce87b4f409dac36ee2 \ + --hash=sha256:e9abc03d67793b1bf33dc766caa69a3333f9db029869ba6e8fc6cd9c251c0080 \ + --hash=sha256:ecf1494cb695afead36995534f787761ee33fb9e116b23030113a37fe6057a83 \ + --hash=sha256:f06bb0753b7cecbff154b523cfb8f45dee2c31b0a4c72bed7da44c57f1cba113 \ + --hash=sha256:f336ad9be661d92fa45940e74e8ff3d78e67ebe9b4f7ea8774b2d680c17aeb6c \ + --hash=sha256:f6caf36e7752728329a28f93afec7c4ec9015fc1c6e4460bd1eb0f3737e1c55a + # via tavern (pyproject.toml) httplib2==0.21.0 \ --hash=sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01 \ --hash=sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34 @@ -683,6 +731,7 @@ protobuf==4.22.0 \ # googleapis-common-protos # grpcio-reflection # grpcio-status + # grpcio-tools py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 From 4cdd863ad9a2da87093a9ee199c2ced2c3975273 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 2 Apr 2023 14:10:57 +0100 Subject: [PATCH 19/72] Fix grpc and add debug flag --- constraints.txt | 4 ++ example/grpc/helloworld.proto | 4 +- example/grpc/helloworld_pb2.py | 14 +++---- example/grpc/helloworld_pb2_grpc.py | 6 +-- example/grpc/server.py | 2 +- pyproject.toml | 3 ++ requirements.txt | 6 +++ tavern/_core/pytest/hooks.py | 40 ++++++++++++++++++++ tavern/_core/pytest/util.py | 12 ++++++ tavern/_core/schema/tests.jsonschema.yaml | 42 +++++++++++++++++++++ tavern/_plugins/grpc/__init__.py | 12 ++++++ tavern/_plugins/grpc/client.py | 19 +++------- tavern/_plugins/grpc/jsonschema.yaml | 45 +++++++++++++++++++++++ tavern/_plugins/grpc/request.py | 4 +- tavern/_plugins/grpc/response.py | 26 ++++++------- tavern/_plugins/grpc/tavernhook.py | 16 +++----- tavern/response.py | 4 +- 17 files changed, 205 insertions(+), 54 deletions(-) create mode 100644 tavern/_plugins/grpc/jsonschema.yaml diff --git a/constraints.txt b/constraints.txt index 5eea5dacd..9c2a5f44a 100644 --- a/constraints.txt +++ b/constraints.txt @@ -199,6 +199,8 @@ pluggy==1.0.0 # tox pre-commit==2.20.0 # via tavern (pyproject.toml) +proto-plus==1.22.2 + # via tavern (pyproject.toml) protobuf==4.22.0 # via # google-api-core @@ -206,6 +208,8 @@ protobuf==4.22.0 # grpcio-reflection # grpcio-status # grpcio-tools + # proto-plus + # tavern (pyproject.toml) py==1.11.0 # via # tavern (pyproject.toml) diff --git a/example/grpc/helloworld.proto b/example/grpc/helloworld.proto index 3c7d77d46..eaa42167e 100644 --- a/example/grpc/helloworld.proto +++ b/example/grpc/helloworld.proto @@ -1,8 +1,10 @@ syntax = "proto3"; +package helloworld; + option py_generic_services = true; -message HelloRequest { +message HelloRequest { string name = 1; } diff --git a/example/grpc/helloworld_pb2.py b/example/grpc/helloworld_pb2.py index b1840443f..a8eca53bc 100644 --- a/example/grpc/helloworld_pb2.py +++ b/example/grpc/helloworld_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t23\n\x07Greeter\x12(\n\x08SayHello\x12\r.HelloRequest\x1a\x0b.HelloReply\"\x00\x42\x03\x90\x01\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x03\x90\x01\x01\x62\x06proto3') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals()) @@ -21,11 +21,11 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\220\001\001' - _HELLOREQUEST._serialized_start=20 - _HELLOREQUEST._serialized_end=48 - _HELLOREPLY._serialized_start=50 - _HELLOREPLY._serialized_end=79 - _GREETER._serialized_start=81 - _GREETER._serialized_end=132 + _HELLOREQUEST._serialized_start=32 + _HELLOREQUEST._serialized_end=60 + _HELLOREPLY._serialized_start=62 + _HELLOREPLY._serialized_end=91 + _GREETER._serialized_start=93 + _GREETER._serialized_end=166 _builder.BuildServices(DESCRIPTOR, 'helloworld_pb2', globals()) # @@protoc_insertion_point(module_scope) diff --git a/example/grpc/helloworld_pb2_grpc.py b/example/grpc/helloworld_pb2_grpc.py index 601ec741f..0efb0eb60 100644 --- a/example/grpc/helloworld_pb2_grpc.py +++ b/example/grpc/helloworld_pb2_grpc.py @@ -15,7 +15,7 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.SayHello = channel.unary_unary( - '/Greeter/SayHello', + '/helloworld.Greeter/SayHello', request_serializer=helloworld__pb2.HelloRequest.SerializeToString, response_deserializer=helloworld__pb2.HelloReply.FromString, ) @@ -40,7 +40,7 @@ def add_GreeterServicer_to_server(servicer, server): ), } generic_handler = grpc.method_handlers_generic_handler( - 'Greeter', rpc_method_handlers) + 'helloworld.Greeter', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) @@ -59,7 +59,7 @@ def SayHello(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/Greeter/SayHello', + return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello', helloworld__pb2.HelloRequest.SerializeToString, helloworld__pb2.HelloReply.FromString, options, channel_credentials, diff --git a/example/grpc/server.py b/example/grpc/server.py index f65904793..d18ed5128 100644 --- a/example/grpc/server.py +++ b/example/grpc/server.py @@ -31,5 +31,5 @@ def serve(): event.wait() if __name__ == '__main__': - logging.basicConfig() + logging.basicConfig(level=logging.INFO) serve() diff --git a/pyproject.toml b/pyproject.toml index 11b73b508..28100725b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,8 @@ dependencies = [ "grpcio-reflection", "grpcio-status", "google-api-python-client", + "protobuf", + "proto-plus" ] requires-python = ">=3.8" @@ -140,6 +142,7 @@ addopts = [ "--strict-markers", "-p", "no:logging", "--tb=short", + "--tavern-setup-init-logging", ] norecursedirs = [ ".git", diff --git a/requirements.txt b/requirements.txt index a2b70ad99..0967698a1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -712,6 +712,10 @@ pre-commit==2.20.0 \ --hash=sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7 \ --hash=sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959 # via tavern (pyproject.toml) +proto-plus==1.22.2 \ + --hash=sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165 \ + --hash=sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d + # via tavern (pyproject.toml) protobuf==4.22.0 \ --hash=sha256:1669cb7524221a8e2d9008d0842453dbefdd0fcdd64d67672f657244867635fb \ --hash=sha256:29288813aacaa302afa2381db1d6e0482165737b0afdf2811df5fa99185c457b \ @@ -732,6 +736,8 @@ protobuf==4.22.0 \ # grpcio-reflection # grpcio-status # grpcio-tools + # proto-plus + # tavern (pyproject.toml) py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 diff --git a/tavern/_core/pytest/hooks.py b/tavern/_core/pytest/hooks.py index c3e1c89b4..2ee77b3be 100644 --- a/tavern/_core/pytest/hooks.py +++ b/tavern/_core/pytest/hooks.py @@ -1,8 +1,12 @@ +import logging +import logging.config import os import pathlib import re +from textwrap import dedent import pytest +import yaml from tavern._core import exceptions @@ -22,6 +26,42 @@ def pytest_collect_file(parent, path: os.PathLike): if int(pytest.__version__.split(".", maxsplit=1)[0]) < 7: raise exceptions.TavernException("Only pytest >=7 is supported") + try: + setup_initial_logging = get_option_generic( + parent.config, "tavern-setup-init-logging", False + ) + except ValueError: + pass + else: + if setup_initial_logging: + cfg = dedent( + """ + --- + version: 1 + formatters: + default: + format: "%(asctime)s [%(levelname)s]: (%(name)s:%(lineno)d) %(message)s" + style: "%" + datefmt: "%X" + + handlers: + stderr: + class : logging.StreamHandler + level : DEBUG + formatter: default + stream : ext://sys.stderr + + loggers: + tavern: + handlers: + - stderr + level: DEBUG + """ + ) + + settings = yaml.load(cfg, Loader=yaml.SafeLoader) + logging.config.dictConfig(settings) + pattern = get_option_generic( parent.config, "tavern-file-path-regex", r".+\.tavern\.ya?ml$" ) diff --git a/tavern/_core/pytest/util.py b/tavern/_core/pytest/util.py index b972faa5a..8418b10c5 100644 --- a/tavern/_core/pytest/util.py +++ b/tavern/_core/pytest/util.py @@ -64,6 +64,12 @@ def add_parser_options(parser_addoption, with_defaults: bool = True) -> None: action="store", nargs=1, ) + parser_addoption( + "--tavern-setup-init-logging", + help="Set up a simple logger for tavern initialisation. Only for internal use and debugging, may be removed in future with no warning.", + default=False, + action="store_true", + ) def add_ini_options(parser: pytest.Parser) -> None: @@ -110,6 +116,12 @@ def add_ini_options(parser: pytest.Parser) -> None: default=r".+\.tavern\.ya?ml$", type="args", ) + parser.addini( + "tavern-setup-init-logging", + help="Set up a simple logger for tavern initialisation. Only for internal use and debugging, may be removed in future with no warning.", + type="bool", + default=False, + ) def load_global_cfg(pytest_config: pytest.Config) -> TestConfig: diff --git a/tavern/_core/schema/tests.jsonschema.yaml b/tavern/_core/schema/tests.jsonschema.yaml index ebd62f086..8887c5213 100644 --- a/tavern/_core/schema/tests.jsonschema.yaml +++ b/tavern/_core/schema/tests.jsonschema.yaml @@ -244,6 +244,42 @@ definitions: type: object description: Which objects to save from the response + grpc_request: + type: object + required: + - service + properties: + service: + type: string + + proto_body: + type: object + + retain: + type: boolean + + grpc_response: + type: object + properties: + status: + type: object # TODO: Add jsonschema validation + + details: + type: object + + proto_body: + type: object + + timeout: + type: number + + verify_response_with: + oneOf: + - $ref: "#/definitions/verify_block" + - type: array + items: + $ref: "#/definitions/verify_block" + http_response: type: object additionalProperties: false @@ -362,6 +398,12 @@ definitions: response: $ref: "#/definitions/http_response" + grpc_request: + $ref: "#/definitions/grpc_request" + + grpc_response: + $ref: "#/definitions/grpc_response" + ### type: object diff --git a/tavern/_plugins/grpc/__init__.py b/tavern/_plugins/grpc/__init__.py index e69de29bb..4ab9638a1 100644 --- a/tavern/_plugins/grpc/__init__.py +++ b/tavern/_plugins/grpc/__init__.py @@ -0,0 +1,12 @@ +import warnings + +# Shut up warnings caused by proto libraries +warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="pkg_resources", lineno=2804 +) +warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="pkg_resources", lineno=2309 +) +warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="pkg_resources", lineno=20 +) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index af41cfd36..7de3cfcfa 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -98,24 +98,15 @@ def __init__(self, **kwargs): _proto_args = kwargs.pop("proto", {}) check_expected_keys(expected_blocks["proto"], _proto_args) - host = "localhost" - port = "50051" + self.default_host = _connect_args["host"] + if port := _connect_args.get("port"): + self.default_host += ":{}".format(port) - if "host" in _connect_args: - host_arg = _connect_args["host"] - host_port = host_arg.split(":") - - if len(host_port) == 2: - host = host_port[0] - port = host_port[1] - elif len(host_port) == 1: - host = host_arg - - port = _connect_args.get("port", port) - self.default_host = "{}:{}".format(host, port) self.timeout = int(_connect_args.get("timeout", 5)) self.tls = bool(_connect_args.get("tls", False)) + logger.critical(self.default_host) + self.channels = {} self.sym_db = _symbol_database.Default() diff --git a/tavern/_plugins/grpc/jsonschema.yaml b/tavern/_plugins/grpc/jsonschema.yaml new file mode 100644 index 000000000..d41a9f564 --- /dev/null +++ b/tavern/_plugins/grpc/jsonschema.yaml @@ -0,0 +1,45 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" + +title: gRPC schema +description: Schema for Python gRPC connection + +type: object +additionalProperties: false +required: + - grpc + +properties: + grpc: + type: object + required: + - connect + properties: + connect: + type: object + required: + - host + - port + properties: + host: + type: string + port: + type: integer + timeout: + type: number + keepalive: + type: integer + tls: + type: object + # TODO: tls options + + metadata: + type: object + + proto: + type: object + properties: + source: + type: string + module: + type: string diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index 77bc6304e..dad7029d6 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -14,7 +14,7 @@ def get_grpc_args(rspec, test_block_config): """Format GRPC request args""" - fspec = format_keys(rspec, test_block_config["variables"]) + fspec = format_keys(rspec, test_block_config.variables) if "json" in rspec: if "body" in rspec: @@ -45,7 +45,7 @@ def __init__(self, client, rspec, test_block_config): # Need to do this here because get_publish_args will modify the original # input, which we might want to use to format. No error handling because # all the error handling is done in the previous call - self._original_publish_args = format_keys(rspec, test_block_config["variables"]) + self._original_publish_args = format_keys(rspec, test_block_config.variables) def run(self): try: diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 42410bbec..ce9359f55 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,16 +1,22 @@ import logging +from typing import Mapping from google.protobuf import json_format from grpc import StatusCode from tavern._core.exceptions import TestFailError +from tavern._core.pytest.config import TestConfig from tavern.response import BaseResponse logger = logging.getLogger(__name__) class GRPCResponse(BaseResponse): - def __init__(self, client, name, expected, test_block_config): + def __init__( + self, client, name: str, expected: Mapping, test_block_config: TestConfig + ): + logger.critical(expected) + super(GRPCResponse, self).__init__(name, expected, test_block_config) self._client = client @@ -23,12 +29,12 @@ def __str__(self): else: return "" - def _validate_block(self, blockname, block): + def _validate_block(self, blockname: str, block: Mapping): """Validate a block of the response Args: - blockname (str): which part of the response is being checked - block (dict): The actual part being checked + blockname: which part of the response is being checked + block: The actual part being checked """ try: expected_block = self.expected[blockname] or {} @@ -44,14 +50,8 @@ def _validate_block(self, blockname, block): logger.debug("Validating response %s against %s", blockname, expected_block) - # 'strict' could be a list, in which case we only want to enable strict - # key checking for that specific bit of the response - test_strictness = self.test_block_config["strict"] - if isinstance(test_strictness, list): - block_strictness = blockname in test_strictness - else: - block_strictness = test_strictness - + test_strictness = self.test_block_config.strict + block_strictness = test_strictness.option_for(blockname) self.recurse_check_key_match(expected_block, block, blockname, block_strictness) def verify(self, response): @@ -81,7 +81,7 @@ def verify(self, response): response.details(), ) - if "body" in self.expected: + if "proto_body" in self.expected: result = response.result() json_result = json_format.MessageToDict( diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py index 7ab4d5c42..f4049afb5 100644 --- a/tavern/_plugins/grpc/tavernhook.py +++ b/tavern/_plugins/grpc/tavernhook.py @@ -18,16 +18,10 @@ request_block_name = "grpc_request" -def get_expected_from_request(stage, test_block_config, session): - # pylint: disable=unused-argument - # grpc response is not required - grpc_expected = stage.get("grpc_response") - if grpc_expected: - # format so we can subscribe to the right topic - f_expected = format_keys(grpc_expected, test_block_config["variables"]) - expected = f_expected - else: - expected = {} +def get_expected_from_request(response_block, test_block_config, session): + # format so we can subscribe to the right topic + f_expected = format_keys(response_block, test_block_config.variables) + expected = f_expected return expected @@ -35,6 +29,6 @@ def get_expected_from_request(stage, test_block_config, session): verifier_type = GRPCResponse response_block_name = "grpc_response" -schema_path = join(abspath(dirname(__file__)), "schema.yaml") +schema_path = join(abspath(dirname(__file__)), "jsonschema.yaml") with open(schema_path, "r") as schema_file: schema = yaml.load(schema_file, Loader=yaml.SafeLoader) diff --git a/tavern/response.py b/tavern/response.py index 8bee7a6a1..16a77bb63 100644 --- a/tavern/response.py +++ b/tavern/response.py @@ -110,12 +110,12 @@ def recurse_check_key_match( except exceptions.KeyMismatchError as e: self._adderr(e.args[0], e=e) - def _check_for_validate_functions(self, response_block) -> None: + def _check_for_validate_functions(self, response_block: Mapping) -> None: """ See if there were any functions specified in the response block and save them for later use Args: - response_block (dict): block of external functions to call + response_block: block of external functions to call """ def check_ext_functions(verify_block): From 35916ef8cbe87f9a4a0d946faa30e0728cc35edb Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 2 Apr 2023 14:40:45 +0100 Subject: [PATCH 20/72] Fix validation --- example/grpc/test_grpc.tavern.yaml | 23 +++++++++++++++++++++++ tavern/_core/schema/jsonschema.py | 2 ++ tavern/_core/schema/tests.jsonschema.yaml | 4 +++- tavern/_plugins/grpc/response.py | 2 -- 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index f47a9cc2a..cc6a10504 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -18,5 +18,28 @@ stages: body: name: "John" grpc_response: + status: "OK" body: message: "Hello, John!" + +--- + +test_name: Test cannot use invalid string status + +includes: + - !include common.yaml + +grpc: *grpc_spec + +_xfail: verify + +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "Jim" + grpc_response: + status: "GREETINGS" + body: + message: "Hello, Jim!" diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py index 944dc0105..55627651d 100644 --- a/tavern/_core/schema/jsonschema.py +++ b/tavern/_core/schema/jsonschema.py @@ -21,6 +21,7 @@ check_strict_key, retry_variable, validate_file_spec, + validate_grpc_status_is_valid_or_list_of_names, validate_http_method, validate_json_with_ext, validate_request_json, @@ -173,6 +174,7 @@ def verify_jsonschema(to_verify, schema) -> None: "stages[*].request.data[]": validate_request_json, "stages[*].request.params[]": validate_request_json, "stages[*].request.headers[]": validate_request_json, + "stages[*].grpc_response.status[]": validate_grpc_status_is_valid_or_list_of_names, "stages[*].request.method[]": validate_http_method, "stages[*].request.save[]": validate_json_with_ext, "stages[*].request.files[]": validate_file_spec, diff --git a/tavern/_core/schema/tests.jsonschema.yaml b/tavern/_core/schema/tests.jsonschema.yaml index 8887c5213..81cbe52fb 100644 --- a/tavern/_core/schema/tests.jsonschema.yaml +++ b/tavern/_core/schema/tests.jsonschema.yaml @@ -262,7 +262,9 @@ definitions: type: object properties: status: - type: object # TODO: Add jsonschema validation + oneOf: + - type: string + - type: integer details: type: object diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index ce9359f55..c62dd6e78 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -21,8 +21,6 @@ def __init__( self._client = client - self.received_messages = [] - def __str__(self): if self.response: return self.response.payload From b2f2ad563292eecd38c797aa127d4307414c2eb0 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 8 Apr 2023 18:36:28 +0100 Subject: [PATCH 21/72] Clean up some code and add more exceptions and tests --- example/grpc/test_grpc.tavern.yaml | 54 +++++++++++ tavern/_core/exceptions.py | 4 + tavern/_core/schema/tests.jsonschema.yaml | 8 +- tavern/_plugins/grpc/__init__.py | 6 ++ tavern/_plugins/grpc/client.py | 104 +++++++++++++--------- tavern/_plugins/grpc/jsonschema.yaml | 9 +- tavern/_plugins/grpc/request.py | 9 +- 7 files changed, 142 insertions(+), 52 deletions(-) diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index cc6a10504..1b0a990bf 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -24,6 +24,60 @@ stages: --- +test_name: Test grpc message echo importing a module instead of compiling from source + +includes: + - !include common.yaml + +grpc: + connect: + host: "{grpc_host}" + port: !int "{grpc_port}" + timeout: 3 + proto: + module: helloworld_pb2_grpc + +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc message echo importing a module instead of compiling from source + +includes: + - !include common.yaml + +grpc: + connect: + host: "{grpc_host}" + port: !int "{grpc_port}" + timeout: 3 + proto: + module: cool_grpc_server + +_xfail: run + +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + test_name: Test cannot use invalid string status includes: diff --git a/tavern/_core/exceptions.py b/tavern/_core/exceptions.py index 992eabbbc..ba4fd1f82 100644 --- a/tavern/_core/exceptions.py +++ b/tavern/_core/exceptions.py @@ -58,6 +58,10 @@ class GRPCRequestException(TavernException): """Error making requests in GRPCRequest()""" +class GRPCServiceException(TavernException): + """Some kind of error when trying to get the gRPC service""" + + class ProtoCompilerException(TavernException): """Some kind of error using protoc""" diff --git a/tavern/_core/schema/tests.jsonschema.yaml b/tavern/_core/schema/tests.jsonschema.yaml index 81cbe52fb..d829db16b 100644 --- a/tavern/_core/schema/tests.jsonschema.yaml +++ b/tavern/_core/schema/tests.jsonschema.yaml @@ -249,10 +249,16 @@ definitions: required: - service properties: + host: + type: string + service: type: string - proto_body: + body: + type: object + + json: type: object retain: diff --git a/tavern/_plugins/grpc/__init__.py b/tavern/_plugins/grpc/__init__.py index 4ab9638a1..5c8edcd6f 100644 --- a/tavern/_plugins/grpc/__init__.py +++ b/tavern/_plugins/grpc/__init__.py @@ -7,6 +7,12 @@ warnings.filterwarnings( "ignore", category=DeprecationWarning, module="pkg_resources", lineno=2309 ) +warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="pkg_resources", lineno=2870 +) +warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="pkg_resources", lineno=2349 +) warnings.filterwarnings( "ignore", category=DeprecationWarning, module="pkg_resources", lineno=20 ) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 7de3cfcfa..e16928ad3 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,3 +1,4 @@ +import functools import logging import os import pkgutil @@ -6,6 +7,7 @@ import warnings from distutils.spawn import find_executable from importlib import import_module +from typing import Mapping, Optional import grpc from google.protobuf import descriptor_pb2, json_format @@ -23,6 +25,7 @@ warnings.warn("deprecated", DeprecationWarning) +@functools.lru_cache def find_protoc() -> str: # Find the Protocol Compiler. if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): @@ -31,10 +34,9 @@ def find_protoc() -> str: if protoc := find_executable("protoc"): return protoc - raise exceptions.ProtoCompilerException - - -protoc = find_protoc() + raise exceptions.ProtoCompilerException( + "Wanted to dynamically compile a proto source, but could not find protoc" + ) def _generate_proto_import(source, output): @@ -57,6 +59,8 @@ def _generate_proto_import(source, output): if child.rsplit(".", 1)[-1] == "proto" ] + protoc = find_protoc() + protoc_command = [protoc, "-I" + source, "--python_out=" + output] protoc_command.extend(protos) @@ -65,20 +69,20 @@ def _generate_proto_import(source, output): raise exceptions.ProtoCompilerException(call.stderr) -def _import_grpc_module(output): +def _import_grpc_module(output: str): output_path = [] if os.path.exists(output): output_path.append(output) else: - mod = __import__(output, fromlist=[""]) - output_path.extend(mod.__path__) + mod = import_module(output, output) + output_path.extend(mod.__name__) sys.path.extend(output_path) for _, name, _ in pkgutil.iter_modules(output_path): import_module("." + name, package=output) -class GRPCClient(object): +class GRPCClient: def __init__(self, **kwargs): logger.debug("Initialising GRPC client with %s", kwargs) expected_blocks = { @@ -98,36 +102,41 @@ def __init__(self, **kwargs): _proto_args = kwargs.pop("proto", {}) check_expected_keys(expected_blocks["proto"], _proto_args) - self.default_host = _connect_args["host"] - if port := _connect_args.get("port"): - self.default_host += ":{}".format(port) + self._attempt_reflection = bool(kwargs.pop("attempt_reflection", False)) + + if default_host := _connect_args.get("host"): + self.default_host = default_host + if port := _connect_args.get("port"): + self.default_host += ":{}".format(port) self.timeout = int(_connect_args.get("timeout", 5)) self.tls = bool(_connect_args.get("tls", False)) - logger.critical(self.default_host) - self.channels = {} self.sym_db = _symbol_database.Default() proto_module = _proto_args.get("module", "proto") - if "source" in _proto_args: - proto_source = _proto_args["source"] + if proto_source := _proto_args.get("source"): _generate_proto_import(proto_source, proto_module) - _import_grpc_module(proto_module) + try: + _import_grpc_module(proto_module) + except ImportError as e: + raise exceptions.GRPCServiceException("error importing gRPC modules") from e def _register_file_descriptor(self, service_proto): - for i in range(len(service_proto.file_descriptor_proto)): - file_descriptor_proto = service_proto.file_descriptor_proto[ - len(service_proto.file_descriptor_proto) - i - 1 - ] + for d in service_proto.file_descriptor_proto: + file_descriptor_proto = service_proto.file_descriptor_proto[d] proto = descriptor_pb2.FileDescriptorProto() proto.ParseFromString(file_descriptor_proto) self.sym_db.pool.Add(proto) - def _get_reflection_info(self, channel, service_name=None, file_by_filename=None): - logger.debug("Geting GRPC protobuf for service %s", service_name) + def _get_reflection_info( + self, channel, service_name: Optional[str] = None, file_by_filename=None + ): + logger.debug( + "Getting GRPC protobuf for service %s from reflection", service_name + ) ref_request = reflection_pb2.ServerReflectionRequest( file_containing_symbol=service_name, file_by_filename=file_by_filename ) @@ -138,7 +147,7 @@ def _get_reflection_info(self, channel, service_name=None, file_by_filename=None for response in ref_response: self._register_file_descriptor(response.file_descriptor_response) - def _get_grpc_service(self, channel, service, method): + def _get_grpc_service(self, channel, service: str, method: str): full_service_name = "{}.{}".format(service, method) try: grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) @@ -156,7 +165,7 @@ def _get_grpc_service(self, channel, service, method): return grpc_method, input_type - def _make_call_request(self, host, full_service): + def _make_call_request(self, host: str, full_service: str): full_service = full_service.replace("/", ".") service_method = full_service.rsplit(".", 1) if len(service_method) != 2: @@ -185,33 +194,40 @@ def _make_call_request(self, host, full_service): channel = self.channels[host] grpc_method, input_type = self._get_grpc_service(channel, service, method) - if grpc_method is not None and input_type is not None: + if grpc_method and input_type: return grpc_method, input_type - try: - self._get_reflection_info(channel, service_name=service) - except ( - grpc.RpcError - ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. - logger.error("Call failure: %s", rpc_error) - status = rpc_status.from_call(rpc_error) - if status is None: - logger.warning("Error occurred %s", rpc_error) - else: - logger.warning( - "Unable get %s service reflection information code %s detail %s", - service, - status.code, - status.details, - ) - raise exceptions.GRPCRequestException from rpc_error + if self._attempt_reflection: + try: + self._get_reflection_info(channel, service_name=service) + except ( + grpc.RpcError + ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + status = rpc_status.from_call(rpc_error) + if status is None: + logger.warning("Unknown error occurred in RPC call", exc_info=True) + else: + logger.warning( + "Unable get %s service reflection information code %s detail %s", + service, + status.code, + status.details, + exc_info=True, + ) + raise exceptions.GRPCRequestException from rpc_error return self._get_grpc_service(channel, service, method) def __enter__(self): logger.debug("Connecting to GRPC") - def call(self, service, host=None, body=None, timeout=None): + def call( + self, + service: str, + host: Optional[str] = None, + body: Optional[Mapping] = None, + timeout: Optional[int] = None, + ): if host is None: host = self.default_host if timeout is None: @@ -219,7 +235,7 @@ def call(self, service, host=None, body=None, timeout=None): grpc_call, grpc_request = self._make_call_request(host, service) if grpc_call is None or grpc_request is None: - raise exceptions.GRPCRequestException( + raise exceptions.GRPCServiceException( "Service {} was not found on host {}".format(service, host) ) diff --git a/tavern/_plugins/grpc/jsonschema.yaml b/tavern/_plugins/grpc/jsonschema.yaml index d41a9f564..89c5a1126 100644 --- a/tavern/_plugins/grpc/jsonschema.yaml +++ b/tavern/_plugins/grpc/jsonschema.yaml @@ -17,9 +17,6 @@ properties: properties: connect: type: object - required: - - host - - port properties: host: type: string @@ -30,8 +27,10 @@ properties: keepalive: type: integer tls: - type: object - # TODO: tls options + type: boolean + + attempt_reflection: + type: boolean metadata: type: object diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index dad7029d6..98691f5c2 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -1,11 +1,14 @@ import functools import json import logging +from typing import Mapping from box import Box from tavern._core import exceptions from tavern._core.dict_util import check_expected_keys, format_keys +from tavern._core.pytest.config import TestConfig +from tavern._plugins.grpc.client import GRPCClient from tavern.request import BaseRequest logger = logging.getLogger(__name__) @@ -33,8 +36,10 @@ class GRPCRequest(BaseRequest): Similar to RestRequest, publishes a single message. """ - def __init__(self, client, rspec, test_block_config): - expected = {"host", "service", "body"} + def __init__( + self, client: GRPCClient, rspec: Mapping, test_block_config: TestConfig + ): + expected = {"host", "retain", "service", "body", "json"} check_expected_keys(expected, rspec) From 7436302ea9e3a48984484e1dca2b78b14828b4e4 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 17:45:00 +0100 Subject: [PATCH 22/72] Start writing docs and fix some warnings/errors/cleanup --- docs/source/grpc.md | 17 +++ docs/source/index.md | 1 + example/grpc/Dockerfile | 6 +- example/grpc/common.yaml | 1 + example/grpc/docker-compose.yaml | 1 + example/grpc/server.py | 35 ----- example/grpc/{ => server}/helloworld_pb2.py | 0 example/grpc/{ => server}/helloworld_pb2.pyi | 0 .../grpc/{ => server}/helloworld_pb2_grpc.py | 0 example/grpc/server/server.py | 43 ++++++ example/grpc/test_grpc.tavern.yaml | 122 +++++++++++++++++- pyproject.toml | 1 + tavern/_core/schema/jsonschema.py | 2 +- tavern/_plugins/grpc/client.py | 76 +++++++---- tavern/_plugins/grpc/jsonschema.yaml | 2 - tavern/_plugins/grpc/response.py | 2 - 16 files changed, 237 insertions(+), 72 deletions(-) create mode 100644 docs/source/grpc.md delete mode 100644 example/grpc/server.py rename example/grpc/{ => server}/helloworld_pb2.py (100%) rename example/grpc/{ => server}/helloworld_pb2.pyi (100%) rename example/grpc/{ => server}/helloworld_pb2_grpc.py (100%) create mode 100644 example/grpc/server/server.py diff --git a/docs/source/grpc.md b/docs/source/grpc.md new file mode 100644 index 000000000..1b79d36ba --- /dev/null +++ b/docs/source/grpc.md @@ -0,0 +1,17 @@ +# gRPC integration testing + +## Setting connection parameters + +Testing using gRPC is similar to (mqtt)[mqtt.md], + +There are 4 different types of service resolution: + +#### Specifying the proto definition + +#### Server reflection + +This is obviously the least useful method. If you don't specify a proto source or module, the client +will attempt to +use [gRPC reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) to +determine what is the appropriate message type for the message you're trying to send. This is not +reliable as the server you're trying to talk to might not have reflection turned on. diff --git a/docs/source/index.md b/docs/source/index.md index 584562e67..105814a87 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -32,6 +32,7 @@ Tavern is still in active development and is used by 100s of companies. * [Basic Concepts](basics.md) * [HTTP Integration testing](http.md) * [MQTT Integration testing](mqtt.md) +* [gRPC Integration testing](grpc.md) * [Plugins](plugins.md) * [Debugging Tests](debugging.md) * [Examples](examples.md) diff --git a/example/grpc/Dockerfile b/example/grpc/Dockerfile index 2b3bbe32e..04e2f69b3 100644 --- a/example/grpc/Dockerfile +++ b/example/grpc/Dockerfile @@ -2,8 +2,8 @@ FROM python:3.9-slim-buster RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 -COPY server.py / -COPY helloworld_pb2.py / -COPY helloworld_pb2_grpc.py / +COPY server/server.py / +COPY server/helloworld_pb2.py / +COPY server/helloworld_pb2_grpc.py / CMD ["python3", "/server.py"] diff --git a/example/grpc/common.yaml b/example/grpc/common.yaml index f5b0df6ca..cf010c48e 100644 --- a/example/grpc/common.yaml +++ b/example/grpc/common.yaml @@ -5,3 +5,4 @@ description: used for testing against local server variables: grpc_host: localhost grpc_port: 50051 + grpc_reflecting_port: 50052 diff --git a/example/grpc/docker-compose.yaml b/example/grpc/docker-compose.yaml index bb5f6869a..34b51a08b 100644 --- a/example/grpc/docker-compose.yaml +++ b/example/grpc/docker-compose.yaml @@ -8,3 +8,4 @@ services: dockerfile: Dockerfile ports: - "50051:50051" + - "50052:50052" diff --git a/example/grpc/server.py b/example/grpc/server.py deleted file mode 100644 index d18ed5128..000000000 --- a/example/grpc/server.py +++ /dev/null @@ -1,35 +0,0 @@ -from concurrent import futures -import logging -import threading - -import grpc -from grpc_reflection.v1alpha import reflection - -import helloworld_pb2 -import helloworld_pb2_grpc - - -class Greeter(helloworld_pb2_grpc.GreeterServicer): - - def SayHello(self, request, context): - return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) - - -def serve(): - server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) - helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) - SERVICE_NAMES = ( - helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, - reflection.SERVICE_NAME, - ) - reflection.enable_server_reflection(SERVICE_NAMES, server) - server.add_insecure_port('[::]:50051') - logging.info("Starting...") - server.start() - - event = threading.Event() - event.wait() - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - serve() diff --git a/example/grpc/helloworld_pb2.py b/example/grpc/server/helloworld_pb2.py similarity index 100% rename from example/grpc/helloworld_pb2.py rename to example/grpc/server/helloworld_pb2.py diff --git a/example/grpc/helloworld_pb2.pyi b/example/grpc/server/helloworld_pb2.pyi similarity index 100% rename from example/grpc/helloworld_pb2.pyi rename to example/grpc/server/helloworld_pb2.pyi diff --git a/example/grpc/helloworld_pb2_grpc.py b/example/grpc/server/helloworld_pb2_grpc.py similarity index 100% rename from example/grpc/helloworld_pb2_grpc.py rename to example/grpc/server/helloworld_pb2_grpc.py diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py new file mode 100644 index 000000000..1e42c94ec --- /dev/null +++ b/example/grpc/server/server.py @@ -0,0 +1,43 @@ +from concurrent import futures +import logging +import threading + +import grpc +from grpc_reflection.v1alpha import reflection + +import helloworld_pb2 +import helloworld_pb2_grpc + + +class Greeter(helloworld_pb2_grpc.GreeterServicer): + + def SayHello(self, request, context): + return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) + + +def serve(): + for reflect in [True, False]: + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) + + if reflect: + service_names = ( + helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(service_names, server) + port = 50052 + else: + port = 50051 + + server.add_insecure_port(f'[::]:{port:d}') + logging.info("Starting...") + server.start() + + event = threading.Event() + event.wait() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + serve() diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index 1b0a990bf..8e64e91f3 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -10,6 +10,9 @@ grpc: &grpc_spec host: "{grpc_host}" port: !int "{grpc_port}" timeout: 3 + proto: + source: server + stages: - name: Echo text @@ -35,7 +38,7 @@ grpc: port: !int "{grpc_port}" timeout: 3 proto: - module: helloworld_pb2_grpc + module: server/helloworld_pb2_grpc stages: - name: Echo text @@ -50,7 +53,122 @@ stages: --- -test_name: Test grpc message echo importing a module instead of compiling from source +test_name: Test grpc connection without the 'connect' block + +includes: + - !include common.yaml + +grpc: + proto: + module: server/helloworld_pb2_grpc + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_port}" + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc connection without the 'connect' block, with a bad message + +includes: + - !include common.yaml + +grpc: + proto: + module: server/helloworld_pb2_grpc + +_xfail: run + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_port}" + service: helloworld.Greeter/SayHello + body: + aarg: wooo + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc connection without the 'connect' block, using server reflection + +includes: + - !include common.yaml + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_reflecting_port}" + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc connection without the 'connect' block, using server reflection, with a bad message + +includes: + - !include common.yaml + +_xfail: run + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_reflecting_port}" + service: helloworld.Greeter/SayHello + body: + aarg: wooo + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc compiling source + +includes: + - !include common.yaml + +grpc: + <<: *grpc_spec + proto: + source: . +_xfail: run + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_port}" + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc message echo importing a module that doesn't exist includes: - !include common.yaml diff --git a/pyproject.toml b/pyproject.toml index 2e6ed5a9f..332be72f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -148,6 +148,7 @@ norecursedirs = [ ".git", ".tox", "example", + "example/grpc/server" ] [tool.ruff] diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py index 55627651d..cc6063908 100644 --- a/tavern/_core/schema/jsonschema.py +++ b/tavern/_core/schema/jsonschema.py @@ -165,7 +165,7 @@ def verify_jsonschema(to_verify, schema) -> None: ) msg = "\n---\n" + "\n---\n".join([str(i) for i in real_context]) - raise BadSchemaError(msg) from None + raise BadSchemaError(msg) from e extra_checks = { "stages[*].mqtt_publish.json[]": validate_request_json, diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index e16928ad3..95d36abee 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -3,14 +3,15 @@ import os import pkgutil import subprocess -import sys import warnings from distutils.spawn import find_executable from importlib import import_module from typing import Mapping, Optional import grpc -from google.protobuf import descriptor_pb2, json_format +import grpc_reflection +import sys +from google.protobuf import descriptor_pb2, json_format, message_factory from google.protobuf import symbol_database as _symbol_database from grpc_reflection.v1alpha import reflection_pb2, reflection_pb2_grpc from grpc_status import rpc_status @@ -66,7 +67,7 @@ def _generate_proto_import(source, output): call = subprocess.run(protoc_command) if call.returncode != 0: - raise exceptions.ProtoCompilerException(call.stderr) + raise exceptions.ProtoCompilerException(call.stdout) def _import_grpc_module(output: str): @@ -89,6 +90,7 @@ def __init__(self, **kwargs): "connect": {"host", "port", "options", "compression", "timeout", "tls"}, "proto": {"source", "module"}, "metadata": {}, + "attempt_reflection": {}, } # check main block first check_expected_keys(expected_blocks.keys(), kwargs) @@ -119,16 +121,19 @@ def __init__(self, **kwargs): if proto_source := _proto_args.get("source"): _generate_proto_import(proto_source, proto_module) - try: - _import_grpc_module(proto_module) - except ImportError as e: - raise exceptions.GRPCServiceException("error importing gRPC modules") from e - - def _register_file_descriptor(self, service_proto): - for d in service_proto.file_descriptor_proto: - file_descriptor_proto = service_proto.file_descriptor_proto[d] + if proto_module: + try: + _import_grpc_module(proto_module) + except ImportError as e: + raise exceptions.GRPCServiceException( + "error importing gRPC modules") from e + + def _register_file_descriptor(self, + service_proto: grpc_reflection.v1alpha.reflection_pb2.FileDescriptorResponse): + for file_descriptor_proto in service_proto.file_descriptor_proto: proto = descriptor_pb2.FileDescriptorProto() proto.ParseFromString(file_descriptor_proto) + logger.critical("ksdo") self.sym_db.pool.Add(proto) def _get_reflection_info( @@ -151,8 +156,8 @@ def _get_grpc_service(self, channel, service: str, method: str): full_service_name = "{}.{}".format(service, method) try: grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) - input_type = self.sym_db.GetPrototype(grpc_service.input_type) - output_type = self.sym_db.GetPrototype(grpc_service.output_type) + input_type = message_factory.GetMessageClass(grpc_service.input_type) + output_type = message_factory.GetMessageClass(grpc_service.output_type) except KeyError: return None, None @@ -197,24 +202,41 @@ def _make_call_request(self, host: str, full_service: str): if grpc_method and input_type: return grpc_method, input_type - if self._attempt_reflection: + if not self._attempt_reflection: + logger.error("could not find service and gRPC reflection disabled, cannot continue") + raise exceptions.GRPCServiceException( + "Service {} was not registered for host {}".format(service, host) + ) + + logger.info("service not registered, doing reflection from server") + try: + self._get_reflection_info(channel, service_name=service) + except ( + grpc.RpcError + ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + code = details = None try: - self._get_reflection_info(channel, service_name=service) - except ( - grpc.RpcError - ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + code = rpc_error.code() + details = rpc_error.details() + except AttributeError: status = rpc_status.from_call(rpc_error) if status is None: - logger.warning("Unknown error occurred in RPC call", exc_info=True) + logger.warning("Unknown error occurred in RPC call", + exc_info=True) else: - logger.warning( - "Unable get %s service reflection information code %s detail %s", - service, - status.code, - status.details, - exc_info=True, - ) - raise exceptions.GRPCRequestException from rpc_error + code = status.code + details = status.details + + if code and details: + logger.warning( + "Unable get %s service reflection information code %s detail %s", + service, + code, + details, + exc_info=True, + ) + + raise exceptions.GRPCRequestException from rpc_error return self._get_grpc_service(channel, service, method) diff --git a/tavern/_plugins/grpc/jsonschema.yaml b/tavern/_plugins/grpc/jsonschema.yaml index 89c5a1126..1934e5d33 100644 --- a/tavern/_plugins/grpc/jsonschema.yaml +++ b/tavern/_plugins/grpc/jsonschema.yaml @@ -12,8 +12,6 @@ required: properties: grpc: type: object - required: - - connect properties: connect: type: object diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index c62dd6e78..bbbea0702 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -15,8 +15,6 @@ class GRPCResponse(BaseResponse): def __init__( self, client, name: str, expected: Mapping, test_block_config: TestConfig ): - logger.critical(expected) - super(GRPCResponse, self).__init__(name, expected, test_block_config) self._client = client From 0c205434a43d43b28293f6b6a49e77624cd390e4 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:20:07 +0100 Subject: [PATCH 23/72] Fix some more issues with compiling protos and reflection --- .gitignore | 1 + constraints.txt | 3 ++ example/grpc/Dockerfile | 2 +- example/grpc/docker-compose.yaml | 1 + example/grpc/server/helloworld_pb2.py | 28 +++++----- example/grpc/server/helloworld_pb2.pyi | 1 - example/grpc/server/helloworld_pb2_grpc.py | 63 +++++++++++++--------- example/grpc/server/server.py | 38 ++++++++++--- example/grpc/test_grpc.tavern.yaml | 39 +++++++++++--- pyproject.toml | 1 + requirements.txt | 5 ++ tavern/_plugins/grpc/client.py | 57 ++++++++++++++------ 12 files changed, 169 insertions(+), 70 deletions(-) diff --git a/.gitignore b/.gitignore index 09a8a5b9f..fa0769df9 100644 --- a/.gitignore +++ b/.gitignore @@ -125,3 +125,4 @@ bazel-out bazel-tavern bazel-testlogs +example/grpc/proto diff --git a/constraints.txt b/constraints.txt index 9c2a5f44a..ad6ce2f8f 100644 --- a/constraints.txt +++ b/constraints.txt @@ -103,8 +103,11 @@ googleapis-common-protos==1.58.0 # via # google-api-core # grpcio-status +grpc-interceptor==0.15.1 + # via tavern (pyproject.toml) grpcio==1.51.1 # via + # grpc-interceptor # grpcio-reflection # grpcio-status # grpcio-tools diff --git a/example/grpc/Dockerfile b/example/grpc/Dockerfile index 04e2f69b3..de17e2783 100644 --- a/example/grpc/Dockerfile +++ b/example/grpc/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.9-slim-buster -RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 +RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 grpc-interceptor==0.15.1 COPY server/server.py / COPY server/helloworld_pb2.py / diff --git a/example/grpc/docker-compose.yaml b/example/grpc/docker-compose.yaml index 34b51a08b..590cfb27f 100644 --- a/example/grpc/docker-compose.yaml +++ b/example/grpc/docker-compose.yaml @@ -9,3 +9,4 @@ services: ports: - "50051:50051" - "50052:50052" + stop_grace_period: "1s" diff --git a/example/grpc/server/helloworld_pb2.py b/example/grpc/server/helloworld_pb2.py index a8eca53bc..f4a119480 100644 --- a/example/grpc/server/helloworld_pb2.py +++ b/example/grpc/server/helloworld_pb2.py @@ -6,26 +6,26 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x03\x90\x01\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x10helloworld.proto\x12\nhelloworld"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply"\x00\x42\x03\x90\x01\x01\x62\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "helloworld_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\220\001\001' - _HELLOREQUEST._serialized_start=32 - _HELLOREQUEST._serialized_end=60 - _HELLOREPLY._serialized_start=62 - _HELLOREPLY._serialized_end=91 - _GREETER._serialized_start=93 - _GREETER._serialized_end=166 -_builder.BuildServices(DESCRIPTOR, 'helloworld_pb2', globals()) + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\220\001\001" + _HELLOREQUEST._serialized_start = 32 + _HELLOREQUEST._serialized_end = 60 + _HELLOREPLY._serialized_start = 62 + _HELLOREPLY._serialized_end = 91 + _GREETER._serialized_start = 93 + _GREETER._serialized_end = 166 +_builder.BuildServices(DESCRIPTOR, "helloworld_pb2", globals()) # @@protoc_insertion_point(module_scope) diff --git a/example/grpc/server/helloworld_pb2.pyi b/example/grpc/server/helloworld_pb2.pyi index 28c1e554c..6007e6c5c 100644 --- a/example/grpc/server/helloworld_pb2.pyi +++ b/example/grpc/server/helloworld_pb2.pyi @@ -18,5 +18,4 @@ class HelloRequest(_message.Message): def __init__(self, name: _Optional[str] = ...) -> None: ... class Greeter(_service.service): ... - class Greeter_Stub(Greeter): ... diff --git a/example/grpc/server/helloworld_pb2_grpc.py b/example/grpc/server/helloworld_pb2_grpc.py index 0efb0eb60..b95dee929 100644 --- a/example/grpc/server/helloworld_pb2_grpc.py +++ b/example/grpc/server/helloworld_pb2_grpc.py @@ -15,10 +15,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.SayHello = channel.unary_unary( - '/helloworld.Greeter/SayHello', - request_serializer=helloworld__pb2.HelloRequest.SerializeToString, - response_deserializer=helloworld__pb2.HelloReply.FromString, - ) + "/helloworld.Greeter/SayHello", + request_serializer=helloworld__pb2.HelloRequest.SerializeToString, + response_deserializer=helloworld__pb2.HelloReply.FromString, + ) class GreeterServicer(object): @@ -27,40 +27,53 @@ class GreeterServicer(object): def SayHello(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_GreeterServicer_to_server(servicer, server): rpc_method_handlers = { - 'SayHello': grpc.unary_unary_rpc_method_handler( - servicer.SayHello, - request_deserializer=helloworld__pb2.HelloRequest.FromString, - response_serializer=helloworld__pb2.HelloReply.SerializeToString, - ), + "SayHello": grpc.unary_unary_rpc_method_handler( + servicer.SayHello, + request_deserializer=helloworld__pb2.HelloRequest.FromString, + response_serializer=helloworld__pb2.HelloReply.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'helloworld.Greeter', rpc_method_handlers) + "helloworld.Greeter", rpc_method_handlers + ) server.add_generic_rpc_handlers((generic_handler,)) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class Greeter(object): """Missing associated documentation comment in .proto file.""" @staticmethod - def SayHello(request, + def SayHello( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello', + "/helloworld.Greeter/SayHello", helloworld__pb2.HelloRequest.SerializeToString, helloworld__pb2.HelloReply.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index 1e42c94ec..3c95dca6f 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -1,8 +1,11 @@ from concurrent import futures import logging import threading +from typing import Callable, Any import grpc +from grpc_interceptor import ServerInterceptor +from grpc_interceptor.exceptions import GrpcException from grpc_reflection.v1alpha import reflection import helloworld_pb2 @@ -10,19 +13,42 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer): - def SayHello(self, request, context): - return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) + return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name) + + +class LoggingInterceptor(ServerInterceptor): + def intercept( + self, + method: Callable, + request_or_iterator: Any, + context: grpc.ServicerContext, + method_name: str, + ) -> Any: + logging.info(f"got request on {method_name}") + + try: + return method(request_or_iterator, context) + except GrpcException as e: + logging.exception(f"error processing request") + context.set_code(e.status_code) + context.set_details(e.details) + raise def serve(): + executor = futures.ThreadPoolExecutor(max_workers=10) + for reflect in [True, False]: - server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + server = grpc.server( + executor, + interceptors=[LoggingInterceptor()], + ) helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) if reflect: service_names = ( - helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name, + helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name, reflection.SERVICE_NAME, ) reflection.enable_server_reflection(service_names, server) @@ -30,7 +56,7 @@ def serve(): else: port = 50051 - server.add_insecure_port(f'[::]:{port:d}') + server.add_insecure_port(f"[::]:{port:d}") logging.info("Starting...") server.start() @@ -38,6 +64,6 @@ def serve(): event.wait() -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.INFO) serve() diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index 8e64e91f3..9817ced79 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -11,7 +11,7 @@ grpc: &grpc_spec port: !int "{grpc_port}" timeout: 3 proto: - source: server + source: . stages: @@ -53,6 +53,34 @@ stages: --- +test_name: Test grpc message echo importing a module but its a path to a file + +includes: + - !include common.yaml + +_xfail: run + +grpc: + connect: + host: "{grpc_host}" + port: !int "{grpc_port}" + timeout: 3 + proto: + module: server/helloworld_pb2_grpc.py + +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + test_name: Test grpc connection without the 'connect' block includes: @@ -143,15 +171,13 @@ stages: --- -test_name: Test grpc compiling source +test_name: Test grpc compiling source, with a bad message includes: - !include common.yaml -grpc: - <<: *grpc_spec - proto: - source: . +grpc: *grpc_spec + _xfail: run stages: @@ -161,6 +187,7 @@ stages: service: helloworld.Greeter/SayHello body: name: "John" + A: klk grpc_response: status: "OK" body: diff --git a/pyproject.toml b/pyproject.toml index 332be72f9..93c32d118 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,6 +88,7 @@ dev = [ "twine", "wheel", "grpcio-tools", + "grpc-interceptor", # This has to be installed separately, otherwise you can't upload to pypi # "tbump@https://github.com/michaelboulton/tbump/archive/714ba8957a3c84b625608ceca39811ebe56229dc.zip", ] diff --git a/requirements.txt b/requirements.txt index 0967698a1..9ebe4398d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -349,6 +349,10 @@ googleapis-common-protos==1.58.0 \ # via # google-api-core # grpcio-status +grpc-interceptor==0.15.1 \ + --hash=sha256:1cc52c34b0d7ff34512fb7780742ecda37bf3caa18ecc5f33f09b4f74e96b276 \ + --hash=sha256:3efadbc9aead272ac7a360c75c4bd96233094c9a5192dbb51c6156246bd64ba0 + # via tavern (pyproject.toml) grpcio==1.51.1 \ --hash=sha256:094e64236253590d9d4075665c77b329d707b6fca864dd62b144255e199b4f87 \ --hash=sha256:0dc5354e38e5adf2498312f7241b14c7ce3484eefa0082db4297189dcbe272e6 \ @@ -396,6 +400,7 @@ grpcio==1.51.1 \ --hash=sha256:f96ace1540223f26fbe7c4ebbf8a98e3929a6aa0290c8033d12526847b291c0f \ --hash=sha256:fbdbe9a849854fe484c00823f45b7baab159bdd4a46075302281998cb8719df5 # via + # grpc-interceptor # grpcio-reflection # grpcio-status # grpcio-tools diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 95d36abee..78507ab46 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -13,6 +13,7 @@ import sys from google.protobuf import descriptor_pb2, json_format, message_factory from google.protobuf import symbol_database as _symbol_database +from google.protobuf.json_format import ParseError from grpc_reflection.v1alpha import reflection_pb2, reflection_pb2_grpc from grpc_status import rpc_status @@ -40,7 +41,8 @@ def find_protoc() -> str: ) -def _generate_proto_import(source, output): +@functools.lru_cache +def _generate_proto_import(source: str, output: str): """Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than the input.""" @@ -57,25 +59,35 @@ def _generate_proto_import(source, output): protos = [ os.path.join(source, child) for child in os.listdir(source) - if child.rsplit(".", 1)[-1] == "proto" + if (not os.path.isdir(child)) and child.endswith(".proto") ] + if not protos: + raise exceptions.ProtoCompilerException( + f"No protos defined in {os.path.abspath(source)}" + ) + protoc = find_protoc() protoc_command = [protoc, "-I" + source, "--python_out=" + output] protoc_command.extend(protos) - call = subprocess.run(protoc_command) + call = subprocess.run(protoc_command, capture_output=True) if call.returncode != 0: - raise exceptions.ProtoCompilerException(call.stdout) + logger.error(f"Error calling '{protoc_command}'") + raise exceptions.ProtoCompilerException(call.stderr.decode("utf8")) + + logger.info(f"Generated module from protos: {protos}") def _import_grpc_module(output: str): output_path = [] - if os.path.exists(output): - output_path.append(output) + + py_module = output + ".py" + if os.path.exists(py_module): + output_path.append(py_module) else: - mod = import_module(output, output) + mod = import_module(output) output_path.extend(mod.__name__) sys.path.extend(output_path) @@ -117,19 +129,26 @@ def __init__(self, **kwargs): self.channels = {} self.sym_db = _symbol_database.Default() - proto_module = _proto_args.get("module", "proto") if proto_source := _proto_args.get("source"): - _generate_proto_import(proto_source, proto_module) + # TODO: Use a temp dir instead? + _generate_proto_import(proto_source, "proto") + + if proto_module := _proto_args.get("module"): + if proto_module.endswith(".py"): + raise exceptions.GRPCServiceException( + f"grpc module definitions should not end with .py, but got {proto_module}") - if proto_module: try: _import_grpc_module(proto_module) except ImportError as e: raise exceptions.GRPCServiceException( - "error importing gRPC modules") from e + "error importing gRPC modules" + ) from e - def _register_file_descriptor(self, - service_proto: grpc_reflection.v1alpha.reflection_pb2.FileDescriptorResponse): + def _register_file_descriptor( + self, + service_proto: grpc_reflection.v1alpha.reflection_pb2.FileDescriptorResponse, + ): for file_descriptor_proto in service_proto.file_descriptor_proto: proto = descriptor_pb2.FileDescriptorProto() proto.ParseFromString(file_descriptor_proto) @@ -203,7 +222,9 @@ def _make_call_request(self, host: str, full_service: str): return grpc_method, input_type if not self._attempt_reflection: - logger.error("could not find service and gRPC reflection disabled, cannot continue") + logger.error( + "could not find service and gRPC reflection disabled, cannot continue" + ) raise exceptions.GRPCServiceException( "Service {} was not registered for host {}".format(service, host) ) @@ -221,8 +242,7 @@ def _make_call_request(self, host: str, full_service: str): except AttributeError: status = rpc_status.from_call(rpc_error) if status is None: - logger.warning("Unknown error occurred in RPC call", - exc_info=True) + logger.warning("Unknown error occurred in RPC call", exc_info=True) else: code = status.code details = status.details @@ -263,7 +283,10 @@ def call( request = grpc_request() if body is not None: - request = json_format.ParseDict(body, request) + try: + request = json_format.ParseDict(body, request) + except ParseError as e: + raise exceptions.GRPCRequestException("error creating request from json body") from e logger.debug("Send request %s", request) From b2f72cd24c2335513eaf504f2b0da9041c590952 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:30:39 +0100 Subject: [PATCH 24/72] Check for instance where there's no proto files --- example/grpc/conftest.py | 9 +++++++ example/grpc/test_grpc.tavern.yaml | 32 +++++++++++++++++++++++ tavern/_core/pytest/item.py | 18 ++++++++++--- tavern/_core/schema/tests.jsonschema.yaml | 15 ++++++++--- tavern/_plugins/grpc/client.py | 7 +++-- 5 files changed, 72 insertions(+), 9 deletions(-) create mode 100644 example/grpc/conftest.py diff --git a/example/grpc/conftest.py b/example/grpc/conftest.py new file mode 100644 index 000000000..ce185f103 --- /dev/null +++ b/example/grpc/conftest.py @@ -0,0 +1,9 @@ +import tempfile + +import pytest + + +@pytest.fixture() +def make_temp_dir(): + with tempfile.TemporaryDirectory() as d: + yield d diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index 9817ced79..d1096833b 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -14,6 +14,38 @@ grpc: &grpc_spec source: . +stages: + - name: Echo text + grpc_request: + service: helloworld.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" +--- + +test_name: Test trying to compile a folder with no protos in it + +includes: + - !include common.yaml + +marks: + - usefixtures: + - make_temp_dir + +_xfail: + run: "No protos defined in" + +grpc: + connect: + host: "{grpc_host}" + port: !int "{grpc_port}" + timeout: 3 + proto: + source: "{make_temp_dir}" + stages: - name: Echo text grpc_request: diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index 2baff2fe2..ba096e2f7 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -63,7 +63,9 @@ def initialise_fixture_attrs(self) -> None: # _get_direct_parametrize_args checks parametrize arguments in Python # functions, but we don't care about that in Tavern. - self.session._fixturemanager._get_direct_parametrize_args = lambda _: [] # type: ignore + self.session._fixturemanager._get_direct_parametrize_args = ( + lambda _: [] + ) # type: ignore fixtureinfo = self.session._fixturemanager.getfixtureinfo( self, self.obj, type(self), funcargs=False @@ -216,8 +218,18 @@ def runtest(self) -> None: logger.info("xfailing test while verifying schema") self.add_marker(pytest.mark.xfail, True) raise - except exceptions.TavernException: - if xfail == "run": + except exceptions.TavernException as e: + if isinstance(xfail, dict): + if msg := xfail.get("run"): + if msg not in str(e): + raise Exception( + f"error message did not match: expected '{msg}', got '{str(e)}'" + ) + logger.info("xfailing test when running") + self.add_marker(pytest.mark.xfail, True) + else: + logger.warning("internal error checking 'xfail'") + elif xfail == "run": logger.info("xfailing test when running") self.add_marker(pytest.mark.xfail, True) raise diff --git a/tavern/_core/schema/tests.jsonschema.yaml b/tavern/_core/schema/tests.jsonschema.yaml index d829db16b..2053cb4a9 100644 --- a/tavern/_core/schema/tests.jsonschema.yaml +++ b/tavern/_core/schema/tests.jsonschema.yaml @@ -426,10 +426,17 @@ properties: description: Name of test _xfail: - type: string - enum: - - verify - - run + oneOf: + - type: string + enum: + - verify + - run + - type: object + properties: + verify: + type: string + run: + type: string marks: type: array diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 78507ab46..36e29daf2 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -136,7 +136,8 @@ def __init__(self, **kwargs): if proto_module := _proto_args.get("module"): if proto_module.endswith(".py"): raise exceptions.GRPCServiceException( - f"grpc module definitions should not end with .py, but got {proto_module}") + f"grpc module definitions should not end with .py, but got {proto_module}" + ) try: _import_grpc_module(proto_module) @@ -286,7 +287,9 @@ def call( try: request = json_format.ParseDict(body, request) except ParseError as e: - raise exceptions.GRPCRequestException("error creating request from json body") from e + raise exceptions.GRPCRequestException( + "error creating request from json body" + ) from e logger.debug("Send request %s", request) From ebf542a77566e17e060e9044ef5249fc338479a2 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:44:37 +0100 Subject: [PATCH 25/72] More docs --- docs/source/grpc.md | 41 +++++++++++++++++++++++++++++---- tavern/_plugins/grpc/request.py | 2 ++ 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 1b79d36ba..b3a36718a 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -1,12 +1,45 @@ # gRPC integration testing -## Setting connection parameters +## Responses -Testing using gRPC is similar to (mqtt)[mqtt.md], +The gRPC status code should be a string matching +a [gRPC status code](https://grpc.github.io/grpc/core/md_doc_statuscodes.html), for +example `OK`, `NOT_FOUND`, etc. -There are 4 different types of service resolution: +## Loading protobuf definitions -#### Specifying the proto definition +There are 3 different ways Tavern will try to load the appropriate proto definitions: + +#### Specifying the proto module to use + +Example: + +```yaml +grpc: + proto: + module: server/helloworld_pb2_grpc +``` + +This will attempt to import the given module and register all the protos in it. + +#### Specifying a folder with some protos in + +Example: + +```yaml +grpc: + proto: + source: path/to/protos +``` + +This will attempt to find all files ending in `.proto` in the given folder and compile them using +the protoc compiler. It first checks the value of the environment variable `PROTOC` and use that, +and if not defined it will then look for a binary called `protoc` in the path. proto files are +compiled into a folder called `proto` under the same folder that the Tavern yaml is in. + +This has a few drawbacks, especially that if it can't find the protoc compiler at runtime it will +fail, but it might be useful if you're talking to a Java/Go/other server and you don't want to keep +some compiled Python gRPC stubs in your repository. #### Server reflection diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index 98691f5c2..18a59ed1d 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -19,6 +19,8 @@ def get_grpc_args(rspec, test_block_config): fspec = format_keys(rspec, test_block_config.variables) + # FIXME: Clarify 'json' and 'body' for grpc requests + # FIXME 2: also it should allow proto text format. Maybe binary. if "json" in rspec: if "body" in rspec: raise exceptions.BadSchemaError( From d72c425fcf8d6c348968997050b351da26bd24dd Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:45:57 +0100 Subject: [PATCH 26/72] Fix lint --- tavern/_core/pytest/item.py | 2 +- tavern/_plugins/grpc/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index ba096e2f7..353507757 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -224,7 +224,7 @@ def runtest(self) -> None: if msg not in str(e): raise Exception( f"error message did not match: expected '{msg}', got '{str(e)}'" - ) + ) from e logger.info("xfailing test when running") self.add_marker(pytest.mark.xfail, True) else: diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 36e29daf2..a481877b4 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -3,6 +3,7 @@ import os import pkgutil import subprocess +import sys import warnings from distutils.spawn import find_executable from importlib import import_module @@ -10,7 +11,6 @@ import grpc import grpc_reflection -import sys from google.protobuf import descriptor_pb2, json_format, message_factory from google.protobuf import symbol_database as _symbol_database from google.protobuf.json_format import ParseError From 7412530183c23692ab10ffb7340724046e5d8499 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:50:38 +0100 Subject: [PATCH 27/72] Fix more lint --- tavern/_core/pytest/item.py | 4 ++-- tavern/_plugins/grpc/client.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index 353507757..c1013dab0 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -63,8 +63,8 @@ def initialise_fixture_attrs(self) -> None: # _get_direct_parametrize_args checks parametrize arguments in Python # functions, but we don't care about that in Tavern. - self.session._fixturemanager._get_direct_parametrize_args = ( - lambda _: [] + self.session._fixturemanager._get_direct_parametrize_args = ( # type: ignore + lambda _: [] # type: ignore ) # type: ignore fixtureinfo = self.session._fixturemanager.getfixtureinfo( diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index a481877b4..429110633 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -176,8 +176,8 @@ def _get_grpc_service(self, channel, service: str, method: str): full_service_name = "{}.{}".format(service, method) try: grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) - input_type = message_factory.GetMessageClass(grpc_service.input_type) - output_type = message_factory.GetMessageClass(grpc_service.output_type) + input_type = message_factory.GetMessageClass(grpc_service.input_type) # type: ignore + output_type = message_factory.GetMessageClass(grpc_service.output_type) # type: ignore except KeyError: return None, None From d69c7334bdf412045fc6c4af64749335d65f20d9 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 15 Apr 2023 18:54:03 +0100 Subject: [PATCH 28/72] Run grpc in CI --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f66ea1ecf..16ba3804e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -53,6 +53,8 @@ jobs: TOXCFG: tox-integration.ini - TOXENV: py3-advanced TOXCFG: tox-integration.ini + - TOXENV: py3-grpc + TOXCFG: tox-integration.ini services: docker: From 12eddf230b462a47fa00e900491504f4122c6e13 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 12:31:13 +0000 Subject: [PATCH 29/72] Clarify importing grpc modules --- example/grpc/Dockerfile | 2 +- example/grpc/test_grpc.tavern.yaml | 6 +- tavern/_core/__init__.py | 0 tavern/_plugins/grpc/client.py | 120 ++++++++++++++++++++++------- 4 files changed, 95 insertions(+), 33 deletions(-) delete mode 100644 tavern/_core/__init__.py diff --git a/example/grpc/Dockerfile b/example/grpc/Dockerfile index de17e2783..89fdf59bd 100644 --- a/example/grpc/Dockerfile +++ b/example/grpc/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-slim-buster +FROM python:3.10-slim-buster@sha256:37aa274c2d001f09b14828450d903c55f821c90f225fdfdd80c5180fcca77b3f RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 grpc-interceptor==0.15.1 diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index d1096833b..3f0c398f9 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -98,7 +98,7 @@ grpc: port: !int "{grpc_port}" timeout: 3 proto: - module: server/helloworld_pb2_grpc.py + module: server.helloworld_pb2_grpc.py stages: - name: Echo text @@ -120,7 +120,7 @@ includes: grpc: proto: - module: server/helloworld_pb2_grpc + module: server.helloworld_pb2_grpc stages: - name: Echo text @@ -143,7 +143,7 @@ includes: grpc: proto: - module: server/helloworld_pb2_grpc + module: server.helloworld_pb2_grpc _xfail: run diff --git a/tavern/_core/__init__.py b/tavern/_core/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 1ca6f8401..1561c0af6 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,18 +1,25 @@ import functools +import hashlib +import importlib import logging import os -import pkgutil +import string import subprocess import sys +import tempfile import warnings from distutils.spawn import find_executable -from importlib import import_module +from importlib.util import spec_from_file_location from typing import Mapping, Optional import grpc import grpc_reflection -from google.protobuf import descriptor_pb2, json_format, message_factory -from google.protobuf import symbol_database as _symbol_database +from google.protobuf import ( + descriptor_pb2, + json_format, + message_factory, + symbol_database, +) from google.protobuf.json_format import ParseError from grpc_reflection.v1alpha import reflection_pb2, reflection_pb2_grpc from grpc_status import rpc_status @@ -42,20 +49,19 @@ def find_protoc() -> str: @functools.lru_cache -def _generate_proto_import(source: str, output: str): +def _generate_proto_import(source: str): """Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than - the input.""" + the input. + """ if not os.path.exists(source): raise exceptions.ProtoCompilerException( "Can't find required file: {}".format(source) ) - if not os.path.exists(output): - os.makedirs(output) + logger.info("Generating protos from %s...", source) - logger.info("Generating %s...", output) protos = [ os.path.join(source, child) for child in os.listdir(source) @@ -67,6 +73,20 @@ def _generate_proto_import(source: str, output: str): f"No protos defined in {os.path.abspath(source)}" ) + def sanitise(s): + """Do basic sanitisation for """ + return "".join(c for c in s if c in string.ascii_letters) + + output = os.path.join( + tempfile.gettempdir(), + "tavern_proto", + sanitise(protos[0]), + hashlib.md5("".join(protos).encode("utf8")).hexdigest(), + ) + + if not os.path.exists(output): + os.makedirs(output) + protoc = find_protoc() protoc_command = [protoc, "-I" + source, "--python_out=" + output] @@ -79,20 +99,61 @@ def _generate_proto_import(source: str, output: str): logger.info(f"Generated module from protos: {protos}") + # Invalidate caches so the module can be loaded + sys.path.append(output) + importlib.invalidate_caches() + _import_grpc_module(output) -def _import_grpc_module(output: str): - output_path = [] - py_module = output + ".py" - if os.path.exists(py_module): - output_path.append(py_module) - else: - mod = import_module(output) - output_path.extend(mod.__name__) +def _import_grpc_module(python_module_name: str): + """takes an expected python module name and tries to import the relevant + file, adding service to the symbol database. + """ - sys.path.extend(output_path) - for _, name, _ in pkgutil.iter_modules(output_path): - import_module("." + name, package=output) + logger.debug("attempting to import %s", python_module_name) + + if python_module_name.endswith(".py"): + raise exceptions.GRPCServiceException( + f"grpc module definitions should not end with .py, but got {python_module_name}" + ) + + if python_module_name.startswith("."): + raise exceptions.GRPCServiceException( + f"relative imports for Python grpc modules not allowed (got {python_module_name})" + ) + + import_specs = [] + + # Check if its already on the python path + if (spec := importlib.util.find_spec(python_module_name)) is not None: + logger.debug(f"{python_module_name} on sys path already") + import_specs.append(spec) + + # See if the file exists + module_path = python_module_name.replace(".", "/") + ".py" + if os.path.exists(module_path): + logger.debug(f"{python_module_name} found in file") + spec = importlib.util.spec_from_file_location(python_module_name, module_path) + import_specs.append(spec) + + if os.path.isdir(python_module_name): + for s in os.listdir(python_module_name): + s = os.path.join(python_module_name, s) + if s.endswith(".py"): + logger.debug(f"found py file {s}") + # Guess a package name + spec = importlib.util.spec_from_file_location(s[:-3], s) + import_specs.append(spec) + + if not import_specs: + raise exceptions.GRPCServiceException( + f"could not determine how to import {python_module_name}" + ) + + for spec in import_specs: + mod = importlib.util.module_from_spec(spec) + logger.debug(f"loading from {spec.name}") + spec.loader.exec_module(mod) class GRPCClient: @@ -127,21 +188,20 @@ def __init__(self, **kwargs): self.tls = bool(_connect_args.get("tls", False)) self.channels = {} - self.sym_db = _symbol_database.Default() + # Using the default symbol database is a bit undesirable because it means that things being imported from + # previous tests will affect later ones which can mask bugs. But there isn't a nice way to have a + # self-contained symbol database, because then you need to transitively import all dependencies of protos and + # add them to the database. + self.sym_db = symbol_database.Default() if proto_source := _proto_args.get("source"): - # TODO: Use a temp dir instead? - _generate_proto_import(proto_source, "proto") + _generate_proto_import(proto_source) if proto_module := _proto_args.get("module"): - if proto_module.endswith(".py"): - raise exceptions.GRPCServiceException( - f"grpc module definitions should not end with .py, but got {proto_module}" - ) - try: _import_grpc_module(proto_module) - except ImportError as e: + except (ImportError, ModuleNotFoundError) as e: + logger.exception(f"could not import {proto_module}") raise exceptions.GRPCServiceException( "error importing gRPC modules" ) from e @@ -181,6 +241,8 @@ def _get_grpc_service(self, channel, service: str, method: str): except KeyError: return None, None + logger.critical(f"reflected info for {service}: {full_service_name}") + service_url = "/{}/{}".format(service, method) grpc_method = channel.unary_unary( service_url, From 5beded52a43fbb57dec8829be05d145717a3a1aa Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 12:37:14 +0000 Subject: [PATCH 30/72] Cleanu --- example/grpc/server/server.py | 2 +- example/grpc/test_grpc.tavern.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index 3c95dca6f..24adf5170 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -57,9 +57,9 @@ def serve(): port = 50051 server.add_insecure_port(f"[::]:{port:d}") - logging.info("Starting...") server.start() + logging.info("Starting grpc server") event = threading.Event() event.wait() diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index 3f0c398f9..5c61758aa 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -13,7 +13,6 @@ grpc: &grpc_spec proto: source: . - stages: - name: Echo text grpc_request: @@ -24,6 +23,7 @@ stages: status: "OK" body: message: "Hello, John!" + --- test_name: Test trying to compile a folder with no protos in it From bc37b31aef464498aae238b0b671f1cc2856537f Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 13:00:21 +0000 Subject: [PATCH 31/72] fix mypy --- tavern/_plugins/grpc/client.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 1561c0af6..4978fd90e 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -9,8 +9,8 @@ import tempfile import warnings from distutils.spawn import find_executable -from importlib.util import spec_from_file_location -from typing import Mapping, Optional +from importlib.machinery import ModuleSpec +from typing import List, Mapping, Optional import grpc import grpc_reflection @@ -74,14 +74,14 @@ def _generate_proto_import(source: str): ) def sanitise(s): - """Do basic sanitisation for """ + """Do basic sanitisation for""" return "".join(c for c in s if c in string.ascii_letters) output = os.path.join( tempfile.gettempdir(), "tavern_proto", sanitise(protos[0]), - hashlib.md5("".join(protos).encode("utf8")).hexdigest(), + hashlib.new("sha3_224", "".join(protos).encode("utf8")).hexdigest(), ) if not os.path.exists(output): @@ -122,7 +122,7 @@ def _import_grpc_module(python_module_name: str): f"relative imports for Python grpc modules not allowed (got {python_module_name})" ) - import_specs = [] + import_specs: List[ModuleSpec] = [] # Check if its already on the python path if (spec := importlib.util.find_spec(python_module_name)) is not None: @@ -133,8 +133,12 @@ def _import_grpc_module(python_module_name: str): module_path = python_module_name.replace(".", "/") + ".py" if os.path.exists(module_path): logger.debug(f"{python_module_name} found in file") - spec = importlib.util.spec_from_file_location(python_module_name, module_path) - import_specs.append(spec) + if ( + spec := importlib.util.spec_from_file_location( + python_module_name, module_path + ) + ) is not None: + import_specs.append(spec) if os.path.isdir(python_module_name): for s in os.listdir(python_module_name): @@ -142,8 +146,10 @@ def _import_grpc_module(python_module_name: str): if s.endswith(".py"): logger.debug(f"found py file {s}") # Guess a package name - spec = importlib.util.spec_from_file_location(s[:-3], s) - import_specs.append(spec) + if ( + spec := importlib.util.spec_from_file_location(s[:-3], s) + ) is not None: + import_specs.append(spec) if not import_specs: raise exceptions.GRPCServiceException( @@ -153,7 +159,8 @@ def _import_grpc_module(python_module_name: str): for spec in import_specs: mod = importlib.util.module_from_spec(spec) logger.debug(f"loading from {spec.name}") - spec.loader.exec_module(mod) + if spec.loader: + spec.loader.exec_module(mod) class GRPCClient: From d1496b7f37248c0b1cd90cd36308c5930ae10f61 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 13:13:57 +0000 Subject: [PATCH 32/72] grpc as optional --- pyproject.toml | 16 +++++++++------- tavern/_core/plugins.py | 2 ++ tavern/_core/pytest/config.py | 12 +++++++++--- tavern/_core/pytest/file.py | 2 +- tavern/_core/pytest/hooks.py | 2 +- tavern/_core/pytest/item.py | 2 +- tavern/_core/pytest/util.py | 1 - tavern/_core/schema/extensions.py | 4 +++- tavern/_core/schema/files.py | 4 ++-- tavern/_core/schema/jsonschema.py | 10 +++++++++- tox-integration.ini | 2 ++ 11 files changed, 39 insertions(+), 18 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 81ff4f964..222e58e21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,13 +34,6 @@ dependencies = [ "python-box>=6,<7", "requests>=2.22.0,<3", "stevedore>=4,<5", - - "grpcio", - "grpcio-reflection", - "grpcio-status", - "google-api-python-client", - "protobuf", - "proto-plus" ] requires-python = ">=3.8" @@ -61,6 +54,15 @@ Documentation = "https://tavern.readthedocs.io/en/latest/" Source = "https://github.com/taverntesting/tavern" [project.optional-dependencies] +grpc = [ + "grpcio", + "grpcio-reflection", + "grpcio-status", + "google-api-python-client", + "protobuf", + "proto-plus", +] + dev = [ "Faker", "allure-pytest", diff --git a/tavern/_core/plugins.py b/tavern/_core/plugins.py index bfe5fde40..8ac2dc0ad 100644 --- a/tavern/_core/plugins.py +++ b/tavern/_core/plugins.py @@ -105,6 +105,8 @@ def enabled(current_backend, ext): ) for backend in test_block_config.backends(): + logger.debug("loading backend for %s", backend) + namespace = "tavern_{}".format(backend) manager = stevedore.EnabledExtensionManager( diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index 62d498a70..6f2407ed7 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -1,5 +1,6 @@ import copy import dataclasses +import importlib from typing import Any, List from tavern._core.strict_util import StrictLevel @@ -49,6 +50,11 @@ def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": @staticmethod def backends() -> List[str]: - # TODO: This is here in case in future we want to be able to turn some of these - # on or off - return ["http", "mqtt", "grpc"] + available_backends = ["http"] + + if importlib.util.find_spec("paho.mqtt"): + available_backends.append("mqtt") + if importlib.util.find_spec("grpc"): + available_backends.append("grpc") + + return available_backends diff --git a/tavern/_core/pytest/file.py b/tavern/_core/pytest/file.py index b014a58fd..0edc0ac7a 100644 --- a/tavern/_core/pytest/file.py +++ b/tavern/_core/pytest/file.py @@ -4,10 +4,10 @@ import logging from typing import Dict, Iterator, List, Mapping -import pytest import yaml from box import Box +import pytest from tavern._core import exceptions from tavern._core.dict_util import deep_dict_merge, format_keys, get_tavern_box from tavern._core.extfunctions import get_wrapped_create_function, is_ext_function diff --git a/tavern/_core/pytest/hooks.py b/tavern/_core/pytest/hooks.py index 9a83ee9e9..1405888ec 100644 --- a/tavern/_core/pytest/hooks.py +++ b/tavern/_core/pytest/hooks.py @@ -5,9 +5,9 @@ import re from textwrap import dedent -import pytest import yaml +import pytest from tavern._core import exceptions from .util import add_ini_options, add_parser_options, get_option_generic diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index e48f5c605..314bace30 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -3,11 +3,11 @@ from typing import Optional, Tuple import attr -import pytest import yaml from _pytest._code.code import ExceptionInfo from _pytest.nodes import Node +import pytest from tavern._core import exceptions from tavern._core.loader import error_on_empty_scalar from tavern._core.plugins import load_plugins diff --git a/tavern/_core/pytest/util.py b/tavern/_core/pytest/util.py index 8418b10c5..7448505dd 100644 --- a/tavern/_core/pytest/util.py +++ b/tavern/_core/pytest/util.py @@ -3,7 +3,6 @@ from typing import Any, Dict import pytest - from tavern._core.dict_util import format_keys, get_tavern_box from tavern._core.general import load_global_config from tavern._core.pytest.config import TavernInternalConfig, TestConfig diff --git a/tavern/_core/schema/extensions.py b/tavern/_core/schema/extensions.py index 7bc2756bf..61eee0e57 100644 --- a/tavern/_core/schema/extensions.py +++ b/tavern/_core/schema/extensions.py @@ -2,7 +2,6 @@ import re from typing import Union -from grpc import StatusCode from pykwalify.types import is_bool, is_float, is_int from tavern._core import exceptions @@ -151,6 +150,9 @@ def validate_grpc_status_is_valid_or_list_of_names(value, rule_obj, path): def is_grpc_status(value): value = value.upper() + + from grpc import StatusCode + for status in StatusCode: if status.name == value: return True diff --git a/tavern/_core/schema/files.py b/tavern/_core/schema/files.py index a5bb4af67..33913023c 100644 --- a/tavern/_core/schema/files.py +++ b/tavern/_core/schema/files.py @@ -3,7 +3,7 @@ import logging import os import tempfile -from typing import Dict +from typing import Dict, Mapping import pykwalify import yaml @@ -129,7 +129,7 @@ def wrapfile(to_wrap): os.remove(wrapped_tmp.name) -def verify_tests(test_spec, with_plugins: bool = True) -> None: +def verify_tests(test_spec: Mapping, with_plugins: bool = True) -> None: """Verify that a specific test block is correct Todo: diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py index 595178436..a1b3b6b00 100644 --- a/tavern/_core/schema/jsonschema.py +++ b/tavern/_core/schema/jsonschema.py @@ -1,10 +1,13 @@ +import importlib import logging import re +from typing import Mapping import jsonschema from jsonschema import Draft7Validator, ValidationError from jsonschema.validators import extend +from tavern._core import exceptions from tavern._core.dict_util import recurse_access_key from tavern._core.exceptions import BadSchemaError from tavern._core.loader import ( @@ -103,7 +106,7 @@ def oneOf(validator, oneOf, instance, schema): ) -def verify_jsonschema(to_verify, schema) -> None: +def verify_jsonschema(to_verify: Mapping, schema) -> None: """Verify a generic file against a given jsonschema Args: @@ -116,6 +119,11 @@ def verify_jsonschema(to_verify, schema) -> None: validator = CustomValidator(schema) + if "grpc" in to_verify and not importlib.util.find_spec("grpc"): + raise exceptions.BadSchemaError( + "Tried to use grpc connection string, but grpc was not installed. Reinstall Tavern with the grpc extra like `pip install tavern[grpc]`" + ) + try: validator.validate(to_verify) except jsonschema.ValidationError as e: diff --git a/tox-integration.ini b/tox-integration.ini index eeb490fdd..c0d847f7d 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -29,6 +29,8 @@ deps = pytest-cov colorlog mqtt: fluent-logger +extras = + grpc: grpc commands = ; docker compose stop ; docker compose build From abee27ab85e938a18eacbd4396808530cb60355c Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 13:21:49 +0000 Subject: [PATCH 33/72] Add warning --- scripts/smoke.bash | 8 +------- tavern/_core/pytest/item.py | 7 +++++-- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/scripts/smoke.bash b/scripts/smoke.bash index ea4be2e46..b702447ef 100755 --- a/scripts/smoke.bash +++ b/scripts/smoke.bash @@ -14,10 +14,4 @@ tox --parallel -c tox.ini \ -e py3mypy tox -c tox-integration.ini \ - -e py3-generic \ - -e py3-mqtt \ - -e py3-grpc \ - -e py3-advanced \ - -e py3-cookies \ - -e py3-components \ - -e py3-hooks + -e py3-generic,py3-grpc,py3-mqtt diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index 314bace30..7db3f01e0 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -1,6 +1,6 @@ import logging import pathlib -from typing import Optional, Tuple +from typing import MutableMapping, Optional, Tuple import attr import yaml @@ -39,8 +39,11 @@ class YamlItem(pytest.Item): _patched_yaml = False def __init__( - self, *, name: str, parent, spec, path: pathlib.Path, **kwargs + self, *, name: str, parent, spec: MutableMapping, path: pathlib.Path, **kwargs ) -> None: + if "grpc" in spec: + logger.warning("Tavern grpc support is in an experimental stage") + super().__init__(name, parent, **kwargs) self.path = path self.spec = spec From 987b628783d05cd56e33db7088b00d34f45ee607 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sun, 29 Oct 2023 14:05:51 +0000 Subject: [PATCH 34/72] Fix tests --- scripts/smoke.bash | 3 +-- tavern/_core/__init__.py | 0 tavern/_core/pytest/file.py | 2 +- tavern/_core/pytest/hooks.py | 2 +- tavern/_core/pytest/item.py | 2 +- tavern/_core/pytest/util.py | 1 + tox.ini | 1 + 7 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 tavern/_core/__init__.py diff --git a/scripts/smoke.bash b/scripts/smoke.bash index b702447ef..f68c6e0ba 100755 --- a/scripts/smoke.bash +++ b/scripts/smoke.bash @@ -10,8 +10,7 @@ tox --parallel -c tox.ini \ -e py3check tox --parallel -c tox.ini \ - -e py3 \ - -e py3mypy + -e py3,py3mypy tox -c tox-integration.ini \ -e py3-generic,py3-grpc,py3-mqtt diff --git a/tavern/_core/__init__.py b/tavern/_core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tavern/_core/pytest/file.py b/tavern/_core/pytest/file.py index 0edc0ac7a..b014a58fd 100644 --- a/tavern/_core/pytest/file.py +++ b/tavern/_core/pytest/file.py @@ -4,10 +4,10 @@ import logging from typing import Dict, Iterator, List, Mapping +import pytest import yaml from box import Box -import pytest from tavern._core import exceptions from tavern._core.dict_util import deep_dict_merge, format_keys, get_tavern_box from tavern._core.extfunctions import get_wrapped_create_function, is_ext_function diff --git a/tavern/_core/pytest/hooks.py b/tavern/_core/pytest/hooks.py index 1405888ec..9a83ee9e9 100644 --- a/tavern/_core/pytest/hooks.py +++ b/tavern/_core/pytest/hooks.py @@ -5,9 +5,9 @@ import re from textwrap import dedent +import pytest import yaml -import pytest from tavern._core import exceptions from .util import add_ini_options, add_parser_options, get_option_generic diff --git a/tavern/_core/pytest/item.py b/tavern/_core/pytest/item.py index 7db3f01e0..28636f4a9 100644 --- a/tavern/_core/pytest/item.py +++ b/tavern/_core/pytest/item.py @@ -3,11 +3,11 @@ from typing import MutableMapping, Optional, Tuple import attr +import pytest import yaml from _pytest._code.code import ExceptionInfo from _pytest.nodes import Node -import pytest from tavern._core import exceptions from tavern._core.loader import error_on_empty_scalar from tavern._core.plugins import load_plugins diff --git a/tavern/_core/pytest/util.py b/tavern/_core/pytest/util.py index 7448505dd..8418b10c5 100644 --- a/tavern/_core/pytest/util.py +++ b/tavern/_core/pytest/util.py @@ -3,6 +3,7 @@ from typing import Any, Dict import pytest + from tavern._core.dict_util import format_keys, get_tavern_box from tavern._core.general import load_global_config from tavern._core.pytest.config import TavernInternalConfig, TestConfig diff --git a/tox.ini b/tox.ini index 91a975e69..241bf93d2 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,7 @@ allowlist_externals = install_command = python -m pip install {opts} {packages} -c constraints.txt extras = dev + grpc commands = {envbindir}/python -m pytest --cov-report term-missing --cov tavern {posargs} From cfdd1c377fde52cf73894aae23e9d72b4eb4812f Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 15:50:08 +0000 Subject: [PATCH 35/72] Fix missing --- tavern/_core/pytest/config.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index 6f2407ed7..c4062881e 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -1,6 +1,6 @@ import copy import dataclasses -import importlib +from importlib.util import find_spec from typing import Any, List from tavern._core.strict_util import StrictLevel @@ -52,9 +52,17 @@ def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": def backends() -> List[str]: available_backends = ["http"] - if importlib.util.find_spec("paho.mqtt"): + def has_module(module): + try: + find_spec(module) + except ModuleNotFoundError: + return False + else: + return True + + if has_module("paho.mqtt"): available_backends.append("mqtt") - if importlib.util.find_spec("grpc"): + if has_module("grpc"): available_backends.append("grpc") return available_backends From 5a563681e2defeb7e858822a3d1b4ec949ef2624 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 15:53:36 +0000 Subject: [PATCH 36/72] f-string --- tavern/_plugins/grpc/client.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 4978fd90e..950b57687 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -56,9 +56,7 @@ def _generate_proto_import(source: str): """ if not os.path.exists(source): - raise exceptions.ProtoCompilerException( - "Can't find required file: {}".format(source) - ) + raise exceptions.ProtoCompilerException(f"Can't find required file: {source}") logger.info("Generating protos from %s...", source) From 17a83d3ff741907c855ad7f95050628c6c16d5ba Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:01:06 +0000 Subject: [PATCH 37/72] fix backend check --- tavern/_core/pytest/config.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index c4062881e..57f3386b8 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -1,10 +1,13 @@ import copy import dataclasses +import logging from importlib.util import find_spec from typing import Any, List from tavern._core.strict_util import StrictLevel +logger = logging.getLogger(__name__) + @dataclasses.dataclass(frozen=True) class TavernInternalConfig: @@ -54,15 +57,15 @@ def backends() -> List[str]: def has_module(module): try: - find_spec(module) + return find_spec(module) is not None except ModuleNotFoundError: return False - else: - return True if has_module("paho.mqtt"): available_backends.append("mqtt") if has_module("grpc"): available_backends.append("grpc") + logger.debug(f"available request backends: {available_backends}") + return available_backends From 522b1824d11baeb0e1058ab076b6b617e3c0668f Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:01:50 +0000 Subject: [PATCH 38/72] Annotate --- tavern/_core/pytest/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index 57f3386b8..e3a39e1e2 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -55,7 +55,7 @@ def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": def backends() -> List[str]: available_backends = ["http"] - def has_module(module): + def has_module(module: str) -> bool: try: return find_spec(module) is not None except ModuleNotFoundError: From c21a55275e07f361b8da399a8fe8eabce54a01af Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:06:59 +0000 Subject: [PATCH 39/72] Add a couple more precommit hooks --- .pre-commit-config.yaml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b00190726..fa56b22c2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,20 @@ repos: + - repo: https://github.com/rhysd/actionlint + rev: v1.6.26 + hooks: + - id: actionlint + args: ["-shellcheck="] + - repo: https://github.com/hadialqattan/pycln + rev: v2.4.0 + hooks: + - id: pycln - repo: https://github.com/ambv/black - rev: 23.7.0 + rev: 23.12.1 hooks: - id: black files: "(tavern|tests)" - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.280" + rev: "v0.1.11" hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 5d8cf6ea9138b1d2b995421335ffcd40c188e18c Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:08:43 +0000 Subject: [PATCH 40/72] Fix new ruff warnings --- tavern/_plugins/grpc/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 950b57687..ddbe20e04 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -90,7 +90,7 @@ def sanitise(s): protoc_command = [protoc, "-I" + source, "--python_out=" + output] protoc_command.extend(protos) - call = subprocess.run(protoc_command, capture_output=True) # noqa: S603 + call = subprocess.run(protoc_command, capture_output=True, check=False) # noqa if call.returncode != 0: logger.error(f"Error calling '{protoc_command}'") raise exceptions.ProtoCompilerException(call.stderr.decode("utf8")) @@ -177,7 +177,7 @@ def __init__(self, **kwargs): check_expected_keys(expected_blocks["connect"], _connect_args) metadata = kwargs.pop("metadata", {}) - self._metadata = [(key, value) for key, value in metadata.items()] + self._metadata = list(metadata.items()) _proto_args = kwargs.pop("proto", {}) check_expected_keys(expected_blocks["proto"], _proto_args) From d61dc7f60a86ddc7cd8b634b452ff7a227f7b2a9 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:08:52 +0000 Subject: [PATCH 41/72] Setup protoc in grpc tests --- .github/workflows/main.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 53fddc11b..8170918f5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -125,6 +125,12 @@ jobs: with: python-version: "3.11" + - name: Install Protoc + if: ${{ contains(matrix.TOXENV, 'grpc') }} + uses: arduino/setup-protoc@v2 + with: + version: "23.x" + - name: install deps run: | pip install tox -c constraints.txt From 6d8dbe1873e788356932aa99661347e81300f1ca Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:19:12 +0000 Subject: [PATCH 42/72] Try updating dependencies --- example/grpc/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/grpc/Dockerfile b/example/grpc/Dockerfile index 89fdf59bd..af42899ef 100644 --- a/example/grpc/Dockerfile +++ b/example/grpc/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10-slim-buster@sha256:37aa274c2d001f09b14828450d903c55f821c90f225fdfdd80c5180fcca77b3f -RUN pip install grpcio==1.51.1 grpcio-reflection==1.51.1 grpcio-status==1.51.1 grpc-interceptor==0.15.1 +RUN pip install grpcio==1.59.0 grpcio-reflection==1.59.0 grpcio-status==1.59.0 grpc-interceptor==0.15.3 COPY server/server.py / COPY server/helloworld_pb2.py / From d33f2e4708d49c6b17084993f6b6f981fb63aece Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:36:06 +0000 Subject: [PATCH 43/72] Annotations --- tavern/_plugins/grpc/client.py | 17 +++++++++-------- tavern/_plugins/grpc/response.py | 5 +++-- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index ddbe20e04..5ec76e2c6 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -10,7 +10,7 @@ import warnings from distutils.spawn import find_executable from importlib.machinery import ModuleSpec -from typing import List, Mapping, Optional +from typing import Any, List, Mapping, Optional, Tuple, Union import grpc import grpc_reflection @@ -218,7 +218,6 @@ def _register_file_descriptor( for file_descriptor_proto in service_proto.file_descriptor_proto: proto = descriptor_pb2.FileDescriptorProto() proto.ParseFromString(file_descriptor_proto) - logger.critical("ksdo") self.sym_db.pool.Add(proto) def _get_reflection_info( @@ -237,8 +236,10 @@ def _get_reflection_info( for response in ref_response: self._register_file_descriptor(response.file_descriptor_response) - def _get_grpc_service(self, channel, service: str, method: str): - full_service_name = "{}.{}".format(service, method) + def _get_grpc_service( + self, channel: grpc.Channel, service: str, method: str + ) -> Union[Tuple[None, None], Tuple[Any, Any]]: + full_service_name = f"{service}.{method}" try: grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) input_type = message_factory.GetMessageClass(grpc_service.input_type) # type: ignore @@ -248,7 +249,7 @@ def _get_grpc_service(self, channel, service: str, method: str): logger.critical(f"reflected info for {service}: {full_service_name}") - service_url = "/{}/{}".format(service, method) + service_url = f"/{service}/{method}" grpc_method = channel.unary_unary( service_url, request_serializer=input_type.SerializeToString, @@ -294,7 +295,7 @@ def _make_call_request(self, host: str, full_service: str): "could not find service and gRPC reflection disabled, cannot continue" ) raise exceptions.GRPCServiceException( - "Service {} was not registered for host {}".format(service, host) + f"Service {service} was not registered for host {host}" ) logger.info("service not registered, doing reflection from server") @@ -337,7 +338,7 @@ def call( host: Optional[str] = None, body: Optional[Mapping] = None, timeout: Optional[int] = None, - ): + ) -> grpc.Future: if host is None: host = self.default_host if timeout is None: @@ -346,7 +347,7 @@ def call( grpc_call, grpc_request = self._make_call_request(host, service) if grpc_call is None or grpc_request is None: raise exceptions.GRPCServiceException( - "Service {} was not found on host {}".format(service, host) + f"Service {service} was not found on host {host}" ) request = grpc_request() diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index bbbea0702..c1de19a06 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,6 +1,7 @@ import logging -from typing import Mapping +from typing import Mapping, Union +import grpc from google.protobuf import json_format from grpc import StatusCode @@ -50,7 +51,7 @@ def _validate_block(self, blockname: str, block: Mapping): block_strictness = test_strictness.option_for(blockname) self.recurse_check_key_match(expected_block, block, blockname, block_strictness) - def verify(self, response): + def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: # Get any keys to save saved = {} verify_status = [StatusCode.OK.name] From d44a04383909b7ec30e7f42cdfcd5de022f4c802 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 6 Jan 2024 16:59:14 +0000 Subject: [PATCH 44/72] fix port for tests --- example/grpc/common.yaml | 2 +- example/grpc/server/server.py | 2 +- tavern/_plugins/grpc/client.py | 6 ++++++ tavern/_plugins/grpc/response.py | 3 +++ 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/example/grpc/common.yaml b/example/grpc/common.yaml index cf010c48e..91ec5f033 100644 --- a/example/grpc/common.yaml +++ b/example/grpc/common.yaml @@ -3,6 +3,6 @@ name: test includes description: used for testing against local server variables: - grpc_host: localhost + grpc_host: "[::1]" grpc_port: 50051 grpc_reflecting_port: 50052 diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index 24adf5170..f66b91e9c 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -56,7 +56,7 @@ def serve(): else: port = 50051 - server.add_insecure_port(f"[::]:{port:d}") + server.add_insecure_port(f"0.0.0.0:{port:d}") server.start() logging.info("Starting grpc server") diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 5ec76e2c6..095895ca4 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -340,7 +340,13 @@ def call( timeout: Optional[int] = None, ) -> grpc.Future: if host is None: + if getattr(self, "default_host", None) is None: + raise exceptions.GRPCRequestException( + "no host specified in request and no default host in settings" + ) + host = self.default_host + if timeout is None: timeout = self.timeout diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index c1de19a06..e1dcd06f9 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -52,6 +52,9 @@ def _validate_block(self, blockname: str, block: Mapping): self.recurse_check_key_match(expected_block, block, blockname, block_strictness) def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: + logger.debug(f"grpc status code: {response.code()}") + logger.debug(f"grpc details: {response.details()}") + # Get any keys to save saved = {} verify_status = [StatusCode.OK.name] From ce99cc529dc1ee78fceded5c0772ea20abac6288 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Mon, 8 Jan 2024 08:45:05 +0000 Subject: [PATCH 45/72] Missing host spec --- example/grpc/common.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/grpc/common.yaml b/example/grpc/common.yaml index 91ec5f033..cf010c48e 100644 --- a/example/grpc/common.yaml +++ b/example/grpc/common.yaml @@ -3,6 +3,6 @@ name: test includes description: used for testing against local server variables: - grpc_host: "[::1]" + grpc_host: localhost grpc_port: 50051 grpc_reflecting_port: 50052 From 1b2dd97eee511e9b6e43013f9479362391817d3a Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Tue, 16 Jan 2024 11:07:26 +0000 Subject: [PATCH 46/72] Format with ruff Add v2/v3 for other tests fix reflection Options --- .pre-commit-config.yaml | 6 +- example/advanced/server.py | 1 - example/cookies/server.py | 1 - example/grpc/Dockerfile | 21 +- example/grpc/conftest.py | 11 + example/grpc/docker-compose.yaml | 4 +- ....proto => helloworld_v1_precompiled.proto} | 4 +- example/grpc/helloworld_v1_precompiled_pb2.py | 32 +++ ....pyi => helloworld_v1_precompiled_pb2.pyi} | 14 +- ... => helloworld_v1_precompiled_pb2_grpc.py} | 20 +- example/grpc/helloworld_v2_compiled.proto | 15 ++ example/grpc/helloworld_v3_reflected.proto | 15 ++ example/grpc/regenerate.sh | 3 +- example/grpc/server/helloworld_pb2.py | 31 --- example/grpc/server/server.py | 71 ++++--- example/grpc/test_grpc.tavern.yaml | 198 +++++++++++++----- example/mqtt/server.py | 3 +- pyproject.toml | 4 + scripts/smoke.bash | 2 +- tavern/_core/dict_util.py | 12 +- tavern/_core/exceptions.py | 7 +- tavern/_plugins/grpc/client.py | 56 +++-- tavern/_plugins/grpc/jsonschema.yaml | 12 +- 23 files changed, 371 insertions(+), 172 deletions(-) rename example/grpc/{helloworld.proto => helloworld_v1_precompiled.proto} (76%) create mode 100644 example/grpc/helloworld_v1_precompiled_pb2.py rename example/grpc/{server/helloworld_pb2.pyi => helloworld_v1_precompiled_pb2.pyi} (82%) rename example/grpc/{server/helloworld_pb2_grpc.py => helloworld_v1_precompiled_pb2_grpc.py} (71%) create mode 100644 example/grpc/helloworld_v2_compiled.proto create mode 100644 example/grpc/helloworld_v3_reflected.proto delete mode 100644 example/grpc/server/helloworld_pb2.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa56b22c2..547242451 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,14 +8,10 @@ repos: rev: v2.4.0 hooks: - id: pycln - - repo: https://github.com/ambv/black - rev: 23.12.1 - hooks: - - id: black - files: "(tavern|tests)" - repo: https://github.com/charliermarsh/ruff-pre-commit rev: "v0.1.11" hooks: + - id: ruff-format - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/mirrors-prettier diff --git a/example/advanced/server.py b/example/advanced/server.py index 726cd99e2..f34756281 100644 --- a/example/advanced/server.py +++ b/example/advanced/server.py @@ -25,7 +25,6 @@ def get_db(): "CREATE TABLE numbers_table (name TEXT NOT NULL, number INTEGER NOT NULL)" ) - return db diff --git a/example/cookies/server.py b/example/cookies/server.py index 9026f1381..72613ea85 100644 --- a/example/cookies/server.py +++ b/example/cookies/server.py @@ -23,7 +23,6 @@ def get_db(): "CREATE TABLE numbers_table (name TEXT NOT NULL, number INTEGER NOT NULL)" ) - return db diff --git a/example/grpc/Dockerfile b/example/grpc/Dockerfile index af42899ef..f3f5d7806 100644 --- a/example/grpc/Dockerfile +++ b/example/grpc/Dockerfile @@ -1,9 +1,22 @@ -FROM python:3.10-slim-buster@sha256:37aa274c2d001f09b14828450d903c55f821c90f225fdfdd80c5180fcca77b3f +FROM python:3.12-slim-bookworm@sha256:ee9a59cfdad294560241c9a8c8e40034f165feb4af7088c1479c2cdd84aafbed AS base -RUN pip install grpcio==1.59.0 grpcio-reflection==1.59.0 grpcio-status==1.59.0 grpc-interceptor==0.15.3 +RUN pip install grpcio-tools==1.59.0 grpcio==1.59.0 grpcio-reflection==1.59.0 grpcio-status==1.59.0 grpc-interceptor==0.15.3 + + +FROM base AS protos + +RUN apt-get update && apt-get install protobuf-compiler --yes --no-install-recommends && apt-get clean + +COPY *.proto . + +RUN python3 -m grpc_tools.protoc --proto_path=$(pwd) --pyi_out=$(pwd) --python_out=$(pwd) --grpc_python_out=$(pwd) *.proto + + + +FROM base + +COPY --from=protos /*.py / COPY server/server.py / -COPY server/helloworld_pb2.py / -COPY server/helloworld_pb2_grpc.py / CMD ["python3", "/server.py"] diff --git a/example/grpc/conftest.py b/example/grpc/conftest.py index ce185f103..a53434e2b 100644 --- a/example/grpc/conftest.py +++ b/example/grpc/conftest.py @@ -1,3 +1,5 @@ +import os.path +import shutil import tempfile import pytest @@ -7,3 +9,12 @@ def make_temp_dir(): with tempfile.TemporaryDirectory() as d: yield d + + +@pytest.fixture(autouse=True, scope="session") +def single_compiled_proto_for_test(): + with tempfile.TemporaryDirectory() as d: + proto_filename = "helloworld_v2_compiled.proto" + dst = os.path.join(d, proto_filename) + shutil.copy(proto_filename, dst) + yield dst diff --git a/example/grpc/docker-compose.yaml b/example/grpc/docker-compose.yaml index 590cfb27f..6acebbd03 100644 --- a/example/grpc/docker-compose.yaml +++ b/example/grpc/docker-compose.yaml @@ -7,6 +7,6 @@ services: context: . dockerfile: Dockerfile ports: - - "50051:50051" - - "50052:50052" + - "127.0.0.1:50051:50051/tcp" + - "127.0.0.1:50052:50052/tcp" stop_grace_period: "1s" diff --git a/example/grpc/helloworld.proto b/example/grpc/helloworld_v1_precompiled.proto similarity index 76% rename from example/grpc/helloworld.proto rename to example/grpc/helloworld_v1_precompiled.proto index eaa42167e..55c6a96f7 100644 --- a/example/grpc/helloworld.proto +++ b/example/grpc/helloworld_v1_precompiled.proto @@ -1,8 +1,6 @@ syntax = "proto3"; -package helloworld; - -option py_generic_services = true; +package helloworld.v1; message HelloRequest { string name = 1; diff --git a/example/grpc/helloworld_v1_precompiled_pb2.py b/example/grpc/helloworld_v1_precompiled_pb2.py new file mode 100644 index 000000000..c892f3b45 --- /dev/null +++ b/example/grpc/helloworld_v1_precompiled_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: helloworld_v1_precompiled.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1fhelloworld_v1_precompiled.proto\x12\rhelloworld.v1"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2O\n\x07Greeter\x12\x44\n\x08SayHello\x12\x1b.helloworld.v1.HelloRequest\x1a\x19.helloworld.v1.HelloReply"\x00\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "helloworld_v1_precompiled_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals["_HELLOREQUEST"]._serialized_start = 50 + _globals["_HELLOREQUEST"]._serialized_end = 78 + _globals["_HELLOREPLY"]._serialized_start = 80 + _globals["_HELLOREPLY"]._serialized_end = 109 + _globals["_GREETER"]._serialized_start = 111 + _globals["_GREETER"]._serialized_end = 190 +# @@protoc_insertion_point(module_scope) diff --git a/example/grpc/server/helloworld_pb2.pyi b/example/grpc/helloworld_v1_precompiled_pb2.pyi similarity index 82% rename from example/grpc/server/helloworld_pb2.pyi rename to example/grpc/helloworld_v1_precompiled_pb2.pyi index 6007e6c5c..5632989e6 100644 --- a/example/grpc/server/helloworld_pb2.pyi +++ b/example/grpc/helloworld_v1_precompiled_pb2.pyi @@ -1,21 +1,17 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from google.protobuf import service as _service from typing import ClassVar as _ClassVar, Optional as _Optional DESCRIPTOR: _descriptor.FileDescriptor -class HelloReply(_message.Message): - __slots__ = ["message"] - MESSAGE_FIELD_NUMBER: _ClassVar[int] - message: str - def __init__(self, message: _Optional[str] = ...) -> None: ... - class HelloRequest(_message.Message): __slots__ = ["name"] NAME_FIELD_NUMBER: _ClassVar[int] name: str def __init__(self, name: _Optional[str] = ...) -> None: ... -class Greeter(_service.service): ... -class Greeter_Stub(Greeter): ... +class HelloReply(_message.Message): + __slots__ = ["message"] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + message: str + def __init__(self, message: _Optional[str] = ...) -> None: ... diff --git a/example/grpc/server/helloworld_pb2_grpc.py b/example/grpc/helloworld_v1_precompiled_pb2_grpc.py similarity index 71% rename from example/grpc/server/helloworld_pb2_grpc.py rename to example/grpc/helloworld_v1_precompiled_pb2_grpc.py index b95dee929..6f949ac87 100644 --- a/example/grpc/server/helloworld_pb2_grpc.py +++ b/example/grpc/helloworld_v1_precompiled_pb2_grpc.py @@ -2,7 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -import helloworld_pb2 as helloworld__pb2 +import helloworld_v1_precompiled_pb2 as helloworld__v1__precompiled__pb2 class GreeterStub(object): @@ -15,9 +15,9 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.SayHello = channel.unary_unary( - "/helloworld.Greeter/SayHello", - request_serializer=helloworld__pb2.HelloRequest.SerializeToString, - response_deserializer=helloworld__pb2.HelloReply.FromString, + "/helloworld.v1.Greeter/SayHello", + request_serializer=helloworld__v1__precompiled__pb2.HelloRequest.SerializeToString, + response_deserializer=helloworld__v1__precompiled__pb2.HelloReply.FromString, ) @@ -35,12 +35,12 @@ def add_GreeterServicer_to_server(servicer, server): rpc_method_handlers = { "SayHello": grpc.unary_unary_rpc_method_handler( servicer.SayHello, - request_deserializer=helloworld__pb2.HelloRequest.FromString, - response_serializer=helloworld__pb2.HelloReply.SerializeToString, + request_deserializer=helloworld__v1__precompiled__pb2.HelloRequest.FromString, + response_serializer=helloworld__v1__precompiled__pb2.HelloReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( - "helloworld.Greeter", rpc_method_handlers + "helloworld.v1.Greeter", rpc_method_handlers ) server.add_generic_rpc_handlers((generic_handler,)) @@ -65,9 +65,9 @@ def SayHello( return grpc.experimental.unary_unary( request, target, - "/helloworld.Greeter/SayHello", - helloworld__pb2.HelloRequest.SerializeToString, - helloworld__pb2.HelloReply.FromString, + "/helloworld.v1.Greeter/SayHello", + helloworld__v1__precompiled__pb2.HelloRequest.SerializeToString, + helloworld__v1__precompiled__pb2.HelloReply.FromString, options, channel_credentials, insecure, diff --git a/example/grpc/helloworld_v2_compiled.proto b/example/grpc/helloworld_v2_compiled.proto new file mode 100644 index 000000000..c4cd261d4 --- /dev/null +++ b/example/grpc/helloworld_v2_compiled.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package helloworld.v2; + +message HelloRequest { + string name = 1; +} + +message HelloReply { + string message = 1; +} + +service Greeter { + rpc SayHello (HelloRequest) returns (HelloReply) {} +} diff --git a/example/grpc/helloworld_v3_reflected.proto b/example/grpc/helloworld_v3_reflected.proto new file mode 100644 index 000000000..44eb6a4f5 --- /dev/null +++ b/example/grpc/helloworld_v3_reflected.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package helloworld.v3; + +message HelloRequest { + string name = 1; +} + +message HelloReply { + string message = 1; +} + +service Greeter { + rpc SayHello (HelloRequest) returns (HelloReply) {} +} diff --git a/example/grpc/regenerate.sh b/example/grpc/regenerate.sh index 9a9a54aef..3003cca5a 100755 --- a/example/grpc/regenerate.sh +++ b/example/grpc/regenerate.sh @@ -1,3 +1,4 @@ #!/usr/bin/env bash -python -m grpc_tools.protoc --proto_path=$(pwd) --pyi_out=$(pwd) --python_out=$(pwd) --grpc_python_out=$(pwd) helloworld.proto +python3 -m grpc_tools.protoc --proto_path=$(pwd) --pyi_out=$(pwd) --python_out=$(pwd) --grpc_python_out=$(pwd) helloworld_v1_precompiled.proto +black *pb2*py diff --git a/example/grpc/server/helloworld_pb2.py b/example/grpc/server/helloworld_pb2.py deleted file mode 100644 index f4a119480..000000000 --- a/example/grpc/server/helloworld_pb2.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: helloworld.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x10helloworld.proto\x12\nhelloworld"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply"\x00\x42\x03\x90\x01\x01\x62\x06proto3' -) - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "helloworld_pb2", globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b"\220\001\001" - _HELLOREQUEST._serialized_start = 32 - _HELLOREQUEST._serialized_end = 60 - _HELLOREPLY._serialized_start = 62 - _HELLOREPLY._serialized_end = 91 - _GREETER._serialized_start = 93 - _GREETER._serialized_end = 166 -_builder.BuildServices(DESCRIPTOR, "helloworld_pb2", globals()) -# @@protoc_insertion_point(module_scope) diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index f66b91e9c..58cef40b8 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -1,20 +1,33 @@ -from concurrent import futures import logging import threading -from typing import Callable, Any +from concurrent import futures +from typing import Any, Callable import grpc -from grpc_interceptor import ServerInterceptor +import helloworld_v1_precompiled_pb2 as helloworld_pb2_v1 +import helloworld_v1_precompiled_pb2_grpc as helloworld_pb2_grpc_v1 +import helloworld_v2_compiled_pb2 as helloworld_pb2_v2 +import helloworld_v2_compiled_pb2_grpc as helloworld_pb2_grpc_v2 +import helloworld_v3_reflected_pb2 as helloworld_pb2_v3 +import helloworld_v3_reflected_pb2_grpc as helloworld_pb2_grpc_v3 from grpc_interceptor.exceptions import GrpcException from grpc_reflection.v1alpha import reflection +from grpc_interceptor import ServerInterceptor + -import helloworld_pb2 -import helloworld_pb2_grpc +class GreeterV1(helloworld_pb2_grpc_v1.GreeterServicer): + def SayHello(self, request, context): + return helloworld_pb2_v1.HelloReply(message="Hello, %s!" % request.name) -class Greeter(helloworld_pb2_grpc.GreeterServicer): +class GreeterV2(helloworld_pb2_grpc_v2.GreeterServicer): def SayHello(self, request, context): - return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name) + return helloworld_pb2_v2.HelloReply(message="Hello, %s!" % request.name) + + +class GreeterV3(helloworld_pb2_grpc_v3.GreeterServicer): + def SayHello(self, request, context): + return helloworld_pb2_v3.HelloReply(message="Hello, %s!" % request.name) class LoggingInterceptor(ServerInterceptor): @@ -30,34 +43,38 @@ def intercept( try: return method(request_or_iterator, context) except GrpcException as e: - logging.exception(f"error processing request") + logging.exception("error processing request") context.set_code(e.status_code) context.set_details(e.details) raise def serve(): + interceptors = [LoggingInterceptor()] executor = futures.ThreadPoolExecutor(max_workers=10) - for reflect in [True, False]: - server = grpc.server( - executor, - interceptors=[LoggingInterceptor()], - ) - helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) - - if reflect: - service_names = ( - helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name, - reflection.SERVICE_NAME, - ) - reflection.enable_server_reflection(service_names, server) - port = 50052 - else: - port = 50051 - - server.add_insecure_port(f"0.0.0.0:{port:d}") - server.start() + server = grpc.server( + executor, + interceptors=interceptors, + ) + helloworld_pb2_grpc_v1.add_GreeterServicer_to_server(GreeterV1(), server) + helloworld_pb2_grpc_v2.add_GreeterServicer_to_server(GreeterV2(), server) + + server.add_insecure_port(f"0.0.0.0:50051") + server.start() + + reflecting_server = grpc.server( + executor, + interceptors=interceptors, + ) + helloworld_pb2_grpc_v3.add_GreeterServicer_to_server(GreeterV3(), reflecting_server) + service_names = ( + helloworld_pb2_v3.DESCRIPTOR.services_by_name["Greeter"].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(service_names, reflecting_server) + reflecting_server.add_insecure_port(f"0.0.0.0:50052") + reflecting_server.start() logging.info("Starting grpc server") event = threading.Event() diff --git a/example/grpc/test_grpc.tavern.yaml b/example/grpc/test_grpc.tavern.yaml index 5c61758aa..083823464 100644 --- a/example/grpc/test_grpc.tavern.yaml +++ b/example/grpc/test_grpc.tavern.yaml @@ -1,22 +1,22 @@ --- -test_name: Test grpc message echo +test_name: Test grpc message echo importing a module instead of compiling from source includes: - !include common.yaml -grpc: &grpc_spec - connect: +grpc: + connect: &grpc_connect host: "{grpc_host}" port: !int "{grpc_port}" timeout: 3 proto: - source: . + module: helloworld_v1_precompiled_pb2_grpc stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + service: helloworld.v1.Greeter/SayHello body: name: "John" grpc_response: @@ -26,30 +26,52 @@ stages: --- -test_name: Test trying to compile a folder with no protos in it +test_name: Test trying to connect using an invalid option includes: - !include common.yaml -marks: - - usefixtures: - - make_temp_dir +grpc: + connect: + <<: *grpc_connect + options: + woah: cool + proto: + module: helloworld_v1_precompiled_pb2_grpc _xfail: - run: "No protos defined in" + run: invalid grpc option 'woah' + +stages: + - name: Echo text + grpc_request: + service: helloworld.v1.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc message echo importing a module but its a path to a file + +includes: + - !include common.yaml + +_xfail: run grpc: connect: - host: "{grpc_host}" - port: !int "{grpc_port}" - timeout: 3 + <<: *grpc_connect proto: - source: "{make_temp_dir}" + module: helloworld_v1_precompiled_pb2_grpc.py stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + service: helloworld.v1.Greeter/SayHello body: name: "John" grpc_response: @@ -59,23 +81,20 @@ stages: --- -test_name: Test grpc message echo importing a module instead of compiling from source +test_name: Test grpc connection without the 'connect' block includes: - !include common.yaml grpc: - connect: - host: "{grpc_host}" - port: !int "{grpc_port}" - timeout: 3 proto: - module: server/helloworld_pb2_grpc + module: helloworld_v1_precompiled_pb2_grpc stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + host: "{grpc_host}:{grpc_port}" + service: helloworld.v1.Greeter/SayHello body: name: "John" grpc_response: @@ -85,25 +104,47 @@ stages: --- -test_name: Test grpc message echo importing a module but its a path to a file +test_name: Test grpc connection without the 'connect' block, with a bad message includes: - !include common.yaml +grpc: + proto: + module: helloworld_pb2_grpc + _xfail: run -grpc: +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_port}" + service: helloworld.v1.Greeter/SayHello + body: + aarg: wooo + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +test_name: Test grpc message echo compiling proto + +includes: + - !include common.yaml + +grpc: &grpc_spec connect: - host: "{grpc_host}" - port: !int "{grpc_port}" - timeout: 3 + <<: *grpc_connect proto: - module: server.helloworld_pb2_grpc.py + source: "{single_compiled_proto_for_test}" stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + service: helloworld.v2.Greeter/SayHello body: name: "John" grpc_response: @@ -113,20 +154,17 @@ stages: --- -test_name: Test grpc connection without the 'connect' block +test_name: Test grpc message echo compiling folder with multiple protos includes: - !include common.yaml -grpc: - proto: - module: server.helloworld_pb2_grpc +grpc: *grpc_spec stages: - name: Echo text grpc_request: - host: "{grpc_host}:{grpc_port}" - service: helloworld.Greeter/SayHello + service: helloworld.v2.Greeter/SayHello body: name: "John" grpc_response: @@ -136,24 +174,54 @@ stages: --- -test_name: Test grpc connection without the 'connect' block, with a bad message +test_name: Test trying to compile a folder with no protos in it includes: - !include common.yaml +marks: + - usefixtures: + - make_temp_dir + +_xfail: + run: "No protos defined in" + grpc: + connect: + <<: *grpc_connect proto: - module: server.helloworld_pb2_grpc + source: "{make_temp_dir}" -_xfail: run +stages: + - name: Echo text + grpc_request: + service: helloworld.v2.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +grpc: + attempt_reflection: True + connect: + host: "{grpc_host}" + port: !int "{grpc_reflecting_port}" + timeout: 3 + +test_name: Test server reflection + +includes: + - !include common.yaml stages: - name: Echo text grpc_request: - host: "{grpc_host}:{grpc_port}" - service: helloworld.Greeter/SayHello + service: helloworld.v3.Greeter/SayHello body: - aarg: wooo name: "John" grpc_response: status: "OK" @@ -162,6 +230,9 @@ stages: --- +grpc: + attempt_reflection: True + test_name: Test grpc connection without the 'connect' block, using server reflection includes: @@ -171,7 +242,32 @@ stages: - name: Echo text grpc_request: host: "{grpc_host}:{grpc_reflecting_port}" - service: helloworld.Greeter/SayHello + service: helloworld.v3.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" + body: + message: "Hello, John!" + +--- + +grpc: + attempt_reflection: True + +test_name: Tried to use grpc reflection but the service did not expose it + +_xfail: + run: "Service coolservice.v9/SayGoodbye was not found on host" + +includes: + - !include common.yaml + +stages: + - name: Echo text + grpc_request: + host: "{grpc_host}:{grpc_reflecting_port}" + service: coolservice.v9/SayGoodbye body: name: "John" grpc_response: @@ -186,13 +282,14 @@ test_name: Test grpc connection without the 'connect' block, using server reflec includes: - !include common.yaml -_xfail: run +_xfail: + run: error creating request from json body stages: - name: Echo text grpc_request: host: "{grpc_host}:{grpc_reflecting_port}" - service: helloworld.Greeter/SayHello + service: helloworld.v3.Greeter/SayHello body: aarg: wooo name: "John" @@ -210,13 +307,14 @@ includes: grpc: *grpc_spec -_xfail: run +_xfail: + run: error creating request from json body stages: - name: Echo text grpc_request: host: "{grpc_host}:{grpc_port}" - service: helloworld.Greeter/SayHello + service: helloworld.v2.Greeter/SayHello body: name: "John" A: klk @@ -234,9 +332,7 @@ includes: grpc: connect: - host: "{grpc_host}" - port: !int "{grpc_port}" - timeout: 3 + <<: *grpc_connect proto: module: cool_grpc_server @@ -245,7 +341,7 @@ _xfail: run stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + service: helloworld.v1.Greeter/SayHello body: name: "John" grpc_response: @@ -267,7 +363,7 @@ _xfail: verify stages: - name: Echo text grpc_request: - service: helloworld.Greeter/SayHello + service: helloworld.v1.Greeter/SayHello body: name: "Jim" grpc_response: diff --git a/example/mqtt/server.py b/example/mqtt/server.py index bc73c910d..e31a79c58 100644 --- a/example/mqtt/server.py +++ b/example/mqtt/server.py @@ -157,7 +157,7 @@ def create_device(): try: r["clean"] - except (TypeError): + except TypeError: return jsonify({"error": "checking for clean key"}), 500 except KeyError: try: @@ -196,7 +196,6 @@ def attempt(query): with contextlib.suppress(Exception): db.execute(query) - attempt("DELETE FROM devices_table") attempt( "CREATE TABLE devices_table (device_id TEXT NOT NULL, lights_on INTEGER NOT NULL)" diff --git a/pyproject.toml b/pyproject.toml index 3da1940c4..49117fb24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -179,6 +179,10 @@ target-version = "py38" [tool.ruff.isort] known-first-party = ["tavern"] +[tool.ruff.format] +exclude = ["*_pb2.py", "*_pb2_grpc.py", "*_pb2.pyi"] +docstring-code-format = true + [tool.tbump.version] current = "2.7.1" diff --git a/scripts/smoke.bash b/scripts/smoke.bash index f68c6e0ba..e8d7818c5 100755 --- a/scripts/smoke.bash +++ b/scripts/smoke.bash @@ -3,7 +3,7 @@ set -ex pre-commit run ruff --all-files -pre-commit run black --all-files +pre-commit run ruff-format --all-files # Separate as isort can interfere with other testenvs tox --parallel -c tox.ini \ diff --git a/tavern/_core/dict_util.py b/tavern/_core/dict_util.py index fd0ab1c7a..435a10533 100644 --- a/tavern/_core/dict_util.py +++ b/tavern/_core/dict_util.py @@ -162,9 +162,9 @@ def recurse_access_key(data, query: str): Example: - >>> recurse_access_key({'a': 'b'}, 'a') + >>> recurse_access_key({"a": "b"}, "a") 'b' - >>> recurse_access_key({'a': {'b': ['c', 'd']}}, 'a.b[0]') + >>> recurse_access_key({"a": {"b": ["c", "d"]}}, "a.b[0]") 'c' Args: @@ -203,9 +203,9 @@ def _deprecated_recurse_access_key(current_val, keys): Example: - >>> _deprecated_recurse_access_key({'a': 'b'}, ['a']) + >>> _deprecated_recurse_access_key({"a": "b"}, ["a"]) 'b' - >>> _deprecated_recurse_access_key({'a': {'b': ['c', 'd']}}, ['a', 'b', '0']) + >>> _deprecated_recurse_access_key({"a": {"b": ["c", "d"]}}, ["a", "b", "0"]) 'c' Args: @@ -351,7 +351,9 @@ def check_keys_match_recursive( >>> check_keys_match_recursive({"a": {"b": "c"}}, {"a": {"b": "c"}}, []) is None True - >>> check_keys_match_recursive({"a": {"b": "c"}}, {"a": {"b": "d"}}, []) # doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_keys_match_recursive( + ... {"a": {"b": "c"}}, {"a": {"b": "d"}}, [] + ... ) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): File "/home/michael/code/tavern/tavern/tavern/_core.util/dict_util.py", line 223, in check_keys_match_recursive tavern._core.exceptions.KeyMismatchError: Key mismatch: (expected["a"]["b"] = 'c', actual["a"]["b"] = 'd') diff --git a/tavern/_core/exceptions.py b/tavern/_core/exceptions.py index dcdc7fbc9..9c825007c 100644 --- a/tavern/_core/exceptions.py +++ b/tavern/_core/exceptions.py @@ -7,7 +7,12 @@ class TavernException(Exception): """Base exception - Fields are internal and might change in future + Fields are internal and might change in future without warning + + Attributes: + is_final: whether this exception came from a 'finally' block + stage: stage that caused this issue + test_block_config: config for stage """ stage: Optional[Dict] diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 095895ca4..06dc54940 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,6 +1,6 @@ import functools import hashlib -import importlib +import importlib.util import logging import os import string @@ -10,7 +10,7 @@ import warnings from distutils.spawn import find_executable from importlib.machinery import ModuleSpec -from typing import Any, List, Mapping, Optional, Tuple, Union +from typing import Any, Dict, List, Mapping, Optional, Tuple, Union import grpc import grpc_reflection @@ -60,17 +60,30 @@ def _generate_proto_import(source: str): logger.info("Generating protos from %s...", source) - protos = [ - os.path.join(source, child) - for child in os.listdir(source) - if (not os.path.isdir(child)) and child.endswith(".proto") - ] + if not os.path.isdir(source): + if not source.endswith(".proto"): + raise exceptions.ProtoCompilerException( + f"invalid proto source file {source}" + ) + protos = [source] + include_path = os.path.dirname(source) + else: + protos = [ + os.path.join(source, child) + for child in os.listdir(source) + if (not os.path.isdir(child)) and child.endswith(".proto") + ] + include_path = source if not protos: raise exceptions.ProtoCompilerException( f"No protos defined in {os.path.abspath(source)}" ) + for p in protos: + if not os.path.exists(p): + raise exceptions.ProtoCompilerException(f"{p} does not exist") + def sanitise(s): """Do basic sanitisation for""" return "".join(c for c in s if c in string.ascii_letters) @@ -87,7 +100,7 @@ def sanitise(s): protoc = find_protoc() - protoc_command = [protoc, "-I" + source, "--python_out=" + output] + protoc_command = [protoc, "-I" + include_path, "--python_out=" + output] protoc_command.extend(protos) call = subprocess.run(protoc_command, capture_output=True, check=False) # noqa @@ -165,7 +178,7 @@ class GRPCClient: def __init__(self, **kwargs): logger.debug("Initialising GRPC client with %s", kwargs) expected_blocks = { - "connect": {"host", "port", "options", "compression", "timeout", "tls"}, + "connect": {"host", "port", "options", "compression", "timeout", "secure"}, "proto": {"source", "module"}, "metadata": {}, "attempt_reflection": {}, @@ -190,9 +203,17 @@ def __init__(self, **kwargs): self.default_host += ":{}".format(port) self.timeout = int(_connect_args.get("timeout", 5)) - self.tls = bool(_connect_args.get("tls", False)) + self.secure = bool(_connect_args.get("secure", False)) - self.channels = {} + self._options: List[Tuple[str, Any]] = [] + for key, value in _connect_args.pop("options", {}).items(): + if not key.startswith("grpc."): + raise exceptions.GRPCServiceException( + f"invalid grpc option '{key}' - must be in the form 'grpc.option_name'" + ) + self._options.append((key, value)) + + self.channels: Dict[str, grpc.Channel] = {} # Using the default symbol database is a bit undesirable because it means that things being imported from # previous tests will affect later ones which can mask bugs. But there isn't a nice way to have a # self-contained symbol database, because then you need to transitively import all dependencies of protos and @@ -271,17 +292,17 @@ def _make_call_request(self, host: str, full_service: str): ) if host not in self.channels: - if self.tls: + if self.secure: credentials = grpc.ssl_channel_credentials() self.channels[host] = grpc.secure_channel( host, credentials, - options=[("grpc.max_receive_message_length", 10 * 1024 * 1024)], + options=self._options, ) else: self.channels[host] = grpc.insecure_channel( host, - options=[("grpc.max_receive_message_length", 10 * 1024 * 1024)], + options=self._options, ) channel = self.channels[host] @@ -301,9 +322,7 @@ def _make_call_request(self, host: str, full_service: str): logger.info("service not registered, doing reflection from server") try: self._get_reflection_info(channel, service_name=service) - except ( - grpc.RpcError - ) as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + except grpc.RpcError as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. code = details = None try: code = rpc_error.code() @@ -371,3 +390,6 @@ def call( def __exit__(self, *args): logger.debug("Disconnecting from GRPC") + for v in self.channels.values(): + v.close() + self.channels = {} diff --git a/tavern/_plugins/grpc/jsonschema.yaml b/tavern/_plugins/grpc/jsonschema.yaml index 1934e5d33..f18c1131d 100644 --- a/tavern/_plugins/grpc/jsonschema.yaml +++ b/tavern/_plugins/grpc/jsonschema.yaml @@ -24,19 +24,29 @@ properties: type: number keepalive: type: integer - tls: + secure: type: boolean + description: use a secure channel using the system default ssl certs + options: + description: connection options, in map format + type: object + + # TODO + # tls: ... attempt_reflection: + description: If a gRPC definition could not be found for a service, try to use server reflection to create the gRPC call instead. This can be useful if you do not have the compiled proto definition on hand but you know what the schema is. type: boolean metadata: + description: gRPC metadata to send to the server type: object proto: type: object properties: source: + description: path to a folder containing proto definitions type: string module: type: string From 1beb3c93945c4b9766e44a978dfa676721cc853c Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 13:57:06 +0000 Subject: [PATCH 47/72] pb2 files generated --- .gitattributes | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..bcdece610 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +**/*_pb2.py linguist-generated +**/*_pb2_grpc.py linguist-generated +**/*_pb2.pyi linguist-generated From 17401528a24ae807f29ff41940b0ed73ac380370 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 13:58:37 +0000 Subject: [PATCH 48/72] document files --- example/grpc/helloworld_v1_precompiled.proto | 2 ++ example/grpc/helloworld_v2_compiled.proto | 2 ++ example/grpc/helloworld_v3_reflected.proto | 2 ++ 3 files changed, 6 insertions(+) diff --git a/example/grpc/helloworld_v1_precompiled.proto b/example/grpc/helloworld_v1_precompiled.proto index 55c6a96f7..886f3df97 100644 --- a/example/grpc/helloworld_v1_precompiled.proto +++ b/example/grpc/helloworld_v1_precompiled.proto @@ -1,3 +1,5 @@ +// Pre compiled and checked into the repo so it can be imported by Tavern at runtime + syntax = "proto3"; package helloworld.v1; diff --git a/example/grpc/helloworld_v2_compiled.proto b/example/grpc/helloworld_v2_compiled.proto index c4cd261d4..09e68ab7e 100644 --- a/example/grpc/helloworld_v2_compiled.proto +++ b/example/grpc/helloworld_v2_compiled.proto @@ -1,3 +1,5 @@ +// Not compiled, but compiled at runtime by Tavern + syntax = "proto3"; package helloworld.v2; diff --git a/example/grpc/helloworld_v3_reflected.proto b/example/grpc/helloworld_v3_reflected.proto index 44eb6a4f5..aa983ec32 100644 --- a/example/grpc/helloworld_v3_reflected.proto +++ b/example/grpc/helloworld_v3_reflected.proto @@ -1,3 +1,5 @@ +// Not compiled, Tavern uses server side reflection to determine the schema + syntax = "proto3"; package helloworld.v3; From 3c0c2b73c675355caefc14544474519e9020bf10 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 13:59:26 +0000 Subject: [PATCH 49/72] docs --- example/grpc/server/server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index 58cef40b8..54a9b0f5d 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -53,6 +53,7 @@ def serve(): interceptors = [LoggingInterceptor()] executor = futures.ThreadPoolExecutor(max_workers=10) + # One server which exposes these two server = grpc.server( executor, interceptors=interceptors, @@ -63,6 +64,7 @@ def serve(): server.add_insecure_port(f"0.0.0.0:50051") server.start() + # One server which exposes the V3 API and has reflection turned on reflecting_server = grpc.server( executor, interceptors=interceptors, From 21cdacbbc954e8b66670dae9a6dd7ab2138a79a6 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:04:16 +0000 Subject: [PATCH 50/72] fix import err --- tavern/_core/pytest/config.py | 13 +++++++------ tavern/_core/schema/jsonschema.py | 8 ++++++-- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/tavern/_core/pytest/config.py b/tavern/_core/pytest/config.py index e3a39e1e2..2e33620e5 100644 --- a/tavern/_core/pytest/config.py +++ b/tavern/_core/pytest/config.py @@ -55,12 +55,6 @@ def with_strictness(self, new_strict: StrictLevel) -> "TestConfig": def backends() -> List[str]: available_backends = ["http"] - def has_module(module: str) -> bool: - try: - return find_spec(module) is not None - except ModuleNotFoundError: - return False - if has_module("paho.mqtt"): available_backends.append("mqtt") if has_module("grpc"): @@ -69,3 +63,10 @@ def has_module(module: str) -> bool: logger.debug(f"available request backends: {available_backends}") return available_backends + + +def has_module(module: str) -> bool: + try: + return find_spec(module) is not None + except ModuleNotFoundError: + return False diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py index 330a5d346..339ef6e2a 100644 --- a/tavern/_core/schema/jsonschema.py +++ b/tavern/_core/schema/jsonschema.py @@ -1,4 +1,3 @@ -import importlib import logging import re from typing import Mapping @@ -19,6 +18,7 @@ TypeConvertToken, TypeSentinel, ) +from tavern._core.pytest.config import has_module from tavern._core.schema.extensions import ( check_parametrize_marks, check_strict_key, @@ -121,10 +121,14 @@ def verify_jsonschema(to_verify: Mapping, schema: Mapping) -> None: validator = CustomValidator(schema) - if "grpc" in to_verify and not importlib.util.find_spec("grpc"): + if "grpc" in to_verify and not has_module("grpc"): raise exceptions.BadSchemaError( "Tried to use grpc connection string, but grpc was not installed. Reinstall Tavern with the grpc extra like `pip install tavern[grpc]`" ) + if "mqtt" in to_verify and not has_module("paho.mqtt"): + raise exceptions.BadSchemaError( + "Tried to use mqtt connection string, but mqtt was not installed. Reinstall Tavern with the mqtt extra like `pip install tavern[mqtt]`" + ) try: validator.validate(to_verify) From dbc2a25e3198ead72327471a29df7848331d6000 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:04:54 +0000 Subject: [PATCH 51/72] dont include whole errror --- tavern/_core/schema/jsonschema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tavern/_core/schema/jsonschema.py b/tavern/_core/schema/jsonschema.py index 339ef6e2a..fcea74928 100644 --- a/tavern/_core/schema/jsonschema.py +++ b/tavern/_core/schema/jsonschema.py @@ -181,7 +181,7 @@ def verify_jsonschema(to_verify: Mapping, schema: Mapping) -> None: logger.debug("original exception from jsonschema: %s", e) msg = "\n---\n" + "\n---\n".join([str(i) for i in real_context]) - raise BadSchemaError(msg) from e + raise BadSchemaError(msg) from None extra_checks = { "stages[*].mqtt_publish.json[]": validate_request_json, From 48b684a0104c2557ca3bd17d0eaebe74210fee58 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:22:40 +0000 Subject: [PATCH 52/72] Add unit test --- pyproject.toml | 1 - tavern/_core/schema/extensions.py | 39 ++++++++++++++++++++----------- tests/unit/test_extensions.py | 21 +++++++++++++++++ tox-integration.ini | 2 +- 4 files changed, 48 insertions(+), 15 deletions(-) create mode 100644 tests/unit/test_extensions.py diff --git a/pyproject.toml b/pyproject.toml index dbb549f1d..d03d66139 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,7 +149,6 @@ addopts = [ "-p", "no:logging", "--tb=short", "--color=yes", - "--tavern-setup-init-logging", ] norecursedirs = [ ".git", diff --git a/tavern/_core/schema/extensions.py b/tavern/_core/schema/extensions.py index 61eee0e57..7a7bfcc43 100644 --- a/tavern/_core/schema/extensions.py +++ b/tavern/_core/schema/extensions.py @@ -1,6 +1,6 @@ import os import re -from typing import Union +from typing import List, Union from pykwalify.types import is_bool, is_float, is_int @@ -133,29 +133,42 @@ def check_usefixtures(value, rule_obj, path) -> bool: return True -def validate_grpc_status_is_valid_or_list_of_names(value, rule_obj, path): +def validate_grpc_status_is_valid_or_list_of_names( + value: Union[List[str], str, int], rule_obj, path +): """Validate GRPC statuses https://github.com/grpc/grpc/blob/master/doc/statuscodes.md""" # pylint: disable=unused-argument - err_msg = "status has to be an valid grpc status name (got {})".format(value) - - if not isinstance(value, list) and not is_grpc_status(value): - raise BadSchemaError(err_msg) + err_msg = ( + "status has to be an valid grpc status code, name, or list (got {})".format( + value + ) + ) - if isinstance(value, list): + if isinstance(value, (str, int)): + if not is_grpc_status(value): + raise BadSchemaError(err_msg) + elif isinstance(value, list): if not all(is_grpc_status(i) for i in value): raise BadSchemaError(err_msg) + else: + raise BadSchemaError(err_msg) return True -def is_grpc_status(value): - value = value.upper() - +def is_grpc_status(value: Union[str, int]): from grpc import StatusCode - for status in StatusCode: - if status.name == value: - return True + if isinstance(value, str): + value = value.upper() + for status in StatusCode: + if status.name == value: + return True + elif isinstance(value, int): + for status in StatusCode: + if status.value[0] == value: + return True + return False diff --git a/tests/unit/test_extensions.py b/tests/unit/test_extensions.py new file mode 100644 index 000000000..24ba08025 --- /dev/null +++ b/tests/unit/test_extensions.py @@ -0,0 +1,21 @@ +import pytest + +from tavern._core import exceptions +from tavern._core.schema.extensions import ( + validate_grpc_status_is_valid_or_list_of_names as validate_grpc, +) + + +class TestGrpcCodes: + @pytest.mark.parametrize("code", ("UNAVAILABLE", "unavailable", "ok", 14, 0)) + def test_validate_grpc_valid_status(self, code): + assert True is validate_grpc(code, None, None) + assert True is validate_grpc([code], None, None) + + @pytest.mark.parametrize("code", (-1, "fo", "J", {"status": "OK"})) + def test_validate_grpc_invalid_status(self, code): + with pytest.raises(exceptions.BadSchemaError): + assert False is validate_grpc(code, None, None) + + with pytest.raises(exceptions.BadSchemaError): + assert False is validate_grpc([code], None, None) diff --git a/tox-integration.ini b/tox-integration.ini index 518eb47fe..9a7b7abcd 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -36,7 +36,7 @@ commands = ; docker compose build docker compose up --build -d python -m pytest --collect-only - python -m pytest --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --cov tavern {posargs} + python -m pytest --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml --cov tavern {posargs} --tavern-setup-init-logging generic: py.test --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml -n 3 generic: tavern-ci --stdout . --tavern-global-cfg={toxinidir}/tests/integration/global_cfg.yaml From da5361603c5a2b03d97830a7ab3f090728817c3d Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:24:09 +0000 Subject: [PATCH 53/72] Clean up tavernhook --- tavern/_plugins/grpc/tavernhook.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tavern/_plugins/grpc/tavernhook.py b/tavern/_plugins/grpc/tavernhook.py index f4049afb5..58b9c1206 100644 --- a/tavern/_plugins/grpc/tavernhook.py +++ b/tavern/_plugins/grpc/tavernhook.py @@ -4,6 +4,7 @@ import yaml from tavern._core.dict_util import format_keys +from tavern._core.pytest.config import TestConfig from .client import GRPCClient from .request import GRPCRequest @@ -18,8 +19,7 @@ request_block_name = "grpc_request" -def get_expected_from_request(response_block, test_block_config, session): - # format so we can subscribe to the right topic +def get_expected_from_request(response_block, test_block_config: TestConfig, session): f_expected = format_keys(response_block, test_block_config.variables) expected = f_expected From 2b89dff1630f7751326fde803cb28ea56f9db9a8 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:34:01 +0000 Subject: [PATCH 54/72] More docs --- docs/source/grpc.md | 54 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 51 insertions(+), 3 deletions(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index b3a36718a..1a2898cae 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -1,10 +1,52 @@ # gRPC integration testing +## Connection + +There are 2 ways of specifying the grpc connection, in the `grpc` block at the top of the test similarly to an mqtt +connection block, or in the test stage itself. + +In the `grpc.connect` block: + +```yaml +grpc: + connect: + host: localhost + port: 50052 +``` + +In the test stage itself: + +```yaml +stages: + - name: Do a thing + grpc_request: + host: "localhost: 50052" + service: my.cool.service/Waoh + body: + ... +``` + +## Requests + +The `grpc_request` block requires, at minimum, the name of the service to send the request to + +```yaml +stages: + - name: Say hello + grpc_request: + service: helloworld.v3.Greeter/SayHello + body: + name: "John" +``` + +The 'body' block will be reflected into the protobuf message type expected for the service, if the schema is invalid +then an exception will be raised. + ## Responses The gRPC status code should be a string matching a [gRPC status code](https://grpc.github.io/grpc/core/md_doc_statuscodes.html), for -example `OK`, `NOT_FOUND`, etc. +example `OK`, `NOT_FOUND`, etc. or the numerical value of the code. It can also be a list of codes. ## Loading protobuf definitions @@ -44,7 +86,13 @@ some compiled Python gRPC stubs in your repository. #### Server reflection This is obviously the least useful method. If you don't specify a proto source or module, the client -will attempt to +can attempt to use [gRPC reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) to determine what is the appropriate message type for the message you're trying to send. This is not -reliable as the server you're trying to talk to might not have reflection turned on. +reliable as the server you're trying to talk to might not have reflection turned on. This needs to be specified in +the `grpc` block: + +```yaml +grpc: + attempt_reflection: true +``` From 1d474255afc6d5686100d8cd9916f54c6571aee0 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:50:54 +0000 Subject: [PATCH 55/72] More docs --- docs/source/grpc.md | 47 ++++++++++++++++++++++++++++++++-- tavern/_plugins/grpc/client.py | 4 +-- 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 1a2898cae..115000ff0 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -1,5 +1,10 @@ # gRPC integration testing +## Current limitations / future plans + +- Custom TLS like rest/mqtt +- Better syntax around importing modules + ## Connection There are 2 ways of specifying the grpc connection, in the `grpc` block at the top of the test similarly to an mqtt @@ -26,6 +31,43 @@ stages: ... ``` +The connection will be established at the beginning of the test and dropped when it finishes. + +### SSL connection + +Tavern currently _defaults to an insecure connection_ when connecting to grpc, to enable SSL connections add +the `secure` key in the `connect` block: + +```yaml +grpc: + connect: + secure: true +``` + +### Metadata + +Generic metadata can be passed on every message using the `metadata` key: + +```yaml +grpc: + metadata: + my-extra-info: something +``` + +### Advanced: connection options + +Generic connection options can be passed as key:value pairs under the `options` block: + +```yaml +grpc: + connect: + options: + grpc.max_send_message_length: 10000000 +``` + +See [the gRPC documentation](https://grpc.github.io/grpc/core/group__grpc__arg__keys.html) for a list of possible +options, note that some of these may not be implemented in Python. + ## Requests The `grpc_request` block requires, at minimum, the name of the service to send the request to @@ -54,7 +96,7 @@ There are 3 different ways Tavern will try to load the appropriate proto definit #### Specifying the proto module to use -Example: +If you already have all the Python gRPC stubs in your repository. Example: ```yaml grpc: @@ -62,7 +104,8 @@ grpc: module: server/helloworld_pb2_grpc ``` -This will attempt to import the given module and register all the protos in it. +This will attempt to import the given module (it should not be a Python file, but the path to the module containing the +existing stubs) and register all the protos in it. #### Specifying a folder with some protos in diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 06dc54940..b6fe8a334 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -178,7 +178,7 @@ class GRPCClient: def __init__(self, **kwargs): logger.debug("Initialising GRPC client with %s", kwargs) expected_blocks = { - "connect": {"host", "port", "options", "compression", "timeout", "secure"}, + "connect": {"host", "port", "options", "timeout", "secure"}, "proto": {"source", "module"}, "metadata": {}, "attempt_reflection": {}, @@ -384,7 +384,7 @@ def call( "error creating request from json body" ) from e - logger.debug("Send request %s", request) + logger.debug("Sending request %s", request) return grpc_call.future(request, metadata=self._metadata, timeout=timeout) From 9d3d28c0f0716583707a129a3796b4a7669cd41a Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 14:56:02 +0000 Subject: [PATCH 56/72] Add more limitations/plans --- docs/source/grpc.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 115000ff0..9570a8faf 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -2,8 +2,14 @@ ## Current limitations / future plans -- Custom TLS like rest/mqtt +- Should be able to specify channel credentials. +- Currently there is no way of doing custom TLS options (like with rest/mqtt) - Better syntax around importing modules +- Some way of representing streaming RPCs? This is pretty niche and Tavern is built around a core of only making 1 + request which doesn't work well with streaming request RPCs, but streaming response RPCs could be handled like + multiple MQTT responses. +- Much like the tavern-flask plugin it wouldn't be too difficult to write a plugin which started a Python gRPC server + in-process and ran tests against that instead of having to use a remote server ## Connection From e85a3ee02bd432d18a4d2b529a479d222e475d5e Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 15:13:19 +0000 Subject: [PATCH 57/72] Add initial unit test code for grpc --- pyproject.toml | 6 ++ tests/unit/tavern_grpc/__init__.py | 0 tests/unit/tavern_grpc/test_grpc.py | 43 ++++++++ tests/unit/tavern_grpc/test_services.proto | 18 ++++ tests/unit/tavern_grpc/test_services_pb2.py | 30 ++++++ tests/unit/tavern_grpc/test_services_pb2.pyi | 19 ++++ .../tavern_grpc/test_services_pb2_grpc.py | 99 +++++++++++++++++++ 7 files changed, 215 insertions(+) create mode 100644 tests/unit/tavern_grpc/__init__.py create mode 100644 tests/unit/tavern_grpc/test_grpc.py create mode 100644 tests/unit/tavern_grpc/test_services.proto create mode 100644 tests/unit/tavern_grpc/test_services_pb2.py create mode 100644 tests/unit/tavern_grpc/test_services_pb2.pyi create mode 100644 tests/unit/tavern_grpc/test_services_pb2_grpc.py diff --git a/pyproject.toml b/pyproject.toml index d03d66139..fe8f0ea04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -169,9 +169,15 @@ ignore = [ select = ["E", "F", "B", "W", "I", "S", "C4", "ICN", "T20", "PLE", "RUF", "SIM105", "PL"] # Look at: UP target-version = "py38" +extend-exclude = [ + "tests/unit/tavern_grpc/test_services_pb2.py", + "tests/unit/tavern_grpc/test_services_pb2.pyi", + "tests/unit/tavern_grpc/test_services_pb2_grpc.py", +] [tool.ruff.per-file-ignores] "tests/*" = ["S", "RUF"] +"tests/unit/tavern_grpc/test_grpc.py" = ["E402"] [tool.ruff.isort] known-first-party = ["tavern"] diff --git a/tests/unit/tavern_grpc/__init__.py b/tests/unit/tavern_grpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py new file mode 100644 index 000000000..602b41a04 --- /dev/null +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -0,0 +1,43 @@ +import os.path +import random +import sys +from concurrent import futures + +import grpc +import pytest +from google.protobuf.empty_pb2 import Empty + +sys.path.append(os.path.dirname(__file__)) + +from . import test_services_pb2, test_services_pb2_grpc + + +class ServiceImpl(test_services_pb2_grpc.DummyServiceServicer): + def Empty(self, request: Empty, context) -> Empty: + return Empty() + + def SimpleTest( + self, request: test_services_pb2.DummyRequest, context: grpc.ServicerContext + ) -> test_services_pb2.DummyResponse: + if request.id > 1000: + context.abort(grpc.StatusCode.FAILED_PRECONDITION, "number too big!") + return test_services_pb2.DummyResponse(id=request.id) + + +@pytest.fixture(scope="session") +def service(): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) + service_impl = ServiceImpl() + test_services_pb2_grpc.add_DummyServiceServicer_to_server(service_impl, server) + port = random.randint(10000, 40000) + address = f"127.0.0.1:{port}" + server.add_insecure_port(address) + server.start() + + yield address + + server.stop(1) + + +def test_server_empty(service): + pass diff --git a/tests/unit/tavern_grpc/test_services.proto b/tests/unit/tavern_grpc/test_services.proto new file mode 100644 index 000000000..1adaee7a2 --- /dev/null +++ b/tests/unit/tavern_grpc/test_services.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package tavern.tests.v1; + +import "google/protobuf/empty.proto"; + +service DummyService { + rpc Empty(google.protobuf.Empty) returns (google.protobuf.Empty); + rpc SimpleTest(DummyRequest) returns (DummyResponse); +} + +message DummyRequest { + int32 id = 1; +} + +message DummyResponse { + int32 id = 1; +} diff --git a/tests/unit/tavern_grpc/test_services_pb2.py b/tests/unit/tavern_grpc/test_services_pb2.py new file mode 100644 index 000000000..8b11b37b5 --- /dev/null +++ b/tests/unit/tavern_grpc/test_services_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: test_services.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13test_services.proto\x12\x0ftavern.tests.v1\x1a\x1bgoogle/protobuf/empty.proto\"\x1a\n\x0c\x44ummyRequest\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1b\n\rDummyResponse\x12\n\n\x02id\x18\x01 \x01(\x05\x32\x94\x01\n\x0c\x44ummyService\x12\x37\n\x05\x45mpty\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12K\n\nSimpleTest\x12\x1d.tavern.tests.v1.DummyRequest\x1a\x1e.tavern.tests.v1.DummyResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'test_services_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_DUMMYREQUEST']._serialized_start=69 + _globals['_DUMMYREQUEST']._serialized_end=95 + _globals['_DUMMYRESPONSE']._serialized_start=97 + _globals['_DUMMYRESPONSE']._serialized_end=124 + _globals['_DUMMYSERVICE']._serialized_start=127 + _globals['_DUMMYSERVICE']._serialized_end=275 +# @@protoc_insertion_point(module_scope) diff --git a/tests/unit/tavern_grpc/test_services_pb2.pyi b/tests/unit/tavern_grpc/test_services_pb2.pyi new file mode 100644 index 000000000..7226c9079 --- /dev/null +++ b/tests/unit/tavern_grpc/test_services_pb2.pyi @@ -0,0 +1,19 @@ +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message + +DESCRIPTOR: _descriptor.FileDescriptor + +class DummyRequest(_message.Message): + __slots__ = ["id"] + ID_FIELD_NUMBER: _ClassVar[int] + id: int + def __init__(self, id: _Optional[int] = ...) -> None: ... + +class DummyResponse(_message.Message): + __slots__ = ["id"] + ID_FIELD_NUMBER: _ClassVar[int] + id: int + def __init__(self, id: _Optional[int] = ...) -> None: ... diff --git a/tests/unit/tavern_grpc/test_services_pb2_grpc.py b/tests/unit/tavern_grpc/test_services_pb2_grpc.py new file mode 100644 index 000000000..30cefb324 --- /dev/null +++ b/tests/unit/tavern_grpc/test_services_pb2_grpc.py @@ -0,0 +1,99 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import test_services_pb2 as test__services__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class DummyServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Empty = channel.unary_unary( + '/tavern.tests.v1.DummyService/Empty', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.SimpleTest = channel.unary_unary( + '/tavern.tests.v1.DummyService/SimpleTest', + request_serializer=test__services__pb2.DummyRequest.SerializeToString, + response_deserializer=test__services__pb2.DummyResponse.FromString, + ) + + +class DummyServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Empty(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SimpleTest(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DummyServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Empty': grpc.unary_unary_rpc_method_handler( + servicer.Empty, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'SimpleTest': grpc.unary_unary_rpc_method_handler( + servicer.SimpleTest, + request_deserializer=test__services__pb2.DummyRequest.FromString, + response_serializer=test__services__pb2.DummyResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'tavern.tests.v1.DummyService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class DummyService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Empty(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/tavern.tests.v1.DummyService/Empty', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SimpleTest(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/tavern.tests.v1.DummyService/SimpleTest', + test__services__pb2.DummyRequest.SerializeToString, + test__services__pb2.DummyResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) From c2b8e1eb77e0198806c1ddb98cfafad035d03f78 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 15:32:30 +0000 Subject: [PATCH 58/72] Initial 'empty' test --- tavern/_plugins/grpc/client.py | 4 ++- tavern/_plugins/grpc/request.py | 14 ++++---- tavern/_plugins/grpc/response.py | 7 +++- tavern/response.py | 1 + tests/unit/tavern_grpc/test_grpc.py | 53 +++++++++++++++++++++++++---- 5 files changed, 65 insertions(+), 14 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index b6fe8a334..a4d98732c 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -283,7 +283,9 @@ def _make_call_request(self, host: str, full_service: str): full_service = full_service.replace("/", ".") service_method = full_service.rsplit(".", 1) if len(service_method) != 2: - raise exceptions.GRPCRequestException("Could not find method name") + raise exceptions.GRPCRequestException( + f"Invalid service/method name {full_service}" + ) service = service_method[0] method = service_method[1] diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index 18a59ed1d..3e34b776d 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -39,20 +39,22 @@ class GRPCRequest(BaseRequest): """ def __init__( - self, client: GRPCClient, rspec: Mapping, test_block_config: TestConfig + self, client: GRPCClient, request_spec: Mapping, test_block_config: TestConfig ): - expected = {"host", "retain", "service", "body", "json"} + expected = {"host", "service", "body"} - check_expected_keys(expected, rspec) + check_expected_keys(expected, request_spec) - grpc_args = get_grpc_args(rspec, test_block_config) + grpc_args = get_grpc_args(request_spec, test_block_config) self._prepared = functools.partial(client.call, **grpc_args) # Need to do this here because get_publish_args will modify the original # input, which we might want to use to format. No error handling because # all the error handling is done in the previous call - self._original_publish_args = format_keys(rspec, test_block_config.variables) + self._original_request_vars = format_keys( + request_spec, test_block_config.variables + ) def run(self): try: @@ -63,4 +65,4 @@ def run(self): @property def request_vars(self): - return Box(self._original_publish_args) + return Box(self._original_request_vars) diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index e1dcd06f9..1900c7be9 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -7,6 +7,7 @@ from tavern._core.exceptions import TestFailError from tavern._core.pytest.config import TestConfig +from tavern._plugins.grpc.client import GRPCClient from tavern.response import BaseResponse logger = logging.getLogger(__name__) @@ -14,7 +15,11 @@ class GRPCResponse(BaseResponse): def __init__( - self, client, name: str, expected: Mapping, test_block_config: TestConfig + self, + client: GRPCClient, + name: str, + expected: Mapping, + test_block_config: TestConfig, ): super(GRPCResponse, self).__init__(name, expected, test_block_config) diff --git a/tavern/response.py b/tavern/response.py index a840d4eb4..b9ffb86f5 100644 --- a/tavern/response.py +++ b/tavern/response.py @@ -22,6 +22,7 @@ def indent_err_text(err: str) -> str: class BaseResponse: def __init__(self, name: str, expected, test_block_config: TestConfig) -> None: + # Stage name self.name = name # all errors in this response diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 602b41a04..57929d0ce 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -6,6 +6,13 @@ import grpc import pytest from google.protobuf.empty_pb2 import Empty +from google.protobuf.json_format import MessageToDict +from grpc_reflection.v1alpha import reflection + +from tavern._core.pytest.config import TestConfig +from tavern._plugins.grpc.client import GRPCClient +from tavern._plugins.grpc.request import GRPCRequest +from tavern._plugins.grpc.response import GRPCResponse sys.path.append(os.path.dirname(__file__)) @@ -25,19 +32,53 @@ def SimpleTest( @pytest.fixture(scope="session") -def service(): +def service() -> int: server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) service_impl = ServiceImpl() test_services_pb2_grpc.add_DummyServiceServicer_to_server(service_impl, server) + + service_names = ( + test_services_pb2.DESCRIPTOR.services_by_name["DummyService"].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(service_names, server) + port = random.randint(10000, 40000) - address = f"127.0.0.1:{port}" - server.add_insecure_port(address) + server.add_insecure_port(f"127.0.0.1:{port}") server.start() - yield address + yield port server.stop(1) -def test_server_empty(service): - pass +@pytest.fixture() +def grpc_client(service: int) -> GRPCClient: + opts = { + "connect": {"host": "localhost", "port": service, "secure": False}, + "attempt_reflection": True, + } + + return GRPCClient(**opts) + + +def wrap_make_request( + client: GRPCClient, service_name: str, req, resp, test_block_config: TestConfig +): + request = GRPCRequest( + client, {"service": service_name, "body": MessageToDict(req)}, test_block_config + ) + + future = request.run() + + resp_as_dict = MessageToDict(resp) + + resp = GRPCResponse(client, "test", resp_as_dict, test_block_config) + + resp.verify(future) + + +def test_server_empty(grpc_client, includes): + wrap_make_request( + grpc_client, "tavern.tests.v1.DummyService/Empty", Empty(), Empty(), includes + ) From f57f5af1da64ccda8b00f120b46cdebf3fa47417 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 15:44:11 +0000 Subject: [PATCH 59/72] Parametrize tests --- tests/unit/tavern_grpc/test_grpc.py | 41 ++++++++++++++++++++++------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 57929d0ce..af8395314 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -1,10 +1,14 @@ +import dataclasses import os.path import random import sys from concurrent import futures +from typing import Mapping import grpc +import proto import pytest +from _pytest.mark import MarkGenerator from google.protobuf.empty_pb2 import Empty from google.protobuf.json_format import MessageToDict from grpc_reflection.v1alpha import reflection @@ -62,23 +66,40 @@ def grpc_client(service: int) -> GRPCClient: return GRPCClient(**opts) -def wrap_make_request( - client: GRPCClient, service_name: str, req, resp, test_block_config: TestConfig -): +@dataclasses.dataclass +class GRPCTestSpec: + method: str + req: proto.message.Message + resp: proto.message.Message + service: str = "tavern.tests.v1.DummyService" + + def service_method(self): + return f"{self.service}/{self.method}" + + def request(self) -> Mapping: + return MessageToDict(self.req) + + def expected(self) -> Mapping: + return MessageToDict(self.resp) + + +def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTestSpec): request = GRPCRequest( - client, {"service": service_name, "body": MessageToDict(req)}, test_block_config + grpc_client, + {"service": test_spec.service_method(), "body": test_spec.request()}, + includes, ) future = request.run() - resp_as_dict = MessageToDict(resp) + resp_as_dict = test_spec.expected() - resp = GRPCResponse(client, "test", resp_as_dict, test_block_config) + resp = GRPCResponse(grpc_client, "test", resp_as_dict, includes) resp.verify(future) -def test_server_empty(grpc_client, includes): - wrap_make_request( - grpc_client, "tavern.tests.v1.DummyService/Empty", Empty(), Empty(), includes - ) +def pytest_generate_tests(metafunc: MarkGenerator): + if "test_spec" in metafunc.fixturenames: + tests = [GRPCTestSpec(method="Empty", req=Empty(), resp=Empty())] + metafunc.parametrize("test_spec", tests) From 7428215bcb33e89ef7ebbbadc07b7b380a4342cc Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 16:03:54 +0000 Subject: [PATCH 60/72] cleanup + annotate --- docs/source/grpc.md | 15 ++++++++++- tavern/_core/schema/extensions.py | 21 ++++++++-------- tavern/_plugins/grpc/response.py | 39 +++++++++++++++++++++-------- tests/unit/tavern_grpc/test_grpc.py | 14 +++++++---- 4 files changed, 62 insertions(+), 27 deletions(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 9570a8faf..550f00f23 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -92,10 +92,23 @@ then an exception will be raised. ## Responses -The gRPC status code should be a string matching +If no response is specified, Tavern will assume that _any_ response with an `OK` status code to be successful. + +Other status codes are specified using the `status` key. The gRPC status code should be a string matching a [gRPC status code](https://grpc.github.io/grpc/core/md_doc_statuscodes.html), for example `OK`, `NOT_FOUND`, etc. or the numerical value of the code. It can also be a list of codes. +```yaml +stages: + - name: Echo text + grpc_request: + service: helloworld.v1.Greeter/SayHello + body: + name: "John" + grpc_response: + status: "OK" # Also the default +``` + ## Loading protobuf definitions There are 3 different ways Tavern will try to load the appropriate proto definitions: diff --git a/tavern/_core/schema/extensions.py b/tavern/_core/schema/extensions.py index 7a7bfcc43..1cf10ea74 100644 --- a/tavern/_core/schema/extensions.py +++ b/tavern/_core/schema/extensions.py @@ -1,6 +1,6 @@ import os import re -from typing import List, Union +from typing import TYPE_CHECKING, Union from pykwalify.types import is_bool, is_float, is_int @@ -15,6 +15,9 @@ from tavern._core.loader import ApproxScalar, BoolToken, FloatToken, IntToken from tavern._core.strict_util import StrictLevel +if TYPE_CHECKING: + from tavern._plugins.grpc.response import GRPCCode + # To extend pykwalify's type validation, extend its internal functions # These return boolean values @@ -133,9 +136,7 @@ def check_usefixtures(value, rule_obj, path) -> bool: return True -def validate_grpc_status_is_valid_or_list_of_names( - value: Union[List[str], str, int], rule_obj, path -): +def validate_grpc_status_is_valid_or_list_of_names(value: "GRPCCode", rule_obj, path): """Validate GRPC statuses https://github.com/grpc/grpc/blob/master/doc/statuscodes.md""" # pylint: disable=unused-argument err_msg = ( @@ -145,10 +146,10 @@ def validate_grpc_status_is_valid_or_list_of_names( ) if isinstance(value, (str, int)): - if not is_grpc_status(value): + if not to_grpc_status(value): raise BadSchemaError(err_msg) elif isinstance(value, list): - if not all(is_grpc_status(i) for i in value): + if not all(to_grpc_status(i) for i in value): raise BadSchemaError(err_msg) else: raise BadSchemaError(err_msg) @@ -156,20 +157,20 @@ def validate_grpc_status_is_valid_or_list_of_names( return True -def is_grpc_status(value: Union[str, int]): +def to_grpc_status(value: Union[str, int]): from grpc import StatusCode if isinstance(value, str): value = value.upper() for status in StatusCode: if status.name == value: - return True + return status.name elif isinstance(value, int): for status in StatusCode: if status.value[0] == value: - return True + return status.name - return False + return None def verify_oneof_id_name(value, rule_obj, path) -> bool: diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 1900c7be9..e60e48648 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,26 +1,47 @@ import logging -from typing import Mapping, Union +from typing import Any, List, Mapping, TypedDict, Union import grpc from google.protobuf import json_format from grpc import StatusCode +from tavern._core.dict_util import check_expected_keys from tavern._core.exceptions import TestFailError from tavern._core.pytest.config import TestConfig +from tavern._core.schema.extensions import to_grpc_status from tavern._plugins.grpc.client import GRPCClient from tavern.response import BaseResponse logger = logging.getLogger(__name__) +GRPCCode = Union[str, int, List[str], List[int]] + + +def _to_grpc_name(status: GRPCCode) -> Union[str, List[str]]: + if isinstance(status, list): + return [_to_grpc_name(s) for s in status] + + return to_grpc_status(status).upper() + + +class _GRPCExpected(TypedDict): + """What the 'expected' block for a grpc response should contain""" + + status: GRPCCode + details: Any + body: Mapping + + class GRPCResponse(BaseResponse): def __init__( self, client: GRPCClient, name: str, - expected: Mapping, + expected: Union[_GRPCExpected | Mapping], test_block_config: TestConfig, ): + check_expected_keys({"body", "status", "details"}, expected) super(GRPCResponse, self).__init__(name, expected, test_block_config) self._client = client @@ -63,12 +84,8 @@ def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: # Get any keys to save saved = {} verify_status = [StatusCode.OK.name] - if "status" in self.expected: - status = self.expected["status"] - if isinstance(status, list): - verify_status = [name.upper() for name in status] - else: - verify_status = [status.upper()] + if status := self.expected.get("status", None): + verify_status = _to_grpc_name(status) if response.code().name not in verify_status: self._adderr( @@ -86,7 +103,7 @@ def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: response.details(), ) - if "proto_body" in self.expected: + if "body" in self.expected: result = response.result() json_result = json_format.MessageToDict( @@ -95,7 +112,7 @@ def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: preserving_proto_field_name=True, ) - self._validate_block("body", json_result) + self._validate_block("json", json_result) self._maybe_run_validate_functions(json_result) saved.update( @@ -107,7 +124,7 @@ def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: if self.errors: raise TestFailError( - "Test '{:s}' failed:\n{:s}".format(self.name, self._str_errors()), + f"Test '{self.name:s}' failed:\n{self._str_errors():s}", failures=self.errors, ) diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index af8395314..824fb316f 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -16,7 +16,7 @@ from tavern._core.pytest.config import TestConfig from tavern._plugins.grpc.client import GRPCClient from tavern._plugins.grpc.request import GRPCRequest -from tavern._plugins.grpc.response import GRPCResponse +from tavern._plugins.grpc.response import GRPCCode, GRPCResponse sys.path.append(os.path.dirname(__file__)) @@ -71,6 +71,7 @@ class GRPCTestSpec: method: str req: proto.message.Message resp: proto.message.Message + code: GRPCCode = grpc.StatusCode.OK.value[0] service: str = "tavern.tests.v1.DummyService" def service_method(self): @@ -79,7 +80,7 @@ def service_method(self): def request(self) -> Mapping: return MessageToDict(self.req) - def expected(self) -> Mapping: + def body(self) -> Mapping: return MessageToDict(self.resp) @@ -92,14 +93,17 @@ def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTest future = request.run() - resp_as_dict = test_spec.expected() + expected = {"body": test_spec.body(), "status": test_spec.code} - resp = GRPCResponse(grpc_client, "test", resp_as_dict, includes) + resp = GRPCResponse(grpc_client, "test", expected, includes) resp.verify(future) def pytest_generate_tests(metafunc: MarkGenerator): if "test_spec" in metafunc.fixturenames: - tests = [GRPCTestSpec(method="Empty", req=Empty(), resp=Empty())] + tests = [ + GRPCTestSpec(method="Empty", req=Empty(), resp=Empty()), + GRPCTestSpec(method="Empty", req=Empty(), resp=Empty(), code=0), + ] metafunc.parametrize("test_spec", tests) From ac0f60eabd203422f229f8d4cb11ee3f7691cfdc Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 16:15:16 +0000 Subject: [PATCH 61/72] Clean up values from grpc call method --- tavern/_plugins/grpc/client.py | 37 +++++++++++++++++++---------- tests/unit/tavern_grpc/test_grpc.py | 5 ++++ 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index a4d98732c..a0824584b 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,3 +1,4 @@ +import dataclasses import functools import hashlib import importlib.util @@ -7,13 +8,15 @@ import subprocess import sys import tempfile +import typing import warnings from distutils.spawn import find_executable from importlib.machinery import ModuleSpec -from typing import Any, Dict, List, Mapping, Optional, Tuple, Union +from typing import Any, Dict, List, Mapping, Optional, Tuple import grpc import grpc_reflection +import proto.message from google.protobuf import ( descriptor_pb2, json_format, @@ -174,6 +177,13 @@ def _import_grpc_module(python_module_name: str): spec.loader.exec_module(mod) +@dataclasses.dataclass +class _ChannelVals: + channel: grpc.UnaryUnaryMultiCallable + input_type: typing.Type[proto.message.Message] + output_type: typing.Type[proto.message.Message] + + class GRPCClient: def __init__(self, **kwargs): logger.debug("Initialising GRPC client with %s", kwargs) @@ -259,14 +269,14 @@ def _get_reflection_info( def _get_grpc_service( self, channel: grpc.Channel, service: str, method: str - ) -> Union[Tuple[None, None], Tuple[Any, Any]]: + ) -> Optional[_ChannelVals]: full_service_name = f"{service}.{method}" try: grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) input_type = message_factory.GetMessageClass(grpc_service.input_type) # type: ignore output_type = message_factory.GetMessageClass(grpc_service.output_type) # type: ignore except KeyError: - return None, None + return None logger.critical(f"reflected info for {service}: {full_service_name}") @@ -277,9 +287,9 @@ def _get_grpc_service( response_deserializer=output_type.FromString, ) - return grpc_method, input_type + return _ChannelVals(grpc_method, input_type, output_type) - def _make_call_request(self, host: str, full_service: str): + def _make_call_request(self, host: str, full_service: str) -> _ChannelVals: full_service = full_service.replace("/", ".") service_method = full_service.rsplit(".", 1) if len(service_method) != 2: @@ -309,9 +319,8 @@ def _make_call_request(self, host: str, full_service: str): channel = self.channels[host] - grpc_method, input_type = self._get_grpc_service(channel, service, method) - if grpc_method and input_type: - return grpc_method, input_type + if channel_vals := self._get_grpc_service(channel, service, method): + return channel_vals if not self._attempt_reflection: logger.error( @@ -324,7 +333,7 @@ def _make_call_request(self, host: str, full_service: str): logger.info("service not registered, doing reflection from server") try: self._get_reflection_info(channel, service_name=service) - except grpc.RpcError as rpc_error: # Since this object is guaranteed to be a grpc.Call, might as well include that in its name. + except grpc.RpcError as rpc_error: code = details = None try: code = rpc_error.code() @@ -371,13 +380,13 @@ def call( if timeout is None: timeout = self.timeout - grpc_call, grpc_request = self._make_call_request(host, service) - if grpc_call is None or grpc_request is None: + channel_vals = self._make_call_request(host, service) + if _ChannelVals is None: raise exceptions.GRPCServiceException( f"Service {service} was not found on host {host}" ) - request = grpc_request() + request = channel_vals.input_type() if body is not None: try: request = json_format.ParseDict(body, request) @@ -388,7 +397,9 @@ def call( logger.debug("Sending request %s", request) - return grpc_call.future(request, metadata=self._metadata, timeout=timeout) + return channel_vals.channel.future( + request, metadata=self._metadata, timeout=timeout + ) def __exit__(self, *args): logger.debug("Disconnecting from GRPC") diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 824fb316f..bcbe6f20b 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -104,6 +104,11 @@ def pytest_generate_tests(metafunc: MarkGenerator): if "test_spec" in metafunc.fixturenames: tests = [ GRPCTestSpec(method="Empty", req=Empty(), resp=Empty()), + GRPCTestSpec( + method="SimpleTest", + req=test_services_pb2.DummyRequest(id=2), + resp=Empty(), + ), GRPCTestSpec(method="Empty", req=Empty(), resp=Empty(), code=0), ] metafunc.parametrize("test_spec", tests) From 331667515cce47711b67d7c65bb8707ed87c4ab5 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 16:57:37 +0000 Subject: [PATCH 62/72] Fix method name servic elookup --- tavern/_plugins/grpc/client.py | 50 ++++++++++++++++++++++------- tavern/_plugins/grpc/request.py | 18 +++++++++-- tavern/_plugins/grpc/response.py | 33 +++++++++++++------ tests/unit/tavern_grpc/test_grpc.py | 2 +- 4 files changed, 78 insertions(+), 25 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index a0824584b..1f5f514c6 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -17,6 +17,7 @@ import grpc import grpc_reflection import proto.message +from google._upb._message import DescriptorPool from google.protobuf import ( descriptor_pb2, json_format, @@ -177,11 +178,14 @@ def _import_grpc_module(python_module_name: str): spec.loader.exec_module(mod) +_ProtoMessageType = typing.Type[proto.message.Message] + + @dataclasses.dataclass class _ChannelVals: channel: grpc.UnaryUnaryMultiCallable - input_type: typing.Type[proto.message.Message] - output_type: typing.Type[proto.message.Message] + input_type: _ProtoMessageType + output_type: _ProtoMessageType class GRPCClient: @@ -270,25 +274,49 @@ def _get_reflection_info( def _get_grpc_service( self, channel: grpc.Channel, service: str, method: str ) -> Optional[_ChannelVals]: - full_service_name = f"{service}.{method}" + full_service_name = f"{service}/{method}" try: - grpc_service = self.sym_db.pool.FindMethodByName(full_service_name) - input_type = message_factory.GetMessageClass(grpc_service.input_type) # type: ignore - output_type = message_factory.GetMessageClass(grpc_service.output_type) # type: ignore - except KeyError: + input_type, output_type = self.get_method_types(full_service_name) + except KeyError as e: + logger.debug(f"could not find types: {e}") return None - logger.critical(f"reflected info for {service}: {full_service_name}") + logger.info(f"reflected info for {service}: {full_service_name}") - service_url = f"/{service}/{method}" grpc_method = channel.unary_unary( - service_url, + "/" + full_service_name, request_serializer=input_type.SerializeToString, response_deserializer=output_type.FromString, ) return _ChannelVals(grpc_method, input_type, output_type) + def get_method_types( + self, full_method_name: str + ) -> Tuple[_ProtoMessageType, _ProtoMessageType]: + """Uses the builtin symbol pool to try and find the input and output types for the given method + + Args: + full_method_name: full RPC name in the form 'pkg.ServiceName/Method' + + Returns: + input and output types (class objects) for the RPC + + Raises: + KeyError: If the types are not registered. Should ideally never happen? + """ + logger.debug(f"looking up types for {full_method_name}") + + service, method = full_method_name.split("/") + + pool: DescriptorPool = self.sym_db.pool + grpc_service = pool.FindServiceByName(service) + method = grpc_service.FindMethodByName(method) + input_type = message_factory.GetMessageClass(method.input_type) # type: ignore + output_type = message_factory.GetMessageClass(method.output_type) # type: ignore + + return input_type, output_type + def _make_call_request(self, host: str, full_service: str) -> _ChannelVals: full_service = full_service.replace("/", ".") service_method = full_service.rsplit(".", 1) @@ -381,7 +409,7 @@ def call( timeout = self.timeout channel_vals = self._make_call_request(host, service) - if _ChannelVals is None: + if channel_vals is None: raise exceptions.GRPCServiceException( f"Service {service} was not found on host {host}" ) diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index 3e34b776d..6ff671eec 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -1,8 +1,10 @@ +import dataclasses import functools import json import logging -from typing import Mapping +from typing import Mapping, Union +import grpc from box import Box from tavern._core import exceptions @@ -32,6 +34,12 @@ def get_grpc_args(rspec, test_block_config): return fspec +@dataclasses.dataclass +class WrappedFuture: + response: Union[grpc.Call, grpc.Future] + service_name: str + + class GRPCRequest(BaseRequest): """Wrapper for a single GRPC request on a client @@ -49,6 +57,8 @@ def __init__( self._prepared = functools.partial(client.call, **grpc_args) + self._service_name = grpc_args.get("service", None) + # Need to do this here because get_publish_args will modify the original # input, which we might want to use to format. No error handling because # all the error handling is done in the previous call @@ -56,9 +66,11 @@ def __init__( request_spec, test_block_config.variables ) - def run(self): + def run(self) -> WrappedFuture: try: - return self._prepared() + return WrappedFuture( + response=self._prepared(), service_name=self._service_name + ) except ValueError as e: logger.exception("Error executing request") raise exceptions.GRPCRequestException from e diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index e60e48648..4bd73d6f1 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -1,7 +1,7 @@ import logging -from typing import Any, List, Mapping, TypedDict, Union +from typing import TYPE_CHECKING, Any, List, Mapping, TypedDict, Union -import grpc +import proto.message from google.protobuf import json_format from grpc import StatusCode @@ -12,6 +12,9 @@ from tavern._plugins.grpc.client import GRPCClient from tavern.response import BaseResponse +if TYPE_CHECKING: + from tavern._plugins.grpc.request import WrappedFuture + logger = logging.getLogger(__name__) @@ -77,9 +80,11 @@ def _validate_block(self, blockname: str, block: Mapping): block_strictness = test_strictness.option_for(blockname) self.recurse_check_key_match(expected_block, block, blockname, block_strictness) - def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: - logger.debug(f"grpc status code: {response.code()}") - logger.debug(f"grpc details: {response.details()}") + def verify(self, response: "WrappedFuture") -> Mapping: + grpc_response = response.response + + logger.debug(f"grpc status code: {grpc_response.code()}") + logger.debug(f"grpc details: {grpc_response.details()}") # Get any keys to save saved = {} @@ -87,24 +92,32 @@ def verify(self, response: Union[grpc.Call, grpc.Future]) -> Mapping: if status := self.expected.get("status", None): verify_status = _to_grpc_name(status) - if response.code().name not in verify_status: + if grpc_response.code().name not in verify_status: self._adderr( "expected status %s, but the actual response '%s'", verify_status, - response.code().name, + grpc_response.code().name, ) if "details" in self.expected: verify_details = self.expected["details"] - if verify_details != response.details(): + if verify_details != grpc_response.details(): self._adderr( "expected details '%s', but the actual response '%s'", verify_details, - response.details(), + grpc_response.details(), ) if "body" in self.expected: - result = response.result() + _, output_type = self._client.get_method_types(response.service_name) + expected_parsed = output_type() + json_format.ParseDict(self.expected["body"], expected_parsed) + + result: proto.message.Message = grpc_response.result() + + if not isinstance(result, output_type): + logger.warning("ijifdi") + raise Exception("k") json_result = json_format.MessageToDict( result, diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index bcbe6f20b..fb9cf401e 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -60,7 +60,7 @@ def service() -> int: def grpc_client(service: int) -> GRPCClient: opts = { "connect": {"host": "localhost", "port": service, "secure": False}, - "attempt_reflection": True, + "attempt_reflection": False, } return GRPCClient(**opts) From 1146947a5caa0aef93904e53e5bb7b3097167823 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:19:36 +0000 Subject: [PATCH 63/72] Fix serialisation, tests, check type, etc --- docs/source/grpc.md | 7 ++++ tavern/_plugins/grpc/response.py | 12 ++++-- tests/unit/tavern_grpc/test_grpc.py | 38 ++++++++++++++----- tests/unit/tavern_grpc/test_services.proto | 4 +- tests/unit/tavern_grpc/test_services_pb2.py | 14 +++---- tests/unit/tavern_grpc/test_services_pb2.pyi | 21 +++++----- .../tavern_grpc/test_services_pb2_grpc.py | 3 +- 7 files changed, 64 insertions(+), 35 deletions(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 550f00f23..18e56bfd7 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -10,6 +10,13 @@ multiple MQTT responses. - Much like the tavern-flask plugin it wouldn't be too difficult to write a plugin which started a Python gRPC server in-process and ran tests against that instead of having to use a remote server +- Fix comparing results - currently it serialises with + + including_default_value_fields=True, + preserving_proto_field_name=True, + + Which formats a field like `my_field_name` as `my_field_name` and not `myFieldName` which is what protojson in Go + converts it to for example, need to provide a way to allow people to write tests using either one ## Connection diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 4bd73d6f1..0d212124d 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -63,7 +63,7 @@ def _validate_block(self, blockname: str, block: Mapping): block: The actual part being checked """ try: - expected_block = self.expected[blockname] or {} + expected_block = self.expected["body"] or {} except KeyError: expected_block = {} @@ -111,13 +111,17 @@ def verify(self, response: "WrappedFuture") -> Mapping: if "body" in self.expected: _, output_type = self._client.get_method_types(response.service_name) expected_parsed = output_type() - json_format.ParseDict(self.expected["body"], expected_parsed) + try: + json_format.ParseDict(self.expected["body"], expected_parsed) + except json_format.ParseError as e: + self._adderr(f"response body was not in the right format: {e}", e=e) result: proto.message.Message = grpc_response.result() if not isinstance(result, output_type): - logger.warning("ijifdi") - raise Exception("k") + self._adderr( + f"response from server ({type(response)}) was not the same type as expected from the registered definition ({output_type})" + ) json_result = json_format.MessageToDict( result, diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index fb9cf401e..96a02896c 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -9,8 +9,8 @@ import proto import pytest from _pytest.mark import MarkGenerator +from google.protobuf import json_format from google.protobuf.empty_pb2 import Empty -from google.protobuf.json_format import MessageToDict from grpc_reflection.v1alpha import reflection from tavern._core.pytest.config import TestConfig @@ -30,9 +30,9 @@ def Empty(self, request: Empty, context) -> Empty: def SimpleTest( self, request: test_services_pb2.DummyRequest, context: grpc.ServicerContext ) -> test_services_pb2.DummyResponse: - if request.id > 1000: + if request.request_id > 1000: context.abort(grpc.StatusCode.FAILED_PRECONDITION, "number too big!") - return test_services_pb2.DummyResponse(id=request.id) + return test_services_pb2.DummyResponse(response_id=request.request_id + 1) @pytest.fixture(scope="session") @@ -68,6 +68,7 @@ def grpc_client(service: int) -> GRPCClient: @dataclasses.dataclass class GRPCTestSpec: + test_name: str method: str req: proto.message.Message resp: proto.message.Message @@ -78,10 +79,18 @@ def service_method(self): return f"{self.service}/{self.method}" def request(self) -> Mapping: - return MessageToDict(self.req) + return json_format.MessageToDict( + self.req, + including_default_value_fields=True, + preserving_proto_field_name=True, + ) def body(self) -> Mapping: - return MessageToDict(self.resp) + return json_format.MessageToDict( + self.resp, + including_default_value_fields=True, + preserving_proto_field_name=True, + ) def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTestSpec): @@ -103,12 +112,21 @@ def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTest def pytest_generate_tests(metafunc: MarkGenerator): if "test_spec" in metafunc.fixturenames: tests = [ - GRPCTestSpec(method="Empty", req=Empty(), resp=Empty()), GRPCTestSpec( - method="SimpleTest", - req=test_services_pb2.DummyRequest(id=2), + test_name="basic empty", method="Empty", req=Empty(), resp=Empty() + ), + GRPCTestSpec( + test_name="empty with numeric status code", + method="Empty", + req=Empty(), resp=Empty(), + code=0, + ), + GRPCTestSpec( + test_name="Simple service", + method="SimpleTest", + req=test_services_pb2.DummyRequest(request_id=2), + resp=test_services_pb2.DummyResponse(response_id=3), ), - GRPCTestSpec(method="Empty", req=Empty(), resp=Empty(), code=0), ] - metafunc.parametrize("test_spec", tests) + metafunc.parametrize("test_spec", tests, ids=[g.test_name for g in tests]) diff --git a/tests/unit/tavern_grpc/test_services.proto b/tests/unit/tavern_grpc/test_services.proto index 1adaee7a2..0a60edccd 100644 --- a/tests/unit/tavern_grpc/test_services.proto +++ b/tests/unit/tavern_grpc/test_services.proto @@ -10,9 +10,9 @@ service DummyService { } message DummyRequest { - int32 id = 1; + int32 request_id = 1; } message DummyResponse { - int32 id = 1; + int32 response_id = 1; } diff --git a/tests/unit/tavern_grpc/test_services_pb2.py b/tests/unit/tavern_grpc/test_services_pb2.py index 8b11b37b5..1209f0e9c 100644 --- a/tests/unit/tavern_grpc/test_services_pb2.py +++ b/tests/unit/tavern_grpc/test_services_pb2.py @@ -6,15 +6,15 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13test_services.proto\x12\x0ftavern.tests.v1\x1a\x1bgoogle/protobuf/empty.proto\"\x1a\n\x0c\x44ummyRequest\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1b\n\rDummyResponse\x12\n\n\x02id\x18\x01 \x01(\x05\x32\x94\x01\n\x0c\x44ummyService\x12\x37\n\x05\x45mpty\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12K\n\nSimpleTest\x12\x1d.tavern.tests.v1.DummyRequest\x1a\x1e.tavern.tests.v1.DummyResponseb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13test_services.proto\x12\x0ftavern.tests.v1\x1a\x1bgoogle/protobuf/empty.proto\"\"\n\x0c\x44ummyRequest\x12\x12\n\nrequest_id\x18\x01 \x01(\x05\"$\n\rDummyResponse\x12\x13\n\x0bresponse_id\x18\x01 \x01(\x05\x32\x94\x01\n\x0c\x44ummyService\x12\x37\n\x05\x45mpty\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12K\n\nSimpleTest\x12\x1d.tavern.tests.v1.DummyRequest\x1a\x1e.tavern.tests.v1.DummyResponseb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -22,9 +22,9 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _globals['_DUMMYREQUEST']._serialized_start=69 - _globals['_DUMMYREQUEST']._serialized_end=95 - _globals['_DUMMYRESPONSE']._serialized_start=97 - _globals['_DUMMYRESPONSE']._serialized_end=124 - _globals['_DUMMYSERVICE']._serialized_start=127 - _globals['_DUMMYSERVICE']._serialized_end=275 + _globals['_DUMMYREQUEST']._serialized_end=103 + _globals['_DUMMYRESPONSE']._serialized_start=105 + _globals['_DUMMYRESPONSE']._serialized_end=141 + _globals['_DUMMYSERVICE']._serialized_start=144 + _globals['_DUMMYSERVICE']._serialized_end=292 # @@protoc_insertion_point(module_scope) diff --git a/tests/unit/tavern_grpc/test_services_pb2.pyi b/tests/unit/tavern_grpc/test_services_pb2.pyi index 7226c9079..b5e5af86c 100644 --- a/tests/unit/tavern_grpc/test_services_pb2.pyi +++ b/tests/unit/tavern_grpc/test_services_pb2.pyi @@ -1,19 +1,18 @@ -from typing import ClassVar as _ClassVar -from typing import Optional as _Optional - +from google.protobuf import empty_pb2 as _empty_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Optional as _Optional DESCRIPTOR: _descriptor.FileDescriptor class DummyRequest(_message.Message): - __slots__ = ["id"] - ID_FIELD_NUMBER: _ClassVar[int] - id: int - def __init__(self, id: _Optional[int] = ...) -> None: ... + __slots__ = ["request_id"] + REQUEST_ID_FIELD_NUMBER: _ClassVar[int] + request_id: int + def __init__(self, request_id: _Optional[int] = ...) -> None: ... class DummyResponse(_message.Message): - __slots__ = ["id"] - ID_FIELD_NUMBER: _ClassVar[int] - id: int - def __init__(self, id: _Optional[int] = ...) -> None: ... + __slots__ = ["response_id"] + RESPONSE_ID_FIELD_NUMBER: _ClassVar[int] + response_id: int + def __init__(self, response_id: _Optional[int] = ...) -> None: ... diff --git a/tests/unit/tavern_grpc/test_services_pb2_grpc.py b/tests/unit/tavern_grpc/test_services_pb2_grpc.py index 30cefb324..3401b73d6 100644 --- a/tests/unit/tavern_grpc/test_services_pb2_grpc.py +++ b/tests/unit/tavern_grpc/test_services_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc -import test_services_pb2 as test__services__pb2 + from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +import test_services_pb2 as test__services__pb2 class DummyServiceStub(object): From 5cdb541fbb59e420bd1f37f9ac0230037c6cda11 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:26:24 +0000 Subject: [PATCH 64/72] More test --- docs/source/conf.py | 1 - .../grpc/helloworld_v1_precompiled_pb2.pyi | 4 +- .../helloworld_v1_precompiled_pb2_grpc.py | 1 - example/grpc/server/server.py | 6 +- tests/unit/tavern_grpc/test_grpc.py | 55 +++++++++++++++++-- 5 files changed, 55 insertions(+), 12 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index e6a226973..08e5a0a78 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -29,7 +29,6 @@ # needs_sphinx = '1.0' import sphinx_rtd_theme -import recommonmark from recommonmark.transform import AutoStructify # Add any Sphinx extension module names here, as strings. They can be diff --git a/example/grpc/helloworld_v1_precompiled_pb2.pyi b/example/grpc/helloworld_v1_precompiled_pb2.pyi index 5632989e6..fa5ff4364 100644 --- a/example/grpc/helloworld_v1_precompiled_pb2.pyi +++ b/example/grpc/helloworld_v1_precompiled_pb2.pyi @@ -1,6 +1,8 @@ +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Optional as _Optional DESCRIPTOR: _descriptor.FileDescriptor diff --git a/example/grpc/helloworld_v1_precompiled_pb2_grpc.py b/example/grpc/helloworld_v1_precompiled_pb2_grpc.py index 6f949ac87..34c0d6f31 100644 --- a/example/grpc/helloworld_v1_precompiled_pb2_grpc.py +++ b/example/grpc/helloworld_v1_precompiled_pb2_grpc.py @@ -1,7 +1,6 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc - import helloworld_v1_precompiled_pb2 as helloworld__v1__precompiled__pb2 diff --git a/example/grpc/server/server.py b/example/grpc/server/server.py index 54a9b0f5d..e1ec25179 100644 --- a/example/grpc/server/server.py +++ b/example/grpc/server/server.py @@ -10,9 +10,9 @@ import helloworld_v2_compiled_pb2_grpc as helloworld_pb2_grpc_v2 import helloworld_v3_reflected_pb2 as helloworld_pb2_v3 import helloworld_v3_reflected_pb2_grpc as helloworld_pb2_grpc_v3 +from grpc_interceptor import ServerInterceptor from grpc_interceptor.exceptions import GrpcException from grpc_reflection.v1alpha import reflection -from grpc_interceptor import ServerInterceptor class GreeterV1(helloworld_pb2_grpc_v1.GreeterServicer): @@ -61,7 +61,7 @@ def serve(): helloworld_pb2_grpc_v1.add_GreeterServicer_to_server(GreeterV1(), server) helloworld_pb2_grpc_v2.add_GreeterServicer_to_server(GreeterV2(), server) - server.add_insecure_port(f"0.0.0.0:50051") + server.add_insecure_port("0.0.0.0:50051") server.start() # One server which exposes the V3 API and has reflection turned on @@ -75,7 +75,7 @@ def serve(): reflection.SERVICE_NAME, ) reflection.enable_server_reflection(service_names, reflecting_server) - reflecting_server.add_insecure_port(f"0.0.0.0:50052") + reflecting_server.add_insecure_port("0.0.0.0:50052") reflecting_server.start() logging.info("Starting grpc server") diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 96a02896c..db4cdf514 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -3,10 +3,9 @@ import random import sys from concurrent import futures -from typing import Mapping +from typing import Any, Mapping import grpc -import proto import pytest from _pytest.mark import MarkGenerator from google.protobuf import json_format @@ -70,8 +69,10 @@ def grpc_client(service: int) -> GRPCClient: class GRPCTestSpec: test_name: str method: str - req: proto.message.Message - resp: proto.message.Message + req: Any + resp: Any + + xfail: bool = False code: GRPCCode = grpc.StatusCode.OK.value[0] service: str = "tavern.tests.v1.DummyService" @@ -100,12 +101,15 @@ def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTest includes, ) - future = request.run() - expected = {"body": test_spec.body(), "status": test_spec.code} resp = GRPCResponse(grpc_client, "test", expected, includes) + if test_spec.xfail: + pytest.xfail() + + future = request.run() + resp.verify(future) @@ -122,11 +126,50 @@ def pytest_generate_tests(metafunc: MarkGenerator): resp=Empty(), code=0, ), + GRPCTestSpec( + test_name="empty with wrong status code", + method="Empty", + req=Empty(), + resp=Empty(), + code="ABORTED", + xfail=True, + ), + GRPCTestSpec( + test_name="empty with the wrong request type", + method="Empty", + req=test_services_pb2.DummyRequest(), + resp=Empty(), + code=0, + xfail=True, + ), + GRPCTestSpec( + test_name="empty with the wrong response type", + method="Empty", + req=Empty(), + resp=test_services_pb2.DummyResponse(), + code=0, + xfail=True, + ), GRPCTestSpec( test_name="Simple service", method="SimpleTest", req=test_services_pb2.DummyRequest(request_id=2), resp=test_services_pb2.DummyResponse(response_id=3), ), + GRPCTestSpec( + test_name="Simple service with wrong request type", + method="SimpleTest", + req=Empty(), + resp=test_services_pb2.DummyResponse(response_id=3), + xfail=True, + ), + GRPCTestSpec( + test_name="Simple service with wrong response type", + method="SimpleTest", + req=test_services_pb2.DummyRequest(request_id=2), + resp=Empty(), + xfail=True, + ), ] + metafunc.parametrize("test_spec", tests, ids=[g.test_name for g in tests]) From acbd45222e45f08f40295dbf1933bd382912b373 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:31:05 +0000 Subject: [PATCH 65/72] fix some type annotations --- tavern/_plugins/grpc/client.py | 4 +++- tavern/_plugins/grpc/response.py | 8 +++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 1f5f514c6..d530cc958 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -317,7 +317,9 @@ def get_method_types( return input_type, output_type - def _make_call_request(self, host: str, full_service: str) -> _ChannelVals: + def _make_call_request( + self, host: str, full_service: str + ) -> Optional[_ChannelVals]: full_service = full_service.replace("/", ".") service_method = full_service.rsplit(".", 1) if len(service_method) != 2: diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 0d212124d..485464401 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -23,7 +23,7 @@ def _to_grpc_name(status: GRPCCode) -> Union[str, List[str]]: if isinstance(status, list): - return [_to_grpc_name(s) for s in status] + return [_to_grpc_name(s) for s in status] # type:ignore return to_grpc_status(status).upper() @@ -41,7 +41,7 @@ def __init__( self, client: GRPCClient, name: str, - expected: Union[_GRPCExpected | Mapping], + expected: Union[_GRPCExpected, Mapping], test_block_config: TestConfig, ): check_expected_keys({"body", "status", "details"}, expected) @@ -90,7 +90,9 @@ def verify(self, response: "WrappedFuture") -> Mapping: saved = {} verify_status = [StatusCode.OK.name] if status := self.expected.get("status", None): - verify_status = _to_grpc_name(status) + verify_status = _to_grpc_name(status) # type: ignore + if not isinstance(verify_status, list): + verify_status = [verify_status] if grpc_response.code().name not in verify_status: self._adderr( From 10316497c9f4a225615c62645f28566e3841e456 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:40:40 +0000 Subject: [PATCH 66/72] Fix bad body error --- tavern/_plugins/grpc/response.py | 55 ++++++++++++++++------------- tests/unit/tavern_grpc/test_grpc.py | 12 ++++++- 2 files changed, 41 insertions(+), 26 deletions(-) diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index 485464401..fdd691ee4 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -111,35 +111,40 @@ def verify(self, response: "WrappedFuture") -> Mapping: ) if "body" in self.expected: - _, output_type = self._client.get_method_types(response.service_name) - expected_parsed = output_type() - try: - json_format.ParseDict(self.expected["body"], expected_parsed) - except json_format.ParseError as e: - self._adderr(f"response body was not in the right format: {e}", e=e) - - result: proto.message.Message = grpc_response.result() - - if not isinstance(result, output_type): + if verify_status != ["OK"]: self._adderr( - f"response from server ({type(response)}) was not the same type as expected from the registered definition ({output_type})" + "'body' was specified in response, but expected status code was not 'OK'" + ) + else: + _, output_type = self._client.get_method_types(response.service_name) + expected_parsed = output_type() + try: + json_format.ParseDict(self.expected["body"], expected_parsed) + except json_format.ParseError as e: + self._adderr(f"response body was not in the right format: {e}", e=e) + + result: proto.message.Message = grpc_response.result() + + if not isinstance(result, output_type): + self._adderr( + f"response from server ({type(response)}) was not the same type as expected from the registered definition ({output_type})" + ) + + json_result = json_format.MessageToDict( + result, + including_default_value_fields=True, + preserving_proto_field_name=True, ) - json_result = json_format.MessageToDict( - result, - including_default_value_fields=True, - preserving_proto_field_name=True, - ) - - self._validate_block("json", json_result) - self._maybe_run_validate_functions(json_result) + self._validate_block("json", json_result) + self._maybe_run_validate_functions(json_result) - saved.update( - self.maybe_get_save_values_from_save_block("body", json_result) - ) - saved.update( - self.maybe_get_save_values_from_ext(json_result, self.expected) - ) + saved.update( + self.maybe_get_save_values_from_save_block("body", json_result) + ) + saved.update( + self.maybe_get_save_values_from_ext(json_result, self.expected) + ) if self.errors: raise TestFailError( diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index db4cdf514..040cfc401 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -13,6 +13,7 @@ from grpc_reflection.v1alpha import reflection from tavern._core.pytest.config import TestConfig +from tavern._core.schema.extensions import to_grpc_status from tavern._plugins.grpc.client import GRPCClient from tavern._plugins.grpc.request import GRPCRequest from tavern._plugins.grpc.response import GRPCCode, GRPCResponse @@ -101,7 +102,9 @@ def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTest includes, ) - expected = {"body": test_spec.body(), "status": test_spec.code} + expected = {"status": test_spec.code} + if to_grpc_status(test_spec.code) == "OK": + expected["body"] = test_spec.body() resp = GRPCResponse(grpc_client, "test", expected, includes) @@ -156,6 +159,13 @@ def pytest_generate_tests(metafunc: MarkGenerator): req=test_services_pb2.DummyRequest(request_id=2), resp=test_services_pb2.DummyResponse(response_id=3), ), + GRPCTestSpec( + test_name="Simple service with error", + method="SimpleTest", + req=test_services_pb2.DummyRequest(request_id=10000), + resp=test_services_pb2.DummyResponse(response_id=3), + code="FAILED_PRECONDITION", + ), GRPCTestSpec( test_name="Simple service with wrong request type", method="SimpleTest", From 641a3e0c833c374b4b9a76bbc52d88ce972695b4 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:43:03 +0000 Subject: [PATCH 67/72] Extra test --- tests/unit/tavern_grpc/test_grpc.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 040cfc401..362f631e9 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -122,6 +122,13 @@ def pytest_generate_tests(metafunc: MarkGenerator): GRPCTestSpec( test_name="basic empty", method="Empty", req=Empty(), resp=Empty() ), + GRPCTestSpec( + test_name="nonexistent method", + method="Wek", + req=Empty(), + resp=Empty(), + xfail=True, + ), GRPCTestSpec( test_name="empty with numeric status code", method="Empty", From a7c1d1d78683f08e3127b4f27c7ff072e40d8ba0 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:46:36 +0000 Subject: [PATCH 68/72] Move file --- tavern/_plugins/grpc/client.py | 153 +------------------------------- tavern/_plugins/grpc/protos.py | 156 +++++++++++++++++++++++++++++++++ 2 files changed, 158 insertions(+), 151 deletions(-) create mode 100644 tavern/_plugins/grpc/protos.py diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index d530cc958..595a8f680 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -1,17 +1,7 @@ import dataclasses -import functools -import hashlib -import importlib.util import logging -import os -import string -import subprocess -import sys -import tempfile import typing import warnings -from distutils.spawn import find_executable -from importlib.machinery import ModuleSpec from typing import Any, Dict, List, Mapping, Optional, Tuple import grpc @@ -30,6 +20,7 @@ from tavern._core import exceptions from tavern._core.dict_util import check_expected_keys +from tavern._plugins.grpc.protos import _generate_proto_import, _import_grpc_module logger = logging.getLogger(__name__) @@ -37,147 +28,6 @@ warnings.simplefilter("ignore") warnings.warn("deprecated", DeprecationWarning) # noqa: B028 - -@functools.lru_cache -def find_protoc() -> str: - # Find the Protocol Compiler. - if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): - return os.environ["PROTOC"] - - if protoc := find_executable("protoc"): - return protoc - - raise exceptions.ProtoCompilerException( - "Wanted to dynamically compile a proto source, but could not find protoc" - ) - - -@functools.lru_cache -def _generate_proto_import(source: str): - """Invokes the Protocol Compiler to generate a _pb2.py from the given - .proto file. Does nothing if the output already exists and is newer than - the input. - """ - - if not os.path.exists(source): - raise exceptions.ProtoCompilerException(f"Can't find required file: {source}") - - logger.info("Generating protos from %s...", source) - - if not os.path.isdir(source): - if not source.endswith(".proto"): - raise exceptions.ProtoCompilerException( - f"invalid proto source file {source}" - ) - protos = [source] - include_path = os.path.dirname(source) - else: - protos = [ - os.path.join(source, child) - for child in os.listdir(source) - if (not os.path.isdir(child)) and child.endswith(".proto") - ] - include_path = source - - if not protos: - raise exceptions.ProtoCompilerException( - f"No protos defined in {os.path.abspath(source)}" - ) - - for p in protos: - if not os.path.exists(p): - raise exceptions.ProtoCompilerException(f"{p} does not exist") - - def sanitise(s): - """Do basic sanitisation for""" - return "".join(c for c in s if c in string.ascii_letters) - - output = os.path.join( - tempfile.gettempdir(), - "tavern_proto", - sanitise(protos[0]), - hashlib.new("sha3_224", "".join(protos).encode("utf8")).hexdigest(), - ) - - if not os.path.exists(output): - os.makedirs(output) - - protoc = find_protoc() - - protoc_command = [protoc, "-I" + include_path, "--python_out=" + output] - protoc_command.extend(protos) - - call = subprocess.run(protoc_command, capture_output=True, check=False) # noqa - if call.returncode != 0: - logger.error(f"Error calling '{protoc_command}'") - raise exceptions.ProtoCompilerException(call.stderr.decode("utf8")) - - logger.info(f"Generated module from protos: {protos}") - - # Invalidate caches so the module can be loaded - sys.path.append(output) - importlib.invalidate_caches() - _import_grpc_module(output) - - -def _import_grpc_module(python_module_name: str): - """takes an expected python module name and tries to import the relevant - file, adding service to the symbol database. - """ - - logger.debug("attempting to import %s", python_module_name) - - if python_module_name.endswith(".py"): - raise exceptions.GRPCServiceException( - f"grpc module definitions should not end with .py, but got {python_module_name}" - ) - - if python_module_name.startswith("."): - raise exceptions.GRPCServiceException( - f"relative imports for Python grpc modules not allowed (got {python_module_name})" - ) - - import_specs: List[ModuleSpec] = [] - - # Check if its already on the python path - if (spec := importlib.util.find_spec(python_module_name)) is not None: - logger.debug(f"{python_module_name} on sys path already") - import_specs.append(spec) - - # See if the file exists - module_path = python_module_name.replace(".", "/") + ".py" - if os.path.exists(module_path): - logger.debug(f"{python_module_name} found in file") - if ( - spec := importlib.util.spec_from_file_location( - python_module_name, module_path - ) - ) is not None: - import_specs.append(spec) - - if os.path.isdir(python_module_name): - for s in os.listdir(python_module_name): - s = os.path.join(python_module_name, s) - if s.endswith(".py"): - logger.debug(f"found py file {s}") - # Guess a package name - if ( - spec := importlib.util.spec_from_file_location(s[:-3], s) - ) is not None: - import_specs.append(spec) - - if not import_specs: - raise exceptions.GRPCServiceException( - f"could not determine how to import {python_module_name}" - ) - - for spec in import_specs: - mod = importlib.util.module_from_spec(spec) - logger.debug(f"loading from {spec.name}") - if spec.loader: - spec.loader.exec_module(mod) - - _ProtoMessageType = typing.Type[proto.message.Message] @@ -399,6 +249,7 @@ def call( body: Optional[Mapping] = None, timeout: Optional[int] = None, ) -> grpc.Future: + """Makes the request and returns a future with the response.""" if host is None: if getattr(self, "default_host", None) is None: raise exceptions.GRPCRequestException( diff --git a/tavern/_plugins/grpc/protos.py b/tavern/_plugins/grpc/protos.py new file mode 100644 index 000000000..c5b70620e --- /dev/null +++ b/tavern/_plugins/grpc/protos.py @@ -0,0 +1,156 @@ +import functools +import hashlib +import importlib.util +import logging +import os +import string +import subprocess +import sys +import tempfile +from distutils.spawn import find_executable +from importlib.machinery import ModuleSpec +from typing import List + +from tavern._core import exceptions + +logger = logging.getLogger(__name__) + + +@functools.lru_cache +def find_protoc() -> str: + # Find the Protocol Compiler. + if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): + return os.environ["PROTOC"] + + if protoc := find_executable("protoc"): + return protoc + + raise exceptions.ProtoCompilerException( + "Wanted to dynamically compile a proto source, but could not find protoc" + ) + + +@functools.lru_cache +def _generate_proto_import(source: str): + """Invokes the Protocol Compiler to generate a _pb2.py from the given + .proto file. Does nothing if the output already exists and is newer than + the input. + """ + + if not os.path.exists(source): + raise exceptions.ProtoCompilerException(f"Can't find required file: {source}") + + logger.info("Generating protos from %s...", source) + + if not os.path.isdir(source): + if not source.endswith(".proto"): + raise exceptions.ProtoCompilerException( + f"invalid proto source file {source}" + ) + protos = [source] + include_path = os.path.dirname(source) + else: + protos = [ + os.path.join(source, child) + for child in os.listdir(source) + if (not os.path.isdir(child)) and child.endswith(".proto") + ] + include_path = source + + if not protos: + raise exceptions.ProtoCompilerException( + f"No protos defined in {os.path.abspath(source)}" + ) + + for p in protos: + if not os.path.exists(p): + raise exceptions.ProtoCompilerException(f"{p} does not exist") + + def sanitise(s): + """Do basic sanitisation for""" + return "".join(c for c in s if c in string.ascii_letters) + + output = os.path.join( + tempfile.gettempdir(), + "tavern_proto", + sanitise(protos[0]), + hashlib.new("sha3_224", "".join(protos).encode("utf8")).hexdigest(), + ) + + if not os.path.exists(output): + os.makedirs(output) + + protoc = find_protoc() + + protoc_command = [protoc, "-I" + include_path, "--python_out=" + output] + protoc_command.extend(protos) + + call = subprocess.run(protoc_command, capture_output=True, check=False) # noqa + if call.returncode != 0: + logger.error(f"Error calling '{protoc_command}'") + raise exceptions.ProtoCompilerException(call.stderr.decode("utf8")) + + logger.info(f"Generated module from protos: {protos}") + + # Invalidate caches so the module can be loaded + sys.path.append(output) + importlib.invalidate_caches() + _import_grpc_module(output) + + +def _import_grpc_module(python_module_name: str): + """takes an expected python module name and tries to import the relevant + file, adding service to the symbol database. + """ + + logger.debug("attempting to import %s", python_module_name) + + if python_module_name.endswith(".py"): + raise exceptions.GRPCServiceException( + f"grpc module definitions should not end with .py, but got {python_module_name}" + ) + + if python_module_name.startswith("."): + raise exceptions.GRPCServiceException( + f"relative imports for Python grpc modules not allowed (got {python_module_name})" + ) + + import_specs: List[ModuleSpec] = [] + + # Check if its already on the python path + if (spec := importlib.util.find_spec(python_module_name)) is not None: + logger.debug(f"{python_module_name} on sys path already") + import_specs.append(spec) + + # See if the file exists + module_path = python_module_name.replace(".", "/") + ".py" + if os.path.exists(module_path): + logger.debug(f"{python_module_name} found in file") + if ( + spec := importlib.util.spec_from_file_location( + python_module_name, module_path + ) + ) is not None: + import_specs.append(spec) + + if os.path.isdir(python_module_name): + for s in os.listdir(python_module_name): + s = os.path.join(python_module_name, s) + if s.endswith(".py"): + logger.debug(f"found py file {s}") + # Guess a package name + if ( + spec := importlib.util.spec_from_file_location(s[:-3], s) + ) is not None: + import_specs.append(spec) + + if not import_specs: + raise exceptions.GRPCServiceException( + f"could not determine how to import {python_module_name}" + ) + + for spec in import_specs: + mod = importlib.util.module_from_spec(spec) + logger.debug(f"loading from {spec.name}") + if spec.loader: + spec.loader.exec_module(mod) From fdb361412a2ec28be68b854f7f0810b64f310561 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Thu, 18 Jan 2024 17:51:05 +0000 Subject: [PATCH 69/72] More docs --- docs/source/grpc.md | 1 + tavern/_plugins/grpc/protos.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/source/grpc.md b/docs/source/grpc.md index 18e56bfd7..13ba49628 100644 --- a/docs/source/grpc.md +++ b/docs/source/grpc.md @@ -17,6 +17,7 @@ Which formats a field like `my_field_name` as `my_field_name` and not `myFieldName` which is what protojson in Go converts it to for example, need to provide a way to allow people to write tests using either one +- protos are compiled into a folder based on `tempfile.gettempdir()`, this could be configurable ## Connection diff --git a/tavern/_plugins/grpc/protos.py b/tavern/_plugins/grpc/protos.py index c5b70620e..b70aa2388 100644 --- a/tavern/_plugins/grpc/protos.py +++ b/tavern/_plugins/grpc/protos.py @@ -42,6 +42,7 @@ def _generate_proto_import(source: str): logger.info("Generating protos from %s...", source) + # If its a dir, compile them all if not os.path.isdir(source): if not source.endswith(".proto"): raise exceptions.ProtoCompilerException( @@ -67,9 +68,11 @@ def _generate_proto_import(source: str): raise exceptions.ProtoCompilerException(f"{p} does not exist") def sanitise(s): - """Do basic sanitisation for""" + """Do basic sanitisation for creating a temporary directory based on + the name of the input proto file""" return "".join(c for c in s if c in string.ascii_letters) + # Create a temporary directory to put the generated protobuf files in output = os.path.join( tempfile.gettempdir(), "tavern_proto", @@ -133,6 +136,7 @@ def _import_grpc_module(python_module_name: str): ) is not None: import_specs.append(spec) + # If its a dir then load files in the dir instead if os.path.isdir(python_module_name): for s in os.listdir(python_module_name): s = os.path.join(python_module_name, s) @@ -149,6 +153,7 @@ def _import_grpc_module(python_module_name: str): f"could not determine how to import {python_module_name}" ) + # Actually import them to register them in the symbol db for spec in import_specs: mod = importlib.util.module_from_spec(spec) logger.debug(f"loading from {spec.name}") From 0bd7f28e8ba0e37330d3f9a4e180936ea9c687e3 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Fri, 19 Jan 2024 08:56:48 +0000 Subject: [PATCH 70/72] Fix response checking --- tavern/_plugins/grpc/client.py | 6 +++--- tavern/_plugins/grpc/response.py | 11 ++++++++++- tests/unit/tavern_grpc/test_grpc.py | 14 ++++++++++---- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/tavern/_plugins/grpc/client.py b/tavern/_plugins/grpc/client.py index 595a8f680..5fd262cba 100644 --- a/tavern/_plugins/grpc/client.py +++ b/tavern/_plugins/grpc/client.py @@ -101,9 +101,9 @@ def _register_file_descriptor( service_proto: grpc_reflection.v1alpha.reflection_pb2.FileDescriptorResponse, ): for file_descriptor_proto in service_proto.file_descriptor_proto: - proto = descriptor_pb2.FileDescriptorProto() - proto.ParseFromString(file_descriptor_proto) - self.sym_db.pool.Add(proto) + descriptor = descriptor_pb2.FileDescriptorProto() + descriptor.ParseFromString(file_descriptor_proto) + self.sym_db.pool.Add(descriptor) def _get_reflection_info( self, channel, service_name: Optional[str] = None, file_by_filename=None diff --git a/tavern/_plugins/grpc/response.py b/tavern/_plugins/grpc/response.py index fdd691ee4..3eee4ba93 100644 --- a/tavern/_plugins/grpc/response.py +++ b/tavern/_plugins/grpc/response.py @@ -5,6 +5,7 @@ from google.protobuf import json_format from grpc import StatusCode +from tavern._core import exceptions from tavern._core.dict_util import check_expected_keys from tavern._core.exceptions import TestFailError from tavern._core.pytest.config import TestConfig @@ -25,7 +26,11 @@ def _to_grpc_name(status: GRPCCode) -> Union[str, List[str]]: if isinstance(status, list): return [_to_grpc_name(s) for s in status] # type:ignore - return to_grpc_status(status).upper() + if status_name := to_grpc_status(status): + return status_name.upper() + + # This should have been verified before this + raise exceptions.GRPCServiceException(f"unknown status code '{status}'") class _GRPCExpected(TypedDict): @@ -115,6 +120,10 @@ def verify(self, response: "WrappedFuture") -> Mapping: self._adderr( "'body' was specified in response, but expected status code was not 'OK'" ) + elif grpc_response.code().name != "OK": + logger.info( + f"skipping body checking due to {grpc_response.code()} response" + ) else: _, output_type = self._client.get_method_types(response.service_name) expected_parsed = output_type() diff --git a/tests/unit/tavern_grpc/test_grpc.py b/tests/unit/tavern_grpc/test_grpc.py index 362f631e9..5e9903e0c 100644 --- a/tests/unit/tavern_grpc/test_grpc.py +++ b/tests/unit/tavern_grpc/test_grpc.py @@ -3,7 +3,7 @@ import random import sys from concurrent import futures -from typing import Any, Mapping +from typing import Any, Mapping, Optional import grpc import pytest @@ -13,7 +13,6 @@ from grpc_reflection.v1alpha import reflection from tavern._core.pytest.config import TestConfig -from tavern._core.schema.extensions import to_grpc_status from tavern._plugins.grpc.client import GRPCClient from tavern._plugins.grpc.request import GRPCRequest from tavern._plugins.grpc.response import GRPCCode, GRPCResponse @@ -71,8 +70,8 @@ class GRPCTestSpec: test_name: str method: str req: Any - resp: Any + resp: Optional[Any] = None xfail: bool = False code: GRPCCode = grpc.StatusCode.OK.value[0] service: str = "tavern.tests.v1.DummyService" @@ -103,7 +102,7 @@ def test_grpc(grpc_client: GRPCClient, includes: TestConfig, test_spec: GRPCTest ) expected = {"status": test_spec.code} - if to_grpc_status(test_spec.code) == "OK": + if test_spec.resp: expected["body"] = test_spec.body() resp = GRPCResponse(grpc_client, "test", expected, includes) @@ -170,8 +169,15 @@ def pytest_generate_tests(metafunc: MarkGenerator): test_name="Simple service with error", method="SimpleTest", req=test_services_pb2.DummyRequest(request_id=10000), + code="FAILED_PRECONDITION", + ), + GRPCTestSpec( + test_name="Simple service with error code but also a response", + method="SimpleTest", + req=test_services_pb2.DummyRequest(request_id=10000), resp=test_services_pb2.DummyResponse(response_id=3), code="FAILED_PRECONDITION", + xfail=True, ), GRPCTestSpec( test_name="Simple service with wrong request type", From efc30e2e7152bbb9528411ca1dfd81d4fde0a49a Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 20 Jan 2024 12:25:31 +0000 Subject: [PATCH 71/72] Update deps --- CONTRIBUTING.md | 4 +- constraints.txt | 176 +++-- pyproject.toml | 2 + requirements.txt | 1694 +++++++++++++++++++++++----------------------- 4 files changed, 941 insertions(+), 935 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 34a7a330f..13d44fd41 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,8 +24,8 @@ If on Windows, you should be able to just run the 'tox' commands in that file. 1. Update requirements files (BOTH of them) ```shell - pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file requirements.txt --reuse-hashes --generate-hashes - pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file constraints.txt --strip-extras +pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file requirements.txt --reuse-hashes --generate-hashes -U +pip-compile --all-extras --resolver=backtracking pyproject.toml --output-file constraints.txt --strip-extras -U ``` 1. Run tests as above diff --git a/constraints.txt b/constraints.txt index 6f0e8cb36..cf884fc03 100644 --- a/constraints.txt +++ b/constraints.txt @@ -4,61 +4,59 @@ # # pip-compile --all-extras --output-file=constraints.txt --strip-extras pyproject.toml # -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx allure-pytest==2.13.2 # via tavern (pyproject.toml) allure-python-commons==2.13.2 # via allure-pytest -attrs==23.1.0 +attrs==23.2.0 # via # allure-python-commons # jsonschema # pytest # referencing -babel==2.12.1 +babel==2.14.0 # via sphinx -bleach==6.0.0 - # via readme-renderer -blinker==1.6.2 +blinker==1.7.0 # via flask -build==0.10.0 +build==1.0.3 # via pip-tools bump2version==1.0.1 # via tavern (pyproject.toml) -cachetools==5.3.1 +cachetools==5.3.2 # via # google-auth # tox -certifi==2023.7.22 +certifi==2023.11.17 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit -chardet==5.1.0 +chardet==5.2.0 # via tox -charset-normalizer==3.1.0 +charset-normalizer==3.3.2 # via requests -click==8.1.3 +click==8.1.7 # via # flask # pip-tools colorama==0.4.6 # via tox -colorlog==6.7.0 +colorlog==6.8.0 # via tavern (pyproject.toml) commonmark==0.9.1 # via # recommonmark # tavern (pyproject.toml) -coverage==7.2.7 +coverage==7.4.0 # via # pytest-cov # tavern (pyproject.toml) -cryptography==41.0.4 +cryptography==41.0.7 # via secretstorage -distlib==0.3.6 +distlib==0.3.8 # via virtualenv docopt==0.6.2 # via pykwalify @@ -68,16 +66,17 @@ docutils==0.20.1 # readme-renderer # recommonmark # sphinx + # sphinx-rtd-theme # tavern (pyproject.toml) -execnet==1.9.0 +execnet==2.0.2 # via pytest-xdist -faker==18.10.1 +faker==22.4.0 # via tavern (pyproject.toml) -filelock==3.12.0 +filelock==3.13.1 # via # tox # virtualenv -flask==2.3.2 +flask==3.0.1 # via tavern (pyproject.toml) flit==3.9.0 # via tavern (pyproject.toml) @@ -85,47 +84,47 @@ flit-core==3.9.0 # via flit fluent-logger==0.10.0 # via tavern (pyproject.toml) -google-api-core==2.12.0 +google-api-core==2.15.0 # via google-api-python-client -google-api-python-client==2.105.0 +google-api-python-client==2.114.0 # via tavern (pyproject.toml) -google-auth==2.23.3 +google-auth==2.26.2 # via # google-api-core # google-api-python-client # google-auth-httplib2 -google-auth-httplib2==0.1.1 +google-auth-httplib2==0.2.0 # via google-api-python-client -googleapis-common-protos==1.61.0 +googleapis-common-protos==1.62.0 # via # google-api-core # grpcio-status -grpc-interceptor==0.15.3 +grpc-interceptor==0.15.4 # via tavern (pyproject.toml) -grpcio==1.59.0 +grpcio==1.60.0 # via # grpc-interceptor # grpcio-reflection # grpcio-status # grpcio-tools # tavern (pyproject.toml) -grpcio-reflection==1.59.0 +grpcio-reflection==1.60.0 # via tavern (pyproject.toml) -grpcio-status==1.59.0 +grpcio-status==1.60.0 # via tavern (pyproject.toml) -grpcio-tools==1.59.0 +grpcio-tools==1.60.0 # via tavern (pyproject.toml) httplib2==0.22.0 # via # google-api-python-client # google-auth-httplib2 -identify==2.5.24 +identify==2.5.33 # via pre-commit -idna==3.4 +idna==3.6 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.6.0 +importlib-metadata==7.0.1 # via # keyring # twine @@ -135,47 +134,49 @@ itsdangerous==2.1.2 # via # flask # tavern (pyproject.toml) -jaraco-classes==3.2.3 +jaraco-classes==3.3.0 # via keyring jeepney==0.8.0 # via # keyring # secretstorage -jinja2==3.1.2 +jinja2==3.1.3 # via # flask # sphinx jmespath==1.0.1 # via tavern (pyproject.toml) -jsonschema==4.20.0 +jsonschema==4.21.1 # via tavern (pyproject.toml) jsonschema-specifications==2023.12.1 # via jsonschema -keyring==23.13.1 +keyring==24.3.0 # via twine -markdown==3.4.4 +markdown==3.5.2 # via sphinx-markdown-tables -markdown-it-py==2.2.0 +markdown-it-py==3.0.0 # via rich -markupsafe==2.1.3 +markupsafe==2.1.4 # via # jinja2 # werkzeug mdurl==0.1.2 # via markdown-it-py -more-itertools==9.1.0 +more-itertools==10.2.0 # via jaraco-classes -msgpack==1.0.5 +msgpack==1.0.7 # via fluent-logger -mypy==1.3.0 +mypy==1.8.0 # via tavern (pyproject.toml) mypy-extensions==1.0.0 # via # mypy # tavern (pyproject.toml) +nh3==0.2.15 + # via readme-renderer nodeenv==1.8.0 # via pre-commit -packaging==23.1 +packaging==23.2 # via # build # pyproject-api @@ -184,26 +185,26 @@ packaging==23.1 # tox paho-mqtt==1.6.1 # via tavern (pyproject.toml) -pbr==5.11.1 +pbr==6.0.0 # via stevedore -pip-tools==6.13.0 +pip-tools==7.3.0 # via tavern (pyproject.toml) pkginfo==1.9.6 # via twine -platformdirs==3.5.1 +platformdirs==4.1.0 # via # tox # virtualenv -pluggy==1.0.0 +pluggy==1.3.0 # via # allure-python-commons # pytest # tox -pre-commit==3.3.2 +pre-commit==3.6.0 # via tavern (pyproject.toml) -proto-plus==1.22.3 +proto-plus==1.23.0 # via tavern (pyproject.toml) -protobuf==4.24.4 +protobuf==4.25.2 # via # google-api-core # googleapis-common-protos @@ -214,7 +215,7 @@ protobuf==4.24.4 # tavern (pyproject.toml) py==1.11.0 # via tavern (pyproject.toml) -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa @@ -222,19 +223,19 @@ pyasn1-modules==0.3.0 # via google-auth pycparser==2.21 # via cffi -pygments==2.15.1 +pygments==2.17.2 # via # readme-renderer # rich # sphinx # tavern (pyproject.toml) -pyjwt==2.7.0 +pyjwt==2.8.0 # via tavern (pyproject.toml) pykwalify==1.8.0 # via tavern (pyproject.toml) pyparsing==3.1.1 # via httplib2 -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build @@ -246,7 +247,7 @@ pytest==7.2.2 # tavern (pyproject.toml) pytest-cov==4.1.0 # via tavern (pyproject.toml) -pytest-xdist==3.3.1 +pytest-xdist==3.5.0 # via tavern (pyproject.toml) python-box==6.1.0 # via tavern (pyproject.toml) @@ -258,7 +259,7 @@ pyyaml==6.0.1 # via # pre-commit # tavern (pyproject.toml) -readme-renderer==37.3 +readme-renderer==42.0 # via twine recommonmark==0.7.1 # via tavern (pyproject.toml) @@ -278,91 +279,84 @@ requests-toolbelt==1.0.0 # via twine rfc3986==2.0.0 # via twine -rich==13.4.1 +rich==13.7.0 # via twine -rpds-py==0.16.2 +rpds-py==0.17.1 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.31 +ruamel-yaml==0.18.5 # via pykwalify -ruamel-yaml-clib==0.2.7 +ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.1.13 +ruff==0.1.14 # via tavern (pyproject.toml) secretstorage==3.3.3 # via keyring six==1.16.0 - # via - # bleach - # python-dateutil + # via python-dateutil snowballstemmer==2.2.0 # via sphinx sphinx==7.2.6 # via # recommonmark # sphinx-rtd-theme - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml + # sphinxcontrib-jquery # tavern (pyproject.toml) sphinx-markdown-tables==0.0.17 # via tavern (pyproject.toml) -sphinx-rtd-theme==0.5.1 +sphinx-rtd-theme==2.0.0 # via tavern (pyproject.toml) -sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx +sphinxcontrib-jquery==4.1 + # via sphinx-rtd-theme sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx stevedore==4.1.1 # via tavern (pyproject.toml) tomli-w==1.0.0 # via flit -tox==4.6.0 +tox==4.12.1 # via tavern (pyproject.toml) twine==4.0.2 # via tavern (pyproject.toml) -types-pyyaml==6.0.12.10 +types-pyyaml==6.0.12.12 # via tavern (pyproject.toml) -types-requests==2.31.0.1 +types-requests==2.31.0.20240106 # via tavern (pyproject.toml) -types-setuptools==67.8.0.0 +types-setuptools==69.0.0.20240115 # via tavern (pyproject.toml) -types-urllib3==1.26.25.13 - # via types-requests -typing-extensions==4.6.3 +typing-extensions==4.9.0 # via mypy uritemplate==4.1.1 # via google-api-python-client -urllib3==2.0.7 +urllib3==2.1.0 # via # requests # twine -virtualenv==20.23.0 + # types-requests +virtualenv==20.25.0 # via # pre-commit # tox -webencodings==0.5.1 - # via bleach werkzeug==3.0.1 # via flask -wheel==0.40.0 +wheel==0.42.0 # via # pip-tools # tavern (pyproject.toml) -zipp==3.15.0 +zipp==3.17.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/pyproject.toml b/pyproject.toml index fe8f0ea04..32332fa13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,8 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Utilities", "Topic :: Software Development :: Testing", "License :: OSI Approved :: MIT License", diff --git a/requirements.txt b/requirements.txt index ef9a3ef51..b90e9332c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --all-extras --generate-hashes --output-file=requirements.txt pyproject.toml # -alabaster==0.7.13 \ - --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ - --hash=sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2 +alabaster==0.7.16 \ + --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ + --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 # via sphinx allure-pytest==2.13.2 \ --hash=sha256:17de9dbee7f61c8e66a5b5e818b00e419dbcea44cb55c24319401ba813220690 \ @@ -16,21 +16,21 @@ allure-python-commons==2.13.2 \ --hash=sha256:2bb3646ec3fbf5b36d178a5e735002bc130ae9f9ba80f080af97d368ba375051 \ --hash=sha256:8a03681330231b1deadd86b97ff68841c6591320114ae638570f1ed60d7a2033 # via allure-pytest -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via # allure-python-commons # jsonschema # pytest # referencing -babel==2.12.1 \ - --hash=sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610 \ - --hash=sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455 +babel==2.14.0 \ + --hash=sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363 \ + --hash=sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287 # via sphinx -blinker==1.6.2 \ - --hash=sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213 \ - --hash=sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0 +blinker==1.7.0 \ + --hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \ + --hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182 # via flask build==1.0.3 \ --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ @@ -40,81 +40,69 @@ bump2version==1.0.1 \ --hash=sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410 \ --hash=sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6 # via tavern (pyproject.toml) -cachetools==5.3.1 \ - --hash=sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590 \ - --hash=sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via # google-auth # tox -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2023.11.17 \ + --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \ + --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography cfgv==3.4.0 \ --hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \ @@ -124,82 +112,97 @@ chardet==5.2.0 \ --hash=sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7 \ --hash=sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970 # via tox -charset-normalizer==3.1.0 \ - --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ - --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ - --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ - --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ - --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ - --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ - --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ - --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ - --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ - --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ - --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ - --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ - --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ - --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ - --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ - --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ - --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ - --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ - --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ - --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ - --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ - --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ - --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ - --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ - --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ - --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ - --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ - --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ - --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ - --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ - --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ - --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ - --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ - --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ - --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ - --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ - --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ - --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ - --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ - --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ - --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ - --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ - --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ - --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ - --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ - --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ - --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ - --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ - --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ - --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ - --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ - --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ - --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ - --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ - --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ - --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ - --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ - --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ - --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ - --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ - --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ - --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ - --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ - --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ - --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ - --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ - --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ - --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ - --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ - --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ - --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ - --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ - --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ - --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ - --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab +charset-normalizer==3.3.2 \ + --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ + --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ + --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ + --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ + --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ + --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ + --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ + --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ + --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ + --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ + --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ + --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ + --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ + --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ + --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ + --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ + --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ + --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ + --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ + --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ + --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ + --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ + --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ + --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ + --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ + --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ + --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ + --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ + --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ + --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ + --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ + --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ + --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ + --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ + --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ + --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ + --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ + --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ + --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ + --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ + --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ + --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ + --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ + --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ + --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ + --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ + --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ + --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ + --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ + --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ + --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ + --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ + --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ + --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ + --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ + --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ + --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ + --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ + --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ + --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ + --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ + --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ + --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ + --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ + --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ + --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ + --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ + --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ + --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ + --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ + --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ + --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ + --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ + --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ + --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ + --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ + --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ + --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ + --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ + --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ + --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ + --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ + --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ + --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ + --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ + --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ + --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ + --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ + --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ + --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -211,9 +214,9 @@ colorama==0.4.6 \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 # via tox -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.0 \ + --hash=sha256:4ed23b05a1154294ac99f511fabe8c1d6d4364ec1f7fc989c7fb515ccc29d375 \ + --hash=sha256:fbb6fdf9d5685f2517f388fb29bb27d54e8654dd31f58bc2a3b217e967a95ca6 # via tavern (pyproject.toml) commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ @@ -221,97 +224,97 @@ commonmark==0.9.1 \ # via # recommonmark # tavern (pyproject.toml) -coverage[toml]==7.3.1 \ - --hash=sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375 \ - --hash=sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344 \ - --hash=sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e \ - --hash=sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745 \ - --hash=sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f \ - --hash=sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194 \ - --hash=sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a \ - --hash=sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f \ - --hash=sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760 \ - --hash=sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8 \ - --hash=sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392 \ - --hash=sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d \ - --hash=sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc \ - --hash=sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40 \ - --hash=sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981 \ - --hash=sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0 \ - --hash=sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92 \ - --hash=sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3 \ - --hash=sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0 \ - --hash=sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086 \ - --hash=sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7 \ - --hash=sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465 \ - --hash=sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140 \ - --hash=sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952 \ - --hash=sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3 \ - --hash=sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8 \ - --hash=sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f \ - --hash=sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593 \ - --hash=sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0 \ - --hash=sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204 \ - --hash=sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037 \ - --hash=sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276 \ - --hash=sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9 \ - --hash=sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26 \ - --hash=sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce \ - --hash=sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7 \ - --hash=sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136 \ - --hash=sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a \ - --hash=sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4 \ - --hash=sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c \ - --hash=sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f \ - --hash=sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832 \ - --hash=sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3 \ - --hash=sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969 \ - --hash=sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520 \ - --hash=sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887 \ - --hash=sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3 \ - --hash=sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6 \ - --hash=sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1 \ - --hash=sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff \ - --hash=sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981 \ - --hash=sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e +coverage[toml]==7.4.0 \ + --hash=sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca \ + --hash=sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471 \ + --hash=sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a \ + --hash=sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058 \ + --hash=sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85 \ + --hash=sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143 \ + --hash=sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446 \ + --hash=sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590 \ + --hash=sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a \ + --hash=sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105 \ + --hash=sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9 \ + --hash=sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a \ + --hash=sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac \ + --hash=sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25 \ + --hash=sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2 \ + --hash=sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450 \ + --hash=sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932 \ + --hash=sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba \ + --hash=sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137 \ + --hash=sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae \ + --hash=sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614 \ + --hash=sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70 \ + --hash=sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e \ + --hash=sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505 \ + --hash=sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870 \ + --hash=sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc \ + --hash=sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451 \ + --hash=sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7 \ + --hash=sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e \ + --hash=sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566 \ + --hash=sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5 \ + --hash=sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26 \ + --hash=sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2 \ + --hash=sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42 \ + --hash=sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555 \ + --hash=sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43 \ + --hash=sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed \ + --hash=sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa \ + --hash=sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516 \ + --hash=sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952 \ + --hash=sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd \ + --hash=sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09 \ + --hash=sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c \ + --hash=sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f \ + --hash=sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6 \ + --hash=sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1 \ + --hash=sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0 \ + --hash=sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e \ + --hash=sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9 \ + --hash=sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9 \ + --hash=sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e \ + --hash=sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06 # via # pytest-cov # tavern (pyproject.toml) -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.7 \ + --hash=sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960 \ + --hash=sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a \ + --hash=sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc \ + --hash=sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a \ + --hash=sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf \ + --hash=sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1 \ + --hash=sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39 \ + --hash=sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406 \ + --hash=sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a \ + --hash=sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a \ + --hash=sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c \ + --hash=sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be \ + --hash=sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15 \ + --hash=sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2 \ + --hash=sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d \ + --hash=sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157 \ + --hash=sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003 \ + --hash=sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248 \ + --hash=sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a \ + --hash=sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec \ + --hash=sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309 \ + --hash=sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7 \ + --hash=sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d # via secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv docopt==0.6.2 \ --hash=sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491 # via pykwalify -docutils==0.18.1 \ - --hash=sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c \ - --hash=sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06 +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via # flit # readme-renderer @@ -323,19 +326,19 @@ execnet==2.0.2 \ --hash=sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41 \ --hash=sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af # via pytest-xdist -faker==19.6.1 \ - --hash=sha256:5d6b7880b3bea708075ddf91938424453f07053a59f8fa0453c1870df6ff3292 \ - --hash=sha256:64c8513c53c3a809075ee527b323a0ba61517814123f3137e4912f5d43350139 +faker==22.4.0 \ + --hash=sha256:9abc6decb78dde54cccbad4432431b3caba796bd06950225da158e86c55855d3 \ + --hash=sha256:b649d7b9b03e9e8283506411a56ecef124c8cd8d2bd300d8d7c858fa42350c4e # via tavern (pyproject.toml) -filelock==3.12.4 \ - --hash=sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4 \ - --hash=sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via # tox # virtualenv -flask==2.3.3 \ - --hash=sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc \ - --hash=sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b +flask==3.0.1 \ + --hash=sha256:6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403 \ + --hash=sha256:ca631a507f6dfe6c278ae20112cea3ff54ff2216390bf8880f6b035a5354af13 # via tavern (pyproject.toml) flit==3.9.0 \ --hash=sha256:076c3aaba5ac24cf0ad3251f910900d95a08218e6bcb26f21fef1036cc4679ca \ @@ -349,159 +352,159 @@ fluent-logger==0.10.0 \ --hash=sha256:543637e5e62ec3fc3c92b44e5a4e148a3cea88a0f8ca4fae26c7e60fda7564c1 \ --hash=sha256:678bda90c513ff0393964b64544ce41ef25669d2089ce6c3b63d9a18554b9bfa # via tavern (pyproject.toml) -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.15.0 \ + --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ + --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca # via google-api-python-client -google-api-python-client==2.104.0 \ - --hash=sha256:867061526aa6dc6c1481d118e913a8a38a02a01eed589413968397ebd77df71d \ - --hash=sha256:bbc66520e7fe9417b93fd113f2a0a1afa789d686de9009b6e94e48fdea50a60f +google-api-python-client==2.114.0 \ + --hash=sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a \ + --hash=sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed # via tavern (pyproject.toml) -google-auth==2.23.3 \ - --hash=sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3 \ - --hash=sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda +google-auth==2.26.2 \ + --hash=sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424 \ + --hash=sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81 # via # google-api-core # google-api-python-client # google-auth-httplib2 -google-auth-httplib2==0.1.1 \ - --hash=sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c \ - --hash=sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29 +google-auth-httplib2==0.2.0 \ + --hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \ + --hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d # via google-api-python-client -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.62.0 \ + --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ + --hash=sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277 # via # google-api-core # grpcio-status -grpc-interceptor==0.15.3 \ - --hash=sha256:33592cb9d8c00fceed5755c71029f75aef55b273496dbced06f1d48f2571fcc3 \ - --hash=sha256:96be2043b7e49f9deb444f18b61c373ea28d22d81c90cd3b82127a4744eb9247 +grpc-interceptor==0.15.4 \ + --hash=sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d \ + --hash=sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926 # via tavern (pyproject.toml) -grpcio==1.59.0 \ - --hash=sha256:0ae444221b2c16d8211b55326f8ba173ba8f8c76349bfc1768198ba592b58f74 \ - --hash=sha256:0b84445fa94d59e6806c10266b977f92fa997db3585f125d6b751af02ff8b9fe \ - --hash=sha256:14890da86a0c0e9dc1ea8e90101d7a3e0e7b1e71f4487fab36e2bfd2ecadd13c \ - --hash=sha256:15f03bd714f987d48ae57fe092cf81960ae36da4e520e729392a59a75cda4f29 \ - --hash=sha256:1a839ba86764cc48226f50b924216000c79779c563a301586a107bda9cbe9dcf \ - --hash=sha256:225e5fa61c35eeaebb4e7491cd2d768cd8eb6ed00f2664fa83a58f29418b39fd \ - --hash=sha256:228b91ce454876d7eed74041aff24a8f04c0306b7250a2da99d35dd25e2a1211 \ - --hash=sha256:2ea95cd6abbe20138b8df965b4a8674ec312aaef3147c0f46a0bac661f09e8d0 \ - --hash=sha256:2f120d27051e4c59db2f267b71b833796770d3ea36ca712befa8c5fff5da6ebd \ - --hash=sha256:34341d9e81a4b669a5f5dca3b2a760b6798e95cdda2b173e65d29d0b16692857 \ - --hash=sha256:3859917de234a0a2a52132489c4425a73669de9c458b01c9a83687f1f31b5b10 \ - --hash=sha256:38823bd088c69f59966f594d087d3a929d1ef310506bee9e3648317660d65b81 \ - --hash=sha256:38da5310ef84e16d638ad89550b5b9424df508fd5c7b968b90eb9629ca9be4b9 \ - --hash=sha256:3b8ff795d35a93d1df6531f31c1502673d1cebeeba93d0f9bd74617381507e3f \ - --hash=sha256:50eff97397e29eeee5df106ea1afce3ee134d567aa2c8e04fabab05c79d791a7 \ - --hash=sha256:5711c51e204dc52065f4a3327dca46e69636a0b76d3e98c2c28c4ccef9b04c52 \ - --hash=sha256:598f3530231cf10ae03f4ab92d48c3be1fee0c52213a1d5958df1a90957e6a88 \ - --hash=sha256:611d9aa0017fa386809bddcb76653a5ab18c264faf4d9ff35cb904d44745f575 \ - --hash=sha256:61bc72a00ecc2b79d9695220b4d02e8ba53b702b42411397e831c9b0589f08a3 \ - --hash=sha256:63982150a7d598281fa1d7ffead6096e543ff8be189d3235dd2b5604f2c553e5 \ - --hash=sha256:6c4b1cc3a9dc1924d2eb26eec8792fedd4b3fcd10111e26c1d551f2e4eda79ce \ - --hash=sha256:81d86a096ccd24a57fa5772a544c9e566218bc4de49e8c909882dae9d73392df \ - --hash=sha256:849c47ef42424c86af069a9c5e691a765e304079755d5c29eff511263fad9c2a \ - --hash=sha256:871371ce0c0055d3db2a86fdebd1e1d647cf21a8912acc30052660297a5a6901 \ - --hash=sha256:8cd2d38c2d52f607d75a74143113174c36d8a416d9472415eab834f837580cf7 \ - --hash=sha256:936b2e04663660c600d5173bc2cc84e15adbad9c8f71946eb833b0afc205b996 \ - --hash=sha256:93e9cb546e610829e462147ce724a9cb108e61647a3454500438a6deef610be1 \ - --hash=sha256:956f0b7cb465a65de1bd90d5a7475b4dc55089b25042fe0f6c870707e9aabb1d \ - --hash=sha256:986de4aa75646e963466b386a8c5055c8b23a26a36a6c99052385d6fe8aaf180 \ - --hash=sha256:aca8a24fef80bef73f83eb8153f5f5a0134d9539b4c436a716256b311dda90a6 \ - --hash=sha256:acf70a63cf09dd494000007b798aff88a436e1c03b394995ce450be437b8e54f \ - --hash=sha256:b34c7a4c31841a2ea27246a05eed8a80c319bfc0d3e644412ec9ce437105ff6c \ - --hash=sha256:b95ec8ecc4f703f5caaa8d96e93e40c7f589bad299a2617bdb8becbcce525539 \ - --hash=sha256:ba0ca727a173ee093f49ead932c051af463258b4b493b956a2c099696f38aa66 \ - --hash=sha256:c041a91712bf23b2a910f61e16565a05869e505dc5a5c025d429ca6de5de842c \ - --hash=sha256:c0488c2b0528e6072010182075615620071371701733c63ab5be49140ed8f7f0 \ - --hash=sha256:c173a87d622ea074ce79be33b952f0b424fa92182063c3bda8625c11d3585d09 \ - --hash=sha256:c251d22de8f9f5cca9ee47e4bade7c5c853e6e40743f47f5cc02288ee7a87252 \ - --hash=sha256:c4dfdb49f4997dc664f30116af2d34751b91aa031f8c8ee251ce4dcfc11277b0 \ - --hash=sha256:ca87ee6183421b7cea3544190061f6c1c3dfc959e0b57a5286b108511fd34ff4 \ - --hash=sha256:ceb1e68135788c3fce2211de86a7597591f0b9a0d2bb80e8401fd1d915991bac \ - --hash=sha256:d09bd2a4e9f5a44d36bb8684f284835c14d30c22d8ec92ce796655af12163588 \ - --hash=sha256:d0fcf53df684fcc0154b1e61f6b4a8c4cf5f49d98a63511e3f30966feff39cd0 \ - --hash=sha256:d74f7d2d7c242a6af9d4d069552ec3669965b74fed6b92946e0e13b4168374f9 \ - --hash=sha256:de2599985b7c1b4ce7526e15c969d66b93687571aa008ca749d6235d056b7205 \ - --hash=sha256:e5378785dce2b91eb2e5b857ec7602305a3b5cf78311767146464bfa365fc897 \ - --hash=sha256:ec78aebb9b6771d6a1de7b6ca2f779a2f6113b9108d486e904bde323d51f5589 \ - --hash=sha256:f1feb034321ae2f718172d86b8276c03599846dc7bb1792ae370af02718f91c5 \ - --hash=sha256:f21917aa50b40842b51aff2de6ebf9e2f6af3fe0971c31960ad6a3a2b24988f4 \ - --hash=sha256:f367e4b524cb319e50acbdea57bb63c3b717c5d561974ace0b065a648bb3bad3 \ - --hash=sha256:f6cfe44a5d7c7d5f1017a7da1c8160304091ca5dc64a0f85bca0d63008c3137a \ - --hash=sha256:fa66cac32861500f280bb60fe7d5b3e22d68c51e18e65367e38f8669b78cea3b \ - --hash=sha256:fc8bf2e7bc725e76c0c11e474634a08c8f24bcf7426c0c6d60c8f9c6e70e4d4a \ - --hash=sha256:fe976910de34d21057bcb53b2c5e667843588b48bf11339da2a75f5c4c5b4055 +grpcio==1.60.0 \ + --hash=sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6 \ + --hash=sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328 \ + --hash=sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead \ + --hash=sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5 \ + --hash=sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491 \ + --hash=sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96 \ + --hash=sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444 \ + --hash=sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951 \ + --hash=sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf \ + --hash=sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253 \ + --hash=sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629 \ + --hash=sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae \ + --hash=sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43 \ + --hash=sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b \ + --hash=sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14 \ + --hash=sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab \ + --hash=sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390 \ + --hash=sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2 \ + --hash=sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0 \ + --hash=sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590 \ + --hash=sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508 \ + --hash=sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b \ + --hash=sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08 \ + --hash=sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13 \ + --hash=sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca \ + --hash=sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03 \ + --hash=sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748 \ + --hash=sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860 \ + --hash=sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d \ + --hash=sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353 \ + --hash=sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e \ + --hash=sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c \ + --hash=sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134 \ + --hash=sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415 \ + --hash=sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320 \ + --hash=sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179 \ + --hash=sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324 \ + --hash=sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18 \ + --hash=sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df \ + --hash=sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e \ + --hash=sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b \ + --hash=sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6 \ + --hash=sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d \ + --hash=sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff \ + --hash=sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968 \ + --hash=sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619 \ + --hash=sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139 \ + --hash=sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55 \ + --hash=sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454 \ + --hash=sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65 \ + --hash=sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a \ + --hash=sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19 \ + --hash=sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b \ + --hash=sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd # via # grpc-interceptor # grpcio-reflection # grpcio-status # grpcio-tools # tavern (pyproject.toml) -grpcio-reflection==1.59.0 \ - --hash=sha256:1fe8f0dd6c180fdcf4e12ced2a8f784d9c741ccbc0b198585b1df024b7f8f3f2 \ - --hash=sha256:bf4efc7e2e8162e5be9736f4d0a0b324c9bf0c04ad597a9d78fcaf1fbdf818ec +grpcio-reflection==1.60.0 \ + --hash=sha256:3f6c0c73ba8f20d1420c5e72fc4dd0389fac346ed8fb32a28e6e1967b44fff35 \ + --hash=sha256:f7a347ebd6cecf347fc836fd520fd1f0b3411912981649c7fb34d62a3a15aa4e # via tavern (pyproject.toml) -grpcio-status==1.59.0 \ - --hash=sha256:cb5a222b14a80ee050bff9676623822e953bff0c50d2d29180de723652fdf10d \ - --hash=sha256:f93b9c33e0a26162ef8431bfcffcc3e1fb217ccd8d7b5b3061b6e9f813e698b5 +grpcio-status==1.60.0 \ + --hash=sha256:7d383fa36e59c1e61d380d91350badd4d12ac56e4de2c2b831b050362c3c572e \ + --hash=sha256:f10e0b6db3adc0fdc244b71962814ee982996ef06186446b5695b9fa635aa1ab # via tavern (pyproject.toml) -grpcio-tools==1.59.0 \ - --hash=sha256:0548e901894399886ff4a4cd808cb850b60c021feb4a8977a0751f14dd7e55d9 \ - --hash=sha256:05bf7b3ed01c8a562bb7e840f864c58acedbd6924eb616367c0bd0a760bdf483 \ - --hash=sha256:1d551ff42962c7c333c3da5c70d5e617a87dee581fa2e2c5ae2d5137c8886779 \ - --hash=sha256:1df755951f204e65bf9232a9cac5afe7d6b8e4c87ac084d3ecd738fdc7aa4174 \ - --hash=sha256:204e08f807b1d83f5f0efea30c4e680afe26a43dec8ba614a45fa698a7ef0a19 \ - --hash=sha256:240a7a3c2c54f77f1f66085a635bca72003d02f56a670e7db19aec531eda8f78 \ - --hash=sha256:26eb2eebf150a33ebf088e67c1acf37eb2ac4133d9bfccbaa011ad2148c08b42 \ - --hash=sha256:27a7f226b741b2ebf7e2d0779d2c9b17f446d1b839d59886c1619e62cc2ae472 \ - --hash=sha256:2d970aa26854f535ffb94ea098aa8b43de020d9a14682e4a15dcdaeac7801b27 \ - --hash=sha256:2ee960904dde12a7fa48e1591a5b3eeae054bdce57bacf9fd26685a98138f5bf \ - --hash=sha256:335e2f355a0c544a88854e2c053aff8a3f398b84a263a96fa19d063ca1fe513a \ - --hash=sha256:387662bee8e4c0b52cc0f61eaaca0ca583f5b227103f685b76083a3590a71a3e \ - --hash=sha256:40cbf712769242c2ba237745285ef789114d7fcfe8865fc4817d87f20015e99a \ - --hash=sha256:4499d4bc5aa9c7b645018d8b0db4bebd663d427aabcd7bee7777046cb1bcbca7 \ - --hash=sha256:498e7be0b14385980efa681444ba481349c131fc5ec88003819f5d929646947c \ - --hash=sha256:4a10e59cca462208b489478340b52a96d64e8b8b6f1ac097f3e8cb211d3f66c0 \ - --hash=sha256:4ee443abcd241a5befb05629013fbf2eac637faa94aaa3056351aded8a31c1bc \ - --hash=sha256:51d9595629998d8b519126c5a610f15deb0327cd6325ed10796b47d1d292e70b \ - --hash=sha256:520c0c83ea79d14b0679ba43e19c64ca31d30926b26ad2ca7db37cbd89c167e2 \ - --hash=sha256:5b2d6da553980c590487f2e7fd3ec9c1ad8805ff2ec77977b92faa7e3ca14e1f \ - --hash=sha256:6119f62c462d119c63227b9534210f0f13506a888151b9bf586f71e7edf5088b \ - --hash=sha256:6aec8a4ed3808b7dfc1276fe51e3e24bec0eeaf610d395bcd42934647cf902a3 \ - --hash=sha256:71cc6db1d66da3bc3730d9937bddc320f7b1f1dfdff6342bcb5741515fe4110b \ - --hash=sha256:784aa52965916fec5afa1a28eeee6f0073bb43a2a1d7fedf963393898843077a \ - --hash=sha256:821dba464d84ebbcffd9d420302404db2fa7a40c7ff4c4c4c93726f72bfa2769 \ - --hash=sha256:868892ad9e00651a38dace3e4924bae82fc4fd4df2c65d37b74381570ee8deb1 \ - --hash=sha256:882b809b42b5464bee55288f4e60837297f9618e53e69ae3eea6d61b05ce48fa \ - --hash=sha256:8c4634b3589efa156a8d5860c0a2547315bd5c9e52d14c960d716fe86e0927be \ - --hash=sha256:8f0da5861ee276ca68493b217daef358960e8527cc63c7cb292ca1c9c54939af \ - --hash=sha256:962d1a3067129152cee3e172213486cb218a6bad703836991f46f216caefcf00 \ - --hash=sha256:99b3bde646720bbfb77f263f5ba3e1a0de50632d43c38d405a0ef9c7e94373cd \ - --hash=sha256:9af7e138baa9b2895cf1f3eb718ac96fc5ae2f8e31fca405e21e0e5cd1643c52 \ - --hash=sha256:9ed05197c5ab071e91bcef28901e97ca168c4ae94510cb67a14cb4931b94255a \ - --hash=sha256:9fc02a6e517c34dcf885ff3b57260b646551083903e3d2c780b4971ce7d4ab7c \ - --hash=sha256:a4f6cae381f21fee1ef0a5cbbbb146680164311157ae618edf3061742d844383 \ - --hash=sha256:aa4018f2d8662ac4d9830445d3d253a11b3e096e8afe20865547137aa1160e93 \ - --hash=sha256:b519f2ecde9a579cad2f4a7057d5bb4e040ad17caab8b5e691ed7a13b9db0be9 \ - --hash=sha256:b8e95d921cc2a1521d4750eedefec9f16031457920a6677edebe9d1b2ad6ae60 \ - --hash=sha256:bb87158dbbb9e5a79effe78d54837599caa16df52d8d35366e06a91723b587ae \ - --hash=sha256:bfa4b2b7d21c5634b62e5f03462243bd705adc1a21806b5356b8ce06d902e160 \ - --hash=sha256:c683be38a9bf4024c223929b4cd2f0a0858c94e9dc8b36d7eaa5a48ce9323a6f \ - --hash=sha256:cb63055739808144b541986291679d643bae58755d0eb082157c4d4c04443905 \ - --hash=sha256:d0f0806de1161c7f248e4c183633ee7a58dfe45c2b77ddf0136e2e7ad0650b1b \ - --hash=sha256:db030140d0da2368319e2f23655df3baec278c7e0078ecbe051eaf609a69382c \ - --hash=sha256:de156c18b0c638aaee3be6ad650c8ba7dec94ed4bac26403aec3dce95ffe9407 \ - --hash=sha256:df85096fcac7cea8aa5bd84b7a39c4cdbf556b93669bb4772eb96aacd3222a4e \ - --hash=sha256:e312ddc2d8bec1a23306a661ad52734f984c9aad5d8f126ebb222a778d95407d \ - --hash=sha256:eeed386971bb8afc3ec45593df6a1154d680d87be1209ef8e782e44f85f47e64 \ - --hash=sha256:ef3e8aca2261f7f07436d4e2111556c1fb9bf1f9cfcdf35262743ccdee1b6ce9 \ - --hash=sha256:f14a6e4f700dfd30ff8f0e6695f944affc16ae5a1e738666b3fae4e44b65637e \ - --hash=sha256:f1c684c0d9226d04cadafced620a46ab38c346d0780eaac7448da96bf12066a3 \ - --hash=sha256:f381ae3ad6a5eb27aad8d810438937d8228977067c54e0bd456fce7e11fdbf3d \ - --hash=sha256:f6263b85261b62471cb97b7505df72d72b8b62e5e22d8184924871a6155b4dbf \ - --hash=sha256:f965707da2b48a33128615bcfebedd215a3a30e346447e885bb3da37a143177a +grpcio-tools==1.60.0 \ + --hash=sha256:081336d8258f1a56542aa8a7a5dec99a2b38d902e19fbdd744594783301b0210 \ + --hash=sha256:1748893efd05cf4a59a175d7fa1e4fbb652f4d84ccaa2109f7869a2be48ed25e \ + --hash=sha256:17a32b3da4fc0798cdcec0a9c974ac2a1e98298f151517bf9148294a3b1a5742 \ + --hash=sha256:18976684a931ca4bcba65c78afa778683aefaae310f353e198b1823bf09775a0 \ + --hash=sha256:1b93ae8ffd18e9af9a965ebca5fa521e89066267de7abdde20721edc04e42721 \ + --hash=sha256:1fbb9554466d560472f07d906bfc8dcaf52f365c2a407015185993e30372a886 \ + --hash=sha256:24c4ead4a03037beaeb8ef2c90d13d70101e35c9fae057337ed1a9144ef10b53 \ + --hash=sha256:2a8a758701f3ac07ed85f5a4284c6a9ddefcab7913a8e552497f919349e72438 \ + --hash=sha256:2dd01257e4feff986d256fa0bac9f56de59dc735eceeeb83de1c126e2e91f653 \ + --hash=sha256:2e00de389729ca8d8d1a63c2038703078a887ff738dc31be640b7da9c26d0d4f \ + --hash=sha256:2fb4cf74bfe1e707cf10bc9dd38a1ebaa145179453d150febb121c7e9cd749bf \ + --hash=sha256:2fd1671c52f96e79a2302c8b1c1f78b8a561664b8b3d6946f20d8f1cc6b4225a \ + --hash=sha256:321b18f42a70813545e416ddcb8bf20defa407a8114906711c9710a69596ceda \ + --hash=sha256:3456df087ea61a0972a5bc165aed132ed6ddcc63f5749e572f9fff84540bdbad \ + --hash=sha256:4041538f55aad5b3ae7e25ab314d7995d689e968bfc8aa169d939a3160b1e4c6 \ + --hash=sha256:559ce714fe212aaf4abbe1493c5bb8920def00cc77ce0d45266f4fd9d8b3166f \ + --hash=sha256:5a907a4f1ffba86501b2cdb8682346249ea032b922fc69a92f082ba045cca548 \ + --hash=sha256:5ce6bbd4936977ec1114f2903eb4342781960d521b0d82f73afedb9335251f6f \ + --hash=sha256:6170873b1e5b6580ebb99e87fb6e4ea4c48785b910bd7af838cc6e44b2bccb04 \ + --hash=sha256:6192184b1f99372ff1d9594bd4b12264e3ff26440daba7eb043726785200ff77 \ + --hash=sha256:6807b7a3f3e6e594566100bd7fe04a2c42ce6d5792652677f1aaf5aa5adaef3d \ + --hash=sha256:687f576d7ff6ce483bc9a196d1ceac45144e8733b953620a026daed8e450bc38 \ + --hash=sha256:74025fdd6d1cb7ba4b5d087995339e9a09f0c16cf15dfe56368b23e41ffeaf7a \ + --hash=sha256:7a5263a0f2ddb7b1cfb2349e392cfc4f318722e0f48f886393e06946875d40f3 \ + --hash=sha256:7a6fe752205caae534f29fba907e2f59ff79aa42c6205ce9a467e9406cbac68c \ + --hash=sha256:7c1cde49631732356cb916ee1710507967f19913565ed5f9991e6c9cb37e3887 \ + --hash=sha256:811abb9c4fb6679e0058dfa123fb065d97b158b71959c0e048e7972bbb82ba0f \ + --hash=sha256:857c5351e9dc33a019700e171163f94fcc7e3ae0f6d2b026b10fda1e3c008ef1 \ + --hash=sha256:87cf439178f3eb45c1a889b2e4a17cbb4c450230d92c18d9c57e11271e239c55 \ + --hash=sha256:9970d384fb0c084b00945ef57d98d57a8d32be106d8f0bd31387f7cbfe411b5b \ + --hash=sha256:9ee35234f1da8fba7ddbc544856ff588243f1128ea778d7a1da3039be829a134 \ + --hash=sha256:addc9b23d6ff729d9f83d4a2846292d4c84f5eb2ec38f08489a6a0d66ac2b91e \ + --hash=sha256:b22b1299b666eebd5752ba7719da536075eae3053abcf2898b65f763c314d9da \ + --hash=sha256:b8f7a5094adb49e85db13ea3df5d99a976c2bdfd83b0ba26af20ebb742ac6786 \ + --hash=sha256:b96981f3a31b85074b73d97c8234a5ed9053d65a36b18f4a9c45a2120a5b7a0a \ + --hash=sha256:bbf0ed772d2ae7e8e5d7281fcc00123923ab130b94f7a843eee9af405918f924 \ + --hash=sha256:bd2a17b0193fbe4793c215d63ce1e01ae00a8183d81d7c04e77e1dfafc4b2b8a \ + --hash=sha256:c771b19dce2bfe06899247168c077d7ab4e273f6655d8174834f9a6034415096 \ + --hash=sha256:d941749bd8dc3f8be58fe37183143412a27bec3df8482d5abd6b4ec3f1ac2924 \ + --hash=sha256:dba6e32c87b4af29b5f475fb2f470f7ee3140bfc128644f17c6c59ddeb670680 \ + --hash=sha256:dd1e68c232fe01dd5312a8dbe52c50ecd2b5991d517d7f7446af4ba6334ba872 \ + --hash=sha256:e5614cf0960456d21d8a0f4902e3e5e3bcacc4e400bf22f196e5dd8aabb978b7 \ + --hash=sha256:e5c519a0d4ba1ab44a004fa144089738c59278233e2010b2cf4527dc667ff297 \ + --hash=sha256:e68dc4474f30cad11a965f0eb5d37720a032b4720afa0ec19dbcea2de73b5aae \ + --hash=sha256:e70d867c120d9849093b0ac24d861e378bc88af2552e743d83b9f642d2caa7c2 \ + --hash=sha256:e87cabac7969bdde309575edc2456357667a1b28262b2c1f12580ef48315b19d \ + --hash=sha256:eae27f9b16238e2aaee84c77b5923c6924d6dccb0bdd18435bf42acc8473ae1a \ + --hash=sha256:ec0e401e9a43d927d216d5169b03c61163fb52b665c5af2fed851357b15aef88 \ + --hash=sha256:ed30499340228d733ff69fcf4a66590ed7921f94eb5a2bf692258b1280b9dac7 \ + --hash=sha256:f10ef47460ce3c6fd400f05fe757b90df63486c9b84d1ecad42dcc5f80c8ac14 \ + --hash=sha256:f3d916606dcf5610d4367918245b3d9d8cd0d2ec0b7043d1bbb8c50fe9815c3a \ + --hash=sha256:f610384dee4b1ca705e8da66c5b5fe89a2de3d165c5282c3d1ddf40cb18924e4 \ + --hash=sha256:fb4df80868b3e397d5fbccc004c789d2668b622b51a9d2387b4c89c80d31e2c5 \ + --hash=sha256:fc01bc1079279ec342f0f1b6a107b3f5dc3169c33369cf96ada6e2e171f74e86 # via tavern (pyproject.toml) httplib2==0.22.0 \ --hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \ @@ -509,21 +512,21 @@ httplib2==0.22.0 \ # via # google-api-python-client # google-auth-httplib2 -identify==2.5.29 \ - --hash=sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b \ - --hash=sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5 +identify==2.5.33 \ + --hash=sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d \ + --hash=sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34 # via pre-commit -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.6 \ + --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ + --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f # via requests imagesize==1.4.1 \ --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a # via sphinx -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==7.0.1 \ + --hash=sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e \ + --hash=sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc # via # keyring # twine @@ -547,9 +550,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via # flask # sphinx @@ -557,87 +560,87 @@ jmespath==1.0.1 \ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe # via tavern (pyproject.toml) -jsonschema==4.20.0 \ - --hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \ - --hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3 +jsonschema==4.21.1 \ + --hash=sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f \ + --hash=sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5 # via tavern (pyproject.toml) jsonschema-specifications==2023.12.1 \ --hash=sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc \ --hash=sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c # via jsonschema -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==24.3.0 \ + --hash=sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836 \ + --hash=sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25 # via twine -markdown==3.4.4 \ - --hash=sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6 \ - --hash=sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941 +markdown==3.5.2 \ + --hash=sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd \ + --hash=sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8 # via sphinx-markdown-tables markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.4 \ + --hash=sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69 \ + --hash=sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0 \ + --hash=sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d \ + --hash=sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec \ + --hash=sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5 \ + --hash=sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411 \ + --hash=sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3 \ + --hash=sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74 \ + --hash=sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0 \ + --hash=sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949 \ + --hash=sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d \ + --hash=sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279 \ + --hash=sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f \ + --hash=sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6 \ + --hash=sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc \ + --hash=sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e \ + --hash=sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954 \ + --hash=sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656 \ + --hash=sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc \ + --hash=sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518 \ + --hash=sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56 \ + --hash=sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc \ + --hash=sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa \ + --hash=sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565 \ + --hash=sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4 \ + --hash=sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb \ + --hash=sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250 \ + --hash=sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4 \ + --hash=sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959 \ + --hash=sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc \ + --hash=sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474 \ + --hash=sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863 \ + --hash=sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8 \ + --hash=sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f \ + --hash=sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2 \ + --hash=sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e \ + --hash=sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e \ + --hash=sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb \ + --hash=sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f \ + --hash=sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a \ + --hash=sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26 \ + --hash=sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d \ + --hash=sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2 \ + --hash=sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131 \ + --hash=sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789 \ + --hash=sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6 \ + --hash=sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a \ + --hash=sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858 \ + --hash=sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e \ + --hash=sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb \ + --hash=sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e \ + --hash=sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84 \ + --hash=sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7 \ + --hash=sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea \ + --hash=sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b \ + --hash=sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6 \ + --hash=sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475 \ + --hash=sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74 \ + --hash=sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a \ + --hash=sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00 # via # jinja2 # werkzeug @@ -645,103 +648,96 @@ mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 +more-itertools==10.2.0 \ + --hash=sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684 \ + --hash=sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1 # via jaraco-classes -msgpack==1.0.5 \ - --hash=sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164 \ - --hash=sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b \ - --hash=sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c \ - --hash=sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf \ - --hash=sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd \ - --hash=sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d \ - --hash=sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c \ - --hash=sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a \ - --hash=sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e \ - --hash=sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd \ - --hash=sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025 \ - --hash=sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5 \ - --hash=sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705 \ - --hash=sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a \ - --hash=sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d \ - --hash=sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb \ - --hash=sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11 \ - --hash=sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f \ - --hash=sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c \ - --hash=sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d \ - --hash=sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea \ - --hash=sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba \ - --hash=sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87 \ - --hash=sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a \ - --hash=sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c \ - --hash=sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080 \ - --hash=sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198 \ - --hash=sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9 \ - --hash=sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a \ - --hash=sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b \ - --hash=sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f \ - --hash=sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437 \ - --hash=sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f \ - --hash=sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7 \ - --hash=sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2 \ - --hash=sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0 \ - --hash=sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48 \ - --hash=sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898 \ - --hash=sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0 \ - --hash=sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57 \ - --hash=sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8 \ - --hash=sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282 \ - --hash=sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1 \ - --hash=sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82 \ - --hash=sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc \ - --hash=sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb \ - --hash=sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6 \ - --hash=sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7 \ - --hash=sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9 \ - --hash=sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c \ - --hash=sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1 \ - --hash=sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed \ - --hash=sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c \ - --hash=sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c \ - --hash=sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77 \ - --hash=sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81 \ - --hash=sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a \ - --hash=sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3 \ - --hash=sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086 \ - --hash=sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9 \ - --hash=sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f \ - --hash=sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b \ - --hash=sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d +msgpack==1.0.7 \ + --hash=sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862 \ + --hash=sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d \ + --hash=sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3 \ + --hash=sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672 \ + --hash=sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0 \ + --hash=sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9 \ + --hash=sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee \ + --hash=sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46 \ + --hash=sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524 \ + --hash=sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819 \ + --hash=sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc \ + --hash=sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc \ + --hash=sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1 \ + --hash=sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82 \ + --hash=sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81 \ + --hash=sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6 \ + --hash=sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d \ + --hash=sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2 \ + --hash=sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c \ + --hash=sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87 \ + --hash=sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84 \ + --hash=sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e \ + --hash=sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95 \ + --hash=sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f \ + --hash=sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b \ + --hash=sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93 \ + --hash=sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf \ + --hash=sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61 \ + --hash=sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c \ + --hash=sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8 \ + --hash=sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d \ + --hash=sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c \ + --hash=sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4 \ + --hash=sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba \ + --hash=sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415 \ + --hash=sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee \ + --hash=sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d \ + --hash=sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9 \ + --hash=sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075 \ + --hash=sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f \ + --hash=sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7 \ + --hash=sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681 \ + --hash=sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329 \ + --hash=sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1 \ + --hash=sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf \ + --hash=sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c \ + --hash=sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5 \ + --hash=sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b \ + --hash=sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5 \ + --hash=sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e \ + --hash=sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b \ + --hash=sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad \ + --hash=sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd \ + --hash=sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7 \ + --hash=sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002 \ + --hash=sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc # via fluent-logger -mypy==1.5.1 \ - --hash=sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315 \ - --hash=sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0 \ - --hash=sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373 \ - --hash=sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a \ - --hash=sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161 \ - --hash=sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275 \ - --hash=sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693 \ - --hash=sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb \ - --hash=sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65 \ - --hash=sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4 \ - --hash=sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb \ - --hash=sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243 \ - --hash=sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14 \ - --hash=sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4 \ - --hash=sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1 \ - --hash=sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a \ - --hash=sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160 \ - --hash=sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25 \ - --hash=sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12 \ - --hash=sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d \ - --hash=sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92 \ - --hash=sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770 \ - --hash=sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2 \ - --hash=sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70 \ - --hash=sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb \ - --hash=sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5 \ - --hash=sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f +mypy==1.8.0 \ + --hash=sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6 \ + --hash=sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d \ + --hash=sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02 \ + --hash=sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d \ + --hash=sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3 \ + --hash=sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3 \ + --hash=sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3 \ + --hash=sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66 \ + --hash=sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259 \ + --hash=sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835 \ + --hash=sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd \ + --hash=sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d \ + --hash=sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8 \ + --hash=sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07 \ + --hash=sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b \ + --hash=sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e \ + --hash=sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6 \ + --hash=sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae \ + --hash=sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9 \ + --hash=sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d \ + --hash=sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a \ + --hash=sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592 \ + --hash=sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218 \ + --hash=sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817 \ + --hash=sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4 \ + --hash=sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410 \ + --hash=sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55 # via tavern (pyproject.toml) mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ @@ -749,31 +745,31 @@ mypy-extensions==1.0.0 \ # via # mypy # tavern (pyproject.toml) -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +nh3==0.2.15 \ + --hash=sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770 \ + --hash=sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf \ + --hash=sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305 \ + --hash=sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601 \ + --hash=sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28 \ + --hash=sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7 \ + --hash=sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3 \ + --hash=sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911 \ + --hash=sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf \ + --hash=sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0 \ + --hash=sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5 \ + --hash=sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97 \ + --hash=sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d \ + --hash=sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e \ + --hash=sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3 \ + --hash=sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6 # via readme-renderer nodeenv==1.8.0 \ --hash=sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2 \ --hash=sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec # via pre-commit -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # build # pyproject-api @@ -783,9 +779,9 @@ packaging==23.1 \ paho-mqtt==1.6.1 \ --hash=sha256:2a8291c81623aec00372b5a85558a372c747cbca8e9934dfe218638b8eefc26f # via tavern (pyproject.toml) -pbr==5.11.1 \ - --hash=sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b \ - --hash=sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3 +pbr==6.0.0 \ + --hash=sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda \ + --hash=sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9 # via stevedore pip-tools==7.3.0 \ --hash=sha256:8717693288720a8c6ebd07149c93ab0be1fced0b5191df9e9decd3263e20d85e \ @@ -795,41 +791,39 @@ pkginfo==1.9.6 \ --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==3.10.0 \ - --hash=sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d \ - --hash=sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d +platformdirs==4.1.0 \ + --hash=sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380 \ + --hash=sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420 # via # tox # virtualenv -pluggy==1.0.0 \ - --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ - --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 +pluggy==1.3.0 \ + --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ + --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 # via # allure-python-commons # pytest # tox -pre-commit==3.4.0 \ - --hash=sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522 \ - --hash=sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945 +pre-commit==3.6.0 \ + --hash=sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376 \ + --hash=sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d # via tavern (pyproject.toml) -proto-plus==1.22.3 \ - --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ - --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b +proto-plus==1.23.0 \ + --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ + --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c # via tavern (pyproject.toml) -protobuf==4.24.4 \ - --hash=sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe \ - --hash=sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085 \ - --hash=sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b \ - --hash=sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667 \ - --hash=sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37 \ - --hash=sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92 \ - --hash=sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4 \ - --hash=sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b \ - --hash=sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9 \ - --hash=sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd \ - --hash=sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e \ - --hash=sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46 \ - --hash=sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb +protobuf==4.25.2 \ + --hash=sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62 \ + --hash=sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d \ + --hash=sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61 \ + --hash=sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62 \ + --hash=sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3 \ + --hash=sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9 \ + --hash=sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830 \ + --hash=sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6 \ + --hash=sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0 \ + --hash=sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020 \ + --hash=sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e # via # google-api-core # googleapis-common-protos @@ -842,9 +836,9 @@ py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # via tavern (pyproject.toml) -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde +pyasn1==0.5.1 \ + --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ + --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c # via # pyasn1-modules # rsa @@ -856,17 +850,17 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.17.2 \ + --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \ + --hash=sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367 # via # readme-renderer # rich # sphinx # tavern (pyproject.toml) -pyjwt==2.7.0 \ - --hash=sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1 \ - --hash=sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via tavern (pyproject.toml) pykwalify==1.8.0 \ --hash=sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651 \ @@ -896,9 +890,9 @@ pytest-cov==4.1.0 \ --hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \ --hash=sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a # via tavern (pyproject.toml) -pytest-xdist==3.3.1 \ - --hash=sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93 \ - --hash=sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2 +pytest-xdist==3.5.0 \ + --hash=sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a \ + --hash=sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24 # via tavern (pyproject.toml) python-box==6.1.0 \ --hash=sha256:11cbe62f0dace8a6e2a10d210a5e87b99ad1a1286865568862516794c923a988 \ @@ -926,7 +920,9 @@ python-dateutil==2.8.2 \ # faker # pykwalify pyyaml==6.0.1 \ + --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ + --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \ --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ @@ -934,7 +930,10 @@ pyyaml==6.0.1 \ --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ + --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \ + --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \ --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ + --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \ --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ @@ -942,11 +941,15 @@ pyyaml==6.0.1 \ --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ + --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \ --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ + --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \ + --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \ --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ + --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \ --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ @@ -959,7 +962,9 @@ pyyaml==6.0.1 \ --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ + --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \ --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ + --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \ --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ @@ -1001,110 +1006,110 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.5.3 \ - --hash=sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6 \ - --hash=sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9 +rich==13.7.0 \ + --hash=sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa \ + --hash=sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235 # via twine -rpds-py==0.16.2 \ - --hash=sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9 \ - --hash=sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60 \ - --hash=sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6 \ - --hash=sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe \ - --hash=sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734 \ - --hash=sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb \ - --hash=sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04 \ - --hash=sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772 \ - --hash=sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c \ - --hash=sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0 \ - --hash=sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2 \ - --hash=sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e \ - --hash=sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365 \ - --hash=sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef \ - --hash=sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9 \ - --hash=sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc \ - --hash=sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa \ - --hash=sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773 \ - --hash=sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53 \ - --hash=sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f \ - --hash=sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e \ - --hash=sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57 \ - --hash=sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da \ - --hash=sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac \ - --hash=sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e \ - --hash=sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba \ - --hash=sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe \ - --hash=sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391 \ - --hash=sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d \ - --hash=sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e \ - --hash=sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b \ - --hash=sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0 \ - --hash=sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f \ - --hash=sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73 \ - --hash=sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3 \ - --hash=sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9 \ - --hash=sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d \ - --hash=sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261 \ - --hash=sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e \ - --hash=sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2 \ - --hash=sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0 \ - --hash=sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44 \ - --hash=sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c \ - --hash=sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d \ - --hash=sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108 \ - --hash=sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44 \ - --hash=sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b \ - --hash=sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d \ - --hash=sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82 \ - --hash=sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff \ - --hash=sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3 \ - --hash=sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b \ - --hash=sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850 \ - --hash=sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d \ - --hash=sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6 \ - --hash=sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5 \ - --hash=sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00 \ - --hash=sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8 \ - --hash=sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8 \ - --hash=sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322 \ - --hash=sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed \ - --hash=sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3 \ - --hash=sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc \ - --hash=sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808 \ - --hash=sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4 \ - --hash=sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63 \ - --hash=sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb \ - --hash=sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133 \ - --hash=sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1 \ - --hash=sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2 \ - --hash=sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f \ - --hash=sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b \ - --hash=sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3 \ - --hash=sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0 \ - --hash=sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7 \ - --hash=sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0 \ - --hash=sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad \ - --hash=sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d \ - --hash=sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c \ - --hash=sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45 \ - --hash=sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58 \ - --hash=sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5 \ - --hash=sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748 \ - --hash=sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b \ - --hash=sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d \ - --hash=sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733 \ - --hash=sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239 \ - --hash=sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc \ - --hash=sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b \ - --hash=sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb \ - --hash=sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018 \ - --hash=sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97 \ - --hash=sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1 \ - --hash=sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19 \ - --hash=sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a \ - --hash=sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698 \ - --hash=sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7 \ - --hash=sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851 \ - --hash=sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc +rpds-py==0.17.1 \ + --hash=sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147 \ + --hash=sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7 \ + --hash=sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2 \ + --hash=sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68 \ + --hash=sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1 \ + --hash=sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382 \ + --hash=sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d \ + --hash=sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921 \ + --hash=sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38 \ + --hash=sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4 \ + --hash=sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a \ + --hash=sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d \ + --hash=sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518 \ + --hash=sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e \ + --hash=sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d \ + --hash=sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf \ + --hash=sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5 \ + --hash=sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba \ + --hash=sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6 \ + --hash=sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59 \ + --hash=sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253 \ + --hash=sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6 \ + --hash=sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f \ + --hash=sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3 \ + --hash=sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea \ + --hash=sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1 \ + --hash=sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76 \ + --hash=sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93 \ + --hash=sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad \ + --hash=sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad \ + --hash=sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc \ + --hash=sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049 \ + --hash=sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d \ + --hash=sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90 \ + --hash=sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d \ + --hash=sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd \ + --hash=sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25 \ + --hash=sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2 \ + --hash=sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f \ + --hash=sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6 \ + --hash=sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4 \ + --hash=sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c \ + --hash=sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8 \ + --hash=sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d \ + --hash=sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b \ + --hash=sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19 \ + --hash=sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453 \ + --hash=sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9 \ + --hash=sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde \ + --hash=sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296 \ + --hash=sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58 \ + --hash=sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec \ + --hash=sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99 \ + --hash=sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a \ + --hash=sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb \ + --hash=sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383 \ + --hash=sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d \ + --hash=sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896 \ + --hash=sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc \ + --hash=sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6 \ + --hash=sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b \ + --hash=sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7 \ + --hash=sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22 \ + --hash=sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf \ + --hash=sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394 \ + --hash=sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0 \ + --hash=sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57 \ + --hash=sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74 \ + --hash=sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83 \ + --hash=sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29 \ + --hash=sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9 \ + --hash=sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f \ + --hash=sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745 \ + --hash=sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb \ + --hash=sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811 \ + --hash=sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55 \ + --hash=sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342 \ + --hash=sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23 \ + --hash=sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82 \ + --hash=sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041 \ + --hash=sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb \ + --hash=sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066 \ + --hash=sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55 \ + --hash=sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6 \ + --hash=sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a \ + --hash=sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140 \ + --hash=sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b \ + --hash=sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9 \ + --hash=sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256 \ + --hash=sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c \ + --hash=sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772 \ + --hash=sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4 \ + --hash=sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae \ + --hash=sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920 \ + --hash=sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a \ + --hash=sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b \ + --hash=sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361 \ + --hash=sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8 \ + --hash=sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a # via # jsonschema # referencing @@ -1112,67 +1117,80 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -ruamel-yaml==0.17.31 \ - --hash=sha256:098ed1eb6d338a684891a72380277c1e6fc4d4ae0e120de9a447275056dda335 \ - --hash=sha256:3cf153f0047ced526e723097ac615d3009371779432e304dbd5596b6f3a4c777 +ruamel-yaml==0.18.5 \ + --hash=sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e \ + --hash=sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada # via pykwalify -ruamel-yaml-clib==0.2.7 \ - --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ - --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ - --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ - --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ - --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ - --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ - --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ - --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ - --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ - --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ - --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ - --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ - --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ - --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ - --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ - --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ - --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ - --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ - --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ - --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ - --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ - --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ - --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ - --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ - --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ - --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ - --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ - --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ - --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ - --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ - --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ - --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ - --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ - --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ - --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ - --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ - --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 +ruamel-yaml-clib==0.2.8 \ + --hash=sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d \ + --hash=sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001 \ + --hash=sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462 \ + --hash=sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9 \ + --hash=sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe \ + --hash=sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b \ + --hash=sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b \ + --hash=sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615 \ + --hash=sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62 \ + --hash=sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15 \ + --hash=sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b \ + --hash=sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1 \ + --hash=sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9 \ + --hash=sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675 \ + --hash=sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899 \ + --hash=sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7 \ + --hash=sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7 \ + --hash=sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312 \ + --hash=sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa \ + --hash=sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91 \ + --hash=sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b \ + --hash=sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6 \ + --hash=sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3 \ + --hash=sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334 \ + --hash=sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5 \ + --hash=sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3 \ + --hash=sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe \ + --hash=sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c \ + --hash=sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed \ + --hash=sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337 \ + --hash=sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880 \ + --hash=sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f \ + --hash=sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d \ + --hash=sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248 \ + --hash=sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d \ + --hash=sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf \ + --hash=sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512 \ + --hash=sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069 \ + --hash=sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb \ + --hash=sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942 \ + --hash=sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d \ + --hash=sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31 \ + --hash=sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92 \ + --hash=sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5 \ + --hash=sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28 \ + --hash=sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d \ + --hash=sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1 \ + --hash=sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2 \ + --hash=sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875 \ + --hash=sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412 # via ruamel-yaml -ruff==0.1.13 \ - --hash=sha256:226b517f42d59a543d6383cfe03cccf0091e3e0ed1b856c6824be03d2a75d3b6 \ - --hash=sha256:2f59bcf5217c661254bd6bc42d65a6fd1a8b80c48763cb5c2293295babd945dd \ - --hash=sha256:5f0312ba1061e9b8c724e9a702d3c8621e3c6e6c2c9bd862550ab2951ac75c16 \ - --hash=sha256:6bbbc3042075871ec17f28864808540a26f0f79a4478c357d3e3d2284e832998 \ - --hash=sha256:7a36fa90eb12208272a858475ec43ac811ac37e91ef868759770b71bdabe27b6 \ - --hash=sha256:9a1600942485c6e66119da294c6294856b5c86fd6df591ce293e4a4cc8e72989 \ - --hash=sha256:9ebb40442f7b531e136d334ef0851412410061e65d61ca8ce90d894a094feb22 \ - --hash=sha256:9fb6b3b86450d4ec6a6732f9f60c4406061b6851c4b29f944f8c9d91c3611c7a \ - --hash=sha256:a623349a505ff768dad6bd57087e2461be8db58305ebd5577bd0e98631f9ae69 \ - --hash=sha256:b13ba5d7156daaf3fd08b6b993360a96060500aca7e307d95ecbc5bb47a69296 \ - --hash=sha256:dcaab50e278ff497ee4d1fe69b29ca0a9a47cd954bb17963628fa417933c6eb1 \ - --hash=sha256:e261f1baed6291f434ffb1d5c6bd8051d1c2a26958072d38dfbec39b3dda7352 \ - --hash=sha256:e3fd36e0d48aeac672aa850045e784673449ce619afc12823ea7868fcc41d8ba \ - --hash=sha256:e6894b00495e00c27b6ba61af1fc666f17de6140345e5ef27dd6e08fb987259d \ - --hash=sha256:ee3febce7863e231a467f90e681d3d89210b900d49ce88723ce052c8761be8c7 \ - --hash=sha256:f57de973de4edef3ad3044d6a50c02ad9fc2dff0d88587f25f1a48e3f72edf5e \ - --hash=sha256:f988746e3c3982bea7f824c8fa318ce7f538c4dfefec99cd09c8770bd33e6539 +ruff==0.1.14 \ + --hash=sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f \ + --hash=sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a \ + --hash=sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67 \ + --hash=sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488 \ + --hash=sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae \ + --hash=sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5 \ + --hash=sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab \ + --hash=sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab \ + --hash=sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa \ + --hash=sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf \ + --hash=sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb \ + --hash=sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea \ + --hash=sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9 \ + --hash=sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3 \ + --hash=sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b \ + --hash=sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb \ + --hash=sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99 # via tavern (pyproject.toml) secretstorage==3.3.3 \ --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ @@ -1192,32 +1210,27 @@ sphinx==7.2.6 \ # via # recommonmark # sphinx-rtd-theme - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp # sphinxcontrib-jquery - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml # tavern (pyproject.toml) sphinx-markdown-tables==0.0.17 \ --hash=sha256:2bd0c30779653e4dd120300cbd9ca412c480738cc2241f6dea477a883f299e04 \ --hash=sha256:6bc6d3d400eaccfeebd288446bc08dd83083367c58b85d40fe6c12d77ef592f1 # via tavern (pyproject.toml) -sphinx-rtd-theme==1.3.0 \ - --hash=sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0 \ - --hash=sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931 +sphinx-rtd-theme==2.0.0 \ + --hash=sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b \ + --hash=sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586 # via tavern (pyproject.toml) -sphinxcontrib-applehelp==1.0.7 \ - --hash=sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d \ - --hash=sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa +sphinxcontrib-applehelp==1.0.8 \ + --hash=sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619 \ + --hash=sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4 # via sphinx -sphinxcontrib-devhelp==1.0.5 \ - --hash=sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212 \ - --hash=sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f +sphinxcontrib-devhelp==1.0.6 \ + --hash=sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f \ + --hash=sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 \ - --hash=sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a \ - --hash=sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9 +sphinxcontrib-htmlhelp==2.0.5 \ + --hash=sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015 \ + --hash=sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04 # via sphinx sphinxcontrib-jquery==4.1 \ --hash=sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a \ @@ -1227,13 +1240,13 @@ sphinxcontrib-jsmath==1.0.1 \ --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 # via sphinx -sphinxcontrib-qthelp==1.0.6 \ - --hash=sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d \ - --hash=sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4 +sphinxcontrib-qthelp==1.0.7 \ + --hash=sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6 \ + --hash=sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 \ - --hash=sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54 \ - --hash=sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1 +sphinxcontrib-serializinghtml==1.1.10 \ + --hash=sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7 \ + --hash=sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f # via sphinx stevedore==4.1.1 \ --hash=sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a \ @@ -1243,47 +1256,44 @@ tomli-w==1.0.0 \ --hash=sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463 \ --hash=sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9 # via flit -tox==4.6.3 \ - --hash=sha256:2946a0bb38924c3a9f9575c7fb4ca1f4c11a7c69c61592f176778892155cb50c \ - --hash=sha256:9e2c5091a117d03b583c57c4c40aecd068099c17d40520e7b165e85c19334534 +tox==4.12.1 \ + --hash=sha256:61aafbeff1bd8a5af84e54ef6e8402f53c6a6066d0782336171ddfbf5362122e \ + --hash=sha256:c07ea797880a44f3c4f200ad88ad92b446b83079d4ccef89585df64cc574375c # via tavern (pyproject.toml) twine==4.0.2 \ --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via tavern (pyproject.toml) -types-pyyaml==6.0.12.11 \ - --hash=sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b \ - --hash=sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d +types-pyyaml==6.0.12.12 \ + --hash=sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062 \ + --hash=sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24 # via tavern (pyproject.toml) -types-requests==2.31.0.2 \ - --hash=sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a \ - --hash=sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40 +types-requests==2.31.0.20240106 \ + --hash=sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612 \ + --hash=sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354 # via tavern (pyproject.toml) -types-setuptools==68.2.0.0 \ - --hash=sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b \ - --hash=sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85 +types-setuptools==69.0.0.20240115 \ + --hash=sha256:1a9c863899f40cbe2053d0cd1d00ddef0330b492335467d018f73c1fec9462a3 \ + --hash=sha256:7409e774c69e1810cb45052dbaed839fc30302e86a3ff945172ef2a2e7ab46f8 # via tavern (pyproject.toml) -types-urllib3==1.26.25.14 \ - --hash=sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f \ - --hash=sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e - # via types-requests -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.9.0 \ + --hash=sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783 \ + --hash=sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd # via mypy uritemplate==4.1.1 \ --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \ --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e # via google-api-python-client -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.1.0 \ + --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \ + --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54 # via # requests # twine -virtualenv==20.24.5 \ - --hash=sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b \ - --hash=sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752 + # types-requests +virtualenv==20.25.0 \ + --hash=sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3 \ + --hash=sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b # via # pre-commit # tox @@ -1291,15 +1301,15 @@ werkzeug==3.0.1 \ --hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \ --hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10 # via flask -wheel==0.41.2 \ - --hash=sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985 \ - --hash=sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8 +wheel==0.42.0 \ + --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ + --hash=sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8 # via # pip-tools # tavern (pyproject.toml) -zipp==3.16.2 \ - --hash=sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0 \ - --hash=sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # WARNING: The following packages were not pinned, but pip requires them to be From ff23c763cdb0652212a1bc2b2a71841b5562c5e6 Mon Sep 17 00:00:00 2001 From: Michael Boulton Date: Sat, 20 Jan 2024 12:48:31 +0000 Subject: [PATCH 72/72] Add warning --- tavern/_plugins/grpc/request.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tavern/_plugins/grpc/request.py b/tavern/_plugins/grpc/request.py index 6ff671eec..6fe311bae 100644 --- a/tavern/_plugins/grpc/request.py +++ b/tavern/_plugins/grpc/request.py @@ -2,6 +2,7 @@ import functools import json import logging +import warnings from typing import Mapping, Union import grpc @@ -46,9 +47,19 @@ class GRPCRequest(BaseRequest): Similar to RestRequest, publishes a single message. """ + _warned = False + def __init__( self, client: GRPCClient, request_spec: Mapping, test_block_config: TestConfig ): + if not self._warned: + warnings.warn( + "Tavern gRPC support is experimental and will be updated in a future release.", + RuntimeWarning, + stacklevel=0, + ) + GRPCRequest._warned = True + expected = {"host", "service", "body"} check_expected_keys(expected, request_spec)