Skip to content

Commit

Permalink
fixup precommit reformat
Browse files Browse the repository at this point in the history
  • Loading branch information
jiridanek committed Jan 17, 2025
1 parent 67271a2 commit 004d870
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 42 deletions.
79 changes: 48 additions & 31 deletions tests/workbenches/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,12 @@ class OdhConstants:
# public static final String KNATIVE_SERVING_NAMESPACE = "knative-serving";
# public static final String ISTIO_SYSTEM_NAMESPACE = "istio-system";
#
CONTROLLERS_NAMESPACE: str = getOdhOrRhoai("CONTROLLERS_NAMESPACE", ODH_CONTROLLERS_NAMESPACE,
RHOAI_CONTROLLERS_NAMESPACE)
DASHBOARD_ROUTE_NAME: str = getOdhOrRhoai("DASHBOARD_ROUTE_NAME", ODH_DASHBOARD_ROUTE_NAME,
RHOAI_DASHBOARD_ROUTE_NAME)
CONTROLLERS_NAMESPACE: str = getOdhOrRhoai(
"CONTROLLERS_NAMESPACE", ODH_CONTROLLERS_NAMESPACE, RHOAI_CONTROLLERS_NAMESPACE
)
DASHBOARD_ROUTE_NAME: str = getOdhOrRhoai(
"DASHBOARD_ROUTE_NAME", ODH_DASHBOARD_ROUTE_NAME, RHOAI_DASHBOARD_ROUTE_NAME
)
# public static final String DASHBOARD_CONTROLLER = getOdhOrRhoai("DASHBOARD_CONTROLLER", ODH_DASHBOARD_CONTROLLER, RHOAI_DASHBOARD_CONTROLLER);
# public static final String NOTEBOOKS_NAMESPACE = getOdhOrRhoai("NOTEBOOKS_NAMESPACE", ODH_CONTROLLERS_NAMESPACE, RHOAI_NOTEBOOKS_NAMESPACE);
# public static final String BUNDLE_OPERATOR_NAMESPACE = getOdhOrRhoai("BUNDLE_OPERATOR_NAMESPACE", ODH_BUNDLE_OPERATOR_NAME, RHOAI_OLM_OPERATOR_NAME);
Expand Down Expand Up @@ -150,6 +152,7 @@ def waitForPodsReady(client: DynamicClient, namespaceName: str, label_selector:
:param label_selector:
:param expectPodsCount:
"""

# it's a dynamic client with the `resource` parameter already filled in
class ResourceType(kubernetes.dynamic.Resource, kubernetes.dynamic.DynamicClient):
pass
Expand All @@ -162,48 +165,58 @@ class ResourceType(kubernetes.dynamic.Resource, kubernetes.dynamic.DynamicClient
def ready() -> bool:
pods = resource.get(namespace=namespaceName, label_selector=label_selector).items
if not pods and expectPodsCount == 0:
logging.debug("All expected Pods {} in Namespace {} are ready", label_selector, namespaceName);
logging.debug("All expected Pods {} in Namespace {} are ready", label_selector, namespaceName)
return True
if not pods:
logging.debug("Pods matching {}/{} are not ready", namespaceName, label_selector);
logging.debug("Pods matching {}/{} are not ready", namespaceName, label_selector)
return False
if len(pods) != expectPodsCount:
logging.debug("Expected Pods {}/{} are not ready", namespaceName, label_selector);
logging.debug("Expected Pods {}/{} are not ready", namespaceName, label_selector)
return False
for pod in pods:
if not Readiness.isPodReady(pod) and not Readiness.isPodSucceeded(pod):
logging.debug("Pod is not ready: {}/{}", namespaceName, pod.getMetadata().getName());
logging.debug("Pod is not ready: {}/{}", namespaceName, pod.getMetadata().getName())
return False
else:
# check all containers in pods are ready
for cs in pod.status.containerStatuses:
if not (cs.ready or cs.state.get('terminated', {}).get('reason', '') == "Completed"):
if not (cs.ready or cs.state.get("terminated", {}).get("reason", "") == "Completed"):
logging.debug(
f"Container {cs.getName()} of Pod {namespaceName}/{pod.getMetadata().getName()} not ready")
f"Container {cs.getName()} of Pod {namespaceName}/{pod.getMetadata().getName()} not ready"
)
return False
logging.info("Pods matching {}/{} are ready", namespaceName, label_selector)
return True

Wait.until(f"readiness of all Pods matching {label_selector} in Namespace {namespaceName}",
TestFrameConstants.GLOBAL_POLL_INTERVAL_MEDIUM, PodUtils.READINESS_TIMEOUT, ready)
Wait.until(
f"readiness of all Pods matching {label_selector} in Namespace {namespaceName}",
TestFrameConstants.GLOBAL_POLL_INTERVAL_MEDIUM,
PodUtils.READINESS_TIMEOUT,
ready,
)


class Wait:
@staticmethod
def until(description: str, pollInterval: float, timeout: float, ready: Callable[[], bool],
onTimeout: Callable | None = None):
def until(
description: str,
pollInterval: float,
timeout: float,
ready: Callable[[], bool],
onTimeout: Callable | None = None,
):
"""or every poll (happening once each {@code pollIntervalMs}) checks if supplier {@code ready} is true.
# If yes, the wait is closed. Otherwise, waits another {@code pollIntervalMs} and tries again.
# Once the wait timeout (specified by {@code timeoutMs} is reached and supplier wasn't true until that time,
# runs the {@code onTimeout} (f.e. print of logs, showing the actual value that was checked inside {@code ready}),
# and finally throws {@link WaitException}.
# @param description information about on what we are waiting
# @param pollIntervalMs poll interval in milliseconds
# @param timeoutMs timeout specified in milliseconds
# @param ready {@link BooleanSupplier} containing code, which should be executed each poll,
# verifying readiness of the particular thing
# @param onTimeout {@link Runnable} executed once timeout is reached and
# before the {@link WaitException} is thrown."""
# If yes, the wait is closed. Otherwise, waits another {@code pollIntervalMs} and tries again.
# Once the wait timeout (specified by {@code timeoutMs} is reached and supplier wasn't true until that time,
# runs the {@code onTimeout} (f.e. print of logs, showing the actual value that was checked inside {@code ready}),
# and finally throws {@link WaitException}.
# @param description information about on what we are waiting
# @param pollIntervalMs poll interval in milliseconds
# @param timeoutMs timeout specified in milliseconds
# @param ready {@link BooleanSupplier} containing code, which should be executed each poll,
# verifying readiness of the particular thing
# @param onTimeout {@link Runnable} executed once timeout is reached and
# before the {@link WaitException} is thrown."""
logging.info("Waiting for: {}", description)
deadline = time.monotonic() + timeout

Expand All @@ -226,15 +239,19 @@ def until(description: str, pollInterval: float, timeout: float, ready: Callable

exceptionCount += 1
newExceptionAppearance += 1
if (exceptionCount == exceptionAppearanceCount
and exceptionMessage is not None
and exceptionMessage == previousExceptionMessage):
if (
exceptionCount == exceptionAppearanceCount
and exceptionMessage is not None
and exceptionMessage == previousExceptionMessage
):
logging.info(f"While waiting for: {description} exception occurred: {exceptionMessage}")
# log the stacktrace
stackTraceError = traceback.format_exc()
elif (exceptionMessage is not None
and exceptionMessage != previousExceptionMessage
and newExceptionAppearance == 2):
elif (
exceptionMessage is not None
and exceptionMessage != previousExceptionMessage
and newExceptionAppearance == 2
):
previousExceptionMessage = exceptionMessage

result = False
Expand Down
20 changes: 10 additions & 10 deletions tests/workbenches/docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,30 +9,30 @@ def Desc(value: str) -> str:


def Step(
value: str,
expected: str,
value: str,
expected: str,
) -> tuple[str, str]:
return value, expected


def SuiteDoc(
description: str,
beforeTestSteps: set[Step],
afterTestSteps: set[Step],
description: str,
beforeTestSteps: set[Step],
afterTestSteps: set[Step],
) -> Callable[[T], T]:
return lambda x: x


def Contact(
name: str,
email: str,
name: str,
email: str,
) -> tuple[str, str]:
return name, email


def TestDoc(
description: str,
contact: str,
steps: set[Step],
description: str,
contact: str,
steps: set[Step],
) -> Callable[[T], T]:
return lambda x: x
1 change: 0 additions & 1 deletion tests/workbenches/notebook-controller/test_spawning.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ def testCreateSimpleNotebook(self, function_resource_manager, admin_client, unpr
function_resource_manager.createResourceWithoutWait(unprivileged_client, notebook)

with allure.step("Wait for Notebook pod readiness"):

lblSelector: str = f"app={self.NTB_NAME}"
PodUtils.waitForPodsReady(admin_client, self.NTB_NAMESPACE, lblSelector, 1)

Expand Down

0 comments on commit 004d870

Please sign in to comment.