From 7c6a47e3636d6c192657260247b215b96a122384 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Sun, 19 Apr 2020 13:21:26 -0400 Subject: [PATCH 01/55] Refactor compute cluster logic to prepare for automatic updates/publishing --- .gitignore | 5 +- distributed/api/celery_app/__init__.py | 148 -------- distributed/api/endpoints.py | 73 +--- distributed/api/tests/test_celery.py | 15 - distributed/celery_io.sh | 4 - distributed/cs-dask-sim/cs_dask_sim.py | 18 +- distributed/cs_cluster.py | 343 +++--------------- distributed/dockerfiles/Dockerfile | 16 +- distributed/dockerfiles/Dockerfile.celerybase | 5 - distributed/dockerfiles/Dockerfile.flask | 19 +- .../dockerfiles/Dockerfile.outputs_processor | 24 ++ distributed/dockerfiles/Dockerfile.tasks | 51 --- distributed/kubernetes/flower-deployment.yaml | 40 -- distributed/outputs_processor.py | 57 +++ distributed/requirements.txt | 4 +- distributed/tasks_writer.py | 48 --- .../dask/scheduler-deployment.template.yaml | 41 --- .../dask/scheduler-service.template.yaml | 18 - .../dask/worker-deployment.template.yaml | 45 --- .../templates/flask-deployment.template.yaml | 24 +- ...outputs-processor-deployment.template.yaml | 39 ++ .../templates/sc-deployment.template.yaml | 45 --- distributed/templates/secret.template.yaml | 11 + distributed/templates/tasks_template.py | 58 --- distributed/worker_config.dev.yaml | 37 -- 25 files changed, 213 insertions(+), 975 deletions(-) delete mode 100644 distributed/api/celery_app/__init__.py delete mode 100644 distributed/api/tests/test_celery.py delete mode 100755 distributed/celery_io.sh delete mode 100755 distributed/dockerfiles/Dockerfile.celerybase create mode 100755 distributed/dockerfiles/Dockerfile.outputs_processor delete mode 100755 distributed/dockerfiles/Dockerfile.tasks delete mode 100644 distributed/kubernetes/flower-deployment.yaml create mode 100644 distributed/outputs_processor.py delete mode 100644 distributed/tasks_writer.py delete mode 100644 distributed/templates/dask/scheduler-deployment.template.yaml delete mode 100644 distributed/templates/dask/scheduler-service.template.yaml delete mode 100644 distributed/templates/dask/worker-deployment.template.yaml create mode 100755 distributed/templates/outputs-processor-deployment.template.yaml delete mode 100755 distributed/templates/sc-deployment.template.yaml create mode 100644 distributed/templates/secret.template.yaml delete mode 100644 distributed/templates/tasks_template.py delete mode 100644 distributed/worker_config.dev.yaml diff --git a/.gitignore b/.gitignore index 46a54b99..f9d5ba31 100755 --- a/.gitignore +++ b/.gitignore @@ -37,6 +37,7 @@ secret-docker-compose.yml *docker-compose-apps* distributed/kubernetes/apps/* -dev-secret.yaml +secret.yaml -*flask-deployment.yaml \ No newline at end of file +*flask-deployment.yaml +*outputs-processor-deployment.yaml \ No newline at end of file diff --git a/distributed/api/celery_app/__init__.py b/distributed/api/celery_app/__init__.py deleted file mode 100644 index 1c2c7882..00000000 --- a/distributed/api/celery_app/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -import os -import time -import functools -import re -import traceback - -import requests -from celery import Celery -from celery.signals import task_postrun -from celery.result import AsyncResult - -import cs_storage - - -try: - from cs_config import functions -except ImportError as ie: - if os.environ.get("IS_FLASK", "False") == "True": - functions = None - else: - raise ie - - -COMP_URL = os.environ.get("COMP_URL") -COMP_API_TOKEN = os.environ.get("COMP_API_TOKEN") - -CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379") -CELERY_RESULT_BACKEND = os.environ.get( - "CELERY_RESULT_BACKEND", "redis://localhost:6379" -) - -OUTPUTS_VERSION = os.environ.get("OUTPUTS_VERSION") - - -def get_task_routes(): - def clean(name): - return re.sub("[^0-9a-zA-Z]+", "", name).lower() - - print(f"getting config from: {COMP_URL}/publish/api/") - resp = requests.get(f"{COMP_URL}/publish/api/") - if resp.status_code != 200: - raise Exception(f"Response status code: {resp.status_code}") - data = resp.json() - task_routes = {} - for project in data: - owner = clean(project["owner"]) - title = clean(project["title"]) - model = f"{owner}_{title}" - - # all apps use celery workers for handling their inputs. - routes = { - f"{model}_tasks.inputs_get": {"queue": f"{model}_inputs_queue"}, - f"{model}_tasks.inputs_parse": {"queue": f"{model}_inputs_queue"}, - f"{model}_tasks.inputs_version": {"queue": f"{model}_inputs_queue"}, - } - - # only add sim routes for models that use celery workers. - if project["cluster_type"] == "single-core": - routes[f"{model}_tasks.sim"] = {"queue": f"{model}_queue"} - - task_routes.update(routes) - return task_routes - - -task_routes = get_task_routes() - - -celery_app = Celery( - "celery_app", broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND -) -celery_app.conf.update( - task_serializer="json", - accept_content=["msgpack", "json"], - task_routes=task_routes, - worker_prefetch_multiplier=1, - task_acks_late=True, -) - - -def task_wrapper(func): - @functools.wraps(func) - def f(*args, **kwargs): - task = args[0] - task_id = task.request.id - start = time.time() - traceback_str = None - res = {} - try: - outputs = func(*args, **kwargs) - if task.name.endswith("sim"): - version = outputs.pop("version", OUTPUTS_VERSION) - if version == "v0": - res["model_version"] = "NA" - res.update(dict(outputs, **{"version": version})) - else: - outputs = cs_storage.write(task_id, outputs) - res.update( - { - "model_version": functions.get_version(), - "outputs": outputs, - "version": version, - } - ) - else: - res.update(outputs) - except Exception: - traceback_str = traceback.format_exc() - finish = time.time() - if "meta" not in res: - res["meta"] = {} - res["meta"]["task_times"] = [finish - start] - if traceback_str is None: - res["status"] = "SUCCESS" - else: - res["status"] = "FAIL" - res["traceback"] = traceback_str - return res - - return f - - -@task_postrun.connect -def post_results(sender=None, headers=None, body=None, **kwargs): - print(f'task_id: {kwargs["task_id"]}') - print(f'task: {kwargs["task"]} {kwargs["task"].name}') - print(f'is sim: {kwargs["task"].name.endswith("sim")}') - print(f'state: {kwargs["state"]}') - kwargs["retval"]["job_id"] = kwargs["task_id"] - if kwargs["task"].name.endswith("sim"): - print(f"posting data to {COMP_URL}/outputs/api/") - resp = requests.put( - f"{COMP_URL}/outputs/api/", - json=kwargs["retval"], - headers={"Authorization": f"Token {COMP_API_TOKEN}"}, - ) - print("resp", resp.status_code) - if resp.status_code == 400: - print("errors", resp.json()) - if kwargs["task"].name.endswith("parse"): - print(f"posting data to {COMP_URL}/inputs/api/") - resp = requests.put( - f"{COMP_URL}/inputs/api/", - json=kwargs["retval"], - headers={"Authorization": f"Token {COMP_API_TOKEN}"}, - ) - print("resp", resp.status_code) - if resp.status_code == 400: - print("errors", resp.json()) diff --git a/distributed/api/endpoints.py b/distributed/api/endpoints.py index 206fc96a..7c183cbf 100644 --- a/distributed/api/endpoints.py +++ b/distributed/api/endpoints.py @@ -10,16 +10,14 @@ from flask import Blueprint, request, make_response from celery.result import AsyncResult from celery import chord -from distributed import Client, Future, fire_and_forget import redis import requests -from api.celery_app import celery_app -from cs_dask_sim import dask_sim, done_callback +from cs_publish.app import app as celery_app -COMP_URL = os.environ.get("COMP_URL") -COMP_API_TOKEN = os.environ.get("COMP_API_TOKEN") +CS_URL = os.environ.get("CS_URL") +CS_API_TOKEN = os.environ.get("CS_API_TOKEN") bp = Blueprint("endpoints", __name__) @@ -34,8 +32,8 @@ def clean(word): def get_cs_config(): - print(f"getting config from: {COMP_URL}/publish/api/") - resp = requests.get(f"{COMP_URL}/publish/api/") + print(f"getting config from: {CS_URL}/publish/api/") + resp = requests.get(f"{CS_URL}/publish/api/") if resp.status_code != 200: raise Exception(f"Response status code: {resp.status_code}") data = resp.json() @@ -66,11 +64,6 @@ def get_time_out(owner, app_name): return CONFIG[model_id]["time_out"] -def dask_scheduler_address(owner, app_name): - owner, app_name = clean(owner), clean(app_name) - return f"{owner}-{app_name}-dask-scheduler:8786" - - def async_endpoint(owner, app_name, compute_task): print(f"async endpoint {compute_task}") data = request.get_data() @@ -97,37 +90,6 @@ def sync_endpoint(owner, app_name, compute_task): return json.dumps(result) -def dask_endpoint(owner, app_name, action): - """ - Route dask simulation to appropriate dask scheduluer. - """ - print(f"dask endpoint: {owner}/{app_name}/{action}") - data = request.get_data() - inputs = json.loads(data) - print("inputs", inputs) - addr = dask_scheduler_address(owner, app_name) - job_id = str(uuid.uuid4()) - - # Worker needs the job_id to push the results back to the - # webapp. - # The url and api token are passed as args insted of env - # variables so that the wrapper has access to them - # but the model does not. - inputs.update( - { - "job_id": job_id, - "comp_url": os.environ.get("COMP_URL"), - "comp_api_token": os.environ.get("COMP_API_TOKEN"), - "timeout": get_time_out(owner, app_name), - } - ) - - with Client(addr) as c: - fut = c.submit(dask_sim, **inputs) - fire_and_forget(fut) - return {"job_id": job_id, "qlength": 1} - - def route_to_task(owner, app_name, endpoint, action): owner, app_name = clean(owner), clean(app_name) print("getting...", owner, app_name, endpoint, action) @@ -169,8 +131,6 @@ def endpoint_sim(owner, app_name): print(f"cluster type is {cluster_type}") if cluster_type == "single-core": return route_to_task(owner, app_name, async_endpoint, action) - elif cluster_type == "dask": - return dask_endpoint(owner, app_name, action) else: return json.dumps({"error": "model does not exist."}), 404 @@ -189,18 +149,6 @@ def results(owner, app_name, job_id): ) else: return make_response("not ready", 202) - elif cluster_type == "dask": - addr = dask_scheduler_address(owner, app_name) - with Client(addr) as client: - fut = Future(job_id, client=client) - if fut.done() and fut.status != "error": - return fut.result() - elif fut.done() and fut.status in ("error", "cancelled"): - return json.dumps( - {"status": "WORKER_FAILURE", "traceback": fut.traceback()} - ) - else: - return make_response("not ready", 202) else: return json.dumps({"error": "model does not exist."}), 404 @@ -218,17 +166,6 @@ def query_results(owner, app_name, job_id): return "FAIL" else: return "NO" - elif cluster_type == "dask": - addr = dask_scheduler_address(owner, app_name) - with Client(addr) as client: - fut = Future(job_id, client=client) - print("dask result", fut.status) - if fut.done() and fut.status != "error": - return "YES" - elif fut.done() and fut.status in ("error", "cancelled"): - return "FAIL" - else: - return "NO" else: return json.dumps({"error": "model does not exist."}), 404 diff --git a/distributed/api/tests/test_celery.py b/distributed/api/tests/test_celery.py deleted file mode 100644 index 42b6f298..00000000 --- a/distributed/api/tests/test_celery.py +++ /dev/null @@ -1,15 +0,0 @@ -# import pytest -# from celery import chord - -# @pytest.fixture(scope='session') -# def celery_config(): -# return { -# 'broker_url': 'redis://redis:6379/0', -# 'result_backend': 'redis://redis:6379/0', -# 'task_serializer': 'json', -# 'accept_content': ['msgpack', 'json']} - - -# def test_project_endpoint(celery_worker): -# # celery tests here. -# pass diff --git a/distributed/celery_io.sh b/distributed/celery_io.sh deleted file mode 100755 index 02d308d6..00000000 --- a/distributed/celery_io.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -SAFEOWNER=$(python -c "import re, os; print(re.sub('[^0-9a-zA-Z]+', '', \"$1\").lower())") -SAFETITLE=$(python -c "import re, os; print(re.sub('[^0-9a-zA-Z]+', '', \"$2\").lower())") -celery -A celery_app.${SAFEOWNER}_${SAFETITLE}_tasks worker --loglevel=info --concurrency=1 -Q ${SAFEOWNER}_${SAFETITLE}_inputs_queue -n ${SAFEOWNER}_${SAFETITLE}_inputs@%h \ No newline at end of file diff --git a/distributed/cs-dask-sim/cs_dask_sim.py b/distributed/cs-dask-sim/cs_dask_sim.py index 2f60ff35..223a6ff6 100644 --- a/distributed/cs-dask-sim/cs_dask_sim.py +++ b/distributed/cs-dask-sim/cs_dask_sim.py @@ -13,15 +13,15 @@ functions = None -def done_callback(future, job_id, comp_url, comp_api_token, start_time): +def done_callback(future, job_id, cs_url, cs_api_token, start_time): """ This should be called like: callback = functools.partial( done_callback, job_id=job_id, - comp_url=os.environ.get("COMP_URL"), - comp_api_token=os.environ.get("comp_api_token"), + cs_url=os.environ.get("CS_URL"), + cs_api_token=os.environ.get("CS_API_TOKEN"), start_time=time.time() ) @@ -61,18 +61,18 @@ def done_callback(future, job_id, comp_url, comp_api_token, start_time): res["job_id"] = job_id print("got result", res) - print(f"posting data to {comp_url}/outputs/api/") + print(f"posting data to {cs_url}/outputs/api/") resp = requests.put( - f"{comp_url}/outputs/api/", + f"{cs_url}/outputs/api/", json=res, - headers={"Authorization": f"Token {comp_api_token}"}, + headers={"Authorization": f"Token {cs_api_token}"}, ) print("resp", resp.status_code) if resp.status_code == 400: print("errors", resp.json()) -def dask_sim(meta_param_dict, adjustment, job_id, comp_url, comp_api_token, timeout): +def dask_sim(meta_param_dict, adjustment, job_id, cs_url, cs_api_token, timeout): """ Wraps the functions.run_model function with a dask future and adds a callback for pushing the results back to the webapp. The callback is @@ -88,8 +88,8 @@ def dask_sim(meta_param_dict, adjustment, job_id, comp_url, comp_api_token, time partialled_cb = partial( done_callback, job_id=job_id, - comp_url=comp_url, - comp_api_token=comp_api_token, + cs_url=cs_url, + cs_api_token=cs_api_token, start_time=start_time, ) with worker_client() as c: diff --git a/distributed/cs_cluster.py b/distributed/cs_cluster.py index fff26a60..0d1293e7 100644 --- a/distributed/cs_cluster.py +++ b/distributed/cs_cluster.py @@ -48,55 +48,22 @@ class Cluster: """ k8s_target = "kubernetes/" - k8s_app_target = "kubernetes/apps" cr = "gcr.io" - def __init__(self, config, tag, project, models=None): + def __init__(self, tag, project): self.tag = tag self.project = project - self.models = models if models and models[0] else None - - with open(config, "r") as f: - self.config = yaml.safe_load(f.read()) with open("templates/flask-deployment.template.yaml", "r") as f: self.flask_template = yaml.safe_load(f.read()) - with open("templates/sc-deployment.template.yaml", "r") as f: - self.sc_template = yaml.safe_load(f.read()) - - with open("templates/dask/scheduler-deployment.template.yaml", "r") as f: - self.dask_scheduler_template = yaml.safe_load(f.read()) - - with open("templates/dask/scheduler-service.template.yaml", "r") as f: - self.dask_scheduler_service_template = yaml.safe_load(f.read()) + with open("templates/outputs-processor-deployment.template.yaml", "r") as f: + self.outputs_processor_template = yaml.safe_load(f.read()) - with open("templates/dask/worker-deployment.template.yaml", "r") as f: - self.dask_worker_template = yaml.safe_load(f.read()) + with open("templates/secret.template.yaml", "r") as f: + self.secret_template = yaml.safe_load(f.read()) def build(self): - """ - Wrap all methods that build, tag, and push the images as well as - write the k8s config fiels. - """ - self.build_base_images() - self.write_flask_deployment() - self.build_apps() - - def apply(self): - """ - Experimental. Apply k8s config files to existing k8s cluster. - """ - run(f"kubectl apply -f {self.k8s_target}") - run(f"kubectl apply -f {self.k8s_app_target}") - - def dry_run(self): - self.write_flask_deployment() - for app in self.config: - for action in ["io", "sim"]: - self.write_app_deployment(app, action) - - def build_base_images(self): """ Build, tag, and push base images for the flask app and modeling apps. @@ -104,297 +71,85 @@ def build_base_images(self): pull from either distributed:latest or celerybase:latest. """ run("docker build -t distributed:latest -f dockerfiles/Dockerfile ./") - run("docker build -t celerybase:latest -f dockerfiles/Dockerfile.celerybase ./") + run( + f"docker build -t outputs_processor:{self.tag} -f dockerfiles/Dockerfile.outputs_processor ./" + ) run(f"docker build -t flask:{self.tag} -f dockerfiles/Dockerfile.flask ./") - for img_name in ["distributed", "celerybase"]: - run(f"docker tag {img_name} {self.cr}/{self.project}/{img_name}:latest") - run(f"docker push {self.cr}/{self.project}/{img_name}:latest") + run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") + run(f"docker push {self.cr}/{self.project}/distributed:latest") + + run( + f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" + ) + run(f"docker push {self.cr}/{self.project}/outputs_processor:{self.tag}") run(f"docker tag flask:{self.tag} {self.cr}/{self.project}/flask:{self.tag}") run(f"docker push {self.cr}/{self.project}/flask:{self.tag}") + def make_config(self): + self.write_flask_deployment() + self.write_outputs_processor_deployment() + self.write_secret() + def write_flask_deployment(self): """ Write flask deployment file. Only step is filling in the image uri. """ - flask_deployment = copy.deepcopy(self.flask_template) - flask_deployment["spec"]["template"]["spec"]["containers"][0][ + deployment = copy.deepcopy(self.flask_template) + deployment["spec"]["template"]["spec"]["containers"][0][ "image" ] = f"gcr.io/{self.project}/flask:{self.tag}" with open(f"{self.k8s_target}/flask-deployment.yaml", "w") as f: - f.write(yaml.dump(flask_deployment)) + f.write(yaml.dump(deployment)) - return flask_deployment + return deployment - def build_apps(self): - """ - Build, tag, and push images and write k8s config files - for all apps in config. Filters out those not in models - list, if applicable. - """ - # ensure clean path. - path = Path(self.k8s_app_target) - path.mkdir(exist_ok=True) - stale_files = path.glob("*yaml") - _ = [sf.unlink() for sf in stale_files] - - for app in self.config: - if self.models and app["title"] not in self.models[0]: - continue - try: - self.build_app_image(app) - except Exception as e: - print( - f"There was an error building: " - f"{app['title']}/{app['owner']}:{self.tag}" - ) - print(e) - continue - - for action in ["io", "sim"]: - self.write_app_deployment(app, action) - - def build_app_image(self, app): + def write_outputs_processor_deployment(self): """ - Build, tag, and pus the image for a single app. + Write outputs processor deployment file. Only step is filling + in the image uri. """ - safeowner = clean(app["owner"]) - safetitle = clean(app["title"]) - img_name = f"{safeowner}_{safetitle}_tasks" - - reg_url = "https://github.com" - raw_url = "https://raw.githubusercontent.com" - - buildargs = dict( - OWNER=app["owner"], - TITLE=app["title"], - BRANCH=app["branch"], - SAFEOWNER=safeowner, - SAFETITLE=safetitle, - SIM_TIME_LIMIT=app["sim_time_limit"], - REPO_URL=app["repo_url"], - RAW_REPO_URL=app["repo_url"].replace(reg_url, raw_url), - **app["env"], - ) + deployment = copy.deepcopy(self.outputs_processor_template) + deployment["spec"]["template"]["spec"]["containers"][0][ + "image" + ] = f"gcr.io/{self.project}/outputs_processor:{self.tag}" - buildargs_str = " ".join( - [f"--build-arg {arg}={value}" for arg, value in buildargs.items()] - ) - cmd = ( - f"docker build {buildargs_str} -t {img_name}:{self.tag} " - f"-f dockerfiles/Dockerfile.tasks ./" - ) - run(cmd) + with open(f"{self.k8s_target}/outputs-processor-deployment.yaml", "w") as f: + f.write(yaml.dump(deployment)) - run( - f"docker tag {img_name}:{self.tag} {self.cr}/{self.project}/{img_name}:{self.tag}" - ) - run(f"docker push {self.cr}/{self.project}/{img_name}:{self.tag}") + return deployment - def write_app_deployment(self, app, action): - """ - Write k8s config file for an app. + def write_secret(self): + secrets = copy.deepcopy(self.secret_template) + secrets["stringData"]["CS_API_TOKEN"] = self._get_secret("CS_API_TOKEN") - Note: Dask uses a dot notation for specifying paths - in their config. It could be helpful for us to - do that, too. + with open(f"{self.k8s_target}/secret.yaml", "w") as f: + f.write(yaml.dump(secrets)) - Also, all io (inputs) apps are deployed as a - single-core cluster. - """ - if action == "io": - self.write_sc_app(app, action) - elif app["cluster_type"] == "dask": - self.write_dask_app(app, action) - elif app["cluster_type"] == "single-core": - self.write_sc_app(app, action) - else: - raise RuntimeError(f"Cluster type {app['cluster_type']} unknown.") - - def write_dask_app(self, app, action): - self._write_dask_worker_app(app) - self._write_dask_scheduler_app(app) - self._write_dask_scheduler_service(app) - - def _write_dask_worker_app(self, app): - app_deployment = copy.deepcopy(self.dask_worker_template) - safeowner = clean(app["owner"]) - safetitle = clean(app["title"]) - name = f"{safeowner}-{safetitle}-dask-worker" - image = f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{self.tag}" - - app_deployment["metadata"]["name"] = name - app_deployment["metadata"]["labels"]["app"] = name - app_deployment["spec"]["replicas"] = app.get("replicas", 1) - app_deployment["spec"]["selector"]["matchLabels"]["app"] = name - app_deployment["spec"]["template"]["metadata"]["labels"]["app"] = name - if "affinity" in app: - affinity_exp = {"key": "model", "operator": "In", "values": [app["affinity"]["model"]]} - app_deployment["spec"]["template"]["spec"]["affinity"] = { - "nodeAffinity": { - "requiredDuringSchedulingIgnoredDuringExecution": { - "nodeSelectorTerms": [{"matchExpressions": [affinity_exp]}] - } - } - } - - container_config = app_deployment["spec"]["template"]["spec"]["containers"][0] - - resources, _ = self._resources(app, action="sim") - container_config.update( - { - "name": name, - "image": image, - "args": [ - "dask-worker", - f"{safeowner}-{safetitle}-dask-scheduler:8786", - "--nthreads", - str(resources["limits"]["cpu"]), - "--memory-limit", - str(resources["limits"]["memory"]), - "--no-bokeh", - ], - "resources": resources, - } - ) - container_config["env"].append( - { - "name": "DASK_SCHEDULER_ADDRESS", - "value": f"{safeowner}-{safetitle}-dask-scheduler:8786", - } - ) + def _get_secret(self, secret_name): + from google.cloud import secretmanager - self._set_secrets(app, container_config) - - with open(f"{self.k8s_app_target}/{name}-deployment.yaml", "w") as f: - f.write(yaml.dump(app_deployment)) - - return app_deployment - - def _write_dask_scheduler_app(self, app): - app_deployment = copy.deepcopy(self.dask_scheduler_template) - safeowner = clean(app["owner"]) - safetitle = clean(app["title"]) - name = f"{safeowner}-{safetitle}-dask-scheduler" - - app_deployment["metadata"]["name"] = name - app_deployment["metadata"]["labels"]["app"] = name - app_deployment["spec"]["selector"]["matchLabels"]["app"] = name - app_deployment["spec"]["template"]["metadata"]["labels"]["app"] = name - app_deployment["spec"]["template"]["spec"]["containers"][0]["name"] = name - - with open(f"{self.k8s_app_target}/{name}-deployment.yaml", "w") as f: - f.write(yaml.dump(app_deployment)) - - return app_deployment - - def _write_dask_scheduler_service(self, app): - app_service = copy.deepcopy(self.dask_scheduler_service_template) - safeowner = clean(app["owner"]) - safetitle = clean(app["title"]) - name = f"{safeowner}-{safetitle}-dask-scheduler" - - app_service["metadata"]["name"] = name - app_service["metadata"]["labels"]["app"] = name - app_service["spec"]["selector"]["app"] = name - - app_service["spec"]["ports"][0]["name"] = name - app_service["spec"]["ports"][1]["name"] = f"{safeowner}-{safetitle}-dask-webui" - - with open(f"{self.k8s_app_target}/{name}-service.yaml", "w") as f: - f.write(yaml.dump(app_service)) - - return app_service - - def write_sc_app(self, app, action): - app_deployment = copy.deepcopy(self.sc_template) - safeowner = clean(app["owner"]) - safetitle = clean(app["title"]) - name = f"{safeowner}-{safetitle}-{action}" - - resources, affinity_size = self._resources(app, action) - - if not isinstance(affinity_size, list): - affinity_size = [affinity_size] - - app_deployment["metadata"]["name"] = name - app_deployment["spec"]["selector"]["matchLabels"]["app"] = name - app_deployment["spec"]["template"]["metadata"]["labels"]["app"] = name - if "affinity" in app and action == "sim": - affinity_exp = {"key": "size", "operator": "In", "values": affinity_size} - app_deployment["spec"]["template"]["spec"]["affinity"] = { - "nodeAffinity": { - "requiredDuringSchedulingIgnoredDuringExecution": { - "nodeSelectorTerms": [{"matchExpressions": [affinity_exp]}] - } - } - } - - container_config = app_deployment["spec"]["template"]["spec"]["containers"][0] - - container_config.update( - { - "name": name, - "image": f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{self.tag}", - "command": [f"./celery_{action}.sh"], - "args": [ - app["owner"], - app["title"], - ], # TODO: pass safe names to docker file at build and run time - "resources": resources, - } + client = secretmanager.SecretManagerServiceClient() + response = client.access_secret_version( + f"projects/{self.project}/secrets/{secret_name}/versions/latest" ) - container_config["env"].append({"name": "TITLE", "value": app["title"]}) - container_config["env"].append({"name": "OWNER", "value": app["owner"]}) - - self._set_secrets(app, container_config) - - with open(f"{self.k8s_app_target}/{name}-deployment.yaml", "w") as f: - f.write(yaml.dump(app_deployment)) - - return app_deployment - - def _resources(self, app, action): - if action == "io": - resources = { - "requests": {"cpu": 0.7, "memory": "0.25G"}, - "limits": {"cpu": 1, "memory": "0.7G"}, - } - affinity_size = ["small", "medium"] - else: - resources = {"requests": {"memory": "1G", "cpu": 1}} - resources = dict(resources, **copy.deepcopy(app["resources"])) - affinity_size = app.get("affinity", {}).get("size", ["small", "medium"]) - return resources, affinity_size - - def _set_secrets(self, app, config): - # TODO: write secrets to secret config files instead of env. - if app.get("secret"): - for var, val in app["secret"].items(): - config["env"].append({"name": var.upper(), "value": val}) + return response.payload.data.decode("utf-8") if __name__ == "__main__": parser = argparse.ArgumentParser(description="Deploy C/S compute cluster.") - parser.add_argument("--config", required=True) parser.add_argument("--tag", required=False, default=TAG) parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--models", nargs="+", type=str, required=False, default=None) parser.add_argument("--build", action="store_true") - parser.add_argument("--dry-run", action="store_true") - parser.add_argument("--build-base-only", action="store_true") + parser.add_argument("--make-config", action="store_true") args = parser.parse_args() - cluster = Cluster( - config=args.config, tag=args.tag, project=args.project, models=args.models - ) + cluster = Cluster(tag=args.tag, project=args.project) if args.build: cluster.build() - elif args.dry_run: - cluster.dry_run() - elif args.build_base_only: - cluster.build_base_images() + if args.make_config: + cluster.make_config() diff --git a/distributed/dockerfiles/Dockerfile b/distributed/dockerfiles/Dockerfile index b165247f..9b62226e 100755 --- a/distributed/dockerfiles/Dockerfile +++ b/distributed/dockerfiles/Dockerfile @@ -3,22 +3,10 @@ FROM continuumio/miniconda3 USER root RUN apt-get update && apt install libgl1-mesa-glx --yes -# install packages for chromium -RUN apt-get update && \ - apt-get install -yq --no-install-recommends \ - libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \ - libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \ - libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ - libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \ - libnss3 - -RUN mkdir /home/distributed -RUN mkdir /home/distributed/api - RUN conda update conda RUN conda config --append channels conda-forge RUN conda install "python>=3.7" pip -COPY requirements.txt home/distributed +COPY requirements.txt /home -WORKDIR /home/distributed \ No newline at end of file +WORKDIR /home \ No newline at end of file diff --git a/distributed/dockerfiles/Dockerfile.celerybase b/distributed/dockerfiles/Dockerfile.celerybase deleted file mode 100755 index 58d26d1f..00000000 --- a/distributed/dockerfiles/Dockerfile.celerybase +++ /dev/null @@ -1,5 +0,0 @@ -ARG TAG -FROM distributed - -ENV CELERY_BROKER_URL redis://redis-master/0 -ENV CELERY_RESULT_BACKEND redis://redis-master/0 diff --git a/distributed/dockerfiles/Dockerfile.flask b/distributed/dockerfiles/Dockerfile.flask index 4c567fb9..ff88b93b 100755 --- a/distributed/dockerfiles/Dockerfile.flask +++ b/distributed/dockerfiles/Dockerfile.flask @@ -1,10 +1,6 @@ ARG TAG FROM distributed -LABEL build="flask" date="2018-06-13" - -USER root - ENV CELERY_BROKER_URL redis://redis-master/0 ENV CELERY_RESULT_BACKEND redis://redis-master/0 @@ -18,19 +14,14 @@ ENV IS_FLASK True EXPOSE 80 EXPOSE 5050 -RUN conda install -c conda-forge dask distributed RUN pip install -r requirements.txt -COPY ./cs-dask-sim /home/distributed/cs-dask-sim -RUN cd /home/distributed/cs-dask-sim && pip install -e . - -COPY ./api /home/distributed/api -COPY ./api/endpoints.py /home/distributed/api -COPY ./api/celery_app/__init__.py /home/distributed/api/celery_app/__init__.py -COPY ./setup.py /home/distributed -RUN cd /home/distributed && pip install -e . +COPY ./api /home/api +COPY ./api/endpoints.py /home/api +COPY ./setup.py /home +RUN cd /home && pip install -e . -WORKDIR /home/distributed/api +WORKDIR /home/api # run the app server CMD ["gunicorn", "--bind", "0.0.0.0:5050", "api:app", "--access-logfile", "-"] diff --git a/distributed/dockerfiles/Dockerfile.outputs_processor b/distributed/dockerfiles/Dockerfile.outputs_processor new file mode 100755 index 00000000..5a0e8e6d --- /dev/null +++ b/distributed/dockerfiles/Dockerfile.outputs_processor @@ -0,0 +1,24 @@ +ARG TAG +FROM distributed + +# install packages for chromium +RUN apt-get update && \ + apt-get install -yq --no-install-recommends \ + libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \ + libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \ + libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ + libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \ + libnss3 + + +RUN pip install -r requirements.txt +RUN conda install -c conda-forge jinja2 bokeh pyppeteer "pyee<6" && pyppeteer-install + +ENV CELERY_BROKER_URL redis://redis-master/0 +ENV CELERY_RESULT_BACKEND redis://redis-master/0 + +COPY outputs_processor.py /home + +WORKDIR /home + +CMD ["celery", "-A", "outputs_processor", "worker", "--loglevel=info", "--concurrency=1", "-n", "outputs_processor@%h"] \ No newline at end of file diff --git a/distributed/dockerfiles/Dockerfile.tasks b/distributed/dockerfiles/Dockerfile.tasks deleted file mode 100755 index d89fb713..00000000 --- a/distributed/dockerfiles/Dockerfile.tasks +++ /dev/null @@ -1,51 +0,0 @@ -ARG TAG -FROM celerybase - -# install packages here -# install packages necessary for celery and creating screenshots -RUN pip install -r requirements.txt -RUN conda install -c conda-forge lz4 -RUN conda install -c conda-forge jinja2 pyppeteer && pyppeteer-install - -ARG TITLE -ARG OWNER -ARG REPO_URL -ARG RAW_REPO_URL -ARG BRANCH=master - -# Install necessary packages, copying files, etc. -###################### -# Bump to trigger build -ARG BUILD_NUM=0 - -ADD ${RAW_REPO_URL}/${BRANCH}/cs-config/install.sh /home -RUN cat /home/install.sh -RUN bash /home/install.sh - -# Bump to trigger re-install of source, without re-installing dependencies. -ARG INSTALL_NUM=0 -RUN pip install "git+${REPO_URL}.git@${BRANCH}#egg=cs-config&subdirectory=cs-config" -ADD ${RAW_REPO_URL}/${BRANCH}/cs-config/cs_config/tests/test_functions.py /home -RUN pip install cs-kit -RUN py.test /home/test_functions.py -v -s -###################### - -ARG SIM_TIME_LIMIT -COPY templates/tasks_template.py tasks_template.py -COPY tasks_writer.py tasks_writer.py -RUN mkdir /home/distributed/api/celery_app -RUN python tasks_writer.py --owner ${OWNER} --title ${TITLE} --sim-time-limit ${SIM_TIME_LIMIT} --out /home/distributed/api/celery_app - -# copy over necessary files for this project's celery app -COPY ./api/__init__.py /home/distributed/api/__init__.py -COPY ./api/celery_app/__init__.py /home/distributed/api/celery_app/__init__.py -COPY ./setup.py /home/distributed -RUN cd /home/distributed && pip install -e . - -WORKDIR /home/distributed/api - -COPY celery_sim.sh /home/distributed/api/celery_sim.sh -COPY celery_io.sh /home/distributed/api/celery_io.sh - -COPY ./cs-dask-sim /home/distributed/cs-dask-sim -RUN cd /home/distributed/cs-dask-sim && pip install -e . diff --git a/distributed/kubernetes/flower-deployment.yaml b/distributed/kubernetes/flower-deployment.yaml deleted file mode 100644 index 9ea42beb..00000000 --- a/distributed/kubernetes/flower-deployment.yaml +++ /dev/null @@ -1,40 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: flower-monitor -spec: - replicas: 1 - selector: - matchLabels: - app: flower-monitor - template: - metadata: - labels: - app: flower-monitor - spec: - containers: - - name: flower-monitor - image: mher/flower - imagePullPolicy: Always - command: ["flower"] - args: ["--broker=redis://redis-master/0", "--port=8888"] - env: - - name: GET_HOSTS_FROM - value: dns - - name: FLOWER_PORT - value: "8888" - ports: - - containerPort: 8888 - resources: - requests: - memory: "95Mi" - cpu: "75m" - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: size - operator: In - values: - - small diff --git a/distributed/outputs_processor.py b/distributed/outputs_processor.py new file mode 100644 index 00000000..d0c64884 --- /dev/null +++ b/distributed/outputs_processor.py @@ -0,0 +1,57 @@ +import os + +import requests +from celery import Celery + +import cs_storage + + +CS_URL = os.environ.get("CS_URL") +CS_API_TOKEN = os.environ.get("CS_API_TOKEN") + +CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379") +CELERY_RESULT_BACKEND = os.environ.get( + "CELERY_RESULT_BACKEND", "redis://localhost:6379" +) + +app = Celery( + "outputs_processor", broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND +) +app.conf.update( + task_serializer="json", + accept_content=["json"], + worker_prefetch_multiplier=1, + task_acks_late=True, +) + + +@app.task(name="outputs_processor.write_to_storage") +def write(task_id, outputs): + outputs = cs_storage.deserialize_from_json(outputs) + res = cs_storage.write(task_id, outputs) + print(res) + return res + + +@app.task(name="outputs_processor.push_to_cs") +def push(task_type, payload): + if task_type == "sim": + print(f"posting data to {CS_URL}/outputs/api/") + resp = requests.put( + f"{CS_URL}/outputs/api/", + json=payload, + headers={"Authorization": f"Token {CS_API_TOKEN}"}, + ) + print("resp", resp.status_code) + if resp.status_code == 400: + print("errors", resp.json()) + if task_type == "parse": + print(f"posting data to {CS_URL}/inputs/api/") + resp = requests.put( + f"{CS_URL}/inputs/api/", + json=payload, + headers={"Authorization": f"Token {CS_API_TOKEN}"}, + ) + print("resp", resp.status_code) + if resp.status_code == 400: + print("errors", resp.json()) diff --git a/distributed/requirements.txt b/distributed/requirements.txt index dc486c9d..aa722c64 100755 --- a/distributed/requirements.txt +++ b/distributed/requirements.txt @@ -7,4 +7,6 @@ flask toolz gunicorn boto3 -cs-storage>=1.8.1 \ No newline at end of file +pyyaml +cs-storage>=1.10.1 +git+https://github.com/compute-tooling/compute-studio-publish.git \ No newline at end of file diff --git a/distributed/tasks_writer.py b/distributed/tasks_writer.py deleted file mode 100644 index 914a8bcc..00000000 --- a/distributed/tasks_writer.py +++ /dev/null @@ -1,48 +0,0 @@ -import argparse -import yaml -import re -import os - -from jinja2 import Template - - -def clean(word): - return re.sub("[^0-9a-zA-Z]+", "", word).lower() - - -def template(owner, title, sim_time_limit, out): - owner = clean(owner) - title = clean(title) - print(owner, title) - with open("tasks_template.py") as f: - text = f.read() - - t = Template(text) - - r = t.render(APP_NAME=f"{owner}_{title}_tasks", SIM_TIME_LIMIT=sim_time_limit) - - with open(os.path.join(out, f"{owner}_{title}_tasks.py"), "w") as f: - f.write(r) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Write tasks modules from template.") - parser.add_argument("--owner") - parser.add_argument("--title") - parser.add_argument("--sim-time-limit", dest="sim_time_limit", type=int) - parser.add_argument("--config") - parser.add_argument("--out", "-o", default="api/celery_app") - parser.add_argument("--models", nargs="+", type=str, required=False, default=None) - args = parser.parse_args() - models = args.models if args.models and args.models[0] else None - if args.config: - with open(args.config) as f: - config = yaml.safe_load(f.read()) - for obj in config: - if models and obj["title"] not in models: - continue - template(obj["owner"], obj["title"], obj["sim_time_limit"], args.out) - elif args.owner and args.title and args.sim_time_limit: - template(args.owner, args.title, args.sim_time_limit, args.out) - else: - print("No arguments received.") diff --git a/distributed/templates/dask/scheduler-deployment.template.yaml b/distributed/templates/dask/scheduler-deployment.template.yaml deleted file mode 100644 index 91903ac9..00000000 --- a/distributed/templates/dask/scheduler-deployment.template.yaml +++ /dev/null @@ -1,41 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: # [owner]-[title]-dask-scheduler - labels: - app: # [owner]-[title]-dask-scheduler - component: scheduler -spec: - replicas: 1 - selector: - matchLabels: - app: # [owner]-[title]-dask-scheduler - component: scheduler - strategy: - type: RollingUpdate - template: - metadata: - labels: - app: # [owner]-[title]-dask-scheduler - component: scheduler - spec: - containers: - - name: # [owner]-[title]-dask-scheduler - image: daskdev/dask:latest - imagePullPolicy: IfNotPresent - args: - - dask-scheduler - - --port - - "8786" - - --bokeh-port - - "8787" - ports: - - containerPort: 8786 - - containerPort: 8787 - resources: - requests: - cpu: 0.5 - memory: 2G - limits: - cpu: 1.8 - memory: 6G diff --git a/distributed/templates/dask/scheduler-service.template.yaml b/distributed/templates/dask/scheduler-service.template.yaml deleted file mode 100644 index ca6e92a5..00000000 --- a/distributed/templates/dask/scheduler-service.template.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: # [owner]-[title]-dask-scheduler - labels: - app: # [owner]-[title]-dask-scheduler - component: scheduler -spec: - ports: - - name: # [owner]-[title]-dask-scheduler - port: 8786 - targetPort: 8786 - - name: # [owner]-[title]-dask-webui - port: 8787 - targetPort: 8787 - selector: - app: # [owner]-[title]-dask-scheduler - component: scheduler diff --git a/distributed/templates/dask/worker-deployment.template.yaml b/distributed/templates/dask/worker-deployment.template.yaml deleted file mode 100644 index 91349d38..00000000 --- a/distributed/templates/dask/worker-deployment.template.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: # [owner]-[title]-dask-worker - labels: - app: # [owner]-[title]-dask-worker - component: worker -spec: - replicas: 1 - selector: - matchLabels: - app: # [owner]-[title]-dask-worker - component: worker - strategy: - type: RollingUpdate - template: - metadata: - labels: - app: # [owner]-[title]-worker - component: worker - spec: - containers: - - name: # [owner]-[tutle]-dask-worker - image: # gcr.io/[project]/[owner]_[title]_tasks:[tag] - imagePullPolicy: Always - args: - - dask-worker - - # [owner]-[title]-dask-scheduler:8786 - - --nthreads - - # cpus - - --memory-limit - - # memory limit - - --no-bokeh - ports: - - containerPort: 8789 - resources: - env: - - name: DASK_DISTRIBUTED__DAEMON - value: "false" - - name: BUCKET - valueFrom: - secretKeyRef: - name: worker-secret - key: BUCKET - # affinity: diff --git a/distributed/templates/flask-deployment.template.yaml b/distributed/templates/flask-deployment.template.yaml index de0be4ef..7e113ff1 100755 --- a/distributed/templates/flask-deployment.template.yaml +++ b/distributed/templates/flask-deployment.template.yaml @@ -22,27 +22,15 @@ spec: ports: - containerPort: 5050 env: - - name: COMP_URL + - name: CS_URL valueFrom: secretKeyRef: name: worker-secret - key: COMP_URL - - name: COMP_API_TOKEN + key: CS_URL + - name: CS_API_TOKEN valueFrom: secretKeyRef: name: worker-secret - key: COMP_API_TOKEN - - name: BUCKET - valueFrom: - secretKeyRef: - name: worker-secret - key: BUCKET - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: size - operator: In - values: - - small + key: CS_API_TOKEN + nodeSelector: + component: api \ No newline at end of file diff --git a/distributed/templates/outputs-processor-deployment.template.yaml b/distributed/templates/outputs-processor-deployment.template.yaml new file mode 100755 index 00000000..baa6de6c --- /dev/null +++ b/distributed/templates/outputs-processor-deployment.template.yaml @@ -0,0 +1,39 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: outputs-processor +spec: + replicas: 1 + selector: + matchLabels: + app: outputs-processor + template: + metadata: + labels: + app: outputs-processor + spec: + containers: + - name: outputs-processor + image: + imagePullPolicy: Always + env: + - name: GET_HOSTS_FROM + value: dns + env: + - name: BUCKET + valueFrom: + secretKeyRef: + name: worker-secret + key: BUCKET + - name: CS_URL + valueFrom: + secretKeyRef: + name: worker-secret + key: CS_URL + - name: CS_API_TOKEN + valueFrom: + secretKeyRef: + name: worker-secret + key: CS_API_TOKEN + nodeSelector: + component: api \ No newline at end of file diff --git a/distributed/templates/sc-deployment.template.yaml b/distributed/templates/sc-deployment.template.yaml deleted file mode 100755 index 3ce428a4..00000000 --- a/distributed/templates/sc-deployment.template.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: # [owner]-[title]-[action] -spec: - replicas: 1 - selector: - matchLabels: - app: # [owner]-[title]-[action] - template: - metadata: - labels: - app: # [owner]-[title]-[action] - spec: - containers: - - name: # [owner]-[title]-[action] - image: # gcr.io/[project]/[owner]_[title]_tasks:[tag] - imagePullPolicy: Always - command: [] # ["./celery_[action].sh"] - args: [] # ["[owner]", "[title"] - resources: - requests: - memory: - cpu: - limits: - memory: - cpu: - env: - - name: OUTPUTS_VERSION - value: v1 - - name: COMP_URL - valueFrom: - secretKeyRef: - name: worker-secret - key: COMP_URL - - name: COMP_API_TOKEN - valueFrom: - secretKeyRef: - name: worker-secret - key: COMP_API_TOKEN - - name: BUCKET - valueFrom: - secretKeyRef: - name: worker-secret - key: BUCKET diff --git a/distributed/templates/secret.template.yaml b/distributed/templates/secret.template.yaml new file mode 100644 index 00000000..84d058f4 --- /dev/null +++ b/distributed/templates/secret.template.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Secret +metadata: + name: worker-secret +type: Opaque +stringData: + CS_URL: https://dev.compute.studio + BUCKET: cs-outputs-dev + CS_API_TOKEN: + + OUTPUTS_VERSION: "v1" diff --git a/distributed/templates/tasks_template.py b/distributed/templates/tasks_template.py deleted file mode 100644 index 66cc7fa1..00000000 --- a/distributed/templates/tasks_template.py +++ /dev/null @@ -1,58 +0,0 @@ -import time -import os - -from api.celery_app import celery_app, task_wrapper - -try: - from cs_config import functions -except ImportError as ie: - if os.environ.get("IS_FLASK", "False") == "True": - functions = None - else: - raise ie - - -@celery_app.task( - name="{{APP_NAME}}.inputs_version", soft_time_limit=10, bind=True, acks_late=True -) -@task_wrapper -def inputs_version(self): - return {"version": functions.get_version()} - - -@celery_app.task( - name="{{APP_NAME}}.inputs_get", soft_time_limit=10, bind=True, acks_late=True -) -@task_wrapper -def inputs_get(self, meta_param_dict): - return functions.get_inputs(meta_param_dict) - - -@celery_app.task( - name="{{APP_NAME}}.inputs_parse", soft_time_limit=10, bind=True, acks_late=True -) -@task_wrapper -def inputs_parse(self, meta_param_dict, adjustment, errors_warnings): - return functions.validate_inputs(meta_param_dict, adjustment, errors_warnings) - - -@celery_app.task( - name="{{APP_NAME}}.sim", - soft_time_limit={{SIM_TIME_LIMIT}}, - bind=True, - acks_late=True, -) -@task_wrapper -def sim(self, meta_param_dict, adjustment): - if os.environ.get("DASK_SCHEDULER_ADDRESS") is not None: - from distributed import Client - from dask import delayed - - print("submitting data") - with Client() as c: - print("c", c) - fut = c.submit(functions.run_model, meta_param_dict, adjustment) - print("waiting on result", fut) - return fut.result() - - return functions.run_model(meta_param_dict, adjustment) diff --git a/distributed/worker_config.dev.yaml b/distributed/worker_config.dev.yaml deleted file mode 100644 index ee0d5f97..00000000 --- a/distributed/worker_config.dev.yaml +++ /dev/null @@ -1,37 +0,0 @@ -- owner: hdoupe - title: Matchups - branch: master - repo_url: https://github.com/hdoupe/Matchups - cluster_type: single-core - env: - BUILD_NUM: 0 - affinity: - size: small - secrets: - some_secret: hello world - resources: - limits: - cpu: 1000m - memory: 4000Mi - requests: - cpu: 500m - memory: 300Mi - sim_time_limit: 60 -- owner: PSLmodels - title: OG-USA - sim_time_limit: 10000 - repo_url: https://github.com/PSLmodels/OG-USA - branch: master - cluster_type: dask - replicas: 2 - resources: - requests: - memory: 7G - cpu: 2 - limits: - memory: 7G - cpu: 2 - env: - {} - # BUILD_NUM: 2 - # INSTALL_NUM: 0 From bd232ffe569525baa2409985080084f4e421c69d Mon Sep 17 00:00:00 2001 From: hdoupe Date: Wed, 6 May 2020 10:51:57 -0400 Subject: [PATCH 02/55] Fix how model resources are specified and expose config for git tag --- src/Publish/index.tsx | 51 +++++++++++++++++-- webapp/apps/billing/tests/test_models.py | 1 - .../apps/comp/tests/test_model_parameters.py | 1 - webapp/apps/conftest.py | 1 - webapp/apps/publish/serializers.py | 8 ++- webapp/apps/publish/tests/test_views.py | 12 +++-- .../migrations/0011_auto_20200506_0928.py | 26 ++++++++++ .../users/migrations/0012_project_repo_tag.py | 16 ++++++ webapp/apps/users/models.py | 7 +-- 9 files changed, 108 insertions(+), 15 deletions(-) create mode 100755 webapp/apps/users/migrations/0011_auto_20200506_0928.py create mode 100755 webapp/apps/users/migrations/0012_project_repo_tag.py diff --git a/src/Publish/index.tsx b/src/Publish/index.tsx index e1ab5710..b9f91e3d 100755 --- a/src/Publish/index.tsx +++ b/src/Publish/index.tsx @@ -48,7 +48,9 @@ interface PublishValues { description: string; oneliner: string; repo_url: string; - server_size: [number, number]; + repo_tag: string; + cpu: number; + memory: number; exp_task_time: number; listed: boolean; } @@ -58,7 +60,9 @@ const initialValues: PublishValues = { description: "", oneliner: "", repo_url: "", - server_size: [4, 2], + repo_tag: "master", + cpu: 2, + memory: 6, exp_task_time: 0, listed: true }; @@ -214,6 +218,21 @@ class PublishForm extends React.Component { /> } />

+
+ +

+ + } /> +

+
+ +
+

Resource Requirements

+
+
+ +

+ +

+
- - } /> + +

+ +

{specialRequests} diff --git a/webapp/apps/billing/tests/test_models.py b/webapp/apps/billing/tests/test_models.py index 4bc825f9..39805865 100644 --- a/webapp/apps/billing/tests/test_models.py +++ b/webapp/apps/billing/tests/test_models.py @@ -162,7 +162,6 @@ def test_customer_sync_subscriptions(self, db, client): "oneliner": "one liner", "description": "**Super** new!", "repo_url": "https://github.com/compute-tooling/compute-studio", - "server_size": [4, 8], } resp = client.post("/publish/api/", post_data) assert resp.status_code == 200 diff --git a/webapp/apps/comp/tests/test_model_parameters.py b/webapp/apps/comp/tests/test_model_parameters.py index 1324b419..4e5351ec 100755 --- a/webapp/apps/comp/tests/test_model_parameters.py +++ b/webapp/apps/comp/tests/test_model_parameters.py @@ -66,7 +66,6 @@ def mock_project(db, worker_url): description="", oneliner="oneliner", repo_url="https://repo.com/test", - server_size=["8,2"], exp_task_time=10, server_cost=0.1, listed=True, diff --git a/webapp/apps/conftest.py b/webapp/apps/conftest.py index b86e31e0..e27bf9dc 100755 --- a/webapp/apps/conftest.py +++ b/webapp/apps/conftest.py @@ -66,7 +66,6 @@ def django_db_setup(django_db_setup, django_db_blocker): "description": "[Matchups](https://github.com/hdoupe/Matchups) provides pitch data on pitcher and batter matchups.. Select a date range using the format YYYY-MM-DD. Keep in mind that Matchups only provides data on matchups going back to 2008. Two datasets are offered to run this model: one that only has the most recent season, 2018, and one that contains data on every single pitch going back to 2008. Next, select your favorite pitcher and some batters who he's faced in the past. Click submit to start analyzing the selected matchups!", "oneliner": "oneliner", "repo_url": "https://github.com/hdoupe/Matchups", - "server_size": ["8,2"], "exp_task_time": 10, "owner": modeler.profile, "server_cost": 0.1, diff --git a/webapp/apps/publish/serializers.py b/webapp/apps/publish/serializers.py index 560ca758..9e9d64d2 100755 --- a/webapp/apps/publish/serializers.py +++ b/webapp/apps/publish/serializers.py @@ -30,9 +30,11 @@ class Meta: "oneliner", "description", "repo_url", - "server_size", + "repo_tag", "exp_task_time", "server_cost", + "cpu", + "memory", "listed", "owner", "cluster_type", @@ -71,9 +73,11 @@ class Meta: "oneliner", "description", "repo_url", - "server_size", + "repo_tag", "exp_task_time", "server_cost", + "cpu", + "memory", "listed", "owner", "cluster_type", diff --git a/webapp/apps/publish/tests/test_views.py b/webapp/apps/publish/tests/test_views.py index 0ea711b5..814fb25c 100755 --- a/webapp/apps/publish/tests/test_views.py +++ b/webapp/apps/publish/tests/test_views.py @@ -24,7 +24,9 @@ def test_post(self, client): "oneliner": "oneliner", "description": "**Super** new!", "repo_url": "https://github.com/compute-tooling/compute-studio", - "server_size": [4, 8], + "repo_tag": "dev", + "cpu": 3, + "memory": 9, "listed": True, } resp = client.post("/publish/api/", post_data) @@ -46,7 +48,9 @@ def test_get_detail_api(self, api_client, client, test_models): "oneliner": "oneliner", "description": "desc", "repo_url": "https://github.com/compute-tooling/compute-studio", - "server_size": ["4", "2"], + "repo_tag": "master", + "cpu": 2, + "memory": 6, "exp_task_time": 20, "server_cost": Decimal("0.1"), "listed": True, @@ -77,7 +81,9 @@ def test_put_detail_api(self, client, test_models, profile, password): "oneliner": "oneliner", "description": "hello world!", "repo_url": "https://github.com/compute-tooling/compute-studio", - "server_size": [2, 4], + "repo_tag": "dev", + "cpu": 2, + "memory": 6, } # not logged in --> not authorized resp = client.put( diff --git a/webapp/apps/users/migrations/0011_auto_20200506_0928.py b/webapp/apps/users/migrations/0011_auto_20200506_0928.py new file mode 100755 index 00000000..188d59f1 --- /dev/null +++ b/webapp/apps/users/migrations/0011_auto_20200506_0928.py @@ -0,0 +1,26 @@ +# Generated by Django 3.0.3 on 2020-05-06 14:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("users", "0010_auto_20200319_0854")] + + operations = [ + migrations.RemoveField(model_name="project", name="server_size"), + migrations.AddField( + model_name="project", + name="cpu", + field=models.DecimalField( + decimal_places=1, default=2, max_digits=5, null=True + ), + ), + migrations.AddField( + model_name="project", + name="memory", + field=models.DecimalField( + decimal_places=1, default=6, max_digits=5, null=True + ), + ), + ] diff --git a/webapp/apps/users/migrations/0012_project_repo_tag.py b/webapp/apps/users/migrations/0012_project_repo_tag.py new file mode 100755 index 00000000..9a5e4766 --- /dev/null +++ b/webapp/apps/users/migrations/0012_project_repo_tag.py @@ -0,0 +1,16 @@ +# Generated by Django 3.0.3 on 2020-05-06 14:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("users", "0011_auto_20200506_0928")] + + operations = [ + migrations.AddField( + model_name="project", + name="repo_tag", + field=models.CharField(default="master", max_length=32), + ) + ] diff --git a/webapp/apps/users/models.py b/webapp/apps/users/models.py index 5f2cf360..9f9957ec 100755 --- a/webapp/apps/users/models.py +++ b/webapp/apps/users/models.py @@ -122,6 +122,7 @@ class Project(models.Model): oneliner = models.CharField(max_length=10000) description = models.CharField(max_length=10000) repo_url = models.URLField() + repo_tag = models.CharField(default="master", max_length=32) owner = models.ForeignKey( Profile, null=True, related_name="projects", on_delete=models.CASCADE ) @@ -148,9 +149,9 @@ class Project(models.Model): def callabledefault(): return [4, 2] - server_size = ArrayField( - models.CharField(max_length=5), default=callabledefault, size=2 - ) + cpu = models.DecimalField(max_digits=5, decimal_places=1, null=True, default=2) + memory = models.DecimalField(max_digits=5, decimal_places=1, null=True, default=6) + exp_task_time = models.IntegerField(null=True) exp_num_tasks = models.IntegerField(null=True) From b61c745992124dd369e353d927260d47db02bdca Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 7 May 2020 14:46:50 -0400 Subject: [PATCH 03/55] Set redis config in one place --- distributed/api/endpoints.py | 10 +++++----- distributed/dockerfiles/Dockerfile.flask | 3 --- distributed/requirements.txt | 2 +- distributed/templates/flask-deployment.template.yaml | 6 ++++++ distributed/templates/secret.template.yaml | 1 + 5 files changed, 13 insertions(+), 9 deletions(-) diff --git a/distributed/api/endpoints.py b/distributed/api/endpoints.py index 7c183cbf..601624c5 100644 --- a/distributed/api/endpoints.py +++ b/distributed/api/endpoints.py @@ -13,7 +13,10 @@ import redis import requests -from cs_publish.app import app as celery_app +from cs_publish.executors.celery import get_app + + +celery_app = get_app() CS_URL = os.environ.get("CS_URL") @@ -21,10 +24,7 @@ bp = Blueprint("endpoints", __name__) -queue_name = "celery" -client = redis.Redis.from_url( - os.environ.get("CELERY_BROKER_URL", "redis://redis-master/0") -) +client = redis.Redis.from_url(os.environ.get("REDIS", "redis://redis-master/0")) def clean(word): diff --git a/distributed/dockerfiles/Dockerfile.flask b/distributed/dockerfiles/Dockerfile.flask index ff88b93b..6c2364b3 100755 --- a/distributed/dockerfiles/Dockerfile.flask +++ b/distributed/dockerfiles/Dockerfile.flask @@ -1,9 +1,6 @@ ARG TAG FROM distributed -ENV CELERY_BROKER_URL redis://redis-master/0 -ENV CELERY_RESULT_BACKEND redis://redis-master/0 - ENV HOST 0.0.0.0 ENV PORT 5050 ENV DEBUG true diff --git a/distributed/requirements.txt b/distributed/requirements.txt index aa722c64..adcf3c84 100755 --- a/distributed/requirements.txt +++ b/distributed/requirements.txt @@ -9,4 +9,4 @@ gunicorn boto3 pyyaml cs-storage>=1.10.1 -git+https://github.com/compute-tooling/compute-studio-publish.git \ No newline at end of file +git+https://github.com/compute-tooling/compute-studio-publish.git@855ec5bdcd66e2755408235b5d6ce4934317b9b5 \ No newline at end of file diff --git a/distributed/templates/flask-deployment.template.yaml b/distributed/templates/flask-deployment.template.yaml index 7e113ff1..9c037dee 100755 --- a/distributed/templates/flask-deployment.template.yaml +++ b/distributed/templates/flask-deployment.template.yaml @@ -32,5 +32,11 @@ spec: secretKeyRef: name: worker-secret key: CS_API_TOKEN + - name: REDIS + valueFrom: + secretKeyRef: + name: worker-secret + key: REDIS + nodeSelector: component: api \ No newline at end of file diff --git a/distributed/templates/secret.template.yaml b/distributed/templates/secret.template.yaml index 84d058f4..2d5a27f0 100644 --- a/distributed/templates/secret.template.yaml +++ b/distributed/templates/secret.template.yaml @@ -9,3 +9,4 @@ stringData: CS_API_TOKEN: OUTPUTS_VERSION: "v1" + REDIS: redis://redis-master/0 From d586c0edae73e7d3686537f6fc4cef1976cda9fe Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 7 May 2020 14:48:13 -0400 Subject: [PATCH 04/55] Convert outputs processor from celery app to tornado api --- .../dockerfiles/Dockerfile.outputs_processor | 8 +- distributed/outputs_processor.py | 91 ++++++++++--------- ...outputs-processor-deployment.template.yaml | 7 +- 3 files changed, 54 insertions(+), 52 deletions(-) diff --git a/distributed/dockerfiles/Dockerfile.outputs_processor b/distributed/dockerfiles/Dockerfile.outputs_processor index 5a0e8e6d..9564eb9e 100755 --- a/distributed/dockerfiles/Dockerfile.outputs_processor +++ b/distributed/dockerfiles/Dockerfile.outputs_processor @@ -12,13 +12,11 @@ RUN apt-get update && \ RUN pip install -r requirements.txt -RUN conda install -c conda-forge jinja2 bokeh pyppeteer "pyee<6" && pyppeteer-install - -ENV CELERY_BROKER_URL redis://redis-master/0 -ENV CELERY_RESULT_BACKEND redis://redis-master/0 +RUN pip install httpx pyppeteer2 +RUN conda install -c conda-forge jinja2 bokeh tornado dask && pyppeteer-install COPY outputs_processor.py /home WORKDIR /home -CMD ["celery", "-A", "outputs_processor", "worker", "--loglevel=info", "--concurrency=1", "-n", "outputs_processor@%h"] \ No newline at end of file +CMD ["python", "outputs_processor.py"] \ No newline at end of file diff --git a/distributed/outputs_processor.py b/distributed/outputs_processor.py index d0c64884..0940f5e5 100644 --- a/distributed/outputs_processor.py +++ b/distributed/outputs_processor.py @@ -1,7 +1,10 @@ +import json import os -import requests -from celery import Celery +import httpx +import tornado.ioloop +import tornado.web +from dask.distributed import Client import cs_storage @@ -9,49 +12,51 @@ CS_URL = os.environ.get("CS_URL") CS_API_TOKEN = os.environ.get("CS_API_TOKEN") -CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379") -CELERY_RESULT_BACKEND = os.environ.get( - "CELERY_RESULT_BACKEND", "redis://localhost:6379" -) -app = Celery( - "outputs_processor", broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND -) -app.conf.update( - task_serializer="json", - accept_content=["json"], - worker_prefetch_multiplier=1, - task_acks_late=True, -) - - -@app.task(name="outputs_processor.write_to_storage") -def write(task_id, outputs): +async def write(task_id, outputs): + client = await Client(asynchronous=True, processes=False) outputs = cs_storage.deserialize_from_json(outputs) - res = cs_storage.write(task_id, outputs) - print(res) + res = await client.submit(cs_storage.write, task_id, outputs) return res -@app.task(name="outputs_processor.push_to_cs") -def push(task_type, payload): - if task_type == "sim": - print(f"posting data to {CS_URL}/outputs/api/") - resp = requests.put( - f"{CS_URL}/outputs/api/", - json=payload, - headers={"Authorization": f"Token {CS_API_TOKEN}"}, - ) - print("resp", resp.status_code) - if resp.status_code == 400: - print("errors", resp.json()) - if task_type == "parse": - print(f"posting data to {CS_URL}/inputs/api/") - resp = requests.put( - f"{CS_URL}/inputs/api/", - json=payload, - headers={"Authorization": f"Token {CS_API_TOKEN}"}, - ) - print("resp", resp.status_code) - if resp.status_code == 400: - print("errors", resp.json()) +async def push(task_type, result): + async with httpx.AsyncClient( + headers={"Authorization": f"Token {CS_API_TOKEN}"} + ) as client: + if task_type == "sim": + print(f"posting data to {CS_URL}/outputs/api/") + return client.put(f"{CS_URL}/outputs/api/", json=result) + if task_type == "parse": + print(f"posting data to {CS_URL}/inputs/api/") + return client.put(f"{CS_URL}/inputs/api/", json=result) + else: + raise ValueError(f"Unknown task type: {task_type}.") + + +class Write(tornado.web.RequestHandler): + async def post(self): + print("POST -- /write/") + payload = json.loads(self.request.body.decode("utf-8")) + result = await write(**payload) + self.write(result) + + +class Push(tornado.web.RequestHandler): + async def post(self): + print("POST -- /push/") + payload = json.loads(self.request.body.decode("utf-8")) + await push(**payload) + self.set_status(200) + + +def make_app(): + return tornado.web.Application( + [(r"/write/", Write), (r"/push/", Push)], debug=True, autoreload=True + ) + + +if __name__ == "__main__": + app = make_app() + app.listen(8888) + tornado.ioloop.IOLoop.current().start() diff --git a/distributed/templates/outputs-processor-deployment.template.yaml b/distributed/templates/outputs-processor-deployment.template.yaml index baa6de6c..1a63d0d3 100755 --- a/distributed/templates/outputs-processor-deployment.template.yaml +++ b/distributed/templates/outputs-processor-deployment.template.yaml @@ -16,9 +16,8 @@ spec: - name: outputs-processor image: imagePullPolicy: Always - env: - - name: GET_HOSTS_FROM - value: dns + ports: + - containerPort: 6379 env: - name: BUCKET valueFrom: @@ -36,4 +35,4 @@ spec: name: worker-secret key: CS_API_TOKEN nodeSelector: - component: api \ No newline at end of file + component: api From f55f3c6c9ef9055e57c9fac485d4ddb24673a5a1 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Sat, 9 May 2020 12:10:32 -0400 Subject: [PATCH 05/55] Use redis acl users and add std out to cli --- distributed/cs_cluster.py | 145 +++++++++++++++-- distributed/dockerfiles/Dockerfile.redis | 147 ++++++++++++++++++ distributed/kubernetes/flask-service.yaml | 6 +- .../kubernetes/redis-master-deployment.yaml | 45 ++++-- .../kubernetes/redis-master-service.yaml | 3 +- distributed/redis_init.py | 78 ++++++++++ .../redis-master-deployment.template.yaml | 36 +++++ distributed/templates/secret.template.yaml | 2 +- 8 files changed, 434 insertions(+), 28 deletions(-) create mode 100644 distributed/dockerfiles/Dockerfile.redis create mode 100644 distributed/redis_init.py create mode 100644 distributed/templates/redis-master-deployment.template.yaml diff --git a/distributed/cs_cluster.py b/distributed/cs_cluster.py index 0d1293e7..24d08896 100644 --- a/distributed/cs_cluster.py +++ b/distributed/cs_cluster.py @@ -5,6 +5,7 @@ import re import shutil import subprocess +import sys import time from pathlib import Path @@ -26,6 +27,20 @@ def run(cmd): return res +def redis_acl_genpass(): + """ + Redis recommends using ACL GENPASS to generate passwords + for ACL users. This function attempts to use a local + redis installation to generate this password automatically. + """ + import redis + + with redis.Redis(host="localhost", port=6379) as c: + value = c.acl_genpass() + + return value + + class Cluster: """ Deploy and manage Compute Studio compute cluster: @@ -47,22 +62,32 @@ class Cluster: """ - k8s_target = "kubernetes/" + kubernetes_target = "kubernetes/" cr = "gcr.io" - def __init__(self, tag, project): + def __init__(self, tag, project, kubernetes_target="kubernetes/"): self.tag = tag self.project = project + if kubernetes_target is None: + self.kubernetes_target = Cluster.kubernetes_target + else: + self.kubernetes_target = kubernetes_target + with open("templates/flask-deployment.template.yaml", "r") as f: self.flask_template = yaml.safe_load(f.read()) with open("templates/outputs-processor-deployment.template.yaml", "r") as f: self.outputs_processor_template = yaml.safe_load(f.read()) + with open("templates/redis-master-deployment.template.yaml", "r") as f: + self.redis_master_template = yaml.safe_load(f.read()) + with open("templates/secret.template.yaml", "r") as f: self.secret_template = yaml.safe_load(f.read()) + self._redis_secrets = None + def build(self): """ Build, tag, and push base images for the flask app and modeling apps. @@ -71,26 +96,50 @@ def build(self): pull from either distributed:latest or celerybase:latest. """ run("docker build -t distributed:latest -f dockerfiles/Dockerfile ./") + run("docker build -t redis-python:latest -f dockerfiles/Dockerfile.redis ./") run( f"docker build -t outputs_processor:{self.tag} -f dockerfiles/Dockerfile.outputs_processor ./" ) run(f"docker build -t flask:{self.tag} -f dockerfiles/Dockerfile.flask ./") run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") - run(f"docker push {self.cr}/{self.project}/distributed:latest") run( f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" ) - run(f"docker push {self.cr}/{self.project}/outputs_processor:{self.tag}") run(f"docker tag flask:{self.tag} {self.cr}/{self.project}/flask:{self.tag}") + + def push(self): + run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") + run(f"docker tag redis-python {self.cr}/{self.project}/redis-python:latest") + + run( + f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" + ) + + run(f"docker tag flask:{self.tag} {self.cr}/{self.project}/flask:{self.tag}") + + run(f"docker push {self.cr}/{self.project}/distributed:latest") + run(f"docker push {self.cr}/{self.project}/redis-python:latest") + run(f"docker push {self.cr}/{self.project}/outputs_processor:{self.tag}") run(f"docker push {self.cr}/{self.project}/flask:{self.tag}") def make_config(self): self.write_flask_deployment() self.write_outputs_processor_deployment() self.write_secret() + self.write_redis_deployment() + configs = [ + "flask-service.yaml", + "outputs-processor-deployment.yaml", + "outputs-processor-service.yaml", + "redis-master-service.yaml", + ] + for filename in configs: + with open(f"kubernetes/{filename}", "r") as f: + config = yaml.safe_load(f.read()) + self.write_config(filename, config) def write_flask_deployment(self): """ @@ -101,8 +150,7 @@ def write_flask_deployment(self): "image" ] = f"gcr.io/{self.project}/flask:{self.tag}" - with open(f"{self.k8s_target}/flask-deployment.yaml", "w") as f: - f.write(yaml.dump(deployment)) + self.write_config("flask-deployment.yaml", deployment) return deployment @@ -116,17 +164,72 @@ def write_outputs_processor_deployment(self): "image" ] = f"gcr.io/{self.project}/outputs_processor:{self.tag}" - with open(f"{self.k8s_target}/outputs-processor-deployment.yaml", "w") as f: - f.write(yaml.dump(deployment)) + self.write_config("outputs-processor-deployment.yaml", deployment) return deployment + def write_redis_deployment(self): + deployment = copy.deepcopy(self.redis_master_template) + container = deployment["spec"]["template"]["spec"]["containers"][0] + container["image"] = f"gcr.io/{self.project}/redis-python:latest" + redis_secrets = self.redis_secrets() + for name, sec in redis_secrets.items(): + if sec is not None: + container["env"].append( + { + "name": name, + "valueFrom": { + "secretKeyRef": {"key": name, "name": "worker-secret"} + }, + } + ) + self.write_config("redis-master-deployment.yaml", deployment) + def write_secret(self): + secrets = copy.deepcopy(self.secret_template) secrets["stringData"]["CS_API_TOKEN"] = self._get_secret("CS_API_TOKEN") - with open(f"{self.k8s_target}/secret.yaml", "w") as f: - f.write(yaml.dump(secrets)) + redis_secrets = self.redis_secrets() + for name, sec in redis_secrets.items(): + if sec is not None: + secrets["stringData"][name] = sec + + self.write_config("secret.yaml", secrets) + + def write_config(self, filename, config): + if self.kubernetes_target == "-": + sys.stdout.write(yaml.dump(config)) + sys.stdout.write("---") + sys.stdout.write("\n") + else: + with open(f"{self.kubernetes_target}/{filename}", "w") as f: + f.write(yaml.dump(config)) + + def redis_secrets(self): + """ + Return redis ACL user passwords. If they are not in the secret manager, + try to generate them using a local instance of redis. If this fails, + they are set to an empty string. + """ + if self._redis_secrets is not None: + return self._redis_secrets + from google.api_core import exceptions + + redis_secrets = dict( + REDIS_ADMIN_PW="", REDIS_EXECUTOR_PW="", REDIS_SCHEDULER_PW="" + ) + for sec in redis_secrets: + try: + value = self._get_secret(sec) + except exceptions.NotFound: + try: + value = redis_acl_genpass() + self._set_secret(sec, value) + except Exception: + value = "" + redis_secrets[sec] = value + return redis_secrets def _get_secret(self, secret_name): from google.cloud import secretmanager @@ -138,18 +241,38 @@ def _get_secret(self, secret_name): return response.payload.data.decode("utf-8") + def _set_secret(self, name, value): + from google.cloud import secretmanager + + client = secretmanager.SecretManagerServiceClient() + proj_parent = client.project_path(self.project) + client.create_secret(proj_parent, name, {"replication": {"automatic": {}}}) + + if not isinstance(value, bytes): + value = value.encode("utf-8") + + secret_parent = client.secret_path(self.project, name) + + return client.add_secret_version(secret_parent, {"data": value}) + if __name__ == "__main__": parser = argparse.ArgumentParser(description="Deploy C/S compute cluster.") parser.add_argument("--tag", required=False, default=TAG) parser.add_argument("--project", required=False, default=PROJECT) parser.add_argument("--build", action="store_true") + parser.add_argument("--push", action="store_true") parser.add_argument("--make-config", action="store_true") + parser.add_argument("--config-out", "-o") args = parser.parse_args() - cluster = Cluster(tag=args.tag, project=args.project) + cluster = Cluster( + tag=args.tag, project=args.project, kubernetes_target=args.config_out + ) if args.build: cluster.build() + if args.push: + cluster.push() if args.make_config: cluster.make_config() diff --git a/distributed/dockerfiles/Dockerfile.redis b/distributed/dockerfiles/Dockerfile.redis new file mode 100644 index 00000000..b75fdd1f --- /dev/null +++ b/distributed/dockerfiles/Dockerfile.redis @@ -0,0 +1,147 @@ +FROM redis + +################################################################################ + +# Commands below copied from: +# https://github.com/docker-library/python/blob/master/3.8/buster/slim/Dockerfile + + +################################################################################ + +# ensure local python is preferred over distribution python +ENV PATH /usr/local/bin:$PATH + +# http://bugs.python.org/issue19846 +# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. +ENV LANG C.UTF-8 + +# runtime dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + netbase \ + && rm -rf /var/lib/apt/lists/* + +ENV GPG_KEY E3FF2839C048B25C084DEBE9B26995E310250568 +ENV PYTHON_VERSION 3.8.2 + +RUN set -ex \ + \ + && savedAptMark="$(apt-mark showmanual)" \ + && apt-get update && apt-get install -y --no-install-recommends \ + dpkg-dev \ + gcc \ + libbluetooth-dev \ + libbz2-dev \ + libc6-dev \ + libexpat1-dev \ + libffi-dev \ + libgdbm-dev \ + liblzma-dev \ + libncursesw5-dev \ + libreadline-dev \ + libsqlite3-dev \ + libssl-dev \ + make \ + tk-dev \ + uuid-dev \ + wget \ + xz-utils \ + zlib1g-dev \ +# as of Stretch, "gpg" is no longer included by default + $(command -v gpg > /dev/null || echo 'gnupg dirmngr') \ + \ + && wget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && export GNUPGHOME="$(mktemp -d)" \ + && gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$GPG_KEY" \ + && gpg --batch --verify python.tar.xz.asc python.tar.xz \ + && { command -v gpgconf > /dev/null && gpgconf --kill all || :; } \ + && rm -rf "$GNUPGHOME" python.tar.xz.asc \ + && mkdir -p /usr/src/python \ + && tar -xJC /usr/src/python --strip-components=1 -f python.tar.xz \ + && rm python.tar.xz \ + \ + && cd /usr/src/python \ + && gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \ + && ./configure \ + --build="$gnuArch" \ + --enable-loadable-sqlite-extensions \ + --enable-optimizations \ + --enable-option-checking=fatal \ + --enable-shared \ + --with-system-expat \ + --with-system-ffi \ + --without-ensurepip \ + && make -j "$(nproc)" \ + && make install \ + && ldconfig \ + \ + && apt-mark auto '.*' > /dev/null \ + && apt-mark manual $savedAptMark \ + && find /usr/local -type f -executable -not \( -name '*tkinter*' \) -exec ldd '{}' ';' \ + | awk '/=>/ { print $(NF-1) }' \ + | sort -u \ + | xargs -r dpkg-query --search \ + | cut -d: -f1 \ + | sort -u \ + | xargs -r apt-mark manual \ + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && rm -rf /var/lib/apt/lists/* \ + \ + && find /usr/local -depth \ + \( \ + \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ + -o \ + \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ + \) -exec rm -rf '{}' + \ + && rm -rf /usr/src/python \ + \ + && python3 --version + +# make some useful symlinks that are expected to exist +RUN cd /usr/local/bin \ + && ln -s idle3 idle \ + && ln -s pydoc3 pydoc \ + && ln -s python3 python \ + && ln -s python3-config python-config + +# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value ''" +ENV PYTHON_PIP_VERSION 20.1 +# https://github.com/pypa/get-pip +ENV PYTHON_GET_PIP_URL https://github.com/pypa/get-pip/raw/1fe530e9e3d800be94e04f6428460fc4fb94f5a9/get-pip.py +ENV PYTHON_GET_PIP_SHA256 ce486cddac44e99496a702aa5c06c5028414ef48fdfd5242cd2fe559b13d4348 + +RUN set -ex; \ + \ + savedAptMark="$(apt-mark showmanual)"; \ + apt-get update; \ + apt-get install -y --no-install-recommends wget; \ + \ + wget -O get-pip.py "$PYTHON_GET_PIP_URL"; \ + echo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum --check --strict -; \ + \ + apt-mark auto '.*' > /dev/null; \ + [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; \ + apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \ + rm -rf /var/lib/apt/lists/*; \ + \ + python get-pip.py \ + --disable-pip-version-check \ + --no-cache-dir \ + "pip==$PYTHON_PIP_VERSION" \ + ; \ + pip --version; \ + \ + find /usr/local -depth \ + \( \ + \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ + -o \ + \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ + \) -exec rm -rf '{}' +; \ + rm -f get-pip.py + + +# End Python install commands + +RUN python3 -m pip install redis +COPY redis_init.py /home/ \ No newline at end of file diff --git a/distributed/kubernetes/flask-service.yaml b/distributed/kubernetes/flask-service.yaml index 0d3aced3..9ff1d4f5 100644 --- a/distributed/kubernetes/flask-service.yaml +++ b/distributed/kubernetes/flask-service.yaml @@ -3,9 +3,9 @@ kind: Service metadata: name: flask spec: - type: LoadBalancer ports: - - port: 5050 - targetPort: 5050 + - port: 5050 + targetPort: 5050 selector: app: flask + type: LoadBalancer diff --git a/distributed/kubernetes/redis-master-deployment.yaml b/distributed/kubernetes/redis-master-deployment.yaml index f63f07cb..46fbfa78 100644 --- a/distributed/kubernetes/redis-master-deployment.yaml +++ b/distributed/kubernetes/redis-master-deployment.yaml @@ -1,16 +1,16 @@ -apiVersion: apps/v1 # for versions before 1.9.0 use apps/v1beta2 +apiVersion: apps/v1 kind: Deployment metadata: - name: redis-master labels: app: redis + name: redis-master spec: + replicas: 1 selector: matchLabels: app: redis role: master tier: backend - replicas: 1 template: metadata: labels: @@ -19,11 +19,34 @@ spec: tier: backend spec: containers: - - name: master - image: k8s.gcr.io/redis:e2e # or just image: redis - resources: - requests: - cpu: 100m - memory: 100Mi - ports: - - containerPort: 6379 \ No newline at end of file + - env: + - name: REDIS_ADMIN_PW + valueFrom: + secretKeyRef: + key: REDIS_ADMIN_PW + name: worker-secret + - name: REDIS_EXECUTOR_PW + valueFrom: + secretKeyRef: + key: REDIS_EXECUTOR_PW + name: worker-secret + - name: REDIS_SCHEDULER_PW + valueFrom: + secretKeyRef: + key: REDIS_SCHEDULER_PW + name: worker-secret + image: gcr.io/cs-workers-dev/redis-python:latest + lifecycle: + postStart: + exec: + command: + - python3 + - /home/redis_init.py + name: master + imagePullPolicy: Always + ports: + - containerPort: 6379 + resources: + requests: + cpu: 100m + memory: 100Mi diff --git a/distributed/kubernetes/redis-master-service.yaml b/distributed/kubernetes/redis-master-service.yaml index bc079dce..04af2120 100644 --- a/distributed/kubernetes/redis-master-service.yaml +++ b/distributed/kubernetes/redis-master-service.yaml @@ -1,11 +1,11 @@ apiVersion: v1 kind: Service metadata: - name: redis-master labels: app: redis role: master tier: backend + name: redis-master spec: ports: - port: 6379 @@ -13,4 +13,3 @@ spec: selector: app: redis role: master - \ No newline at end of file diff --git a/distributed/redis_init.py b/distributed/redis_init.py new file mode 100644 index 00000000..0092159a --- /dev/null +++ b/distributed/redis_init.py @@ -0,0 +1,78 @@ +import redis + +import os + + +def main(): + admin_pw = os.environ.get("REDIS_ADMIN_PW") + if admin_pw in (None, ""): + print("No ADMIN PW found.") + return + + conn_kwargs = dict( + host=os.environ.get("REDIS_HOST", "127.0.0.1"), + port=os.environ.get("REDIS_PORT", 6379), + db=os.environ.get("REDIS_DB"), + ) + + try: + client = redis.Redis(username="admin", password=admin_pw, **conn_kwargs) + + users = client.acl_users() + if not ({"admin", "scheduler", "executor"} - set(users)): + print("ACL users have already been set up.") + return + except redis.exceptions.ResponseError: + # no admin found. + client = redis.Redis(**conn_kwargs) + + # initialize users. + print("No ACL users found. Initializing now.") + if client.acl_whoami() == "default": + nopass = admin_pw in (None, "") + client.acl_setuser( + "admin", + enabled=True, + nopass=nopass, + passwords=f"+{admin_pw}" if not nopass else None, + commands=["+@all"], + ) + + client.close() + del client + admin_client = redis.Redis(username="admin", password=admin_pw, **conn_kwargs) + else: + admin_client = client + + assert admin_client.acl_whoami() == "admin" + + admin_client.acl_setuser("default", enabled=False, commands=["-@all"]) + + sched_pw = os.environ.get("REDIS_SCHEDULER_PW") + nopass = sched_pw in (None, "") + admin_client.acl_setuser( + "scheduler", + enabled=True, + nopass=nopass, + passwords=f"+{sched_pw}" if not nopass else None, + commands=["-@all", "+set", "+get", "+acl|whoami"], + keys=["job:"], + ) + + exec_pw = os.environ.get("REDIS_EXECUTOR_PW") + nopass = exec_pw in (None, "") + admin_client.acl_setuser( + "executor", + enabled=True, + nopass=nopass, + passwords=f"+{exec_pw}" if not nopass else None, + commands=["-@all", "+get", "+acl|whoami"], + keys=["job:"], + ) + admin_client.close() + + print(f"Successfully created users: {admin_client.acl_users()}") + + +if __name__ == "__main__": + main() diff --git a/distributed/templates/redis-master-deployment.template.yaml b/distributed/templates/redis-master-deployment.template.yaml new file mode 100644 index 00000000..1e3f5f30 --- /dev/null +++ b/distributed/templates/redis-master-deployment.template.yaml @@ -0,0 +1,36 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: redis + name: redis-master +spec: + replicas: 1 + selector: + matchLabels: + app: redis + role: master + tier: backend + template: + metadata: + labels: + app: redis + role: master + tier: backend + spec: + containers: + - env: [] + image: # redis-python + lifecycle: + postStart: + exec: + command: + - python3 + - /home/redis_init.py + name: master + ports: + - containerPort: 6379 + resources: + requests: + cpu: 100m + memory: 100Mi diff --git a/distributed/templates/secret.template.yaml b/distributed/templates/secret.template.yaml index 2d5a27f0..522d99fd 100644 --- a/distributed/templates/secret.template.yaml +++ b/distributed/templates/secret.template.yaml @@ -9,4 +9,4 @@ stringData: CS_API_TOKEN: OUTPUTS_VERSION: "v1" - REDIS: redis://redis-master/0 + REDIS_HOST: redis-master From b87e197aae694c3195f2f2ad15503dd4ce054c59 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Wed, 13 May 2020 14:53:36 -0400 Subject: [PATCH 06/55] Initial merge from compute-tooling/compute-studio-publish --- distributed/.dockerignore | 25 -- distributed/api/__init__.py | 23 -- distributed/api/endpoints.py | 176 ----------- distributed/api/tests/test_flask.py | 80 ----- distributed/celery_sim.sh | 4 - distributed/cs-dask-sim/cs_dask_sim.py | 107 ------- distributed/cs-dask-sim/setup.py | 10 - distributed/requirements.txt | 12 - distributed/setup.py | 9 - workers/cs_workers/__init__.py | 3 + .../cs_workers/clients/__init__.py | 0 workers/cs_workers/clients/api_task.py | 21 ++ workers/cs_workers/clients/core.py | 91 ++++++ workers/cs_workers/clients/job.py | 136 +++++++++ workers/cs_workers/clients/publish.py | 281 ++++++++++++++++++ workers/cs_workers/clients/secrets.py | 115 +++++++ workers/cs_workers/executors/__init__.py | 0 workers/cs_workers/executors/api_task.py | 89 ++++++ .../executors/dask-worker-space/global.lock | 0 .../executors/dask-worker-space/purge.lock | 0 workers/cs_workers/executors/kubernetes.py | 40 +++ workers/cs_workers/executors/task_wrapper.py | 91 ++++++ workers/cs_workers/services/__init__.py | 0 .../cs_workers/services/init.py | 0 .../cs_workers/services}/outputs_processor.py | 0 workers/cs_workers/services/scheduler.py | 105 +++++++ workers/cs_workers/utils.py | 44 +++ .../dockerfiles/Dockerfile | 0 workers/dockerfiles/Dockerfile.model | 42 +++ .../dockerfiles/Dockerfile.outputs_processor | 0 .../dockerfiles/Dockerfile.redis | 2 +- .../dockerfiles/Dockerfile.scheduler | 6 +- .../kubernetes/flask-service.yaml | 0 .../kubernetes/outputs-processor-service.yaml | 10 + .../kubernetes/redis-master-deployment.yaml | 0 .../kubernetes/redis-master-service.yaml | 0 workers/requirements.txt | 8 + .../scripts}/redis_init.py | 0 workers/setup.py | 35 +++ .../templates/flask-deployment.template.yaml | 0 ...outputs-processor-deployment.template.yaml | 0 .../redis-master-deployment.template.yaml | 0 workers/templates/sc-deployment.template.yaml | 37 +++ .../templates/secret.template.yaml | 0 44 files changed, 1152 insertions(+), 450 deletions(-) delete mode 100644 distributed/.dockerignore delete mode 100644 distributed/api/__init__.py delete mode 100644 distributed/api/endpoints.py delete mode 100644 distributed/api/tests/test_flask.py delete mode 100755 distributed/celery_sim.sh delete mode 100644 distributed/cs-dask-sim/cs_dask_sim.py delete mode 100644 distributed/cs-dask-sim/setup.py delete mode 100755 distributed/requirements.txt delete mode 100644 distributed/setup.py create mode 100644 workers/cs_workers/__init__.py rename distributed/MANIFEST.in => workers/cs_workers/clients/__init__.py (100%) create mode 100644 workers/cs_workers/clients/api_task.py create mode 100644 workers/cs_workers/clients/core.py create mode 100644 workers/cs_workers/clients/job.py create mode 100644 workers/cs_workers/clients/publish.py create mode 100644 workers/cs_workers/clients/secrets.py create mode 100644 workers/cs_workers/executors/__init__.py create mode 100644 workers/cs_workers/executors/api_task.py create mode 100644 workers/cs_workers/executors/dask-worker-space/global.lock create mode 100644 workers/cs_workers/executors/dask-worker-space/purge.lock create mode 100644 workers/cs_workers/executors/kubernetes.py create mode 100644 workers/cs_workers/executors/task_wrapper.py create mode 100644 workers/cs_workers/services/__init__.py rename distributed/cs_cluster.py => workers/cs_workers/services/init.py (100%) rename {distributed => workers/cs_workers/services}/outputs_processor.py (100%) create mode 100644 workers/cs_workers/services/scheduler.py create mode 100644 workers/cs_workers/utils.py rename {distributed => workers}/dockerfiles/Dockerfile (100%) create mode 100644 workers/dockerfiles/Dockerfile.model rename {distributed => workers}/dockerfiles/Dockerfile.outputs_processor (100%) rename {distributed => workers}/dockerfiles/Dockerfile.redis (99%) rename distributed/dockerfiles/Dockerfile.flask => workers/dockerfiles/Dockerfile.scheduler (88%) rename {distributed => workers}/kubernetes/flask-service.yaml (100%) create mode 100644 workers/kubernetes/outputs-processor-service.yaml rename {distributed => workers}/kubernetes/redis-master-deployment.yaml (100%) rename {distributed => workers}/kubernetes/redis-master-service.yaml (100%) create mode 100755 workers/requirements.txt rename {distributed => workers/scripts}/redis_init.py (100%) create mode 100644 workers/setup.py rename {distributed => workers}/templates/flask-deployment.template.yaml (100%) rename {distributed => workers}/templates/outputs-processor-deployment.template.yaml (100%) rename {distributed => workers}/templates/redis-master-deployment.template.yaml (100%) create mode 100755 workers/templates/sc-deployment.template.yaml rename {distributed => workers}/templates/secret.template.yaml (100%) diff --git a/distributed/.dockerignore b/distributed/.dockerignore deleted file mode 100644 index e1804ef7..00000000 --- a/distributed/.dockerignore +++ /dev/null @@ -1,25 +0,0 @@ -*.pyc -*.db -*.env -db.sqlite3 - -node_modules -bower_components - -webapp_test.sh - -logs/ - -dump.rdb - -.idea -.cache -.ipynb_checkpoints - -*.pem - -_build - -.DS_Store -*.egg-info -staticfiles diff --git a/distributed/api/__init__.py b/distributed/api/__init__.py deleted file mode 100644 index 172050c6..00000000 --- a/distributed/api/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask - - -def create_app(test_config=None): - app = Flask(__name__) - - @app.route('/hello') - def hello(): - return 'Hello, World!' - - if test_config is not None: - app.config.update(test_config) - - from api import endpoints - app.register_blueprint(endpoints.bp) - - return app - -try: - app = create_app() -except Exception as e: - print("got exception on import: ", e) - app = None \ No newline at end of file diff --git a/distributed/api/endpoints.py b/distributed/api/endpoints.py deleted file mode 100644 index 601624c5..00000000 --- a/distributed/api/endpoints.py +++ /dev/null @@ -1,176 +0,0 @@ -import functools -import json -import os -import re -import time -import traceback -import uuid -from collections import defaultdict - -from flask import Blueprint, request, make_response -from celery.result import AsyncResult -from celery import chord -import redis -import requests - -from cs_publish.executors.celery import get_app - - -celery_app = get_app() - - -CS_URL = os.environ.get("CS_URL") -CS_API_TOKEN = os.environ.get("CS_API_TOKEN") - -bp = Blueprint("endpoints", __name__) - -client = redis.Redis.from_url(os.environ.get("REDIS", "redis://redis-master/0")) - - -def clean(word): - return re.sub("[^0-9a-zA-Z]+", "", word).lower() - - -def get_cs_config(): - print(f"getting config from: {CS_URL}/publish/api/") - resp = requests.get(f"{CS_URL}/publish/api/") - if resp.status_code != 200: - raise Exception(f"Response status code: {resp.status_code}") - data = resp.json() - print("got config: ", data) - config = {} - - for model in data: - model_id = clean(model["owner"]), clean(model["title"]) - config[model_id] = { - "cluster_type": model["cluster_type"], - "time_out": model["exp_task_time"] * 1.25, - } - print("made config: ", config) - return config - - -CONFIG = get_cs_config() - - -def get_cluster_type(owner, app_name): - model_id = clean(owner), clean(app_name) - # allowed to return None - return CONFIG.get(model_id, {}).get("cluster_type") - - -def get_time_out(owner, app_name): - model_id = clean(owner), clean(app_name) - return CONFIG[model_id]["time_out"] - - -def async_endpoint(owner, app_name, compute_task): - print(f"async endpoint {compute_task}") - data = request.get_data() - inputs = json.loads(data) - print("inputs", inputs) - result = celery_app.signature(compute_task, kwargs=inputs).delay() - length = client.llen(f"{owner}_{app_name}_queue") + 1 - data = {"job_id": str(result), "qlength": length} - return json.dumps(data) - - -def sync_endpoint(owner, app_name, compute_task): - print(f"io endpoint {compute_task}") - data = request.get_data() - print("got data", data) - if not data: - inputs = {} - else: - inputs = json.loads(data) - print("inputs", inputs) - result = celery_app.signature(compute_task, kwargs=inputs).delay() - print("getting...") - result = result.get() - return json.dumps(result) - - -def route_to_task(owner, app_name, endpoint, action): - owner, app_name = clean(owner), clean(app_name) - print("getting...", owner, app_name, endpoint, action) - task_name = f"{owner}_{app_name}_tasks.{action}" - print("got task_name", task_name) - print("map", celery_app.amqp.routes) - if task_name in celery_app.amqp.routes[0].map: - return endpoint(owner, app_name, task_name) - else: - return json.dumps({"error": "invalid endpoint"}), 404 - - -@bp.route("///version", methods=["POST"]) -def endpoint_version(owner, app_name): - action = "inputs_version" - endpoint = sync_endpoint - return route_to_task(owner, app_name, endpoint, action) - - -@bp.route("///inputs", methods=["POST"]) -def endpoint_inputs(owner, app_name): - action = "inputs_get" - endpoint = sync_endpoint - return route_to_task(owner, app_name, endpoint, action) - - -@bp.route("///parse", methods=["POST"]) -def endpoint_parse(owner, app_name): - action = "inputs_parse" - endpoint = async_endpoint - return route_to_task(owner, app_name, endpoint, action) - - -@bp.route("///sim", methods=["POST"]) -def endpoint_sim(owner, app_name): - action = "sim" - print("owner, app_name", owner, app_name) - cluster_type = get_cluster_type(owner, app_name) - print(f"cluster type is {cluster_type}") - if cluster_type == "single-core": - return route_to_task(owner, app_name, async_endpoint, action) - else: - return json.dumps({"error": "model does not exist."}), 404 - - -@bp.route("///get//", methods=["GET"]) -def results(owner, app_name, job_id): - cluster_type = get_cluster_type(owner, app_name) - if cluster_type == "single-core": - async_result = AsyncResult(job_id) - if async_result.ready() and async_result.successful(): - return json.dumps(async_result.result) - elif async_result.failed(): - print("traceback", async_result.traceback) - return json.dumps( - {"status": "WORKER_FAILURE", "traceback": async_result.traceback} - ) - else: - return make_response("not ready", 202) - else: - return json.dumps({"error": "model does not exist."}), 404 - - -@bp.route("///query//", methods=["GET"]) -def query_results(owner, app_name, job_id): - - cluster_type = get_cluster_type(owner, app_name) - if cluster_type == "single-core": - async_result = AsyncResult(job_id) - print("celery result", async_result.state) - if async_result.ready() and async_result.successful(): - return "YES" - elif async_result.failed(): - return "FAIL" - else: - return "NO" - else: - return json.dumps({"error": "model does not exist."}), 404 - - -@bp.route("/reset-config/", methods=["GET"]) -def reset_config(): - CONFIG.update(get_cs_config()) - return json.dumps({"status": "SUCCESS"}), 200 diff --git a/distributed/api/tests/test_flask.py b/distributed/api/tests/test_flask.py deleted file mode 100644 index b3647b67..00000000 --- a/distributed/api/tests/test_flask.py +++ /dev/null @@ -1,80 +0,0 @@ -import pytest -import json -import time -import msgpack - -from api import create_app - - -@pytest.fixture -def app(): - app = create_app({"TESTING": True}) - - yield app - - -@pytest.fixture -def client(app): - return app.test_client() - - -def post_and_poll(client, url, data, exp_status="YES", tries=30): - packed = msgpack.dumps(data, use_bin_type=True) - resp = client.post( - url, data=packed, headers={"Content-Type": "application/octet-stream"} - ) - assert resp.status_code == 200 - data = json.loads(resp.data.decode("utf-8")) - job_id = data["job_id"] - status = "NO" - while status == "NO" and tries > 0: - resp = client.get("/query_job?job_id={job_id}".format(job_id=job_id)) - status = resp.data.decode("utf-8") - assert resp.status_code == 200 - time.sleep(1) - tries -= 1 - - assert status == exp_status - - resp = client.get("/get_job?job_id={job_id}".format(job_id=job_id)) - assert resp.status_code == 200 - return resp - - -def test_hello(client): - resp = client.get("/hello") - assert resp.status_code == 200 - - -def test_error_app_inputs(client): - data = {"meta_param": True} - packed = msgpack.dumps(data, use_bin_type=True) - resp = client.post( - "/error/app/inputs", - data=packed, - headers={"Content-Type": "application/octet-stream"}, - ) - assert resp.status_code == 200 - data = json.loads(resp.data.decode("utf-8")) - assert data["status"] == "FAIL" - assert data["traceback"] - assert sum(data["meta"]["task_times"]) > 0 - - -def test_error_app_parse(client): - data = { - "params": {"cat": {"param": 0}}, - "jsonparams": "", - "errors_warnings": {"cat": {"errors": {}, "warnings": {}}}, - } - packed = msgpack.dumps(data, use_bin_type=True) - resp = client.post( - "/error/app/parse", - data=packed, - headers={"Content-Type": "application/octet-stream"}, - ) - assert resp.status_code == 200 - data = json.loads(resp.data.decode("utf-8")) - assert data["status"] == "FAIL" - assert data["traceback"] - assert sum(data["meta"]["task_times"]) > 0 diff --git a/distributed/celery_sim.sh b/distributed/celery_sim.sh deleted file mode 100755 index dac81086..00000000 --- a/distributed/celery_sim.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -SAFEOWNER=$(python -c "import re, os; print(re.sub('[^0-9a-zA-Z]+', '', \"$1\").lower())") -SAFETITLE=$(python -c "import re, os; print(re.sub('[^0-9a-zA-Z]+', '', \"$2\").lower())") -celery -A celery_app.${SAFEOWNER}_${SAFETITLE}_tasks worker --loglevel=info --concurrency=1 -Q ${SAFEOWNER}_${SAFETITLE}_queue -n ${SAFEOWNER}_${SAFETITLE}_sim@%h \ No newline at end of file diff --git a/distributed/cs-dask-sim/cs_dask_sim.py b/distributed/cs-dask-sim/cs_dask_sim.py deleted file mode 100644 index 223a6ff6..00000000 --- a/distributed/cs-dask-sim/cs_dask_sim.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import time -import traceback -from functools import partial - -import cs_storage -import requests -from distributed import worker_client - -try: - from cs_config import functions -except ImportError: - functions = None - - -def done_callback(future, job_id, cs_url, cs_api_token, start_time): - """ - This should be called like: - - callback = functools.partial( - done_callback, - job_id=job_id, - cs_url=os.environ.get("CS_URL"), - cs_api_token=os.environ.get("CS_API_TOKEN"), - start_time=time.time() - ) - - This is because this function needs the job id, comp url, - api token, and start time arguments, but dask only passes the - future object. - """ - finish = time.time() - print(f"job_id: {job_id}") - print(f"from dask") - print(f"state: {future.status}") - res = {} - traceback_str = None - try: - outputs = future.result() - outputs = cs_storage.write(job_id, outputs) - res.update( - { - "model_version": functions.get_version(), - "outputs": outputs, - "version": "v1", - } - ) - except Exception: - traceback_str = traceback.format_exc() - print(f"exception in callback with job_id: {job_id}") - print(traceback_str) - - if "meta" not in res: - res["meta"] = {} - res["meta"]["task_times"] = [finish - start_time] - if traceback_str is None: - res["status"] = "SUCCESS" - else: - res["status"] = "FAIL" - res["traceback"] = traceback_str - - res["job_id"] = job_id - print("got result", res) - print(f"posting data to {cs_url}/outputs/api/") - resp = requests.put( - f"{cs_url}/outputs/api/", - json=res, - headers={"Authorization": f"Token {cs_api_token}"}, - ) - print("resp", resp.status_code) - if resp.status_code == 400: - print("errors", resp.json()) - - -def dask_sim(meta_param_dict, adjustment, job_id, cs_url, cs_api_token, timeout): - """ - Wraps the functions.run_model function with a dask future and adds a - callback for pushing the results back to the webapp. The callback is - necessary becuase it will be called no matter what kinds of exceptions - are thrown in this function. - - This wrapper function is called with fire_and_forget. Since dask - "forgets" about this function but keeps track of the run_model task, - we give the run_model task the job_id. This makes it possible for the - webapp to query the job status. - """ - start_time = time.time() - partialled_cb = partial( - done_callback, - job_id=job_id, - cs_url=cs_url, - cs_api_token=cs_api_token, - start_time=start_time, - ) - with worker_client() as c: - print("c", c) - fut = c.submit(functions.run_model, meta_param_dict, adjustment, key=job_id) - fut.add_done_callback(partialled_cb) - try: - print("waiting on future", fut) - _ = fut.result(timeout=timeout) - except Exception: - # Exceptions are picked up by the callback. We just - # log them here. - traceback_str = traceback.format_exc() - print(f"exception in task with job_id: {job_id}") - print(traceback_str) diff --git a/distributed/cs-dask-sim/setup.py b/distributed/cs-dask-sim/setup.py deleted file mode 100644 index acba0664..00000000 --- a/distributed/cs-dask-sim/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -import setuptools -import os - -setuptools.setup( - name="cs-dask-sim", - description="Local package for sending a dask function over the wire.", - url="https://github.com/compute-tooling/compute-studio", - packages=setuptools.find_packages(), - include_package_data=True, -) diff --git a/distributed/requirements.txt b/distributed/requirements.txt deleted file mode 100755 index adcf3c84..00000000 --- a/distributed/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -pip -msgpack -celery -redis -pytest -flask -toolz -gunicorn -boto3 -pyyaml -cs-storage>=1.10.1 -git+https://github.com/compute-tooling/compute-studio-publish.git@855ec5bdcd66e2755408235b5d6ce4934317b9b5 \ No newline at end of file diff --git a/distributed/setup.py b/distributed/setup.py deleted file mode 100644 index ae8f60dd..00000000 --- a/distributed/setup.py +++ /dev/null @@ -1,9 +0,0 @@ -import setuptools - -setuptools.setup( - name="api", - version="0.1.0", - packages=setuptools.find_packages(), - license="", - long_description="API server for Compute Studio", -) diff --git a/workers/cs_workers/__init__.py b/workers/cs_workers/__init__.py new file mode 100644 index 00000000..a6afd17d --- /dev/null +++ b/workers/cs_workers/__init__.py @@ -0,0 +1,3 @@ +__version__ = "0.0.0.0406.0" + +__ALL__ = [] diff --git a/distributed/MANIFEST.in b/workers/cs_workers/clients/__init__.py similarity index 100% rename from distributed/MANIFEST.in rename to workers/cs_workers/clients/__init__.py diff --git a/workers/cs_workers/clients/api_task.py b/workers/cs_workers/clients/api_task.py new file mode 100644 index 00000000..e6bdccb7 --- /dev/null +++ b/workers/cs_workers/clients/api_task.py @@ -0,0 +1,21 @@ +import httpx + + +class APITask: + def __init__(self, owner, title, task_id, task_name, **task_kwargs): + self.owner = owner + self.title = title + self.task_id = task_id + self.task_name = task_name + self.task_kwargs = task_kwargs + + async def create(self, asynchronous=False): + method = "async" if asynchronous else "sync" + print(f"http://localhost:8888/{method}/{self.task_name}/") + async with httpx.AsyncClient() as client: + resp = await client.post( + # f"http://{self.owner}-{self.title}/{method}/{self.task_name}/", + f"http://localhost:8888/{method}/{self.task_name}/", + json={"task_id": self.task_id, "task_kwargs": self.task_kwargs}, + ) + return resp diff --git a/workers/cs_workers/clients/core.py b/workers/cs_workers/clients/core.py new file mode 100644 index 00000000..f5718fd9 --- /dev/null +++ b/workers/cs_workers/clients/core.py @@ -0,0 +1,91 @@ +import copy +import json +import os +import sys +import uuid +import yaml +from pathlib import Path + +import requests +from git import Repo, InvalidGitRepositoryError + + +from cs_publish.utils import clean, run, parse_owner_title, read_github_file +from cs_publish.client.secrets import Secrets + +CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) +BASE_PATH = CURR_PATH / ".." / ".." + + +class Core: + cr = "gcr.io" + + def __init__(self, project, tag=None, base_branch="origin/master", quiet=False): + self.tag = tag + self.project = project + self.base_branch = base_branch + self.quiet = quiet + + def get_config(self, models): + config = {} + for owner_title in models: + owner, title = parse_owner_title(owner_title) + if (owner, title) in config: + continue + else: + config_file = ( + BASE_PATH / Path("config") / Path(owner) / Path(f"{title}.yaml") + ) + if config_file.exists(): + with open(config_file, "r") as f: + c = yaml.safe_load(f.read()) + else: + config_file = self.get_config_from_remote([(owner, title)]) + config[(c["owner"], c["title"])] = c + if not self.quiet and config: + print("# Updating:") + print("\n#".join(f" {o}/{t}" for o, t in config.keys())) + elif not self.quiet: + print("# No changes detected.") + return config + + def get_config_from_diff(self): + try: + r = Repo() + files_with_diff = r.index.diff(r.commit(self.base_branch), paths="config") + except InvalidGitRepositoryError: + files_with_diff = [] + config = {} + for config_file in files_with_diff: + with open(config_file.a_path, "r") as f: + c = yaml.safe_load(f.read()) + config[(c["owner"], c["title"])] = c + return config + + def get_config_from_remote(self, models): + config = {} + for owner_title in models: + owner, title = parse_owner_title(owner_title) + content = read_github_file( + "compute-tooling", + "compute-studio-publish", + "master", + f"{owner}/{title}.yaml", + ) + config[(owner, title)] = yaml.safe_load(content) + return config + + def _resources(self, app, action=None): + if action == "io": + resources = { + "requests": {"cpu": 0.7, "memory": "0.25G"}, + "limits": {"cpu": 1, "memory": "0.7G"}, + } + else: + resources = {"requests": {"memory": "1G", "cpu": 1}} + resources = dict(resources, **copy.deepcopy(app["resources"])) + return resources + + def _list_secrets(self, app): + secret = Secrets(app["owner"], app["title"], self.project) + return secret.list_secrets() diff --git a/workers/cs_workers/clients/job.py b/workers/cs_workers/clients/job.py new file mode 100644 index 00000000..2e281a72 --- /dev/null +++ b/workers/cs_workers/clients/job.py @@ -0,0 +1,136 @@ +import json +import os +import redis +import uuid +import yaml + +from kubernetes import client as kclient, config as kconfig + +from cs_publish.utils import clean +from cs_publish.client.core import Core + + +redis_conn = dict( + host=os.environ.get("REDIS_HOST"), + port=os.environ.get("REDIS_PORT"), + db=os.environ.get("REDIS_DB"), + username="scheduler", + password=os.environ.get("REDIS_SCHEDULER_PW"), +) + + +class Job(Core): + def __init__( + self, project, owner, title, tag, job_id=None, job_kwargs=None, quiet=True + ): + super().__init__(project, quiet=quiet) + self.config = {} + kconfig.load_kube_config() + self.api_client = kclient.BatchV1Api() + self.job = self.configure(owner, title, tag, job_id) + self.save_job_kwargs(self.job_id, job_kwargs) + + def env(self, owner, title, config): + safeowner = clean(owner) + safetitle = clean(title) + envs = [ + kclient.V1EnvVar("OWNER", config["owner"]), + kclient.V1EnvVar("TITLE", config["title"]), + kclient.V1EnvVar("SIM_TIME_LIMIT", str(config["sim_time_limit"])), + ] + for sec in ["CS_URL", "REDIS_HOST", "REDIS_PORT", "REDIS_EXECUTOR_PW"]: + envs.append( + kclient.V1EnvVar( + sec, + value_from=kclient.V1EnvVarSource( + secret_key_ref=( + kclient.V1SecretKeySelector(key=sec, name="worker-secret") + ) + ), + ) + ) + + for secret in self._list_secrets(config): + envs.append( + kclient.V1EnvVar( + name=secret, + value_from=kclient.V1EnvVarSource( + secret_key_ref=( + kclient.V1SecretKeySelector( + key=secret, name=f"{safeowner}-{safetitle}-secret" + ) + ) + ), + ) + ) + return envs + + def configure(self, owner, title, tag, job_id=None): + if job_id is None: + job_id = "job:" + str(uuid.uuid4()) + else: + job_id = str(job_id) + if not str(job_id).startswith("job:"): + job_id += "job:" + + if (owner, title) not in self.config: + self.config.update(self.get_config([(owner, title)])) + + config = self.config[(owner, title)] + + safeowner = clean(owner) + safetitle = clean(title) + name = f"{safeowner}-{safetitle}" + job_name = f"{name}-{job_id}" + container = kclient.V1Container( + name=job_name, + image=f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{tag}", + command=["cs-job", "--job-id", job_id], + env=self.env(owner, title, config), + ) + # Create and configurate a spec section + template = kclient.V1PodTemplateSpec( + metadata=kclient.V1ObjectMeta( + labels={"app": f"{name}-job", "job-id": job_id} + ), + spec=kclient.V1PodSpec( + restart_policy="Never", + containers=[container], + node_selector={"component": "model"}, + ), + ) + # Create the specification of deployment + spec = kclient.V1JobSpec(template=template, backoff_limit=1) + # Instantiate the job object + job = kclient.V1Job( + api_version="batch/v1", + kind="Job", + metadata=kclient.V1ObjectMeta(name=job_name), + spec=spec, + ) + + if not self.quiet: + print(yaml.dump(job.to_dict())) + + return job + + def save_job_kwargs(self, job_id, job_kwargs): + with redis.Redis(**redis_conn) as rclient: + rclient.set(job_id, json.dumps(job_kwargs)) + + def create(self): + return self.api_client.create_namespaced_job(body=self.job, namespace="default") + + def delete(self): + return self.api_client.delete_namespaced_job( + name=self.job.metadata.name, + namespace="default", + body=kclient.V1DeleteOptions(), + ) + + @property + def job_id(self): + if self.job: + return self.job.spec.template.metadata.labels["job-id"] + else: + None diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py new file mode 100644 index 00000000..5c7e884b --- /dev/null +++ b/workers/cs_workers/clients/publish.py @@ -0,0 +1,281 @@ +import argparse +import copy +import os +import sys +import yaml +from pathlib import Path + +from ..utils import run, clean + +from .core import Core + +TAG = os.environ.get("TAG", "") +PROJECT = os.environ.get("PROJECT", "cs-workers-dev") +CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) +BASE_PATH = CURR_PATH / ".." / ".." + + +class Publisher(Core): + """ + Build, test, and publish docker images for Compute Studio: + + args: + - config: configuration for the apps powering C/S. + - tag: image version, defined as [c/s version].[mm][dd].[n] + - project: GCP project that the compute cluster is under. + - models (optional): only build a subset of the models in + the config. + + """ + + kubernetes_target = BASE_PATH / Path("kubernetes") + + def __init__( + self, + project, + tag, + models=None, + base_branch="origin/master", + quiet=False, + kubernetes_target=None, + ): + super().__init__(project, tag, base_branch, quiet) + + self.models = models if models and models[0] else None + self.kubernetes_target = kubernetes_target or self.kubernetes_target + + if self.kubernetes_target == "-": + self.quiet = True + elif not self.kubernetes_target.exists(): + os.mkdir(self.kubernetes_target) + + self.config = self.get_config_from_diff() + if self.models: + self.config.update(self.get_config(self.models)) + + with open( + BASE_PATH / Path("templates") / Path("sc-deployment.template.yaml"), "r" + ) as f: + self.app_template = yaml.safe_load(f.read()) + + with open( + BASE_PATH / Path("templates") / Path("secret.template.yaml"), "r" + ) as f: + self.secret_template = yaml.safe_load(f.read()) + + self.errored = set() + + def build(self): + self.apply_method_to_apps(method=self.build_app_image) + + def test(self): + self.apply_method_to_apps(method=self.test_app_image) + + def push(self): + self.apply_method_to_apps(method=self.push_app_image) + + def write_app_config(self): + self.apply_method_to_apps(method=self.write_secrets) + self.apply_method_to_apps(method=self._write_app_inputs_procesess) + + def apply_method_to_apps(self, method): + """ + Build, tag, and push images and write k8s config files + for all apps in config. Filters out those not in models + list, if applicable. + """ + for name, app in self.config.items(): + if self.models and f"{name[0]}/{name[1]}" not in self.models: + continue + try: + method(app) + except Exception: + print( + f"There was an error building: " + f"{app['owner']}/{app['title']}:{self.tag}" + ) + import traceback as tb + + tb.print_exc() + self.errored.add((app["owner"], app["title"])) + continue + + def build_app_image(self, app): + """ + Build, tag, and pus the image for a single app. + """ + print(app) + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + img_name = f"{safeowner}_{safetitle}_tasks" + + reg_url = "https://github.com" + raw_url = "https://raw.githubusercontent.com" + + buildargs = dict( + OWNER=app["owner"], + TITLE=app["title"], + BRANCH=app["branch"], + SAFEOWNER=safeowner, + SAFETITLE=safetitle, + SIM_TIME_LIMIT=app["sim_time_limit"], + REPO_URL=app["repo_url"], + RAW_REPO_URL=app["repo_url"].replace(reg_url, raw_url), + **app["env"], + ) + + buildargs_str = " ".join( + [f"--build-arg {arg}={value}" for arg, value in buildargs.items()] + ) + cmd = f"docker build {buildargs_str} -t {img_name}:{self.tag} ./" + run(cmd) + + run( + f"docker tag {img_name}:{self.tag} {self.cr}/{self.project}/{img_name}:{self.tag}" + ) + + def test_app_image(self, app): + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + img_name = f"{safeowner}_{safetitle}_tasks" + run( + f"docker run {self.cr}/{self.project}/{img_name}:{self.tag} py.test /home/test_functions.py -v -s" + ) + + def push_app_image(self, app): + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + img_name = f"{safeowner}_{safetitle}_tasks" + run(f"docker push {self.cr}/{self.project}/{img_name}:{self.tag}") + + def write_secrets(self, app): + secret_config = copy.deepcopy(self.secret_template) + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + name = f"{safeowner}-{safetitle}-secret" + + secret_config["metadata"]["name"] = name + + for name, value in self._list_secrets(app).items(): + secret_config["stringData"][name] = value + + if self.kubernetes_target == "-": + sys.stdout.write(yaml.dump(secret_config)) + sys.stdout.write("---") + sys.stdout.write("\n") + else: + with open(self.kubernetes_target / Path(f"{name}.yaml"), "w") as f: + f.write(yaml.dump(secret_config)) + + return secret_config + + def _write_app_inputs_procesess(self, app): + app_deployment = copy.deepcopy(self.app_template) + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + action = "io" + name = f"{safeowner}-{safetitle}-{action}" + + resources = self._resources(app, action) + + app_deployment["metadata"]["name"] = name + app_deployment["spec"]["selector"]["matchLabels"]["app"] = name + app_deployment["spec"]["template"]["metadata"]["labels"]["app"] = name + + container_config = app_deployment["spec"]["template"]["spec"]["containers"][0] + + container_config.update( + { + "name": name, + "image": f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{self.tag}", + "command": [f"./celery_{action}.sh"], + "args": [ + app["owner"], + app["title"], + ], # TODO: pass safe names to docker file at build and run time + "resources": resources, + } + ) + + container_config["env"].append({"name": "TITLE", "value": app["title"]}) + container_config["env"].append({"name": "OWNER", "value": app["owner"]}) + container_config["env"].append( + {"name": "SIM_TIME_LIMIT", "value": str(app["sim_time_limit"])} + ) + container_config["env"].append( + {"name": "APP_NAME", "value": f"{safeowner}_{safetitle}_tasks"} + ) + container_config["env"].append( + { + "name": "REDIS", + "valueFrom": { + "secretKeyRef": {"name": "worker-secret", "key": "REDIS"} + }, + } + ) + + self._set_secrets(app, container_config) + + if self.kubernetes_target == "-": + sys.stdout.write(yaml.dump(app_deployment)) + sys.stdout.write("---") + sys.stdout.write("\n") + else: + with open( + self.kubernetes_target / Path(f"{name}-deployment.yaml"), "w" + ) as f: + f.write(yaml.dump(app_deployment)) + + return app_deployment + + def _resources(self, app, action=None): + if action == "io": + resources = { + "requests": {"cpu": 0.7, "memory": "0.25G"}, + "limits": {"cpu": 1, "memory": "0.7G"}, + } + else: + resources = super()._resources(app) + return resources + + def _set_secrets(self, app, config): + safeowner = clean(app["owner"]) + safetitle = clean(app["title"]) + name = f"{safeowner}-{safetitle}-secret" + for key in self._list_secrets(app): + config["env"].append( + {"name": key, "valueFrom": {"secretKeyRef": {"name": name, "key": key}}} + ) + + +def main(): + parser = argparse.ArgumentParser(description="Deploy C/S compute cluster.") + parser.add_argument("--tag", required=False, default=TAG) + parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--models", nargs="+", type=str, required=False, default=None) + parser.add_argument("--build", action="store_true") + parser.add_argument("--test", action="store_true") + parser.add_argument("--push", action="store_true") + parser.add_argument("--app-config", action="store_true") + parser.add_argument("--base-branch", default="origin/master") + parser.add_argument("--quiet", "-q", default=False) + parser.add_argument("--config-out", "-o", default=None) + + args = parser.parse_args() + + publisher = Publisher( + project=args.project, + tag=args.tag, + models=args.models, + base_branch=args.base_branch, + quiet=args.quiet, + kubernetes_target=args.config_out, + ) + if args.build: + publisher.build() + if args.test: + publisher.test() + if args.push: + publisher.push() + if args.app_config: + publisher.write_app_config() diff --git a/workers/cs_workers/clients/secrets.py b/workers/cs_workers/clients/secrets.py new file mode 100644 index 00000000..c410bfb1 --- /dev/null +++ b/workers/cs_workers/clients/secrets.py @@ -0,0 +1,115 @@ +import argparse +import json +import os + +from cs_publish.utils import clean + +PROJECT = os.environ.get("PROJECT", "cs-workers-dev") + + +class SecretNotFound(Exception): + pass + + +class Secrets: + def __init__(self, owner, title, project): + self.owner = owner + self.title = title + self.project = project + self.safe_owner = clean(owner) + self.safe_title = clean(title) + self.client = None + + def set_secret(self, name, value): + return self._set_secret(name, value) + + def get_secret(self, name): + return self._get_secret(name) + + def list_secrets(self): + return self._get_secret() + + def delete_secret(self, name): + return self._set_secret(name, None) + + def _set_secret(self, name, value): + secret_name = f"{self.safe_owner}_{self.safe_title}" + + client = self._client() + try: + secret_val = self._get_secret() + except SecretNotFound: + secret_val = {name: value} + proj_parent = client.project_path(self.project) + client.create_secret( + proj_parent, secret_name, {"replication": {"automatic": {}}} + ) + else: + if secret_val is not None: + secret_val[name] = value + else: + secret_val = {name: value} + if value is None: + secret_val.pop(name) + + secret_bytes = json.dumps(secret_val).encode("utf-8") + + secret_parent = client.secret_path(self.project, secret_name) + + return client.add_secret_version(secret_parent, {"data": secret_bytes}) + + def _get_secret(self, name=None): + from google.api_core import exceptions + + secret_name = f"{self.safe_owner}_{self.safe_title}" + + client = self._client() + + try: + response = client.access_secret_version( + f"projects/{self.project}/secrets/{secret_name}/versions/latest" + ) + + secret = json.loads(response.payload.data.decode("utf-8")) + except exceptions.NotFound: + raise SecretNotFound() + + if name and name in secret: + return secret[name] + elif name: + return None + else: + return secret + + def _client(self): + if self.client: + return self.client + + from google.cloud import secretmanager + + self.client = secretmanager.SecretManagerServiceClient() + return self.client + + +def main(): + parser = argparse.ArgumentParser(description="CLI for model secrets.") + parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--owner", required=True) + parser.add_argument("--title", required=True) + parser.add_argument("--secret-name", "-s") + parser.add_argument("--secret-value", "-v") + parser.add_argument("--list", "-l", action="store_true") + parser.add_argument("--delete", "-d") + + args = parser.parse_args() + + secrets = Secrets(args.owner, args.title, args.project) + if args.secret_name and args.secret_value: + secrets.set_secret(args.secret_name, args.secret_value) + elif args.secret_name: + print(secrets.get_secret(args.secret_name)) + elif args.delete: + secrets.delete_secret(args.delete) + + if args.list: + print(json.dumps(secrets.list_secrets(), indent=2)) diff --git a/workers/cs_workers/executors/__init__.py b/workers/cs_workers/executors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/workers/cs_workers/executors/api_task.py b/workers/cs_workers/executors/api_task.py new file mode 100644 index 00000000..572c26c6 --- /dev/null +++ b/workers/cs_workers/executors/api_task.py @@ -0,0 +1,89 @@ +import json +import os +import uuid + +from cs_publish.executors.task_wrapper import async_task_wrapper, sync_task_wrapper + +import tornado.ioloop +import tornado.web +from dask.distributed import Client, fire_and_forget + +try: + from cs_config import functions +except ImportError as ie: + if os.environ.get("IS_FLASK", "False") == "True": + functions = None + else: + raise ie + + +def version(task_id, **task_kwargs): + return {"version": functions.get_version()} + + +def defaults(task_id, meta_param_dict=None, **task_kwargs): + return functions.get_inputs(meta_param_dict) + + +def parse(task_id, meta_param_dict, adjustment, errors_warnings): + return functions.validate_inputs(meta_param_dict, adjustment, errors_warnings) + + +class Async(tornado.web.RequestHandler): + def initialize(self, routes): + self.routes = routes + + async def post(self, task_name): + print("POST -- /async/", task_name) + if task_name not in self.routes: + self.set_status(404) + return + + handler = self.routes[task_name] + payload = json.loads(self.request.body.decode("utf-8")) + task_id = payload.pop("task_id", None) + if task_name is None: + task_id = str(uuid.uuid4()) + async with Client(asynchronous=True, processes=True) as client: + fut = client.submit(async_task_wrapper, task_id, handler, **payload) + fire_and_forget(fut) + self.set_status(200) + self.write({"status": "PENDING", "task_id": task_name}) + + +class Sync(tornado.web.RequestHandler): + def initialize(self, routes): + self.routes = routes + + async def post(self, task_name): + print("POST -- /sync/", task_name) + if task_name not in self.routes: + self.set_status(404) + return + + handler = self.routes[task_name] + payload = json.loads(self.request.body.decode("utf-8")) + task_id = payload.pop("task_id", None) + if task_name is None: + task_id = str(uuid.uuid4()) + print("payload", payload) + result = sync_task_wrapper(task_id, handler, **payload) + self.write(result) + + +def executor(routes): + print("routes", routes) + return tornado.web.Application( + [ + (r"/async/([A-Za-z0-9-]+)/", Async, dict(routes=routes)), + (r"/sync/([A-Za-z0-9-]+)/", Sync, dict(routes=routes)), + ], + debug=True, + autoreload=True, + ) + + +if __name__ == "__main__": + app = executor(routes={"version": version, "defaults": defaults, "parse": parse}) + app.listen(8888) + tornado.ioloop.IOLoop.current().start() diff --git a/workers/cs_workers/executors/dask-worker-space/global.lock b/workers/cs_workers/executors/dask-worker-space/global.lock new file mode 100644 index 00000000..e69de29b diff --git a/workers/cs_workers/executors/dask-worker-space/purge.lock b/workers/cs_workers/executors/dask-worker-space/purge.lock new file mode 100644 index 00000000..e69de29b diff --git a/workers/cs_workers/executors/kubernetes.py b/workers/cs_workers/executors/kubernetes.py new file mode 100644 index 00000000..c8b8499e --- /dev/null +++ b/workers/cs_workers/executors/kubernetes.py @@ -0,0 +1,40 @@ +import argparse +import functools +import json +import os + +import redis +import requests + +import cs_storage +from cs_publish.executors.task_wrapper import async_task_wrapper +from cs_publish.executors.celery import get_app + + +def sim_handler(task_id, meta_param_dict, adjustment): + from cs_config import functions + + outputs = functions.run_model(meta_param_dict, adjustment) + print("got result") + outputs = cs_storage.serialize_to_json(outputs) + resp = requests.post( + "http://outputs-processor/write/", json={"task_id": task_id, "outputs": outputs} + ) + assert resp.status_code == 200, f"Got code: {resp.status_code}" + return resp.json() + + +routes = {"sim": sim_handler} + + +def executor(routes): + parser = argparse.ArgumentParser(description="CLI for C/S jobs.") + parser.add_argument("--job-id", "-t", required=True) + parser.add_argument("--route-name", "-r", required=True) + args = parser.parse_args() + + async_task_wrapper(args.job_id, routes[args.route_name]) + + +def main(): + executor({"sim": sim_handler}) diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py new file mode 100644 index 00000000..3c2ff2b3 --- /dev/null +++ b/workers/cs_workers/executors/task_wrapper.py @@ -0,0 +1,91 @@ +import functools +import json +import os +import re +import time +import traceback + +import redis +import requests +import cs_storage + + +redis_conn = dict( + host=os.environ.get("REDIS_HOST"), + port=os.environ.get("REDIS_PORT"), + db=os.environ.get("REDIS_DB"), + username="executor", + password=os.environ.get("REDIS_EXECUTOR_PW"), +) + + +try: + from cs_config import functions +except ImportError as ie: + # if os.environ.get("IS_FLASK", "False") == "True": + # functions = None + # else: + # raise ie + pass + + +def sync_task_wrapper(task_id, func, **task_kwargs): + start = time.time() + traceback_str = None + res = {} + try: + outputs = func(task_id, **task_kwargs) + res.update(outputs) + except Exception: + traceback_str = traceback.format_exc() + finish = time.time() + if "meta" not in res: + res["meta"] = {} + res["meta"]["task_times"] = [finish - start] + if traceback_str is None: + res["status"] = "SUCCESS" + else: + res["status"] = "FAIL" + res["traceback"] = traceback_str + return res + + +def async_task_wrapper(task_id, func, **task_kwargs): + print("sim task", task_id, func) + start = time.time() + traceback_str = None + res = {"job_id": task_id} + try: + print("calling func", func) + if not task_kwargs: + with redis.Redis(**redis_conn) as rclient: + task_kwargs = rclient.get(task_id) + if task_kwargs is None: + raise KeyError(f"No value found for job id: {task_id}") + task_kwargs = json.loads(task_kwargs.decode()) + outputs = func(task_id, **task_kwargs) + res.update( + { + "model_version": functions.get_version(), + "outputs": outputs, + "version": "v1", + } + ) + except Exception: + traceback_str = traceback.format_exc() + finish = time.time() + if "meta" not in res: + res["meta"] = {} + res["meta"]["task_times"] = [finish - start] + if traceback_str is None: + res["status"] = "SUCCESS" + else: + res["status"] = "FAIL" + res["traceback"] = traceback_str + + resp = requests.post( + "http://outputs-processor/push/", json={"task_type": "sim", "result": res} + ) + assert resp.status_code == 200, f"Got code: {resp.status_code}" + + return res diff --git a/workers/cs_workers/services/__init__.py b/workers/cs_workers/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/distributed/cs_cluster.py b/workers/cs_workers/services/init.py similarity index 100% rename from distributed/cs_cluster.py rename to workers/cs_workers/services/init.py diff --git a/distributed/outputs_processor.py b/workers/cs_workers/services/outputs_processor.py similarity index 100% rename from distributed/outputs_processor.py rename to workers/cs_workers/services/outputs_processor.py diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py new file mode 100644 index 00000000..9c30ec57 --- /dev/null +++ b/workers/cs_workers/services/scheduler.py @@ -0,0 +1,105 @@ +import json +import os +import uuid + +import httpx +import marshmallow as ma +import tornado.ioloop +import tornado.web + +from cs_publish.utils import clean +from cs_publish.client import job, api_task + + +CS_URL = os.environ.get("CS_URL") + + +class Payload(ma.Schema): + task_id = ma.fields.UUID(required=False) + task_name = ma.fields.Str(required=True) + task_kwargs = ma.fields.Dict( + keys=ma.fields.Str(), values=ma.fields.Field(), missing=dict + ) + + +def get_projects(): + resp = httpx.get(f"{CS_URL}/publish/api/") + assert resp.status_code == 200, f"Got code: {resp.status_code}" + projects = {} + for project in resp.json(): + projects[(project["owner"], project["title"])] = project + return projects + + +class Scheduler(tornado.web.RequestHandler): + def initialize(self, projects=None): + self.projects = projects + + async def post(self, owner, title): + print("POST -- /", owner, title) + if not self.request.body: + return + payload = Payload().loads(self.request.body.decode("utf-8")) + + if (owner, title) not in self.projects: + self.set_status(404) + + task_id = payload.get("task_id") + if task_id is None: + task_id = uuid.uuid4() + task_id = str(task_id) + task_name = payload["task_name"] + task_kwargs = payload["task_kwargs"] + + if task_name in ("version", "defaults"): + client = api_task.APITask( + owner, title, task_id=task_id, task_name=task_name, **task_kwargs + ) + resp = await client.create(asynchronous=False) + print(resp.text) + assert resp.status_code == 200, f"Got code: {resp.status_code}" + data = resp.json() + elif task_name in ("parse",): + client = api_task.APITask( + owner, title, task_id=task_id, task_name=task_name, **task_kwargs + ) + resp = await client.create(asynchronous=True) + assert resp.status_code == 200, f"Got code: {resp.status_code}" + + data = resp.json() + elif task_name == "sim": + client = job.Job( + "cs-workers-dev", + owner, + title, + tag="latest", + job_id=task_id, + job_kwargs=payload["task_kwargs"], + ) + client.create() + data = {"task_id": client.job_id} + else: + self.set_status(404) + return + + self.write(data) + + +def get_app(): + return tornado.web.Application( + [ + ( + r"/([A-Za-z0-9-]+)/([A-Za-z0-9-]+)/", + Scheduler, + dict(projects=get_projects()), + ) + ], + debug=True, + autoreload=True, + ) + + +if __name__ == "__main__": + app = get_app() + app.listen(8889) + tornado.ioloop.IOLoop.current().start() diff --git a/workers/cs_workers/utils.py b/workers/cs_workers/utils.py new file mode 100644 index 00000000..a959d602 --- /dev/null +++ b/workers/cs_workers/utils.py @@ -0,0 +1,44 @@ +import base64 +import re +import subprocess +import time + +import requests + + +def clean(word): + return re.sub("[^0-9a-zA-Z]+", "", word).lower() + + +def run(cmd): + print(f"Running: {cmd}\n") + s = time.time() + res = subprocess.run(cmd, shell=True, check=True) + f = time.time() + print(f"\n\tFinished in {f-s} seconds.\n") + return res + + +def parse_owner_title(owner_title): + if isinstance(owner_title, tuple) and len(owner_title) == 2: + owner, title = owner_title + else: + owner, title = owner_title.split("/") + return (owner, title) + + +def read_github_file(org, repo, branch, filename): + """ + Read data from github api. Ht to @andersonfrailey for decoding the response + """ + url = f"https://api.github.com/repos/{org}/{repo}/contents/{filename}?ref={branch}" + response = requests.get(url) + print(f"GET: {url} {response.status_code}") + if response.status_code == 403: + assert "hit rate limit" == 403 + assert response.status_code == 200 + sanatized_content = response.json()["content"].replace("\n", "") + encoded_content = sanatized_content.encode() + decoded_bytes = base64.decodebytes(encoded_content) + text = decoded_bytes.decode() + return text diff --git a/distributed/dockerfiles/Dockerfile b/workers/dockerfiles/Dockerfile similarity index 100% rename from distributed/dockerfiles/Dockerfile rename to workers/dockerfiles/Dockerfile diff --git a/workers/dockerfiles/Dockerfile.model b/workers/dockerfiles/Dockerfile.model new file mode 100644 index 00000000..f895a797 --- /dev/null +++ b/workers/dockerfiles/Dockerfile.model @@ -0,0 +1,42 @@ +ARG TAG +FROM continuumio/miniconda3 + +USER root +RUN apt-get update && apt install libgl1-mesa-glx --yes + +RUN conda update conda +RUN conda config --append channels conda-forge +RUN conda install "python>=3.7" pip tornado dask lz4 + +ADD requirements.txt /home + +WORKDIR /home + +ARG TITLE +ARG OWNER +ARG REPO_URL +ARG RAW_REPO_URL +ARG BRANCH=master + +# Install necessary packages, copying files, etc. +###################### +# Bump to trigger build +ARG BUILD_NUM=0 + +ADD ${RAW_REPO_URL}/${BRANCH}/cs-config/install.sh /home +RUN cat /home/install.sh +RUN bash /home/install.sh + +# Bump to trigger re-install of source, without re-installing dependencies. +ARG INSTALL_NUM=0 +RUN pip install "git+${REPO_URL}.git@${BRANCH}#egg=cs-config&subdirectory=cs-config" +ADD ${RAW_REPO_URL}/${BRANCH}/cs-config/cs_config/tests/test_functions.py /home +RUN pip install cs-kit +###################### + +RUN mkdir /home/cs_publish +COPY cs_publish /home/cs_publish +COPY setup.py /home +RUN cd /home/ && pip install -e . + +WORKDIR /home diff --git a/distributed/dockerfiles/Dockerfile.outputs_processor b/workers/dockerfiles/Dockerfile.outputs_processor similarity index 100% rename from distributed/dockerfiles/Dockerfile.outputs_processor rename to workers/dockerfiles/Dockerfile.outputs_processor diff --git a/distributed/dockerfiles/Dockerfile.redis b/workers/dockerfiles/Dockerfile.redis similarity index 99% rename from distributed/dockerfiles/Dockerfile.redis rename to workers/dockerfiles/Dockerfile.redis index b75fdd1f..b549246d 100644 --- a/distributed/dockerfiles/Dockerfile.redis +++ b/workers/dockerfiles/Dockerfile.redis @@ -144,4 +144,4 @@ RUN set -ex; \ # End Python install commands RUN python3 -m pip install redis -COPY redis_init.py /home/ \ No newline at end of file +COPY ./scripts/redis_init.py /home/ \ No newline at end of file diff --git a/distributed/dockerfiles/Dockerfile.flask b/workers/dockerfiles/Dockerfile.scheduler similarity index 88% rename from distributed/dockerfiles/Dockerfile.flask rename to workers/dockerfiles/Dockerfile.scheduler index 6c2364b3..16e1543d 100755 --- a/distributed/dockerfiles/Dockerfile.flask +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -2,14 +2,14 @@ ARG TAG FROM distributed ENV HOST 0.0.0.0 -ENV PORT 5050 +ENV PORT 8888 ENV DEBUG true -ENV IS_FLASK True +ENV IS_SCHEDULER True # expose the app port EXPOSE 80 -EXPOSE 5050 +EXPOSE 8888 RUN pip install -r requirements.txt diff --git a/distributed/kubernetes/flask-service.yaml b/workers/kubernetes/flask-service.yaml similarity index 100% rename from distributed/kubernetes/flask-service.yaml rename to workers/kubernetes/flask-service.yaml diff --git a/workers/kubernetes/outputs-processor-service.yaml b/workers/kubernetes/outputs-processor-service.yaml new file mode 100644 index 00000000..2edac636 --- /dev/null +++ b/workers/kubernetes/outputs-processor-service.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: Service +metadata: + name: outputs-processor +spec: + ports: + - port: 80 + targetPort: 8888 + selector: + app: outputs-processor diff --git a/distributed/kubernetes/redis-master-deployment.yaml b/workers/kubernetes/redis-master-deployment.yaml similarity index 100% rename from distributed/kubernetes/redis-master-deployment.yaml rename to workers/kubernetes/redis-master-deployment.yaml diff --git a/distributed/kubernetes/redis-master-service.yaml b/workers/kubernetes/redis-master-service.yaml similarity index 100% rename from distributed/kubernetes/redis-master-service.yaml rename to workers/kubernetes/redis-master-service.yaml diff --git a/workers/requirements.txt b/workers/requirements.txt new file mode 100755 index 00000000..cb7cb74d --- /dev/null +++ b/workers/requirements.txt @@ -0,0 +1,8 @@ +httpx +redis +pytest +toolz +boto3 +kubernetes +cs-storage>=1.10.1 +pyyaml \ No newline at end of file diff --git a/distributed/redis_init.py b/workers/scripts/redis_init.py similarity index 100% rename from distributed/redis_init.py rename to workers/scripts/redis_init.py diff --git a/workers/setup.py b/workers/setup.py new file mode 100644 index 00000000..0aff603d --- /dev/null +++ b/workers/setup.py @@ -0,0 +1,35 @@ +import setuptools +import os + +if os.path.exists("README.md"): + with open("README.md", "r") as f: + long_description = f.read() +else: + long_description = "" + + +setuptools.setup( + name="cs-publish", + version=os.environ.get("TAG", "0.0.0"), + author="Hank Doupe", + author_email="hank@compute.studio", + description=("Build, publish, and run Compute Studio workers."), + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/compute-tooling/compute-studio-workers", + packages=setuptools.find_packages(), + install_requires=["celery", "redis", "gitpython", "pyyaml"], + include_package_data=True, + entry_points={ + "console_scripts": [ + "cs-publish=cs_publish.client.publish:main", + "cs-secrets=cs_publish.client.secrets:main", + "cs-job=cs_publish.executors.kubernetes:main", + ] + }, + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Operating System :: OS Independent", + ], +) diff --git a/distributed/templates/flask-deployment.template.yaml b/workers/templates/flask-deployment.template.yaml similarity index 100% rename from distributed/templates/flask-deployment.template.yaml rename to workers/templates/flask-deployment.template.yaml diff --git a/distributed/templates/outputs-processor-deployment.template.yaml b/workers/templates/outputs-processor-deployment.template.yaml similarity index 100% rename from distributed/templates/outputs-processor-deployment.template.yaml rename to workers/templates/outputs-processor-deployment.template.yaml diff --git a/distributed/templates/redis-master-deployment.template.yaml b/workers/templates/redis-master-deployment.template.yaml similarity index 100% rename from distributed/templates/redis-master-deployment.template.yaml rename to workers/templates/redis-master-deployment.template.yaml diff --git a/workers/templates/sc-deployment.template.yaml b/workers/templates/sc-deployment.template.yaml new file mode 100755 index 00000000..3a6fd957 --- /dev/null +++ b/workers/templates/sc-deployment.template.yaml @@ -0,0 +1,37 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: # [owner]-[title]-[action] +spec: + replicas: 1 + selector: + matchLabels: + app: # [owner]-[title]-[action] + template: + metadata: + labels: + app: # [owner]-[title]-[action] + spec: + containers: + - name: # [owner]-[title]-[action] + image: # gcr.io/[project]/[owner]_[title]_tasks:[tag] + imagePullPolicy: Always + command: [] # ["./celery_[action].sh"] + args: [] # ["[owner]", "[title"] + resources: + requests: + memory: + cpu: + limits: + memory: + cpu: + env: + - name: OUTPUTS_VERSION + value: v1 + - name: CS_URL + valueFrom: + secretKeyRef: + name: worker-secret + key: CS_URL + nodeSelector: + component: model \ No newline at end of file diff --git a/distributed/templates/secret.template.yaml b/workers/templates/secret.template.yaml similarity index 100% rename from distributed/templates/secret.template.yaml rename to workers/templates/secret.template.yaml From 09092e2439052c453568a8d36879c0eaf1cf7e81 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Wed, 13 May 2020 18:04:58 -0400 Subject: [PATCH 07/55] Add cli; clean up; get build, test, push, config working again --- .gitignore | 6 +- workers/cs_workers/cli.py | 20 +++ workers/cs_workers/clients/api_task.py | 5 +- workers/cs_workers/clients/core.py | 12 +- workers/cs_workers/clients/job.py | 6 +- workers/cs_workers/clients/publish.py | 126 ++++++++++-------- workers/cs_workers/clients/secrets.py | 27 ++-- workers/cs_workers/executors/api_task.py | 27 ++-- .../executors/{kubernetes.py => job.py} | 17 +-- .../services/{init.py => manage.py} | 65 +++++---- .../cs_workers/services/outputs_processor.py | 22 ++- workers/cs_workers/services/scheduler.py | 22 ++- workers/cs_workers/utils.py | 3 +- workers/dockerfiles/Dockerfile.model | 5 +- .../dockerfiles/Dockerfile.outputs_processor | 7 +- workers/dockerfiles/Dockerfile.scheduler | 13 +- .../kubernetes/redis-master-deployment.yaml | 61 +++++---- ...sk-service.yaml => scheduler-service.yaml} | 8 +- workers/setup.py | 10 +- .../api-task-deployment.template.yaml} | 10 +- .../models/api-task-service.template.yaml | 11 ++ workers/templates/models/secret.template.yaml | 6 + workers/templates/secret.template.yaml | 2 - ...outputs-processor-deployment.template.yaml | 0 .../redis-master-deployment.template.yaml | 0 .../scheduler-deployment.template.yaml} | 28 ++-- 26 files changed, 303 insertions(+), 216 deletions(-) create mode 100644 workers/cs_workers/cli.py rename workers/cs_workers/executors/{kubernetes.py => job.py} (72%) rename workers/cs_workers/services/{init.py => manage.py} (85%) rename workers/kubernetes/{flask-service.yaml => scheduler-service.yaml} (55%) rename workers/templates/{sc-deployment.template.yaml => models/api-task-deployment.template.yaml} (84%) create mode 100644 workers/templates/models/api-task-service.template.yaml create mode 100644 workers/templates/models/secret.template.yaml rename workers/templates/{ => services}/outputs-processor-deployment.template.yaml (100%) rename workers/templates/{ => services}/redis-master-deployment.template.yaml (100%) rename workers/templates/{flask-deployment.template.yaml => services/scheduler-deployment.template.yaml} (54%) diff --git a/.gitignore b/.gitignore index f9d5ba31..282dfa58 100755 --- a/.gitignore +++ b/.gitignore @@ -36,8 +36,8 @@ secret-docker-compose.yml *worker_config.prod.yaml *docker-compose-apps* -distributed/kubernetes/apps/* +kubernetes/models/* secret.yaml -*flask-deployment.yaml -*outputs-processor-deployment.yaml \ No newline at end of file +*scheduler-deployment.yaml +*outputs-processor-deployment.yaml diff --git a/workers/cs_workers/cli.py b/workers/cs_workers/cli.py new file mode 100644 index 00000000..af90d96f --- /dev/null +++ b/workers/cs_workers/cli.py @@ -0,0 +1,20 @@ +import argparse + +from cs_workers.services import manage, scheduler, outputs_processor +from cs_workers.clients import publish +from cs_workers.executors import job, api_task + + +def cli(): + parser = argparse.ArgumentParser(description="C/S Workers CLI") + sub_parsers = parser.add_subparsers() + + manage.cli(sub_parsers) + scheduler.cli(sub_parsers) + outputs_processor.cli(sub_parsers) + publish.cli(sub_parsers) + job.cli(sub_parsers) + api_task.cli(sub_parsers) + + args = parser.parse_args() + args.func(args) diff --git a/workers/cs_workers/clients/api_task.py b/workers/cs_workers/clients/api_task.py index e6bdccb7..41bf78b0 100644 --- a/workers/cs_workers/clients/api_task.py +++ b/workers/cs_workers/clients/api_task.py @@ -11,11 +11,10 @@ def __init__(self, owner, title, task_id, task_name, **task_kwargs): async def create(self, asynchronous=False): method = "async" if asynchronous else "sync" - print(f"http://localhost:8888/{method}/{self.task_name}/") async with httpx.AsyncClient() as client: resp = await client.post( - # f"http://{self.owner}-{self.title}/{method}/{self.task_name}/", - f"http://localhost:8888/{method}/{self.task_name}/", + f"http://{self.owner}-{self.title}/{method}/{self.task_name}/", + # f"http://localhost:8888/{method}/{self.task_name}/", json={"task_id": self.task_id, "task_kwargs": self.task_kwargs}, ) return resp diff --git a/workers/cs_workers/clients/core.py b/workers/cs_workers/clients/core.py index f5718fd9..577c638b 100644 --- a/workers/cs_workers/clients/core.py +++ b/workers/cs_workers/clients/core.py @@ -10,8 +10,8 @@ from git import Repo, InvalidGitRepositoryError -from cs_publish.utils import clean, run, parse_owner_title, read_github_file -from cs_publish.client.secrets import Secrets +from cs_workers.utils import clean, run, parse_owner_title, read_github_file +from cs_workers.clients.secrets import Secrets CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) BASE_PATH = CURR_PATH / ".." / ".." @@ -38,10 +38,10 @@ def get_config(self, models): ) if config_file.exists(): with open(config_file, "r") as f: - c = yaml.safe_load(f.read()) + contents = yaml.safe_load(f.read()) + config[(contents["owner"], contents["title"])] = contents else: - config_file = self.get_config_from_remote([(owner, title)]) - config[(c["owner"], c["title"])] = c + config.update(self.get_config_from_remote([(owner, title)])) if not self.quiet and config: print("# Updating:") print("\n#".join(f" {o}/{t}" for o, t in config.keys())) @@ -70,7 +70,7 @@ def get_config_from_remote(self, models): "compute-tooling", "compute-studio-publish", "master", - f"{owner}/{title}.yaml", + f"config/{owner}/{title}.yaml", ) config[(owner, title)] = yaml.safe_load(content) return config diff --git a/workers/cs_workers/clients/job.py b/workers/cs_workers/clients/job.py index 2e281a72..4eba633b 100644 --- a/workers/cs_workers/clients/job.py +++ b/workers/cs_workers/clients/job.py @@ -6,8 +6,8 @@ from kubernetes import client as kclient, config as kconfig -from cs_publish.utils import clean -from cs_publish.client.core import Core +from cs_workers.utils import clean +from cs_workers.clients.core import Core redis_conn = dict( @@ -85,7 +85,7 @@ def configure(self, owner, title, tag, job_id=None): container = kclient.V1Container( name=job_name, image=f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{tag}", - command=["cs-job", "--job-id", job_id], + command=["csw", "job", "--job-id", job_id, "--route-name", "sim"], env=self.env(owner, title, config), ) # Create and configurate a spec section diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index 5c7e884b..ae0bf160 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -7,7 +7,7 @@ from ..utils import run, clean -from .core import Core +from cs_workers.clients.core import Core TAG = os.environ.get("TAG", "") PROJECT = os.environ.get("PROJECT", "cs-workers-dev") @@ -28,7 +28,7 @@ class Publisher(Core): """ - kubernetes_target = BASE_PATH / Path("kubernetes") + kubernetes_target = BASE_PATH / Path("kubernetes") / Path("models") def __init__( self, @@ -54,12 +54,25 @@ def __init__( self.config.update(self.get_config(self.models)) with open( - BASE_PATH / Path("templates") / Path("sc-deployment.template.yaml"), "r" + BASE_PATH + / Path("templates") + / "models" + / Path("api-task-deployment.template.yaml"), + "r", ) as f: - self.app_template = yaml.safe_load(f.read()) + self.api_task_template = yaml.safe_load(f.read()) with open( - BASE_PATH / Path("templates") / Path("secret.template.yaml"), "r" + BASE_PATH + / Path("templates") + / "models" + / Path("api-task-service.template.yaml"), + "r", + ) as f: + self.api_task_service_template = yaml.safe_load(f.read()) + + with open( + BASE_PATH / Path("templates") / "models" / Path("secret.template.yaml"), "r" ) as f: self.secret_template = yaml.safe_load(f.read()) @@ -76,7 +89,7 @@ def push(self): def write_app_config(self): self.apply_method_to_apps(method=self.write_secrets) - self.apply_method_to_apps(method=self._write_app_inputs_procesess) + self.apply_method_to_apps(method=self._write_api_task) def apply_method_to_apps(self, method): """ @@ -127,7 +140,7 @@ def build_app_image(self, app): buildargs_str = " ".join( [f"--build-arg {arg}={value}" for arg, value in buildargs.items()] ) - cmd = f"docker build {buildargs_str} -t {img_name}:{self.tag} ./" + cmd = f"docker build {buildargs_str} -t {img_name}:{self.tag} -f dockerfiles/Dockerfile.model ./" run(cmd) run( @@ -159,6 +172,9 @@ def write_secrets(self, app): for name, value in self._list_secrets(app).items(): secret_config["stringData"][name] = value + if not secret_config["stringData"]: + return + if self.kubernetes_target == "-": sys.stdout.write(yaml.dump(secret_config)) sys.stdout.write("---") @@ -169,74 +185,63 @@ def write_secrets(self, app): return secret_config - def _write_app_inputs_procesess(self, app): - app_deployment = copy.deepcopy(self.app_template) + def _write_api_task(self, app): + deployment = copy.deepcopy(self.api_task_template) safeowner = clean(app["owner"]) safetitle = clean(app["title"]) - action = "io" - name = f"{safeowner}-{safetitle}-{action}" - - resources = self._resources(app, action) + name = f"{safeowner}-{safetitle}-api-task" - app_deployment["metadata"]["name"] = name - app_deployment["spec"]["selector"]["matchLabels"]["app"] = name - app_deployment["spec"]["template"]["metadata"]["labels"]["app"] = name + deployment["metadata"]["name"] = name + deployment["spec"]["selector"]["matchLabels"]["app"] = name + deployment["spec"]["template"]["metadata"]["labels"]["app"] = name - container_config = app_deployment["spec"]["template"]["spec"]["containers"][0] + container_config = deployment["spec"]["template"]["spec"]["containers"][0] container_config.update( { "name": name, "image": f"{self.cr}/{self.project}/{safeowner}_{safetitle}_tasks:{self.tag}", - "command": [f"./celery_{action}.sh"], - "args": [ - app["owner"], - app["title"], - ], # TODO: pass safe names to docker file at build and run time - "resources": resources, + "command": ["csw", "api-task", "--start"], } ) - container_config["env"].append({"name": "TITLE", "value": app["title"]}) - container_config["env"].append({"name": "OWNER", "value": app["owner"]}) container_config["env"].append( {"name": "SIM_TIME_LIMIT", "value": str(app["sim_time_limit"])} ) - container_config["env"].append( - {"name": "APP_NAME", "value": f"{safeowner}_{safetitle}_tasks"} - ) container_config["env"].append( { - "name": "REDIS", + "name": "REDIS_HOST", "valueFrom": { - "secretKeyRef": {"name": "worker-secret", "key": "REDIS"} + "secretKeyRef": {"name": "worker-secret", "key": "REDIS_HOST"} }, } ) self._set_secrets(app, container_config) + service = copy.deepcopy(self.api_task_service_template) + service["metadata"]["name"] = name + service["spec"]["selector"]["app"] = name + if self.kubernetes_target == "-": - sys.stdout.write(yaml.dump(app_deployment)) + sys.stdout.write(yaml.dump(deployment)) sys.stdout.write("---") sys.stdout.write("\n") + sys.stdout.write(yaml.dump(service)) + sys.stdout.write("---") + sys.stdout.write("\n") + else: with open( - self.kubernetes_target / Path(f"{name}-deployment.yaml"), "w" + self.kubernetes_target / Path(f"{name}-api-tasks-deployment.yaml"), "w" ) as f: - f.write(yaml.dump(app_deployment)) - - return app_deployment + f.write(yaml.dump(deployment)) + with open( + self.kubernetes_target / Path(f"{name}-api-tasks-service.yaml"), "w" + ) as f: + f.write(yaml.dump(service)) - def _resources(self, app, action=None): - if action == "io": - resources = { - "requests": {"cpu": 0.7, "memory": "0.25G"}, - "limits": {"cpu": 1, "memory": "0.7G"}, - } - else: - resources = super()._resources(app) - return resources + return deployment, service def _set_secrets(self, app, config): safeowner = clean(app["owner"]) @@ -248,21 +253,7 @@ def _set_secrets(self, app, config): ) -def main(): - parser = argparse.ArgumentParser(description="Deploy C/S compute cluster.") - parser.add_argument("--tag", required=False, default=TAG) - parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--models", nargs="+", type=str, required=False, default=None) - parser.add_argument("--build", action="store_true") - parser.add_argument("--test", action="store_true") - parser.add_argument("--push", action="store_true") - parser.add_argument("--app-config", action="store_true") - parser.add_argument("--base-branch", default="origin/master") - parser.add_argument("--quiet", "-q", default=False) - parser.add_argument("--config-out", "-o", default=None) - - args = parser.parse_args() - +def handle(args: argparse.Namespace): publisher = Publisher( project=args.project, tag=args.tag, @@ -279,3 +270,20 @@ def main(): publisher.push() if args.app_config: publisher.write_app_config() + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser( + "publish", description="Deploy models on C/S compute cluster." + ) + parser.add_argument("--tag", required=False, default=TAG) + parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--models", nargs="+", type=str, required=False, default=None) + parser.add_argument("--build", action="store_true") + parser.add_argument("--test", action="store_true") + parser.add_argument("--push", action="store_true") + parser.add_argument("--app-config", action="store_true") + parser.add_argument("--base-branch", default="origin/master") + parser.add_argument("--quiet", "-q", default=False) + parser.add_argument("--config-out", "-o", default=None) + parser.set_defaults(func=handle) diff --git a/workers/cs_workers/clients/secrets.py b/workers/cs_workers/clients/secrets.py index c410bfb1..d8ef0b24 100644 --- a/workers/cs_workers/clients/secrets.py +++ b/workers/cs_workers/clients/secrets.py @@ -2,7 +2,7 @@ import json import os -from cs_publish.utils import clean +from cs_workers.utils import clean PROJECT = os.environ.get("PROJECT", "cs-workers-dev") @@ -91,18 +91,7 @@ def _client(self): return self.client -def main(): - parser = argparse.ArgumentParser(description="CLI for model secrets.") - parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--owner", required=True) - parser.add_argument("--title", required=True) - parser.add_argument("--secret-name", "-s") - parser.add_argument("--secret-value", "-v") - parser.add_argument("--list", "-l", action="store_true") - parser.add_argument("--delete", "-d") - - args = parser.parse_args() - +def handle(args: argparse.Namespace): secrets = Secrets(args.owner, args.title, args.project) if args.secret_name and args.secret_value: secrets.set_secret(args.secret_name, args.secret_value) @@ -113,3 +102,15 @@ def main(): if args.list: print(json.dumps(secrets.list_secrets(), indent=2)) + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser("secrets", description="CLI for model secrets.") + parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--owner", required=True) + parser.add_argument("--title", required=True) + parser.add_argument("--secret-name", "-s") + parser.add_argument("--secret-value", "-v") + parser.add_argument("--list", "-l", action="store_true") + parser.add_argument("--delete", "-d") + parser.set_defaults(func=handle) diff --git a/workers/cs_workers/executors/api_task.py b/workers/cs_workers/executors/api_task.py index 572c26c6..d98652c0 100644 --- a/workers/cs_workers/executors/api_task.py +++ b/workers/cs_workers/executors/api_task.py @@ -1,8 +1,9 @@ +import argparse import json import os import uuid -from cs_publish.executors.task_wrapper import async_task_wrapper, sync_task_wrapper +from cs_workers.executors.task_wrapper import async_task_wrapper, sync_task_wrapper import tornado.ioloop import tornado.web @@ -11,10 +12,7 @@ try: from cs_config import functions except ImportError as ie: - if os.environ.get("IS_FLASK", "False") == "True": - functions = None - else: - raise ie + None def version(task_id, **task_kwargs): @@ -83,7 +81,18 @@ def executor(routes): ) -if __name__ == "__main__": - app = executor(routes={"version": version, "defaults": defaults, "parse": parse}) - app.listen(8888) - tornado.ioloop.IOLoop.current().start() +def start(args: argparse.Namespace): + if args.start: + app = executor( + routes={"version": version, "defaults": defaults, "parse": parse} + ) + app.listen(8888) + tornado.ioloop.IOLoop.current().start() + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser( + "api-task", description="REST API for running light-weight tasks." + ) + parser.add_argument("--start", required=False, action="store_true") + parser.set_defaults(func=start) diff --git a/workers/cs_workers/executors/kubernetes.py b/workers/cs_workers/executors/job.py similarity index 72% rename from workers/cs_workers/executors/kubernetes.py rename to workers/cs_workers/executors/job.py index c8b8499e..9fe188b9 100644 --- a/workers/cs_workers/executors/kubernetes.py +++ b/workers/cs_workers/executors/job.py @@ -7,8 +7,7 @@ import requests import cs_storage -from cs_publish.executors.task_wrapper import async_task_wrapper -from cs_publish.executors.celery import get_app +from cs_workers.executors.task_wrapper import async_task_wrapper def sim_handler(task_id, meta_param_dict, adjustment): @@ -27,14 +26,12 @@ def sim_handler(task_id, meta_param_dict, adjustment): routes = {"sim": sim_handler} -def executor(routes): - parser = argparse.ArgumentParser(description="CLI for C/S jobs.") - parser.add_argument("--job-id", "-t", required=True) - parser.add_argument("--route-name", "-r", required=True) - args = parser.parse_args() - +def main(args: argparse.Namespace): async_task_wrapper(args.job_id, routes[args.route_name]) -def main(): - executor({"sim": sim_handler}) +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser("job", description="CLI for C/S jobs.") + parser.add_argument("--job-id", "-t", required=True) + parser.add_argument("--route-name", "-r", required=True) + parser.set_defaults(func=main) diff --git a/workers/cs_workers/services/init.py b/workers/cs_workers/services/manage.py similarity index 85% rename from workers/cs_workers/services/init.py rename to workers/cs_workers/services/manage.py index 24d08896..791991d2 100644 --- a/workers/cs_workers/services/init.py +++ b/workers/cs_workers/services/manage.py @@ -46,7 +46,7 @@ class Cluster: Deploy and manage Compute Studio compute cluster: - build, tag, and push the docker images for the flask app and compute.studio modeling apps. - - write k8s config files for the flask deployment and the + - write k8s config files for the scheduler deployment and the compute.studio modeling app deployments. - apply k8s config files to an existing compute cluster. @@ -74,13 +74,15 @@ def __init__(self, tag, project, kubernetes_target="kubernetes/"): else: self.kubernetes_target = kubernetes_target - with open("templates/flask-deployment.template.yaml", "r") as f: - self.flask_template = yaml.safe_load(f.read()) + with open("templates/services/scheduler-deployment.template.yaml", "r") as f: + self.scheduler_template = yaml.safe_load(f.read()) - with open("templates/outputs-processor-deployment.template.yaml", "r") as f: + with open( + "templates/services/outputs-processor-deployment.template.yaml", "r" + ) as f: self.outputs_processor_template = yaml.safe_load(f.read()) - with open("templates/redis-master-deployment.template.yaml", "r") as f: + with open("templates/services/redis-master-deployment.template.yaml", "r") as f: self.redis_master_template = yaml.safe_load(f.read()) with open("templates/secret.template.yaml", "r") as f: @@ -90,7 +92,7 @@ def __init__(self, tag, project, kubernetes_target="kubernetes/"): def build(self): """ - Build, tag, and push base images for the flask app and modeling apps. + Build, tag, and push base images for the scheduler app. Note: distributed and celerybase are tagged as "latest." All other apps pull from either distributed:latest or celerybase:latest. @@ -100,7 +102,9 @@ def build(self): run( f"docker build -t outputs_processor:{self.tag} -f dockerfiles/Dockerfile.outputs_processor ./" ) - run(f"docker build -t flask:{self.tag} -f dockerfiles/Dockerfile.flask ./") + run( + f"docker build -t scheduler:{self.tag} -f dockerfiles/Dockerfile.scheduler ./" + ) run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") @@ -108,7 +112,9 @@ def build(self): f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" ) - run(f"docker tag flask:{self.tag} {self.cr}/{self.project}/flask:{self.tag}") + run( + f"docker tag scheduler:{self.tag} {self.cr}/{self.project}/scheduler:{self.tag}" + ) def push(self): run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") @@ -118,20 +124,22 @@ def push(self): f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" ) - run(f"docker tag flask:{self.tag} {self.cr}/{self.project}/flask:{self.tag}") + run( + f"docker tag scheduler:{self.tag} {self.cr}/{self.project}/scheduler:{self.tag}" + ) run(f"docker push {self.cr}/{self.project}/distributed:latest") run(f"docker push {self.cr}/{self.project}/redis-python:latest") run(f"docker push {self.cr}/{self.project}/outputs_processor:{self.tag}") - run(f"docker push {self.cr}/{self.project}/flask:{self.tag}") + run(f"docker push {self.cr}/{self.project}/scheduler:{self.tag}") def make_config(self): - self.write_flask_deployment() + self.write_scheduler_deployment() self.write_outputs_processor_deployment() self.write_secret() self.write_redis_deployment() configs = [ - "flask-service.yaml", + "scheduler-service.yaml", "outputs-processor-deployment.yaml", "outputs-processor-service.yaml", "redis-master-service.yaml", @@ -141,16 +149,15 @@ def make_config(self): config = yaml.safe_load(f.read()) self.write_config(filename, config) - def write_flask_deployment(self): + def write_scheduler_deployment(self): """ - Write flask deployment file. Only step is filling in the image uri. + Write scheduler deployment file. Only step is filling in the image uri. """ - deployment = copy.deepcopy(self.flask_template) + deployment = copy.deepcopy(self.scheduler_template) deployment["spec"]["template"]["spec"]["containers"][0][ "image" - ] = f"gcr.io/{self.project}/flask:{self.tag}" - - self.write_config("flask-deployment.yaml", deployment) + ] = f"gcr.io/{self.project}/scheduler:{self.tag}" + self.write_config("scheduler-deployment.yaml", deployment) return deployment @@ -256,16 +263,7 @@ def _set_secret(self, name, value): return client.add_secret_version(secret_parent, {"data": value}) -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Deploy C/S compute cluster.") - parser.add_argument("--tag", required=False, default=TAG) - parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--build", action="store_true") - parser.add_argument("--push", action="store_true") - parser.add_argument("--make-config", action="store_true") - parser.add_argument("--config-out", "-o") - args = parser.parse_args() - +def handle(args: argparse.Namespace): cluster = Cluster( tag=args.tag, project=args.project, kubernetes_target=args.config_out ) @@ -276,3 +274,14 @@ def _set_secret(self, name, value): cluster.push() if args.make_config: cluster.make_config() + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser("svc") + parser.add_argument("--tag", required=False, default=TAG) + parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--build", action="store_true") + parser.add_argument("--push", action="store_true") + parser.add_argument("--make-config", action="store_true") + parser.add_argument("--config-out", "-o") + parser.set_defaults(func=handle) diff --git a/workers/cs_workers/services/outputs_processor.py b/workers/cs_workers/services/outputs_processor.py index 0940f5e5..74f14184 100644 --- a/workers/cs_workers/services/outputs_processor.py +++ b/workers/cs_workers/services/outputs_processor.py @@ -1,3 +1,4 @@ +import argparse import json import os @@ -50,13 +51,24 @@ async def post(self): self.set_status(200) -def make_app(): +def get_app(): return tornado.web.Application( [(r"/write/", Write), (r"/push/", Push)], debug=True, autoreload=True ) -if __name__ == "__main__": - app = make_app() - app.listen(8888) - tornado.ioloop.IOLoop.current().start() +def start(args: argparse.Namespace): + if args.start: + app = get_app() + app.listen(8889) + tornado.ioloop.IOLoop.current().start() + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser( + "outputs-processor", + aliases=["outputs"], + description="REST API for processing and storing outputs.", + ) + parser.add_argument("--start", required=False, action="store_true") + parser.set_defaults(func=start) diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py index 9c30ec57..9ec12411 100644 --- a/workers/cs_workers/services/scheduler.py +++ b/workers/cs_workers/services/scheduler.py @@ -1,3 +1,4 @@ +import argparse import json import os import uuid @@ -7,8 +8,8 @@ import tornado.ioloop import tornado.web -from cs_publish.utils import clean -from cs_publish.client import job, api_task +from cs_workers.utils import clean +from cs_workers.clients import job, api_task CS_URL = os.environ.get("CS_URL") @@ -99,7 +100,16 @@ def get_app(): ) -if __name__ == "__main__": - app = get_app() - app.listen(8889) - tornado.ioloop.IOLoop.current().start() +def start(args: argparse.Namespace): + if args.start: + app = get_app() + app.listen(8889) + tornado.ioloop.IOLoop.current().start() + + +def cli(subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser( + "scheduler", description="REST API for running jobs on C/S workers." + ) + parser.add_argument("--start", required=False, action="store_true") + parser.set_defaults(func=start) diff --git a/workers/cs_workers/utils.py b/workers/cs_workers/utils.py index a959d602..0194f554 100644 --- a/workers/cs_workers/utils.py +++ b/workers/cs_workers/utils.py @@ -33,10 +33,9 @@ def read_github_file(org, repo, branch, filename): """ url = f"https://api.github.com/repos/{org}/{repo}/contents/{filename}?ref={branch}" response = requests.get(url) - print(f"GET: {url} {response.status_code}") if response.status_code == 403: assert "hit rate limit" == 403 - assert response.status_code == 200 + assert response.status_code == 200, f"Got code: {response.status_code}" sanatized_content = response.json()["content"].replace("\n", "") encoded_content = sanatized_content.encode() decoded_bytes = base64.decodebytes(encoded_content) diff --git a/workers/dockerfiles/Dockerfile.model b/workers/dockerfiles/Dockerfile.model index f895a797..093adb79 100644 --- a/workers/dockerfiles/Dockerfile.model +++ b/workers/dockerfiles/Dockerfile.model @@ -9,6 +9,7 @@ RUN conda config --append channels conda-forge RUN conda install "python>=3.7" pip tornado dask lz4 ADD requirements.txt /home +RUN pip install -r /home/requirements.txt WORKDIR /home @@ -34,8 +35,8 @@ ADD ${RAW_REPO_URL}/${BRANCH}/cs-config/cs_config/tests/test_functions.py /home RUN pip install cs-kit ###################### -RUN mkdir /home/cs_publish -COPY cs_publish /home/cs_publish +RUN mkdir /home/cs_workers +COPY cs_workers /home/cs_workers COPY setup.py /home RUN cd /home/ && pip install -e . diff --git a/workers/dockerfiles/Dockerfile.outputs_processor b/workers/dockerfiles/Dockerfile.outputs_processor index 9564eb9e..b1f366f2 100755 --- a/workers/dockerfiles/Dockerfile.outputs_processor +++ b/workers/dockerfiles/Dockerfile.outputs_processor @@ -15,8 +15,11 @@ RUN pip install -r requirements.txt RUN pip install httpx pyppeteer2 RUN conda install -c conda-forge jinja2 bokeh tornado dask && pyppeteer-install -COPY outputs_processor.py /home +RUN mkdir /home/cs_workers +COPY cs_workers /home/cs_workers +COPY setup.py /home +RUN cd /home/ && pip install -e . WORKDIR /home -CMD ["python", "outputs_processor.py"] \ No newline at end of file +CMD ["csw", "outputs", "--start"] \ No newline at end of file diff --git a/workers/dockerfiles/Dockerfile.scheduler b/workers/dockerfiles/Dockerfile.scheduler index 16e1543d..41c28b95 100755 --- a/workers/dockerfiles/Dockerfile.scheduler +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -13,12 +13,11 @@ EXPOSE 8888 RUN pip install -r requirements.txt -COPY ./api /home/api -COPY ./api/endpoints.py /home/api -COPY ./setup.py /home -RUN cd /home && pip install -e . +RUN mkdir /home/cs_workers +COPY cs_workers /home/cs_workers +COPY setup.py /home +RUN cd /home/ && pip install -e . -WORKDIR /home/api +WORKDIR /home -# run the app server -CMD ["gunicorn", "--bind", "0.0.0.0:5050", "api:app", "--access-logfile", "-"] +CMD ["csw", "outputs-processor", "--start"] \ No newline at end of file diff --git a/workers/kubernetes/redis-master-deployment.yaml b/workers/kubernetes/redis-master-deployment.yaml index 46fbfa78..a11e2437 100644 --- a/workers/kubernetes/redis-master-deployment.yaml +++ b/workers/kubernetes/redis-master-deployment.yaml @@ -19,34 +19,33 @@ spec: tier: backend spec: containers: - - env: - - name: REDIS_ADMIN_PW - valueFrom: - secretKeyRef: - key: REDIS_ADMIN_PW - name: worker-secret - - name: REDIS_EXECUTOR_PW - valueFrom: - secretKeyRef: - key: REDIS_EXECUTOR_PW - name: worker-secret - - name: REDIS_SCHEDULER_PW - valueFrom: - secretKeyRef: - key: REDIS_SCHEDULER_PW - name: worker-secret - image: gcr.io/cs-workers-dev/redis-python:latest - lifecycle: - postStart: - exec: - command: - - python3 - - /home/redis_init.py - name: master - imagePullPolicy: Always - ports: - - containerPort: 6379 - resources: - requests: - cpu: 100m - memory: 100Mi + - env: + - name: REDIS_ADMIN_PW + valueFrom: + secretKeyRef: + key: REDIS_ADMIN_PW + name: worker-secret + - name: REDIS_EXECUTOR_PW + valueFrom: + secretKeyRef: + key: REDIS_EXECUTOR_PW + name: worker-secret + - name: REDIS_SCHEDULER_PW + valueFrom: + secretKeyRef: + key: REDIS_SCHEDULER_PW + name: worker-secret + image: gcr.io/cs-workers-dev/redis-python:latest + lifecycle: + postStart: + exec: + command: + - python3 + - /home/redis_init.py + name: master + ports: + - containerPort: 6379 + resources: + requests: + cpu: 100m + memory: 100Mi diff --git a/workers/kubernetes/flask-service.yaml b/workers/kubernetes/scheduler-service.yaml similarity index 55% rename from workers/kubernetes/flask-service.yaml rename to workers/kubernetes/scheduler-service.yaml index 9ff1d4f5..674835ae 100644 --- a/workers/kubernetes/flask-service.yaml +++ b/workers/kubernetes/scheduler-service.yaml @@ -1,11 +1,11 @@ apiVersion: v1 kind: Service metadata: - name: flask + name: scheduler spec: ports: - - port: 5050 - targetPort: 5050 + - port: 80 + targetPort: 8888 selector: - app: flask + app: scheduler type: LoadBalancer diff --git a/workers/setup.py b/workers/setup.py index 0aff603d..feaf6304 100644 --- a/workers/setup.py +++ b/workers/setup.py @@ -9,7 +9,7 @@ setuptools.setup( - name="cs-publish", + name="cs-workers", version=os.environ.get("TAG", "0.0.0"), author="Hank Doupe", author_email="hank@compute.studio", @@ -18,14 +18,10 @@ long_description_content_type="text/markdown", url="https://github.com/compute-tooling/compute-studio-workers", packages=setuptools.find_packages(), - install_requires=["celery", "redis", "gitpython", "pyyaml"], + install_requires=["redis", "kubernetes", "gitpython", "pyyaml"], include_package_data=True, entry_points={ - "console_scripts": [ - "cs-publish=cs_publish.client.publish:main", - "cs-secrets=cs_publish.client.secrets:main", - "cs-job=cs_publish.executors.kubernetes:main", - ] + "console_scripts": ["cs-workers=cs_workers.cli:cli", "csw=cs_workers.cli:cli"] }, classifiers=[ "Programming Language :: Python :: 3", diff --git a/workers/templates/sc-deployment.template.yaml b/workers/templates/models/api-task-deployment.template.yaml similarity index 84% rename from workers/templates/sc-deployment.template.yaml rename to workers/templates/models/api-task-deployment.template.yaml index 3a6fd957..e9db3767 100755 --- a/workers/templates/sc-deployment.template.yaml +++ b/workers/templates/models/api-task-deployment.template.yaml @@ -16,15 +16,15 @@ spec: - name: # [owner]-[title]-[action] image: # gcr.io/[project]/[owner]_[title]_tasks:[tag] imagePullPolicy: Always - command: [] # ["./celery_[action].sh"] + command: ["csw", "api_task", "--start"] args: [] # ["[owner]", "[title"] resources: requests: - memory: - cpu: + memory: 0.25G + cpu: 0.7 limits: - memory: - cpu: + memory: 0.7G + cpu: 1 env: - name: OUTPUTS_VERSION value: v1 diff --git a/workers/templates/models/api-task-service.template.yaml b/workers/templates/models/api-task-service.template.yaml new file mode 100644 index 00000000..f4ee18f3 --- /dev/null +++ b/workers/templates/models/api-task-service.template.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Service +metadata: + name: # [owner]-[title]-api-task +spec: + ports: + - port: 80 + targetPort: 8888 + selector: + app: # [owner]-[title]-api-task + type: LoadBalancer diff --git a/workers/templates/models/secret.template.yaml b/workers/templates/models/secret.template.yaml new file mode 100644 index 00000000..f86c0a99 --- /dev/null +++ b/workers/templates/models/secret.template.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: Secret +metadata: + name: worker-secret +type: Opaque +stringData: {} diff --git a/workers/templates/secret.template.yaml b/workers/templates/secret.template.yaml index 522d99fd..2c5b3ca7 100644 --- a/workers/templates/secret.template.yaml +++ b/workers/templates/secret.template.yaml @@ -6,7 +6,5 @@ type: Opaque stringData: CS_URL: https://dev.compute.studio BUCKET: cs-outputs-dev - CS_API_TOKEN: - OUTPUTS_VERSION: "v1" REDIS_HOST: redis-master diff --git a/workers/templates/outputs-processor-deployment.template.yaml b/workers/templates/services/outputs-processor-deployment.template.yaml similarity index 100% rename from workers/templates/outputs-processor-deployment.template.yaml rename to workers/templates/services/outputs-processor-deployment.template.yaml diff --git a/workers/templates/redis-master-deployment.template.yaml b/workers/templates/services/redis-master-deployment.template.yaml similarity index 100% rename from workers/templates/redis-master-deployment.template.yaml rename to workers/templates/services/redis-master-deployment.template.yaml diff --git a/workers/templates/flask-deployment.template.yaml b/workers/templates/services/scheduler-deployment.template.yaml similarity index 54% rename from workers/templates/flask-deployment.template.yaml rename to workers/templates/services/scheduler-deployment.template.yaml index 9c037dee..8cb6a9a8 100755 --- a/workers/templates/flask-deployment.template.yaml +++ b/workers/templates/services/scheduler-deployment.template.yaml @@ -1,42 +1,52 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: flask + name: scheduler spec: replicas: 2 selector: matchLabels: - app: flask + app: scheduler template: metadata: labels: - app: flask + app: scheduler spec: containers: - - name: flask + - name: scheduler image: imagePullPolicy: Always env: - name: GET_HOSTS_FROM value: dns ports: - - containerPort: 5050 + - containerPort: 8888 env: - name: CS_URL valueFrom: secretKeyRef: name: worker-secret key: CS_URL - - name: CS_API_TOKEN + - name: REDIS_HOST valueFrom: secretKeyRef: name: worker-secret - key: CS_API_TOKEN - - name: REDIS + key: REDIS_HOST + - name: REDIS_PORT valueFrom: secretKeyRef: name: worker-secret - key: REDIS + key: REDIS_HOST + - name: REDIS_DB + valueFrom: + secretKeyRef: + name: worker-secret + key: REDIS_DB + - name: REDIS_SCHEDULER_PW + valueFrom: + secretKeyRef: + name: worker-secret + key: REDIS_SCHEDULER_PW nodeSelector: component: api \ No newline at end of file From b2f6c9fb35924a3aa65064222aec9d2a8c799ff1 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Fri, 15 May 2020 12:37:41 -0400 Subject: [PATCH 08/55] Add support for directly loading images to kind and move constant kubernetes files to templates directory --- .gitignore | 2 +- workers/cs_workers/clients/publish.py | 10 ++- workers/cs_workers/services/manage.py | 66 ++++++++++--------- workers/kind-config.yaml | 17 +++++ .../kubernetes/redis-master-deployment.yaml | 51 -------------- ...utputs-processor-Deployment.template.yaml} | 0 .../services/outputs-processor-Service.yaml} | 0 ... => redis-master-Deployment.template.yaml} | 0 .../services/redis-master-Service.yaml} | 0 ...aml => scheduler-Deployment.template.yaml} | 0 .../services/scheduler-Service.yaml} | 0 11 files changed, 61 insertions(+), 85 deletions(-) create mode 100644 workers/kind-config.yaml delete mode 100644 workers/kubernetes/redis-master-deployment.yaml rename workers/templates/services/{outputs-processor-deployment.template.yaml => outputs-processor-Deployment.template.yaml} (100%) rename workers/{kubernetes/outputs-processor-service.yaml => templates/services/outputs-processor-Service.yaml} (100%) rename workers/templates/services/{redis-master-deployment.template.yaml => redis-master-Deployment.template.yaml} (100%) rename workers/{kubernetes/redis-master-service.yaml => templates/services/redis-master-Service.yaml} (100%) rename workers/templates/services/{scheduler-deployment.template.yaml => scheduler-Deployment.template.yaml} (100%) rename workers/{kubernetes/scheduler-service.yaml => templates/services/scheduler-Service.yaml} (100%) diff --git a/.gitignore b/.gitignore index 282dfa58..65e73e74 100755 --- a/.gitignore +++ b/.gitignore @@ -36,7 +36,7 @@ secret-docker-compose.yml *worker_config.prod.yaml *docker-compose-apps* -kubernetes/models/* +kubernetes/* secret.yaml *scheduler-deployment.yaml diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index ae0bf160..d9c55373 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -38,11 +38,13 @@ def __init__( base_branch="origin/master", quiet=False, kubernetes_target=None, + use_kind=False, ): super().__init__(project, tag, base_branch, quiet) self.models = models if models and models[0] else None self.kubernetes_target = kubernetes_target or self.kubernetes_target + self.use_kind = use_kind if self.kubernetes_target == "-": self.quiet = True @@ -159,7 +161,11 @@ def push_app_image(self, app): safeowner = clean(app["owner"]) safetitle = clean(app["title"]) img_name = f"{safeowner}_{safetitle}_tasks" - run(f"docker push {self.cr}/{self.project}/{img_name}:{self.tag}") + if self.use_kind: + cmd_prefix = "kind load docker-image" + else: + cmd_prefix = "docker push" + run(f"{cmd_prefix} {self.cr}/{self.project}/{img_name}:{self.tag}") def write_secrets(self, app): secret_config = copy.deepcopy(self.secret_template) @@ -261,6 +267,7 @@ def handle(args: argparse.Namespace): base_branch=args.base_branch, quiet=args.quiet, kubernetes_target=args.config_out, + use_kind=args.use_kind, ) if args.build: publisher.build() @@ -286,4 +293,5 @@ def cli(subparsers: argparse._SubParsersAction): parser.add_argument("--base-branch", default="origin/master") parser.add_argument("--quiet", "-q", default=False) parser.add_argument("--config-out", "-o", default=None) + parser.add_argument("--use-kind", action="store_true") parser.set_defaults(func=handle) diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index 791991d2..f551bfe8 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -65,24 +65,25 @@ class Cluster: kubernetes_target = "kubernetes/" cr = "gcr.io" - def __init__(self, tag, project, kubernetes_target="kubernetes/"): + def __init__(self, tag, project, kubernetes_target="kubernetes/", use_kind=False): self.tag = tag self.project = project + self.use_kind = use_kind if kubernetes_target is None: self.kubernetes_target = Cluster.kubernetes_target else: self.kubernetes_target = kubernetes_target - with open("templates/services/scheduler-deployment.template.yaml", "r") as f: + with open("templates/services/scheduler-Deployment.template.yaml", "r") as f: self.scheduler_template = yaml.safe_load(f.read()) with open( - "templates/services/outputs-processor-deployment.template.yaml", "r" + "templates/services/outputs-processor-Deployment.template.yaml", "r" ) as f: self.outputs_processor_template = yaml.safe_load(f.read()) - with open("templates/services/redis-master-deployment.template.yaml", "r") as f: + with open("templates/services/redis-master-Deployment.template.yaml", "r") as f: self.redis_master_template = yaml.safe_load(f.read()) with open("templates/secret.template.yaml", "r") as f: @@ -106,16 +107,6 @@ def build(self): f"docker build -t scheduler:{self.tag} -f dockerfiles/Dockerfile.scheduler ./" ) - run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") - - run( - f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" - ) - - run( - f"docker tag scheduler:{self.tag} {self.cr}/{self.project}/scheduler:{self.tag}" - ) - def push(self): run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") run(f"docker tag redis-python {self.cr}/{self.project}/redis-python:latest") @@ -128,26 +119,33 @@ def push(self): f"docker tag scheduler:{self.tag} {self.cr}/{self.project}/scheduler:{self.tag}" ) - run(f"docker push {self.cr}/{self.project}/distributed:latest") - run(f"docker push {self.cr}/{self.project}/redis-python:latest") - run(f"docker push {self.cr}/{self.project}/outputs_processor:{self.tag}") - run(f"docker push {self.cr}/{self.project}/scheduler:{self.tag}") + if self.use_kind: + cmd_prefix = "kind load docker-image" + else: + cmd_prefix = "docker push" + + run(f"{cmd_prefix} {self.cr}/{self.project}/distributed:latest") + run(f"{cmd_prefix} {self.cr}/{self.project}/redis-python:latest") + run(f"{cmd_prefix} {self.cr}/{self.project}/outputs_processor:{self.tag}") + run(f"{cmd_prefix} {self.cr}/{self.project}/scheduler:{self.tag}") def make_config(self): + config_filenames = [ + "scheduler-Service.yaml", + "outputs-processor-Service.yaml", + "redis-master-Service.yaml", + ] + for filename in config_filenames: + with open(f"templates/services/{filename}", "r") as f: + configs = yaml.safe_load_all(f.read()) + for config in configs: + name = config["metadata"]["name"] + kind = config["kind"] + self.write_config(f"{name}-{kind}.yaml", config) self.write_scheduler_deployment() self.write_outputs_processor_deployment() self.write_secret() self.write_redis_deployment() - configs = [ - "scheduler-service.yaml", - "outputs-processor-deployment.yaml", - "outputs-processor-service.yaml", - "redis-master-service.yaml", - ] - for filename in configs: - with open(f"kubernetes/{filename}", "r") as f: - config = yaml.safe_load(f.read()) - self.write_config(filename, config) def write_scheduler_deployment(self): """ @@ -157,7 +155,7 @@ def write_scheduler_deployment(self): deployment["spec"]["template"]["spec"]["containers"][0][ "image" ] = f"gcr.io/{self.project}/scheduler:{self.tag}" - self.write_config("scheduler-deployment.yaml", deployment) + self.write_config("scheduler-Deployment.yaml", deployment) return deployment @@ -171,7 +169,7 @@ def write_outputs_processor_deployment(self): "image" ] = f"gcr.io/{self.project}/outputs_processor:{self.tag}" - self.write_config("outputs-processor-deployment.yaml", deployment) + self.write_config("outputs-processor-Deployment.yaml", deployment) return deployment @@ -190,7 +188,7 @@ def write_redis_deployment(self): }, } ) - self.write_config("redis-master-deployment.yaml", deployment) + self.write_config("redis-master-Deployment.yaml", deployment) def write_secret(self): @@ -265,7 +263,10 @@ def _set_secret(self, name, value): def handle(args: argparse.Namespace): cluster = Cluster( - tag=args.tag, project=args.project, kubernetes_target=args.config_out + tag=args.tag, + project=args.project, + kubernetes_target=args.config_out, + use_kind=args.use_kind, ) if args.build: @@ -284,4 +285,5 @@ def cli(subparsers: argparse._SubParsersAction): parser.add_argument("--push", action="store_true") parser.add_argument("--make-config", action="store_true") parser.add_argument("--config-out", "-o") + parser.add_argument("--use-kind", action="store_true") parser.set_defaults(func=handle) diff --git a/workers/kind-config.yaml b/workers/kind-config.yaml new file mode 100644 index 00000000..6742c517 --- /dev/null +++ b/workers/kind-config.yaml @@ -0,0 +1,17 @@ +kind: Cluster +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: + - role: control-plane + kubeadmConfigPatches: + - | + kind: InitConfiguration + nodeRegistration: + kubeletExtraArgs: + node-labels: "component=model" + - role: control-plane + kubeadmConfigPatches: + - | + kind: InitConfiguration + nodeRegistration: + kubeletExtraArgs: + node-labels: "component=api" diff --git a/workers/kubernetes/redis-master-deployment.yaml b/workers/kubernetes/redis-master-deployment.yaml deleted file mode 100644 index a11e2437..00000000 --- a/workers/kubernetes/redis-master-deployment.yaml +++ /dev/null @@ -1,51 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: redis - name: redis-master -spec: - replicas: 1 - selector: - matchLabels: - app: redis - role: master - tier: backend - template: - metadata: - labels: - app: redis - role: master - tier: backend - spec: - containers: - - env: - - name: REDIS_ADMIN_PW - valueFrom: - secretKeyRef: - key: REDIS_ADMIN_PW - name: worker-secret - - name: REDIS_EXECUTOR_PW - valueFrom: - secretKeyRef: - key: REDIS_EXECUTOR_PW - name: worker-secret - - name: REDIS_SCHEDULER_PW - valueFrom: - secretKeyRef: - key: REDIS_SCHEDULER_PW - name: worker-secret - image: gcr.io/cs-workers-dev/redis-python:latest - lifecycle: - postStart: - exec: - command: - - python3 - - /home/redis_init.py - name: master - ports: - - containerPort: 6379 - resources: - requests: - cpu: 100m - memory: 100Mi diff --git a/workers/templates/services/outputs-processor-deployment.template.yaml b/workers/templates/services/outputs-processor-Deployment.template.yaml similarity index 100% rename from workers/templates/services/outputs-processor-deployment.template.yaml rename to workers/templates/services/outputs-processor-Deployment.template.yaml diff --git a/workers/kubernetes/outputs-processor-service.yaml b/workers/templates/services/outputs-processor-Service.yaml similarity index 100% rename from workers/kubernetes/outputs-processor-service.yaml rename to workers/templates/services/outputs-processor-Service.yaml diff --git a/workers/templates/services/redis-master-deployment.template.yaml b/workers/templates/services/redis-master-Deployment.template.yaml similarity index 100% rename from workers/templates/services/redis-master-deployment.template.yaml rename to workers/templates/services/redis-master-Deployment.template.yaml diff --git a/workers/kubernetes/redis-master-service.yaml b/workers/templates/services/redis-master-Service.yaml similarity index 100% rename from workers/kubernetes/redis-master-service.yaml rename to workers/templates/services/redis-master-Service.yaml diff --git a/workers/templates/services/scheduler-deployment.template.yaml b/workers/templates/services/scheduler-Deployment.template.yaml similarity index 100% rename from workers/templates/services/scheduler-deployment.template.yaml rename to workers/templates/services/scheduler-Deployment.template.yaml diff --git a/workers/kubernetes/scheduler-service.yaml b/workers/templates/services/scheduler-Service.yaml similarity index 100% rename from workers/kubernetes/scheduler-service.yaml rename to workers/templates/services/scheduler-Service.yaml From 0c792ea4cf6c9972443c0ca1d5dd4cf252af9044 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Fri, 15 May 2020 12:41:38 -0400 Subject: [PATCH 09/55] Add rbac so scheduler can create, monitor, and delete jobs --- workers/cs_workers/services/manage.py | 1 + .../scheduler-Deployment.template.yaml | 1 + .../templates/services/scheduler-RBAC.yaml | 28 +++++++++++++++++++ 3 files changed, 30 insertions(+) create mode 100644 workers/templates/services/scheduler-RBAC.yaml diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index f551bfe8..f1c3272e 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -132,6 +132,7 @@ def push(self): def make_config(self): config_filenames = [ "scheduler-Service.yaml", + "scheduler-RBAC.yaml", "outputs-processor-Service.yaml", "redis-master-Service.yaml", ] diff --git a/workers/templates/services/scheduler-Deployment.template.yaml b/workers/templates/services/scheduler-Deployment.template.yaml index 8cb6a9a8..184f3b69 100755 --- a/workers/templates/services/scheduler-Deployment.template.yaml +++ b/workers/templates/services/scheduler-Deployment.template.yaml @@ -12,6 +12,7 @@ spec: labels: app: scheduler spec: + serviceAccountName: scheduler containers: - name: scheduler image: diff --git a/workers/templates/services/scheduler-RBAC.yaml b/workers/templates/services/scheduler-RBAC.yaml new file mode 100644 index 00000000..17185d69 --- /dev/null +++ b/workers/templates/services/scheduler-RBAC.yaml @@ -0,0 +1,28 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: scheduler +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: job-admin + namespace: default +rules: + - apiGroups: ["batch", "extensions"] + resources: ["jobs"] + verbs: ["get", "list", "watch", "create", "update", "delete"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: job-admin + namespace: default +subjects: + - kind: ServiceAccount + name: scheduler + apiGroup: rbac.authorization.k8s.io +roleRef: + kind: Role + name: job-admin + apiGroup: rbac.authorization.k8s.io From 771d1322d1a035cb7c059320c9d2d228afaac17f Mon Sep 17 00:00:00 2001 From: hdoupe Date: Fri, 15 May 2020 18:05:28 -0400 Subject: [PATCH 10/55] Fix bugs; improve svc cli; get cs-workers able to play nice with kind --- workers/cs_workers/clients/publish.py | 2 +- workers/cs_workers/services/manage.py | 65 ++++++++++++------- workers/kind-config.yaml | 17 +---- workers/requirements.txt | 4 +- .../models/api-task-deployment.template.yaml | 1 - workers/templates/secret.template.yaml | 1 + ...outputs-processor-Deployment.template.yaml | 1 - .../scheduler-Deployment.template.yaml | 4 +- .../templates/services/scheduler-RBAC.yaml | 2 +- 9 files changed, 52 insertions(+), 45 deletions(-) diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index d9c55373..03fb85d9 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -162,7 +162,7 @@ def push_app_image(self, app): safetitle = clean(app["title"]) img_name = f"{safeowner}_{safetitle}_tasks" if self.use_kind: - cmd_prefix = "kind load docker-image" + cmd_prefix = "kind load docker-image --name cs --nodes cs-worker2" else: cmd_prefix = "docker push" run(f"{cmd_prefix} {self.cr}/{self.project}/{img_name}:{self.tag}") diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index f1c3272e..00dd3348 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -99,7 +99,9 @@ def build(self): pull from either distributed:latest or celerybase:latest. """ run("docker build -t distributed:latest -f dockerfiles/Dockerfile ./") - run("docker build -t redis-python:latest -f dockerfiles/Dockerfile.redis ./") + run( + f"docker build -t redis-python:{self.tag} -f dockerfiles/Dockerfile.redis ./" + ) run( f"docker build -t outputs_processor:{self.tag} -f dockerfiles/Dockerfile.outputs_processor ./" ) @@ -109,7 +111,9 @@ def build(self): def push(self): run(f"docker tag distributed {self.cr}/{self.project}/distributed:latest") - run(f"docker tag redis-python {self.cr}/{self.project}/redis-python:latest") + run( + f"docker tag redis-python:{self.tag} {self.cr}/{self.project}/redis-python:{self.tag}" + ) run( f"docker tag outputs_processor:{self.tag} {self.cr}/{self.project}/outputs_processor:{self.tag}" @@ -120,16 +124,16 @@ def push(self): ) if self.use_kind: - cmd_prefix = "kind load docker-image" + cmd_prefix = "kind load docker-image --name cs --nodes cs-worker,cs-worker2" else: cmd_prefix = "docker push" run(f"{cmd_prefix} {self.cr}/{self.project}/distributed:latest") - run(f"{cmd_prefix} {self.cr}/{self.project}/redis-python:latest") + run(f"{cmd_prefix} {self.cr}/{self.project}/redis-python:{self.tag}") run(f"{cmd_prefix} {self.cr}/{self.project}/outputs_processor:{self.tag}") run(f"{cmd_prefix} {self.cr}/{self.project}/scheduler:{self.tag}") - def make_config(self): + def config(self): config_filenames = [ "scheduler-Service.yaml", "scheduler-RBAC.yaml", @@ -177,7 +181,7 @@ def write_outputs_processor_deployment(self): def write_redis_deployment(self): deployment = copy.deepcopy(self.redis_master_template) container = deployment["spec"]["template"]["spec"]["containers"][0] - container["image"] = f"gcr.io/{self.project}/redis-python:latest" + container["image"] = f"gcr.io/{self.project}/redis-python:{self.tag}" redis_secrets = self.redis_secrets() for name, sec in redis_secrets.items(): if sec is not None: @@ -262,29 +266,44 @@ def _set_secret(self, name, value): return client.add_secret_version(secret_parent, {"data": value}) -def handle(args: argparse.Namespace): - cluster = Cluster( +def cluster_from_args(args: argparse.Namespace): + return Cluster( tag=args.tag, project=args.project, - kubernetes_target=args.config_out, - use_kind=args.use_kind, + kubernetes_target=getattr(args, "out", None), + use_kind=getattr(args, "use_kind", None), ) - if args.build: - cluster.build() - if args.push: - cluster.push() - if args.make_config: - cluster.make_config() + +def build(args: argparse.Namespace): + cluster = cluster_from_args(args) + cluster.build() + + +def push(args: argparse.Namespace): + cluster = cluster_from_args(args) + cluster.push() + + +def config(args: argparse.Namespace): + cluster = cluster_from_args(args) + cluster.config() def cli(subparsers: argparse._SubParsersAction): - parser = subparsers.add_parser("svc") + parser = subparsers.add_parser("services", aliases=["svc"]) parser.add_argument("--tag", required=False, default=TAG) parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--build", action="store_true") - parser.add_argument("--push", action="store_true") - parser.add_argument("--make-config", action="store_true") - parser.add_argument("--config-out", "-o") - parser.add_argument("--use-kind", action="store_true") - parser.set_defaults(func=handle) + + svc_subparsers = parser.add_subparsers() + + build_parser = svc_subparsers.add_parser("build") + build_parser.set_defaults(func=build) + + push_parser = svc_subparsers.add_parser("push") + push_parser.add_argument("--use-kind", action="store_true") + push_parser.set_defaults(func=push) + + config_parser = svc_subparsers.add_parser("config") + config_parser.add_argument("--out", "-o") + config_parser.set_defaults(func=config) diff --git a/workers/kind-config.yaml b/workers/kind-config.yaml index 6742c517..583886c5 100644 --- a/workers/kind-config.yaml +++ b/workers/kind-config.yaml @@ -1,17 +1,6 @@ kind: Cluster apiVersion: kind.x-k8s.io/v1alpha4 nodes: - - role: control-plane - kubeadmConfigPatches: - - | - kind: InitConfiguration - nodeRegistration: - kubeletExtraArgs: - node-labels: "component=model" - - role: control-plane - kubeadmConfigPatches: - - | - kind: InitConfiguration - nodeRegistration: - kubeletExtraArgs: - node-labels: "component=api" +- role: control-plane +- role: worker +- role: worker \ No newline at end of file diff --git a/workers/requirements.txt b/workers/requirements.txt index cb7cb74d..5bcf8f00 100755 --- a/workers/requirements.txt +++ b/workers/requirements.txt @@ -5,4 +5,6 @@ toolz boto3 kubernetes cs-storage>=1.10.1 -pyyaml \ No newline at end of file +pyyaml +tornado +dask \ No newline at end of file diff --git a/workers/templates/models/api-task-deployment.template.yaml b/workers/templates/models/api-task-deployment.template.yaml index e9db3767..ba346435 100755 --- a/workers/templates/models/api-task-deployment.template.yaml +++ b/workers/templates/models/api-task-deployment.template.yaml @@ -15,7 +15,6 @@ spec: containers: - name: # [owner]-[title]-[action] image: # gcr.io/[project]/[owner]_[title]_tasks:[tag] - imagePullPolicy: Always command: ["csw", "api_task", "--start"] args: [] # ["[owner]", "[title"] resources: diff --git a/workers/templates/secret.template.yaml b/workers/templates/secret.template.yaml index 2c5b3ca7..4846d6b9 100644 --- a/workers/templates/secret.template.yaml +++ b/workers/templates/secret.template.yaml @@ -8,3 +8,4 @@ stringData: BUCKET: cs-outputs-dev OUTPUTS_VERSION: "v1" REDIS_HOST: redis-master + REDIS_DB: "" \ No newline at end of file diff --git a/workers/templates/services/outputs-processor-Deployment.template.yaml b/workers/templates/services/outputs-processor-Deployment.template.yaml index 1a63d0d3..7435bc51 100755 --- a/workers/templates/services/outputs-processor-Deployment.template.yaml +++ b/workers/templates/services/outputs-processor-Deployment.template.yaml @@ -15,7 +15,6 @@ spec: containers: - name: outputs-processor image: - imagePullPolicy: Always ports: - containerPort: 6379 env: diff --git a/workers/templates/services/scheduler-Deployment.template.yaml b/workers/templates/services/scheduler-Deployment.template.yaml index 184f3b69..c505adb7 100755 --- a/workers/templates/services/scheduler-Deployment.template.yaml +++ b/workers/templates/services/scheduler-Deployment.template.yaml @@ -3,7 +3,7 @@ kind: Deployment metadata: name: scheduler spec: - replicas: 2 + replicas: 1 selector: matchLabels: app: scheduler @@ -16,7 +16,6 @@ spec: containers: - name: scheduler image: - imagePullPolicy: Always env: - name: GET_HOSTS_FROM value: dns @@ -48,6 +47,5 @@ spec: secretKeyRef: name: worker-secret key: REDIS_SCHEDULER_PW - nodeSelector: component: api \ No newline at end of file diff --git a/workers/templates/services/scheduler-RBAC.yaml b/workers/templates/services/scheduler-RBAC.yaml index 17185d69..25ea334f 100644 --- a/workers/templates/services/scheduler-RBAC.yaml +++ b/workers/templates/services/scheduler-RBAC.yaml @@ -21,7 +21,7 @@ metadata: subjects: - kind: ServiceAccount name: scheduler - apiGroup: rbac.authorization.k8s.io + namespace: default roleRef: kind: Role name: job-admin From 106d1b78411963dca19d540ce51682cac94651da Mon Sep 17 00:00:00 2001 From: hdoupe Date: Sun, 17 May 2020 11:02:45 -0400 Subject: [PATCH 11/55] Fix port numbers and add dask, distributed, and tornado deps to scheduler --- workers/cs_workers/services/outputs_processor.py | 2 +- workers/cs_workers/services/scheduler.py | 2 +- workers/dockerfiles/Dockerfile.scheduler | 1 + workers/requirements.txt | 2 -- .../services/outputs-processor-Deployment.template.yaml | 2 +- workers/templates/services/scheduler-Deployment.template.yaml | 3 --- 6 files changed, 4 insertions(+), 8 deletions(-) diff --git a/workers/cs_workers/services/outputs_processor.py b/workers/cs_workers/services/outputs_processor.py index 74f14184..f83ff640 100644 --- a/workers/cs_workers/services/outputs_processor.py +++ b/workers/cs_workers/services/outputs_processor.py @@ -60,7 +60,7 @@ def get_app(): def start(args: argparse.Namespace): if args.start: app = get_app() - app.listen(8889) + app.listen(8888) tornado.ioloop.IOLoop.current().start() diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py index 9ec12411..9289ed07 100644 --- a/workers/cs_workers/services/scheduler.py +++ b/workers/cs_workers/services/scheduler.py @@ -103,7 +103,7 @@ def get_app(): def start(args: argparse.Namespace): if args.start: app = get_app() - app.listen(8889) + app.listen(8888) tornado.ioloop.IOLoop.current().start() diff --git a/workers/dockerfiles/Dockerfile.scheduler b/workers/dockerfiles/Dockerfile.scheduler index 41c28b95..1c3f0b18 100755 --- a/workers/dockerfiles/Dockerfile.scheduler +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -12,6 +12,7 @@ EXPOSE 80 EXPOSE 8888 RUN pip install -r requirements.txt +RUN conda install dask distributed tornado RUN mkdir /home/cs_workers COPY cs_workers /home/cs_workers diff --git a/workers/requirements.txt b/workers/requirements.txt index 5bcf8f00..ce465c3f 100755 --- a/workers/requirements.txt +++ b/workers/requirements.txt @@ -6,5 +6,3 @@ boto3 kubernetes cs-storage>=1.10.1 pyyaml -tornado -dask \ No newline at end of file diff --git a/workers/templates/services/outputs-processor-Deployment.template.yaml b/workers/templates/services/outputs-processor-Deployment.template.yaml index 7435bc51..917c27d8 100755 --- a/workers/templates/services/outputs-processor-Deployment.template.yaml +++ b/workers/templates/services/outputs-processor-Deployment.template.yaml @@ -16,7 +16,7 @@ spec: - name: outputs-processor image: ports: - - containerPort: 6379 + - containerPort: 8888 env: - name: BUCKET valueFrom: diff --git a/workers/templates/services/scheduler-Deployment.template.yaml b/workers/templates/services/scheduler-Deployment.template.yaml index c505adb7..b6309fb1 100755 --- a/workers/templates/services/scheduler-Deployment.template.yaml +++ b/workers/templates/services/scheduler-Deployment.template.yaml @@ -16,9 +16,6 @@ spec: containers: - name: scheduler image: - env: - - name: GET_HOSTS_FROM - value: dns ports: - containerPort: 8888 env: From e04375ccadeac9078cff52fc50cbeefe4e3d0a97 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Sun, 17 May 2020 13:47:23 -0400 Subject: [PATCH 12/55] CLI improvements and fixes to get api tasks working --- workers/cs_workers/cli.py | 6 ++ workers/cs_workers/clients/api_task.py | 12 ++- workers/cs_workers/clients/publish.py | 82 ++++++++++++------- workers/cs_workers/executors/api_task.py | 39 +++++---- workers/cs_workers/services/manage.py | 9 +- workers/cs_workers/services/scheduler.py | 2 + workers/dockerfiles/Dockerfile.scheduler | 2 +- .../models/api-task-service.template.yaml | 1 - .../redis-master-Deployment.template.yaml | 2 + 9 files changed, 94 insertions(+), 61 deletions(-) diff --git a/workers/cs_workers/cli.py b/workers/cs_workers/cli.py index af90d96f..6d282c24 100644 --- a/workers/cs_workers/cli.py +++ b/workers/cs_workers/cli.py @@ -1,12 +1,18 @@ import argparse +import os from cs_workers.services import manage, scheduler, outputs_processor from cs_workers.clients import publish from cs_workers.executors import job, api_task +TAG = os.environ.get("TAG", "") +PROJECT = os.environ.get("PROJECT", "cs-workers-dev") + def cli(): parser = argparse.ArgumentParser(description="C/S Workers CLI") + parser.add_argument("--tag", required=False, default=TAG) + parser.add_argument("--project", required=False, default=PROJECT) sub_parsers = parser.add_subparsers() manage.cli(sub_parsers) diff --git a/workers/cs_workers/clients/api_task.py b/workers/cs_workers/clients/api_task.py index 41bf78b0..0f1a529f 100644 --- a/workers/cs_workers/clients/api_task.py +++ b/workers/cs_workers/clients/api_task.py @@ -1,6 +1,9 @@ import httpx +from cs_workers.utils import clean + + class APITask: def __init__(self, owner, title, task_id, task_name, **task_kwargs): self.owner = owner @@ -13,8 +16,11 @@ async def create(self, asynchronous=False): method = "async" if asynchronous else "sync" async with httpx.AsyncClient() as client: resp = await client.post( - f"http://{self.owner}-{self.title}/{method}/{self.task_name}/", - # f"http://localhost:8888/{method}/{self.task_name}/", - json={"task_id": self.task_id, "task_kwargs": self.task_kwargs}, + f"http://{clean(self.owner)}-{clean(self.title)}-api-task/{method}/", + json={ + "task_id": self.task_id, + "task_name": self.task_name, + "task_kwargs": self.task_kwargs, + }, ) return resp diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index 03fb85d9..fe7438e6 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -9,8 +9,6 @@ from cs_workers.clients.core import Core -TAG = os.environ.get("TAG", "") -PROJECT = os.environ.get("PROJECT", "cs-workers-dev") CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) BASE_PATH = CURR_PATH / ".." / ".." @@ -42,7 +40,7 @@ def __init__( ): super().__init__(project, tag, base_branch, quiet) - self.models = models if models and models[0] else None + self.models = models.split(",") if models else None self.kubernetes_target = kubernetes_target or self.kubernetes_target self.use_kind = use_kind @@ -259,39 +257,67 @@ def _set_secrets(self, app, config): ) -def handle(args: argparse.Namespace): +def build(args: argparse.Namespace): publisher = Publisher( project=args.project, tag=args.tag, - models=args.models, + models=args.names, + base_branch=args.base_branch, + ) + publisher.build() + + +def test(args: argparse.Namespace): + publisher = Publisher( + project=args.project, + tag=args.tag, + models=args.names, + base_branch=args.base_branch, + ) + publisher.test() + + +def push(args: argparse.Namespace): + publisher = Publisher( + project=args.project, + tag=args.tag, + models=args.names, base_branch=args.base_branch, - quiet=args.quiet, - kubernetes_target=args.config_out, use_kind=args.use_kind, ) - if args.build: - publisher.build() - if args.test: - publisher.test() - if args.push: - publisher.push() - if args.app_config: - publisher.write_app_config() + publisher.push() + + +def config(args: argparse.Namespace): + publisher = Publisher( + project=args.project, + tag=args.tag, + models=args.names, + base_branch=args.base_branch, + kubernetes_target=args.out, + ) + publisher.write_app_config() def cli(subparsers: argparse._SubParsersAction): parser = subparsers.add_parser( - "publish", description="Deploy models on C/S compute cluster." + "models", description="Deploy models on C/S compute cluster." ) - parser.add_argument("--tag", required=False, default=TAG) - parser.add_argument("--project", required=False, default=PROJECT) - parser.add_argument("--models", nargs="+", type=str, required=False, default=None) - parser.add_argument("--build", action="store_true") - parser.add_argument("--test", action="store_true") - parser.add_argument("--push", action="store_true") - parser.add_argument("--app-config", action="store_true") - parser.add_argument("--base-branch", default="origin/master") - parser.add_argument("--quiet", "-q", default=False) - parser.add_argument("--config-out", "-o", default=None) - parser.add_argument("--use-kind", action="store_true") - parser.set_defaults(func=handle) + parser.add_argument("--names", "-n", type=str, required=False, default=None) + parser.add_argument("--base-branch", default="origin/master", required=False) + model_subparsers = parser.add_subparsers() + + build_parser = model_subparsers.add_parser("build") + build_parser.set_defaults(func=build) + test_parser = model_subparsers.add_parser("test") + test_parser.set_defaults(func=test) + + push_parser = model_subparsers.add_parser("push") + push_parser.add_argument("--use-kind", action="store_true") + push_parser.set_defaults(func=push) + + config_parser = model_subparsers.add_parser("config") + config_parser.add_argument("--out", "-o", default=None) + config_parser.set_defaults(func=config) + + parser.set_defaults(func=lambda args: print(args)) diff --git a/workers/cs_workers/executors/api_task.py b/workers/cs_workers/executors/api_task.py index d98652c0..b94779ca 100644 --- a/workers/cs_workers/executors/api_task.py +++ b/workers/cs_workers/executors/api_task.py @@ -31,41 +31,40 @@ class Async(tornado.web.RequestHandler): def initialize(self, routes): self.routes = routes - async def post(self, task_name): - print("POST -- /async/", task_name) - if task_name not in self.routes: + async def post(self): + print("POST -- /async/", self.request.body) + payload = json.loads(self.request.body.decode("utf-8")) + handler = self.routes.get(payload.get("task_name")) + if handler is None: self.set_status(404) return - - handler = self.routes[task_name] - payload = json.loads(self.request.body.decode("utf-8")) task_id = payload.pop("task_id", None) - if task_name is None: + if task_id is None: task_id = str(uuid.uuid4()) + task_kwargs = payload.get("task_kwargs") or {} async with Client(asynchronous=True, processes=True) as client: - fut = client.submit(async_task_wrapper, task_id, handler, **payload) + fut = client.submit(async_task_wrapper, task_id, handler, **task_kwargs) fire_and_forget(fut) self.set_status(200) - self.write({"status": "PENDING", "task_id": task_name}) + self.write({"status": "PENDING", "task_id": task_id}) class Sync(tornado.web.RequestHandler): def initialize(self, routes): self.routes = routes - async def post(self, task_name): - print("POST -- /sync/", task_name) - if task_name not in self.routes: + async def post(self): + print("POST -- /sync/", self.request.body) + payload = json.loads(self.request.body.decode("utf-8")) + handler = self.routes.get(payload.get("task_name")) + if handler is None: self.set_status(404) return - - handler = self.routes[task_name] - payload = json.loads(self.request.body.decode("utf-8")) task_id = payload.pop("task_id", None) - if task_name is None: + if task_id is None: task_id = str(uuid.uuid4()) - print("payload", payload) - result = sync_task_wrapper(task_id, handler, **payload) + task_kwargs = payload.get("task_kwargs") or {} + result = sync_task_wrapper(task_id, handler, **task_kwargs) self.write(result) @@ -73,8 +72,8 @@ def executor(routes): print("routes", routes) return tornado.web.Application( [ - (r"/async/([A-Za-z0-9-]+)/", Async, dict(routes=routes)), - (r"/sync/([A-Za-z0-9-]+)/", Sync, dict(routes=routes)), + (r"/async/", Async, dict(routes=routes)), + (r"/sync/", Sync, dict(routes=routes)), ], debug=True, autoreload=True, diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index 00dd3348..357eb34b 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -10,10 +10,6 @@ from pathlib import Path -TAG = os.environ.get("TAG", "") -PROJECT = os.environ.get("PROJECT", "cs-workers-dev") - - def clean(word): return re.sub("[^0-9a-zA-Z]+", "", word).lower() @@ -124,7 +120,7 @@ def push(self): ) if self.use_kind: - cmd_prefix = "kind load docker-image --name cs --nodes cs-worker,cs-worker2" + cmd_prefix = "kind load docker-image --name cs --nodes cs-worker" else: cmd_prefix = "docker push" @@ -292,9 +288,6 @@ def config(args: argparse.Namespace): def cli(subparsers: argparse._SubParsersAction): parser = subparsers.add_parser("services", aliases=["svc"]) - parser.add_argument("--tag", required=False, default=TAG) - parser.add_argument("--project", required=False, default=PROJECT) - svc_subparsers = parser.add_subparsers() build_parser = svc_subparsers.add_parser("build") diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py index 9289ed07..b1ccf67b 100644 --- a/workers/cs_workers/services/scheduler.py +++ b/workers/cs_workers/services/scheduler.py @@ -29,6 +29,7 @@ def get_projects(): projects = {} for project in resp.json(): projects[(project["owner"], project["title"])] = project + print("projects", projects) return projects @@ -101,6 +102,7 @@ def get_app(): def start(args: argparse.Namespace): + print("starting, now") if args.start: app = get_app() app.listen(8888) diff --git a/workers/dockerfiles/Dockerfile.scheduler b/workers/dockerfiles/Dockerfile.scheduler index 1c3f0b18..9c006b30 100755 --- a/workers/dockerfiles/Dockerfile.scheduler +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -21,4 +21,4 @@ RUN cd /home/ && pip install -e . WORKDIR /home -CMD ["csw", "outputs-processor", "--start"] \ No newline at end of file +CMD ["csw", "scheduler", "--start"] \ No newline at end of file diff --git a/workers/templates/models/api-task-service.template.yaml b/workers/templates/models/api-task-service.template.yaml index f4ee18f3..1eb2fbf1 100644 --- a/workers/templates/models/api-task-service.template.yaml +++ b/workers/templates/models/api-task-service.template.yaml @@ -8,4 +8,3 @@ spec: targetPort: 8888 selector: app: # [owner]-[title]-api-task - type: LoadBalancer diff --git a/workers/templates/services/redis-master-Deployment.template.yaml b/workers/templates/services/redis-master-Deployment.template.yaml index 1e3f5f30..cda382a6 100644 --- a/workers/templates/services/redis-master-Deployment.template.yaml +++ b/workers/templates/services/redis-master-Deployment.template.yaml @@ -34,3 +34,5 @@ spec: requests: cpu: 100m memory: 100Mi + nodeSelector: + component: api From 0c0009ae38abafa5ff67c177331487d35829dff6 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Sun, 17 May 2020 20:06:49 -0400 Subject: [PATCH 13/55] Checkpoint: jobs work - Fix redis ACL user configuration - Move task name to url body - Add outputs response checks and logging - Able to post image tag for jobs --- .gitignore | 2 ++ workers/cs_workers/clients/job.py | 28 +++++++++++++------ workers/cs_workers/executors/job.py | 2 ++ workers/cs_workers/executors/task_wrapper.py | 9 +++--- .../cs_workers/services/outputs_processor.py | 8 ++++-- workers/cs_workers/services/scheduler.py | 6 ++-- workers/cs_workers/utils.py | 15 ++++++++++ workers/dockerfiles/Dockerfile.scheduler | 4 +++ workers/scripts/redis_init.py | 4 +-- workers/templates/secret.template.yaml | 3 +- .../scheduler-Deployment.template.yaml | 4 +-- 11 files changed, 62 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 65e73e74..b8c321d6 100755 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,5 @@ secret.yaml *scheduler-deployment.yaml *outputs-processor-deployment.yaml + +*google-creds.json \ No newline at end of file diff --git a/workers/cs_workers/clients/job.py b/workers/cs_workers/clients/job.py index 4eba633b..08b7e389 100644 --- a/workers/cs_workers/clients/job.py +++ b/workers/cs_workers/clients/job.py @@ -6,26 +6,36 @@ from kubernetes import client as kclient, config as kconfig -from cs_workers.utils import clean +from cs_workers.utils import clean, redis_conn_from_env from cs_workers.clients.core import Core redis_conn = dict( - host=os.environ.get("REDIS_HOST"), - port=os.environ.get("REDIS_PORT"), - db=os.environ.get("REDIS_DB"), username="scheduler", password=os.environ.get("REDIS_SCHEDULER_PW"), + **redis_conn_from_env(), ) class Job(Core): def __init__( - self, project, owner, title, tag, job_id=None, job_kwargs=None, quiet=True + self, + project, + owner, + title, + tag, + job_id=None, + job_kwargs=None, + quiet=True, + incluster=True, ): super().__init__(project, quiet=quiet) self.config = {} - kconfig.load_kube_config() + self.incluster = incluster + if self.incluster: + kconfig.load_incluster_config() + else: + kconfig.load_kube_config() self.api_client = kclient.BatchV1Api() self.job = self.configure(owner, title, tag, job_id) self.save_job_kwargs(self.job_id, job_kwargs) @@ -67,11 +77,11 @@ def env(self, owner, title, config): def configure(self, owner, title, tag, job_id=None): if job_id is None: - job_id = "job:" + str(uuid.uuid4()) + job_id = "job-" + str(uuid.uuid4()) else: job_id = str(job_id) - if not str(job_id).startswith("job:"): - job_id += "job:" + if not str(job_id).startswith("job-"): + job_id = f"job-{job_id}" if (owner, title) not in self.config: self.config.update(self.get_config([(owner, title)])) diff --git a/workers/cs_workers/executors/job.py b/workers/cs_workers/executors/job.py index 9fe188b9..9e95b61a 100644 --- a/workers/cs_workers/executors/job.py +++ b/workers/cs_workers/executors/job.py @@ -16,9 +16,11 @@ def sim_handler(task_id, meta_param_dict, adjustment): outputs = functions.run_model(meta_param_dict, adjustment) print("got result") outputs = cs_storage.serialize_to_json(outputs) + print("storing results") resp = requests.post( "http://outputs-processor/write/", json={"task_id": task_id, "outputs": outputs} ) + print("got resp", resp.status_code, resp.url) assert resp.status_code == 200, f"Got code: {resp.status_code}" return resp.json() diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index 3c2ff2b3..714b3a94 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -9,13 +9,13 @@ import requests import cs_storage +from cs_workers.utils import redis_conn_from_env + redis_conn = dict( - host=os.environ.get("REDIS_HOST"), - port=os.environ.get("REDIS_PORT"), - db=os.environ.get("REDIS_DB"), username="executor", password=os.environ.get("REDIS_EXECUTOR_PW"), + **redis_conn_from_env(), ) @@ -82,10 +82,11 @@ def async_task_wrapper(task_id, func, **task_kwargs): else: res["status"] = "FAIL" res["traceback"] = traceback_str - + print("saving results...") resp = requests.post( "http://outputs-processor/push/", json={"task_type": "sim", "result": res} ) + print("resp", resp.status_code, resp.url) assert resp.status_code == 200, f"Got code: {resp.status_code}" return res diff --git a/workers/cs_workers/services/outputs_processor.py b/workers/cs_workers/services/outputs_processor.py index f83ff640..46167396 100644 --- a/workers/cs_workers/services/outputs_processor.py +++ b/workers/cs_workers/services/outputs_processor.py @@ -27,10 +27,10 @@ async def push(task_type, result): ) as client: if task_type == "sim": print(f"posting data to {CS_URL}/outputs/api/") - return client.put(f"{CS_URL}/outputs/api/", json=result) + return await client.put(f"{CS_URL}/outputs/api/", json=result) if task_type == "parse": print(f"posting data to {CS_URL}/inputs/api/") - return client.put(f"{CS_URL}/inputs/api/", json=result) + return await client.put(f"{CS_URL}/inputs/api/", json=result) else: raise ValueError(f"Unknown task type: {task_type}.") @@ -40,6 +40,7 @@ async def post(self): print("POST -- /write/") payload = json.loads(self.request.body.decode("utf-8")) result = await write(**payload) + print("success-write") self.write(result) @@ -47,7 +48,8 @@ class Push(tornado.web.RequestHandler): async def post(self): print("POST -- /push/") payload = json.loads(self.request.body.decode("utf-8")) - await push(**payload) + resp = await push(**payload) + print("got resp-push", resp.status_code, resp.url) self.set_status(200) diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py index b1ccf67b..67562090 100644 --- a/workers/cs_workers/services/scheduler.py +++ b/workers/cs_workers/services/scheduler.py @@ -21,6 +21,7 @@ class Payload(ma.Schema): task_kwargs = ma.fields.Dict( keys=ma.fields.Str(), values=ma.fields.Field(), missing=dict ) + tag = ma.fields.Str(required=False) def get_projects(): @@ -42,7 +43,7 @@ async def post(self, owner, title): if not self.request.body: return payload = Payload().loads(self.request.body.decode("utf-8")) - + print("payload", payload) if (owner, title) not in self.projects: self.set_status(404) @@ -70,11 +71,12 @@ async def post(self, owner, title): data = resp.json() elif task_name == "sim": + tag = payload["tag"] client = job.Job( "cs-workers-dev", owner, title, - tag="latest", + tag=tag, job_id=task_id, job_kwargs=payload["task_kwargs"], ) diff --git a/workers/cs_workers/utils.py b/workers/cs_workers/utils.py index 0194f554..bdc83ad7 100644 --- a/workers/cs_workers/utils.py +++ b/workers/cs_workers/utils.py @@ -1,4 +1,5 @@ import base64 +import os import re import subprocess import time @@ -41,3 +42,17 @@ def read_github_file(org, repo, branch, filename): decoded_bytes = base64.decodebytes(encoded_content) text = decoded_bytes.decode() return text + + +def redis_conn_from_env(): + kwargs = {} + for kwarg, env in [ + ("host", "REDIS_HOST"), + ("port", "REDIS_PORT"), + ("db", "REDIS_DB"), + ]: + val = os.environ.get(env) + if val: + kwargs[kwarg] = val + + return kwargs diff --git a/workers/dockerfiles/Dockerfile.scheduler b/workers/dockerfiles/Dockerfile.scheduler index 9c006b30..01b29990 100755 --- a/workers/dockerfiles/Dockerfile.scheduler +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -12,6 +12,7 @@ EXPOSE 80 EXPOSE 8888 RUN pip install -r requirements.txt +RUN pip install google-cloud-secret-manager RUN conda install dask distributed tornado RUN mkdir /home/cs_workers @@ -19,6 +20,9 @@ COPY cs_workers /home/cs_workers COPY setup.py /home RUN cd /home/ && pip install -e . +COPY ./google-creds.json /google-creds.json +ENV GOOGLE_APPLICATION_CREDENTIALS=/google-creds.json + WORKDIR /home CMD ["csw", "scheduler", "--start"] \ No newline at end of file diff --git a/workers/scripts/redis_init.py b/workers/scripts/redis_init.py index 0092159a..2e97e28c 100644 --- a/workers/scripts/redis_init.py +++ b/workers/scripts/redis_init.py @@ -56,7 +56,7 @@ def main(): nopass=nopass, passwords=f"+{sched_pw}" if not nopass else None, commands=["-@all", "+set", "+get", "+acl|whoami"], - keys=["job:"], + keys=["job-*"], ) exec_pw = os.environ.get("REDIS_EXECUTOR_PW") @@ -67,7 +67,7 @@ def main(): nopass=nopass, passwords=f"+{exec_pw}" if not nopass else None, commands=["-@all", "+get", "+acl|whoami"], - keys=["job:"], + keys=["job-*"], ) admin_client.close() diff --git a/workers/templates/secret.template.yaml b/workers/templates/secret.template.yaml index 4846d6b9..f4456ad6 100644 --- a/workers/templates/secret.template.yaml +++ b/workers/templates/secret.template.yaml @@ -8,4 +8,5 @@ stringData: BUCKET: cs-outputs-dev OUTPUTS_VERSION: "v1" REDIS_HOST: redis-master - REDIS_DB: "" \ No newline at end of file + REDIS_DB: "" + REDIS_PORT: "" diff --git a/workers/templates/services/scheduler-Deployment.template.yaml b/workers/templates/services/scheduler-Deployment.template.yaml index b6309fb1..e95123ce 100755 --- a/workers/templates/services/scheduler-Deployment.template.yaml +++ b/workers/templates/services/scheduler-Deployment.template.yaml @@ -33,7 +33,7 @@ spec: valueFrom: secretKeyRef: name: worker-secret - key: REDIS_HOST + key: REDIS_PORT - name: REDIS_DB valueFrom: secretKeyRef: @@ -45,4 +45,4 @@ spec: name: worker-secret key: REDIS_SCHEDULER_PW nodeSelector: - component: api \ No newline at end of file + component: api From ce58f0be7ade34095bd9a4c9bdafdf6c91ac3b28 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Mon, 18 May 2020 17:07:59 -0400 Subject: [PATCH 14/55] Update for new workers url schema --- webapp/apps/comp/actions.py | 2 +- webapp/apps/comp/asyncsubmit.py | 2 +- webapp/apps/comp/compute.py | 22 ++++++----- webapp/apps/comp/model_parameters.py | 5 ++- webapp/apps/comp/parser.py | 2 +- webapp/apps/comp/tests/compute.py | 1 + webapp/apps/comp/tests/test_asyncviews.py | 38 ++++++++----------- .../apps/comp/tests/test_model_parameters.py | 22 ++++++----- webapp/apps/users/models.py | 5 ++- 9 files changed, 52 insertions(+), 47 deletions(-) diff --git a/webapp/apps/comp/actions.py b/webapp/apps/comp/actions.py index 5123012e..4b67ce19 100755 --- a/webapp/apps/comp/actions.py +++ b/webapp/apps/comp/actions.py @@ -1,4 +1,4 @@ -INPUTS = "inputs" +INPUTS = "defaults" PARSE = "parse" SIM = "sim" VERSION = "version" diff --git a/webapp/apps/comp/asyncsubmit.py b/webapp/apps/comp/asyncsubmit.py index 0bda1c9e..98280ba6 100755 --- a/webapp/apps/comp/asyncsubmit.py +++ b/webapp/apps/comp/asyncsubmit.py @@ -123,7 +123,7 @@ def submit(self): } print("submit", data) self.submitted_id, self.max_q_length = self.compute.submit_job( - data, inputs.project.worker_ext(action=actions.SIM) + project=inputs.project, task_name=actions.SIM, task_kwargs=data ) print(f"job id: {self.submitted_id}") print(f"q lenghth: {self.max_q_length}") diff --git a/webapp/apps/comp/compute.py b/webapp/apps/comp/compute.py index d3bd34a8..522066ef 100755 --- a/webapp/apps/comp/compute.py +++ b/webapp/apps/comp/compute.py @@ -23,8 +23,10 @@ class WorkersUnreachableError(Exception): class Compute(object): - def remote_submit_job(self, url, data, timeout=TIMEOUT_IN_SECONDS, headers=None): - response = requests.post(url, data=data, timeout=timeout) + def remote_submit_job( + self, url: str, data: dict, timeout: int = TIMEOUT_IN_SECONDS, headers=None + ): + response = requests.post(url, json=data, timeout=timeout) return response def remote_query_job(self, theurl): @@ -35,20 +37,21 @@ def remote_get_job(self, theurl): job_response = requests.get(theurl) return job_response - def submit_job(self, tasks, endpoint): - print("submitting", tasks, endpoint) - url = f"http://{WORKER_HN}/{endpoint}" - return self.submit(tasks, url) + def submit_job(self, project, task_name, task_kwargs): + print("submitting", task_name) + url = f"http://{WORKER_HN}/{project.owner}/{project.title}/" + return self.submit( + tasks=dict(task_name=task_name, task_kwargs=task_kwargs), url=url + ) def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): queue_length = 0 submitted = False attempts = 0 while not submitted: - packed = json.dumps(tasks) try: response = self.remote_submit_job( - url, data=packed, timeout=TIMEOUT_IN_SECONDS + url, data=tasks, timeout=TIMEOUT_IN_SECONDS ) if response.status_code == 200: print("submitted: ", url) @@ -106,10 +109,9 @@ def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): submitted = False attempts = 0 while not submitted: - packed = json.dumps(tasks) try: response = self.remote_submit_job( - url, data=packed, timeout=TIMEOUT_IN_SECONDS + url, data=tasks, timeout=TIMEOUT_IN_SECONDS ) if response.status_code == 200: print("submitted: ", url) diff --git a/webapp/apps/comp/model_parameters.py b/webapp/apps/comp/model_parameters.py index efd0b502..e4509c6e 100755 --- a/webapp/apps/comp/model_parameters.py +++ b/webapp/apps/comp/model_parameters.py @@ -67,8 +67,9 @@ def get_inputs(self, meta_parameters_values=None): ) except ModelConfig.DoesNotExist: success, result = self.compute.submit_job( - {"meta_param_dict": meta_parameters_values or {}}, - self.project.worker_ext(action=actions.INPUTS), + project=self.project, + task_name=actions.INPUTS, + task_kwargs={"meta_param_dict": meta_parameters_values or {}}, ) if not success: raise AppError(meta_parameters_values, result["traceback"]) diff --git a/webapp/apps/comp/parser.py b/webapp/apps/comp/parser.py index 1ef71472..f74737f8 100755 --- a/webapp/apps/comp/parser.py +++ b/webapp/apps/comp/parser.py @@ -55,7 +55,7 @@ def post(self, errors_warnings, params): "errors_warnings": errors_warnings, } job_id, queue_length = self.compute.submit_job( - data, self.project.worker_ext(action=actions.PARSE) + project=self.project, task_name=actions.PARSE, task_kwargs=data ) return job_id, queue_length diff --git a/webapp/apps/comp/tests/compute.py b/webapp/apps/comp/tests/compute.py index 45b6ccbe..415812b9 100755 --- a/webapp/apps/comp/tests/compute.py +++ b/webapp/apps/comp/tests/compute.py @@ -28,6 +28,7 @@ def remote_submit_job(self, url, data, timeout, headers=None): with requests_mock.Mocker() as mock: resp = {"job_id": str(uuid.uuid4()), "qlength": 2} resp = json.dumps(resp) + print("mocking", url) mock.register_uri("POST", url, text=resp) self.last_posted = data return Compute.remote_submit_job(self, url, data, timeout) diff --git a/webapp/apps/comp/tests/test_asyncviews.py b/webapp/apps/comp/tests/test_asyncviews.py index 5be71085..709f7f68 100755 --- a/webapp/apps/comp/tests/test_asyncviews.py +++ b/webapp/apps/comp/tests/test_asyncviews.py @@ -153,13 +153,11 @@ def post_adjustment( ) -> Response: mock.register_uri( "POST", - f"{self.worker_url}{self.owner}/{self.title}/inputs", - text=json.dumps(defaults_resp_data), - ) - mock.register_uri( - "POST", - f"{self.worker_url}{self.owner}/{self.title}/parse", - text=json.dumps(adj_resp_data), + f"{self.worker_url}{self.owner}/{self.title}/", + json=lambda request, context: { + "defaults": defaults_resp_data, + "parse": adj_resp_data, + }[request.json()["task_name"]], ) init_resp = self.api_client.post( @@ -367,7 +365,7 @@ def test_get_inputs(self, api_client, worker_url): with requests_mock.Mocker() as mock: mock.register_uri( "POST", - f"{worker_url}{self.owner}/{self.title}/inputs", + f"{worker_url}{self.owner}/{self.title}/", text=json.dumps(resp_data), ) resp = api_client.get(f"/{self.owner}/{self.title}/api/v1/inputs/") @@ -433,13 +431,11 @@ def test_new_sim(self, use_api, client, api_client, profile, worker_url): with requests_mock.Mocker() as mock: mock.register_uri( "POST", - f"{worker_url}{self.owner}/{self.title}/inputs", - text=json.dumps(inputs_resp_data), - ) - mock.register_uri( - "POST", - f"{worker_url}{self.owner}/{self.title}/parse", - text=json.dumps(adj_resp_data), + f"{worker_url}{self.owner}/{self.title}/", + json=lambda request, context: { + "defaults": inputs_resp_data, + "parse": adj_resp_data, + }[request.json()["task_name"]], ) api_client.force_login(sim.owner.user) resp = api_client.get(sim.inputs.get_absolute_api_url()) @@ -463,15 +459,13 @@ def test_post_inputs(self, api_client, worker_url): print("mocking", f"{worker_url}{self.owner}/{self.title}/inputs") mock.register_uri( "POST", - f"{worker_url}{self.owner}/{self.title}/version", - text=json.dumps({"status": "SUCCESS", "version": "1.0.0"}), + f"{worker_url}{self.owner}/{self.title}/", + json=lambda request, context: { + "defaults": resp_data, + "version": {"status": "SUCCESS", "version": "1.0.0"}, + }[request.json()["task_name"]], ) - mock.register_uri( - "POST", - f"{worker_url}{self.owner}/{self.title}/inputs", - text=json.dumps(resp_data), - ) resp = api_client.post( f"/{self.owner}/{self.title}/api/v1/inputs/", data=meta_params, diff --git a/webapp/apps/comp/tests/test_model_parameters.py b/webapp/apps/comp/tests/test_model_parameters.py index 4e5351ec..85e9ac09 100755 --- a/webapp/apps/comp/tests/test_model_parameters.py +++ b/webapp/apps/comp/tests/test_model_parameters.py @@ -46,7 +46,7 @@ class Params(pt.Parameters): def get_inputs_callback(request, context): metaparams = MetaParams(array_first=True) - metaparams.adjust(request.json()["meta_param_dict"]) + metaparams.adjust(request.json()["task_kwargs"]["meta_param_dict"]) params = Params() params.set_state(d0=metaparams.d0.tolist(), d1=metaparams.d1) return { @@ -56,6 +56,16 @@ def get_inputs_callback(request, context): } +def mock_callback(request, context): + data = request.json() + if data["task_name"] == "version": + return {"status": "SUCCESS", "version": "v1"} + elif data["task_name"] == "defaults": + return get_inputs_callback(request, context) + else: + raise KeyError(f"Unknown task_name: {task_name}") + + @pytest.fixture def mock_project(db, worker_url): profile = Profile.objects.get(user__username="modeler") @@ -74,14 +84,8 @@ def mock_project(db, worker_url): with requests_mock.Mocker() as mock: mock.register_uri( "POST", - f"{worker_url}{project.owner}/{project.title}/inputs", - json=get_inputs_callback, - status_code=200, - ) - mock.register_uri( - "POST", - f"{worker_url}{project.owner}/{project.title}/version", - json={"status": "SUCCESS", "version": "v1"}, + f"{worker_url}{project.owner}/{project.title}/", + json=mock_callback, status_code=200, ) diff --git a/webapp/apps/users/models.py b/webapp/apps/users/models.py index 9f9957ec..0ce8bff3 100755 --- a/webapp/apps/users/models.py +++ b/webapp/apps/users/models.py @@ -259,7 +259,7 @@ def version(self): return None try: success, result = SyncCompute().submit_job( - {}, self.worker_ext(actions.VERSION) + project=self, task_name=actions.VERSION, task_kwargs=dict() ) if success: return result["version"] @@ -268,6 +268,9 @@ def version(self): return None except Exception as e: print(f"error retrieving version for {self}", e) + import traceback + + traceback.print_exc() return None def has_write_access(self, user): From 5d20ff990ad29b4da095469ce8900349c96e6272 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Mon, 18 May 2020 17:08:30 -0400 Subject: [PATCH 15/55] Use task_name instead of task_type in outputs-processor --- workers/cs_workers/executors/task_wrapper.py | 2 +- workers/cs_workers/services/outputs_processor.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index 714b3a94..a0eeb9ea 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -84,7 +84,7 @@ def async_task_wrapper(task_id, func, **task_kwargs): res["traceback"] = traceback_str print("saving results...") resp = requests.post( - "http://outputs-processor/push/", json={"task_type": "sim", "result": res} + "http://outputs-processor/push/", json={"task_name": "sim", "result": res} ) print("resp", resp.status_code, resp.url) assert resp.status_code == 200, f"Got code: {resp.status_code}" diff --git a/workers/cs_workers/services/outputs_processor.py b/workers/cs_workers/services/outputs_processor.py index 46167396..42889709 100644 --- a/workers/cs_workers/services/outputs_processor.py +++ b/workers/cs_workers/services/outputs_processor.py @@ -21,18 +21,18 @@ async def write(task_id, outputs): return res -async def push(task_type, result): +async def push(task_name, result): async with httpx.AsyncClient( headers={"Authorization": f"Token {CS_API_TOKEN}"} ) as client: - if task_type == "sim": + if task_name == "sim": print(f"posting data to {CS_URL}/outputs/api/") return await client.put(f"{CS_URL}/outputs/api/", json=result) - if task_type == "parse": + if task_name == "parse": print(f"posting data to {CS_URL}/inputs/api/") return await client.put(f"{CS_URL}/inputs/api/", json=result) else: - raise ValueError(f"Unknown task type: {task_type}.") + raise ValueError(f"Unknown task type: {task_name}.") class Write(tornado.web.RequestHandler): From 7d9dec3ed6b68d345e535c9667727c6892947ce8 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Mon, 18 May 2020 17:08:49 -0400 Subject: [PATCH 16/55] Drop port number in docker-compose.yml --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 9a4da9e1..3ac249d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,7 +28,7 @@ services: - DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/postgres - USE_STRIPE=False - DEBUG=True - - WORKERS=${WORKERS}:5050 + - WORKERS=${WORKERS} - LOCAL=True - BUCKET=cs-outputs-dev - MAILGUN_API_KEY=${MAILGUN_API_KEY} From e76bf0af89ca95c2b5e3577db2d0f4136d08275c Mon Sep 17 00:00:00 2001 From: hdoupe Date: Mon, 18 May 2020 23:08:29 -0400 Subject: [PATCH 17/55] Add config between webapp and workers - Add cs-config.yaml option for meta params. - Update webapp with latest tag of model image. - Refactor secrets into Secrets and ModelSecrets classes. --- webapp/apps/comp/asyncsubmit.py | 1 + webapp/apps/comp/compute.py | 4 +- webapp/apps/publish/serializers.py | 4 ++ .../migrations/0013_project_latest_tag.py | 16 ++++++ webapp/apps/users/models.py | 2 + workers/cs-config.yaml | 4 ++ workers/cs_workers/cli.py | 40 ++++++++++++- workers/cs_workers/clients/core.py | 2 +- .../clients/{secrets.py => model_secrets.py} | 33 ++--------- workers/cs_workers/clients/publish.py | 28 ++++++++- workers/cs_workers/executors/api_task.py | 15 ++--- workers/cs_workers/executors/job.py | 2 +- workers/cs_workers/executors/task_wrapper.py | 6 +- workers/cs_workers/secrets.py | 57 +++++++++++++++++++ workers/cs_workers/services/manage.py | 54 +++++++++--------- workers/templates/secret.template.yaml | 3 +- 16 files changed, 197 insertions(+), 74 deletions(-) create mode 100755 webapp/apps/users/migrations/0013_project_latest_tag.py create mode 100644 workers/cs-config.yaml rename workers/cs_workers/clients/{secrets.py => model_secrets.py} (73%) create mode 100644 workers/cs_workers/secrets.py diff --git a/webapp/apps/comp/asyncsubmit.py b/webapp/apps/comp/asyncsubmit.py index 98280ba6..33347a3d 100755 --- a/webapp/apps/comp/asyncsubmit.py +++ b/webapp/apps/comp/asyncsubmit.py @@ -120,6 +120,7 @@ def submit(self): data = { "meta_param_dict": inputs.meta_parameters, "adjustment": inputs.deserialized_inputs, + "tag": self.sim.project.latest_tag, } print("submit", data) self.submitted_id, self.max_q_length = self.compute.submit_job( diff --git a/webapp/apps/comp/compute.py b/webapp/apps/comp/compute.py index 522066ef..33f310df 100755 --- a/webapp/apps/comp/compute.py +++ b/webapp/apps/comp/compute.py @@ -57,8 +57,8 @@ def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): print("submitted: ", url) submitted = True data = response.json() - job_id = data["job_id"] - queue_length = data["qlength"] + job_id = data["task_id"] + queue_length = 0 # data["qlength"] else: print("FAILED: ", WORKER_HN) attempts += 1 diff --git a/webapp/apps/publish/serializers.py b/webapp/apps/publish/serializers.py index 9e9d64d2..05043538 100755 --- a/webapp/apps/publish/serializers.py +++ b/webapp/apps/publish/serializers.py @@ -8,6 +8,7 @@ class PublishSerializer(serializers.ModelSerializer): cluster_type = serializers.CharField(required=False) sim_count = serializers.IntegerField(required=False) user_count = serializers.IntegerField(required=False) + latest_tag = serializers.CharField(required=False) # see to_representation # has_write_access = serializers.BooleanField(source="has_write_access") @@ -31,6 +32,7 @@ class Meta: "description", "repo_url", "repo_tag", + "latest_tag", "exp_task_time", "server_cost", "cpu", @@ -51,6 +53,7 @@ class ProjectWithVersionSerializer(serializers.ModelSerializer): sim_count = serializers.IntegerField(required=False) version = serializers.CharField(required=False) user_count = serializers.IntegerField(required=False) + latest_tag = serializers.CharField(required=False) # see to_representation # has_write_access = serializers.BooleanField(source="has_write_access") @@ -74,6 +77,7 @@ class Meta: "description", "repo_url", "repo_tag", + "latest_tag", "exp_task_time", "server_cost", "cpu", diff --git a/webapp/apps/users/migrations/0013_project_latest_tag.py b/webapp/apps/users/migrations/0013_project_latest_tag.py new file mode 100755 index 00000000..e10caa3a --- /dev/null +++ b/webapp/apps/users/migrations/0013_project_latest_tag.py @@ -0,0 +1,16 @@ +# Generated by Django 3.0.6 on 2020-05-18 22:27 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("users", "0012_project_repo_tag")] + + operations = [ + migrations.AddField( + model_name="project", + name="latest_tag", + field=models.CharField(max_length=64, null=True), + ) + ] diff --git a/webapp/apps/users/models.py b/webapp/apps/users/models.py index 0ce8bff3..efce6204 100755 --- a/webapp/apps/users/models.py +++ b/webapp/apps/users/models.py @@ -166,6 +166,8 @@ def callabledefault(): cluster_type = models.CharField(default="single-core", max_length=32) + latest_tag = models.CharField(null=True, max_length=64) + objects = ProjectManager() def __str__(self): diff --git a/workers/cs-config.yaml b/workers/cs-config.yaml new file mode 100644 index 00000000..79015a0d --- /dev/null +++ b/workers/cs-config.yaml @@ -0,0 +1,4 @@ +CS_URL: null +CS_API_TOKEN: null +PROJECT: null +TAG: null diff --git a/workers/cs_workers/cli.py b/workers/cs_workers/cli.py index 6d282c24..f7d3a34c 100644 --- a/workers/cs_workers/cli.py +++ b/workers/cs_workers/cli.py @@ -1,5 +1,10 @@ import argparse +import copy +import datetime import os +from pathlib import Path +import random +import yaml from cs_workers.services import manage, scheduler, outputs_processor from cs_workers.clients import publish @@ -7,12 +12,43 @@ TAG = os.environ.get("TAG", "") PROJECT = os.environ.get("PROJECT", "cs-workers-dev") +CS_URL = os.environ.get("CS_URL", None) + +defaults = dict( + TAG=datetime.datetime.now().strftime("%Y-%m-%d"), + PROJECT="cs-workers-dev", + CS_URL=None, + CS_API_TOKEN=None, +) + + +def load_env(): + config = copy.deepcopy(defaults) + + path = Path("cs-config.yaml") + if path.exists(): + with open(path, "r") as f: + user_config = yaml.safe_load(f.read()) + else: + user_config = {} + + for var in ["TAG", "PROJECT", "CS_URL", "CS_API_TOKEN"]: + if os.environ.get(var): + config[var] = os.environ.get(var) + elif user_config.get(var): + config[var] = user_config.get(var) + return config def cli(): + config = load_env() parser = argparse.ArgumentParser(description="C/S Workers CLI") - parser.add_argument("--tag", required=False, default=TAG) - parser.add_argument("--project", required=False, default=PROJECT) + parser.add_argument("--tag", required=False, default=config["TAG"]) + parser.add_argument("--project", required=False, default=config["PROJECT"]) + parser.add_argument("--cs-url", required=False, default=config["CS_URL"]) + parser.add_argument( + "--cs-api-token", required=False, default=config["CS_API_TOKEN"] + ) sub_parsers = parser.add_subparsers() manage.cli(sub_parsers) diff --git a/workers/cs_workers/clients/core.py b/workers/cs_workers/clients/core.py index 577c638b..de454d26 100644 --- a/workers/cs_workers/clients/core.py +++ b/workers/cs_workers/clients/core.py @@ -11,7 +11,7 @@ from cs_workers.utils import clean, run, parse_owner_title, read_github_file -from cs_workers.clients.secrets import Secrets +from cs_workers.clients.model_secrets import ModelSecrets CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) BASE_PATH = CURR_PATH / ".." / ".." diff --git a/workers/cs_workers/clients/secrets.py b/workers/cs_workers/clients/model_secrets.py similarity index 73% rename from workers/cs_workers/clients/secrets.py rename to workers/cs_workers/clients/model_secrets.py index d8ef0b24..30f1544a 100644 --- a/workers/cs_workers/clients/secrets.py +++ b/workers/cs_workers/clients/model_secrets.py @@ -3,15 +3,12 @@ import os from cs_workers.utils import clean +from cs_workers.secrets import Secrets, SecretNotFound PROJECT = os.environ.get("PROJECT", "cs-workers-dev") -class SecretNotFound(Exception): - pass - - -class Secrets: +class ModelSecrets(Secrets): def __init__(self, owner, title, project): self.owner = owner self.title = title @@ -40,10 +37,7 @@ def _set_secret(self, name, value): secret_val = self._get_secret() except SecretNotFound: secret_val = {name: value} - proj_parent = client.project_path(self.project) - client.create_secret( - proj_parent, secret_name, {"replication": {"automatic": {}}} - ) + return super().set_secret(secret_name, secret_val) else: if secret_val is not None: secret_val[name] = value @@ -52,11 +46,7 @@ def _set_secret(self, name, value): if value is None: secret_val.pop(name) - secret_bytes = json.dumps(secret_val).encode("utf-8") - - secret_parent = client.secret_path(self.project, secret_name) - - return client.add_secret_version(secret_parent, {"data": secret_bytes}) + return super().set_secret(secret_name, secret_val) def _get_secret(self, name=None): from google.api_core import exceptions @@ -66,11 +56,7 @@ def _get_secret(self, name=None): client = self._client() try: - response = client.access_secret_version( - f"projects/{self.project}/secrets/{secret_name}/versions/latest" - ) - - secret = json.loads(response.payload.data.decode("utf-8")) + secret = json.loads(super().get_secret(secret_name)) except exceptions.NotFound: raise SecretNotFound() @@ -81,15 +67,6 @@ def _get_secret(self, name=None): else: return secret - def _client(self): - if self.client: - return self.client - - from google.cloud import secretmanager - - self.client = secretmanager.SecretManagerServiceClient() - return self.client - def handle(args: argparse.Namespace): secrets = Secrets(args.owner, args.title, args.project) diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index fe7438e6..95b9cbbf 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -5,8 +5,10 @@ import yaml from pathlib import Path -from ..utils import run, clean +import requests +from cs_workers.utils import run, clean +from cs_workers.secrets import Secrets from cs_workers.clients.core import Core CURR_PATH = Path(os.path.abspath(os.path.dirname(__file__))) @@ -37,12 +39,16 @@ def __init__( quiet=False, kubernetes_target=None, use_kind=False, + cs_url=None, + cs_api_token=None, ): super().__init__(project, tag, base_branch, quiet) self.models = models.split(",") if models else None self.kubernetes_target = kubernetes_target or self.kubernetes_target self.use_kind = use_kind + self.cs_url = cs_url + self._cs_api_token = cs_api_token if self.kubernetes_target == "-": self.quiet = True @@ -165,6 +171,15 @@ def push_app_image(self, app): cmd_prefix = "docker push" run(f"{cmd_prefix} {self.cr}/{self.project}/{img_name}:{self.tag}") + if self.cs_url is not None: + resp = requests.put( + f"{self.cs_url}/publish/api/{app['owner']}/{app['title']}/detail/", + json={"latest_tag": self.tag}, + ) + assert ( + resp.status_code == 200 + ), f"Got: {resp.url} {resp.status_code} {resp.text}" + def write_secrets(self, app): secret_config = copy.deepcopy(self.secret_template) safeowner = clean(app["owner"]) @@ -256,6 +271,13 @@ def _set_secrets(self, app, config): {"name": key, "valueFrom": {"secretKeyRef": {"name": name, "key": key}}} ) + @property + def cs_api_token(self): + if self._cs_api_token is None: + secrets = Secrets(self.project) + self._cs_api_token = secrets.get_secret("CS_API_TOKEN") + return self._cs_api_token + def build(args: argparse.Namespace): publisher = Publisher( @@ -284,6 +306,8 @@ def push(args: argparse.Namespace): models=args.names, base_branch=args.base_branch, use_kind=args.use_kind, + cs_url=getattr(args, "cs_url", None), + cs_api_token=getattr(args, "cs_api_token", None), ) publisher.push() @@ -295,6 +319,8 @@ def config(args: argparse.Namespace): models=args.names, base_branch=args.base_branch, kubernetes_target=args.out, + cs_url=getattr(args, "cs_url", None), + cs_api_token=getattr(args, "cs_api_token", None), ) publisher.write_app_config() diff --git a/workers/cs_workers/executors/api_task.py b/workers/cs_workers/executors/api_task.py index b94779ca..c0d251cc 100644 --- a/workers/cs_workers/executors/api_task.py +++ b/workers/cs_workers/executors/api_task.py @@ -1,4 +1,5 @@ import argparse +import asyncio import json import os import uuid @@ -7,7 +8,6 @@ import tornado.ioloop import tornado.web -from dask.distributed import Client, fire_and_forget try: from cs_config import functions @@ -34,7 +34,8 @@ def initialize(self, routes): async def post(self): print("POST -- /async/", self.request.body) payload = json.loads(self.request.body.decode("utf-8")) - handler = self.routes.get(payload.get("task_name")) + task_name = payload.get("task_name") + handler = self.routes.get(task_name) if handler is None: self.set_status(404) return @@ -42,9 +43,8 @@ async def post(self): if task_id is None: task_id = str(uuid.uuid4()) task_kwargs = payload.get("task_kwargs") or {} - async with Client(asynchronous=True, processes=True) as client: - fut = client.submit(async_task_wrapper, task_id, handler, **task_kwargs) - fire_and_forget(fut) + async_task = async_task_wrapper(task_id, task_name, handler, **task_kwargs) + asyncio.create_task(async_task) self.set_status(200) self.write({"status": "PENDING", "task_id": task_id}) @@ -56,7 +56,8 @@ def initialize(self, routes): async def post(self): print("POST -- /sync/", self.request.body) payload = json.loads(self.request.body.decode("utf-8")) - handler = self.routes.get(payload.get("task_name")) + task_name = payload.get("task_name") + handler = self.routes.get(task_name) if handler is None: self.set_status(404) return @@ -64,7 +65,7 @@ async def post(self): if task_id is None: task_id = str(uuid.uuid4()) task_kwargs = payload.get("task_kwargs") or {} - result = sync_task_wrapper(task_id, handler, **task_kwargs) + result = await sync_task_wrapper(task_id, task_name, handler, **task_kwargs) self.write(result) diff --git a/workers/cs_workers/executors/job.py b/workers/cs_workers/executors/job.py index 9e95b61a..ae07e41a 100644 --- a/workers/cs_workers/executors/job.py +++ b/workers/cs_workers/executors/job.py @@ -29,7 +29,7 @@ def sim_handler(task_id, meta_param_dict, adjustment): def main(args: argparse.Namespace): - async_task_wrapper(args.job_id, routes[args.route_name]) + async_task_wrapper(args.job_id, args.route_name, routes[args.route_name]) def cli(subparsers: argparse._SubParsersAction): diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index a0eeb9ea..bed63c91 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -29,7 +29,7 @@ pass -def sync_task_wrapper(task_id, func, **task_kwargs): +async def sync_task_wrapper(task_id, task_name, func, **task_kwargs): start = time.time() traceback_str = None res = {} @@ -50,7 +50,7 @@ def sync_task_wrapper(task_id, func, **task_kwargs): return res -def async_task_wrapper(task_id, func, **task_kwargs): +async def async_task_wrapper(task_id, task_name, func, **task_kwargs): print("sim task", task_id, func) start = time.time() traceback_str = None @@ -84,7 +84,7 @@ def async_task_wrapper(task_id, func, **task_kwargs): res["traceback"] = traceback_str print("saving results...") resp = requests.post( - "http://outputs-processor/push/", json={"task_name": "sim", "result": res} + "http://outputs-processor/push/", json={"task_name": task_name, "result": res} ) print("resp", resp.status_code, resp.url) assert resp.status_code == 200, f"Got code: {resp.status_code}" diff --git a/workers/cs_workers/secrets.py b/workers/cs_workers/secrets.py new file mode 100644 index 00000000..ddf9dce3 --- /dev/null +++ b/workers/cs_workers/secrets.py @@ -0,0 +1,57 @@ +class SecretNotFound(Exception): + pass + + +class Secrets: + def __init__(self, project): + self.project = project + self.client = None + + def set_secret(self, name, value): + return self._set_secret(name, value) + + def get_secret(self, name): + return self._get_secret(name) + + def list_secrets(self): + raise NotImplementedError() + + def delete_secret(self, name): + return self._set_secret(name, None) + + def _set_secret(self, name, value): + client = self._client() + try: + self._get_secret(name) + except SecretNotFound: + proj_parent = client.project_path(self.project) + client.create_secret(proj_parent, name, {"replication": {"automatic": {}}}) + + secret_bytes = value.encode("utf-8") + + secret_parent = client.secret_path(self.project, name) + + return client.add_secret_version(secret_parent, {"data": secret_bytes}) + + def _get_secret(self, name): + from google.api_core import exceptions + + client = self._client() + + try: + response = client.access_secret_version( + f"projects/{self.project}/secrets/{name}/versions/latest" + ) + + return response.payload.data.decode("utf-8") + except exceptions.NotFound: + raise SecretNotFound() + + def _client(self): + if self.client: + return self.client + + from google.cloud import secretmanager + + self.client = secretmanager.SecretManagerServiceClient() + return self.client diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index 357eb34b..a20da8f9 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -10,6 +10,9 @@ from pathlib import Path +from ..secrets import Secrets + + def clean(word): return re.sub("[^0-9a-zA-Z]+", "", word).lower() @@ -61,10 +64,18 @@ class Cluster: kubernetes_target = "kubernetes/" cr = "gcr.io" - def __init__(self, tag, project, kubernetes_target="kubernetes/", use_kind=False): + def __init__( + self, + tag, + project, + kubernetes_target="kubernetes/", + use_kind=False, + cs_api_token=None, + ): self.tag = tag self.project = project self.use_kind = use_kind + self._cs_api_token = cs_api_token if kubernetes_target is None: self.kubernetes_target = Cluster.kubernetes_target @@ -192,10 +203,8 @@ def write_redis_deployment(self): self.write_config("redis-master-Deployment.yaml", deployment) def write_secret(self): - secrets = copy.deepcopy(self.secret_template) - secrets["stringData"]["CS_API_TOKEN"] = self._get_secret("CS_API_TOKEN") - + secrets["stringData"]["CS_API_TOKEN"] = self.cs_api_token redis_secrets = self.redis_secrets() for name, sec in redis_secrets.items(): if sec is not None: @@ -227,39 +236,27 @@ def redis_secrets(self): ) for sec in redis_secrets: try: - value = self._get_secret(sec) + value = self.secrets.get_secret(sec) except exceptions.NotFound: try: value = redis_acl_genpass() - self._set_secret(sec, value) + self.secrets.set_secret(sec, value) except Exception: value = "" redis_secrets[sec] = value return redis_secrets - def _get_secret(self, secret_name): - from google.cloud import secretmanager - - client = secretmanager.SecretManagerServiceClient() - response = client.access_secret_version( - f"projects/{self.project}/secrets/{secret_name}/versions/latest" - ) - - return response.payload.data.decode("utf-8") - - def _set_secret(self, name, value): - from google.cloud import secretmanager - - client = secretmanager.SecretManagerServiceClient() - proj_parent = client.project_path(self.project) - client.create_secret(proj_parent, name, {"replication": {"automatic": {}}}) - - if not isinstance(value, bytes): - value = value.encode("utf-8") - - secret_parent = client.secret_path(self.project, name) + @property + def cs_api_token(self): + if self._cs_api_token is None: + self._cs_api_token = self.secrets._get_secret("CS_API_TOKEN") + return self._cs_api_token - return client.add_secret_version(secret_parent, {"data": value}) + @property + def secrets(self): + if self._secrets is None: + self._secrets = Secrets() + return self._secrets def cluster_from_args(args: argparse.Namespace): @@ -268,6 +265,7 @@ def cluster_from_args(args: argparse.Namespace): project=args.project, kubernetes_target=getattr(args, "out", None), use_kind=getattr(args, "use_kind", None), + cs_api_token=getattr(args, "cs_api_token", None), ) diff --git a/workers/templates/secret.template.yaml b/workers/templates/secret.template.yaml index f4456ad6..ce97cb0b 100644 --- a/workers/templates/secret.template.yaml +++ b/workers/templates/secret.template.yaml @@ -4,7 +4,8 @@ metadata: name: worker-secret type: Opaque stringData: - CS_URL: https://dev.compute.studio + # CS_URL: https://dev.compute.studio + CS_URL: http://hdoupe.ngrok.io BUCKET: cs-outputs-dev OUTPUTS_VERSION: "v1" REDIS_HOST: redis-master From 855fffb53ee9e7f62516496a2fd509df7eeb8ea2 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Tue, 19 May 2020 15:03:24 -0400 Subject: [PATCH 18/55] Success: webapp can submit jobs --- .pre-commit-config.yaml | 10 ++-- webapp/apps/comp/asyncsubmit.py | 6 +- webapp/apps/comp/compute.py | 60 ++++++++++---------- webapp/apps/comp/serializers.py | 6 ++ webapp/apps/comp/views/api.py | 28 +-------- webapp/apps/publish/serializers.py | 6 ++ webapp/apps/publish/urls.py | 13 ++++- webapp/apps/publish/views.py | 37 +++++++++++- webapp/apps/users/models.py | 1 + workers/cs_workers/clients/core.py | 2 +- workers/cs_workers/clients/job.py | 10 ++-- workers/cs_workers/clients/model_secrets.py | 36 ++++-------- workers/cs_workers/clients/publish.py | 5 +- workers/cs_workers/executors/api_task.py | 8 +-- workers/cs_workers/executors/job.py | 5 +- workers/cs_workers/executors/task_wrapper.py | 25 ++++---- workers/cs_workers/services/manage.py | 10 +++- workers/cs_workers/services/scheduler.py | 2 +- 18 files changed, 155 insertions(+), 115 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e69dac2d..1d33e314 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: -- repo: https://github.com/ambv/black - rev: 18.9b0 - hooks: - - id: black - language_version: python3 \ No newline at end of file + - repo: https://github.com/ambv/black + rev: 19.10b0 + hooks: + - id: black + language_version: python3 diff --git a/webapp/apps/comp/asyncsubmit.py b/webapp/apps/comp/asyncsubmit.py index 33347a3d..8485efc7 100755 --- a/webapp/apps/comp/asyncsubmit.py +++ b/webapp/apps/comp/asyncsubmit.py @@ -120,11 +120,13 @@ def submit(self): data = { "meta_param_dict": inputs.meta_parameters, "adjustment": inputs.deserialized_inputs, - "tag": self.sim.project.latest_tag, } print("submit", data) self.submitted_id, self.max_q_length = self.compute.submit_job( - project=inputs.project, task_name=actions.SIM, task_kwargs=data + project=inputs.project, + task_name=actions.SIM, + task_kwargs=data, + tag=self.sim.project.latest_tag, ) print(f"job id: {self.submitted_id}") print(f"q lenghth: {self.max_q_length}") diff --git a/webapp/apps/comp/compute.py b/webapp/apps/comp/compute.py index 33f310df..4335d417 100755 --- a/webapp/apps/comp/compute.py +++ b/webapp/apps/comp/compute.py @@ -37,11 +37,11 @@ def remote_get_job(self, theurl): job_response = requests.get(theurl) return job_response - def submit_job(self, project, task_name, task_kwargs): + def submit_job(self, project, task_name, task_kwargs, tag=None): print("submitting", task_name) url = f"http://{WORKER_HN}/{project.owner}/{project.title}/" return self.submit( - tasks=dict(task_name=task_name, task_kwargs=task_kwargs), url=url + tasks=dict(task_name=task_name, tag=tag, task_kwargs=task_kwargs), url=url ) def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): @@ -74,34 +74,34 @@ def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): return job_id, queue_length - def results_ready(self, sim): - result_url = ( - f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" - f"/query/{sim.job_id}/" - ) - job_response = self.remote_query_job(result_url) - msg = "{0} failed on host: {1}".format(sim.job_id, WORKER_HN) - if job_response.status_code == 200: # Valid response - return job_response.text - else: - print("did not expect response with status_code", job_response.status_code) - raise JobFailError(msg) - - def get_results(self, sim): - result_url = ( - f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" - f"/get_job/{sim.job_id}/" - ) - job_response = self.remote_get_job(result_url) - if job_response.status_code == 200: # Valid response - try: - return job_response.json() - except ValueError: - # Got back a bad response. Get the text and re-raise - msg = "PROBLEM WITH RESPONSE. TEXT RECEIVED: {}" - raise ValueError(msg) - else: - raise WorkersUnreachableError() + # def results_ready(self, sim): + # result_url = ( + # f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" + # f"/query/{sim.job_id}/" + # ) + # job_response = self.remote_query_job(result_url) + # msg = "{0} failed on host: {1}".format(sim.job_id, WORKER_HN) + # if job_response.status_code == 200: # Valid response + # return job_response.text + # else: + # print("did not expect response with status_code", job_response.status_code) + # raise JobFailError(msg) + + # def get_results(self, sim): + # result_url = ( + # f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" + # f"/get_job/{sim.job_id}/" + # ) + # job_response = self.remote_get_job(result_url) + # if job_response.status_code == 200: # Valid response + # try: + # return job_response.json() + # except ValueError: + # # Got back a bad response. Get the text and re-raise + # msg = "PROBLEM WITH RESPONSE. TEXT RECEIVED: {}" + # raise ValueError(msg) + # else: + # raise WorkersUnreachableError() class SyncCompute(Compute): diff --git a/webapp/apps/comp/serializers.py b/webapp/apps/comp/serializers.py index 86bcbbbc..0571b62c 100755 --- a/webapp/apps/comp/serializers.py +++ b/webapp/apps/comp/serializers.py @@ -172,6 +172,12 @@ def to_representation(self, obj): rep["sim"]["authors"] = sorted(rep["sim"]["authors"]) return rep + def to_internal_value(self, data): + print("to_internal_value", data) + if "outputs" in data: + data.update(**data.pop("outputs")) + return super().to_internal_value(data) + class Meta: model = Inputs fields = ( diff --git a/webapp/apps/comp/views/api.py b/webapp/apps/comp/views/api.py index aeedf2ea..0da624d2 100755 --- a/webapp/apps/comp/views/api.py +++ b/webapp/apps/comp/views/api.py @@ -250,31 +250,8 @@ def get_sim_data(self, user, as_remote, username, title, model_pk): return Response(data, status=status.HTTP_200_OK) elif self.object.status == "STARTED": return Response(data, status=status.HTTP_200_OK) - - compute = Compute() - try: - job_ready = compute.results_ready(self.object) - except JobFailError: - self.object.traceback = "" - self.object.save() - return Response( - {"error": "model error"}, status=status.HTTP_400_BAD_REQUEST - ) - # something happened and the exception was not caught - if job_ready == "FAIL": - result = compute.get_results(self.object) - if result["traceback"]: - traceback_ = result["traceback"] - else: - traceback_ = "Error: The traceback for this error is unavailable." - self.object.traceback = traceback_ - self.object.status = "WORKER_FAILURE" - self.object.save() - return Response( - {"error": "model error"}, status=status.HTTP_400_BAD_REQUEST - ) - data.update(sim.data) - return Response(data, status=status.HTTP_202_ACCEPTED) + else: + return Response(data, status=status.HTTP_202_ACCEPTED) def get(self, request, *args, **kwargs): return self.get_sim_data(request.user, as_remote=False, **kwargs) @@ -401,6 +378,7 @@ def put(self, request, *args, **kwargs): if request.user.username == "comp-api-user": data = request.data + print("got data", data) ser = InputsSerializer(data=request.data) if ser.is_valid(): data = ser.validated_data diff --git a/webapp/apps/publish/serializers.py b/webapp/apps/publish/serializers.py index 05043538..7b4fc59a 100755 --- a/webapp/apps/publish/serializers.py +++ b/webapp/apps/publish/serializers.py @@ -91,3 +91,9 @@ class Meta: "version", ) read_only = ("sim_count", "status", "user_count", "version") + + +class DeploymentSerializer(serializers.ModelSerializer): + class Meta: + model = Project + fields = ("latest_tag",) diff --git a/webapp/apps/publish/urls.py b/webapp/apps/publish/urls.py index 10ead5a5..732c3a6d 100644 --- a/webapp/apps/publish/urls.py +++ b/webapp/apps/publish/urls.py @@ -1,7 +1,13 @@ from django.conf.urls import url from django.urls import path -from .views import ProjectView, ProjectDetailView, ProjectDetailAPIView, ProjectAPIView +from .views import ( + ProjectView, + ProjectDetailView, + ProjectDetailAPIView, + ProjectAPIView, + DeploymentAPIView, +) urlpatterns = [ @@ -11,5 +17,10 @@ ProjectDetailAPIView.as_view(), name="project_detail_api", ), + path( + "api///deployments/", + DeploymentAPIView.as_view(), + name="project_deployments_api", + ), path("api/", ProjectAPIView.as_view(), name="project_create_api"), ] diff --git a/webapp/apps/publish/views.py b/webapp/apps/publish/views.py index 52dea196..1657557c 100644 --- a/webapp/apps/publish/views.py +++ b/webapp/apps/publish/views.py @@ -24,7 +24,11 @@ from webapp.apps.users.models import Project, is_profile_active from webapp.apps.users.permissions import StrictRequiresActive, RequiresActive -from .serializers import PublishSerializer, ProjectWithVersionSerializer +from .serializers import ( + PublishSerializer, + ProjectWithVersionSerializer, + DeploymentSerializer, +) from .utils import title_fixup @@ -51,6 +55,12 @@ def get(self, request, *args, **kwargs): class ProjectDetailAPIView(GetProjectMixin, APIView): + authentication_classes = ( + SessionAuthentication, + BasicAuthentication, + TokenAuthentication, + ) + def get(self, request, *args, **kwargs): project = self.get_object(**kwargs) serializer = PublishSerializer(project, context={"request": request}) @@ -143,6 +153,31 @@ def post(self, request, *args, **kwargs): return Response(status=status.HTTP_401_UNAUTHORIZED) +class DeploymentAPIView(GetProjectMixin, APIView): + authentication_classes = ( + SessionAuthentication, + BasicAuthentication, + TokenAuthentication, + ) + + def get(self, request, *args, **kwargs): + ser = DeploymentSerializer( + self.get_object(**kwargs), context={"request": request} + ) + return Response(ser.data, status=status.HTTP_200_OK) + + def post(self, request, *args, **kwargs): + project = self.get_object(**kwargs) + if request.user.is_authenticated and project.has_write_access(request.user): + serializer = DeploymentSerializer(project, data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + else: + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + return Response(status=status.HTTP_401_UNAUTHORIZED) + + class RecentModelsAPIView(generics.ListAPIView): permission_classes = (StrictRequiresActive,) authentication_classes = ( diff --git a/webapp/apps/users/models.py b/webapp/apps/users/models.py index efce6204..409c9754 100755 --- a/webapp/apps/users/models.py +++ b/webapp/apps/users/models.py @@ -276,6 +276,7 @@ def version(self): return None def has_write_access(self, user): + print("Got user", user) return bool( user and user.is_authenticated diff --git a/workers/cs_workers/clients/core.py b/workers/cs_workers/clients/core.py index de454d26..99005c2f 100644 --- a/workers/cs_workers/clients/core.py +++ b/workers/cs_workers/clients/core.py @@ -87,5 +87,5 @@ def _resources(self, app, action=None): return resources def _list_secrets(self, app): - secret = Secrets(app["owner"], app["title"], self.project) + secret = ModelSecrets(app["owner"], app["title"], self.project) return secret.list_secrets() diff --git a/workers/cs_workers/clients/job.py b/workers/cs_workers/clients/job.py index 08b7e389..5205950a 100644 --- a/workers/cs_workers/clients/job.py +++ b/workers/cs_workers/clients/job.py @@ -77,11 +77,9 @@ def env(self, owner, title, config): def configure(self, owner, title, tag, job_id=None): if job_id is None: - job_id = "job-" + str(uuid.uuid4()) + job_id = str(uuid.uuid4()) else: job_id = str(job_id) - if not str(job_id).startswith("job-"): - job_id = f"job-{job_id}" if (owner, title) not in self.config: self.config.update(self.get_config([(owner, title)])) @@ -110,7 +108,9 @@ def configure(self, owner, title, tag, job_id=None): ), ) # Create the specification of deployment - spec = kclient.V1JobSpec(template=template, backoff_limit=1) + spec = kclient.V1JobSpec( + template=template, backoff_limit=1, ttl_seconds_after_finished=7200 + ) # Instantiate the job object job = kclient.V1Job( api_version="batch/v1", @@ -125,6 +125,8 @@ def configure(self, owner, title, tag, job_id=None): return job def save_job_kwargs(self, job_id, job_kwargs): + if not job_id.startswith("job-"): + job_id = f"job-{job_id}" with redis.Redis(**redis_conn) as rclient: rclient.set(job_id, json.dumps(job_kwargs)) diff --git a/workers/cs_workers/clients/model_secrets.py b/workers/cs_workers/clients/model_secrets.py index 30f1544a..a7dda9a2 100644 --- a/workers/cs_workers/clients/model_secrets.py +++ b/workers/cs_workers/clients/model_secrets.py @@ -15,26 +15,12 @@ def __init__(self, owner, title, project): self.project = project self.safe_owner = clean(owner) self.safe_title = clean(title) - self.client = None + super().__init__(project) def set_secret(self, name, value): - return self._set_secret(name, value) - - def get_secret(self, name): - return self._get_secret(name) - - def list_secrets(self): - return self._get_secret() - - def delete_secret(self, name): - return self._set_secret(name, None) - - def _set_secret(self, name, value): secret_name = f"{self.safe_owner}_{self.safe_title}" - - client = self._client() try: - secret_val = self._get_secret() + secret_val = self.get_secret() except SecretNotFound: secret_val = {name: value} return super().set_secret(secret_name, secret_val) @@ -48,17 +34,9 @@ def _set_secret(self, name, value): return super().set_secret(secret_name, secret_val) - def _get_secret(self, name=None): - from google.api_core import exceptions - + def get_secret(self, name=None): secret_name = f"{self.safe_owner}_{self.safe_title}" - - client = self._client() - - try: - secret = json.loads(super().get_secret(secret_name)) - except exceptions.NotFound: - raise SecretNotFound() + secret = json.loads(super().get_secret(secret_name)) if name and name in secret: return secret[name] @@ -67,6 +45,12 @@ def _get_secret(self, name=None): else: return secret + def list_secrets(self): + return self.get_secret() + + def delete_secret(self, name): + return self.set_secret(name, None) + def handle(args: argparse.Namespace): secrets = Secrets(args.owner, args.title, args.project) diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index 95b9cbbf..7ae8b80c 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -172,9 +172,10 @@ def push_app_image(self, app): run(f"{cmd_prefix} {self.cr}/{self.project}/{img_name}:{self.tag}") if self.cs_url is not None: - resp = requests.put( - f"{self.cs_url}/publish/api/{app['owner']}/{app['title']}/detail/", + resp = requests.post( + f"{self.cs_url}/publish/api/{app['owner']}/{app['title']}/deployments/", json={"latest_tag": self.tag}, + headers={"Authorization": f"Token {self.cs_api_token}"}, ) assert ( resp.status_code == 200 diff --git a/workers/cs_workers/executors/api_task.py b/workers/cs_workers/executors/api_task.py index c0d251cc..e73a54de 100644 --- a/workers/cs_workers/executors/api_task.py +++ b/workers/cs_workers/executors/api_task.py @@ -42,8 +42,8 @@ async def post(self): task_id = payload.pop("task_id", None) if task_id is None: task_id = str(uuid.uuid4()) - task_kwargs = payload.get("task_kwargs") or {} - async_task = async_task_wrapper(task_id, task_name, handler, **task_kwargs) + task_kwargs = payload.get("task_kwargs") + async_task = async_task_wrapper(task_id, task_name, handler, task_kwargs) asyncio.create_task(async_task) self.set_status(200) self.write({"status": "PENDING", "task_id": task_id}) @@ -64,8 +64,8 @@ async def post(self): task_id = payload.pop("task_id", None) if task_id is None: task_id = str(uuid.uuid4()) - task_kwargs = payload.get("task_kwargs") or {} - result = await sync_task_wrapper(task_id, task_name, handler, **task_kwargs) + task_kwargs = payload.get("task_kwargs") + result = await sync_task_wrapper(task_id, task_name, handler, task_kwargs) self.write(result) diff --git a/workers/cs_workers/executors/job.py b/workers/cs_workers/executors/job.py index ae07e41a..24c01ecc 100644 --- a/workers/cs_workers/executors/job.py +++ b/workers/cs_workers/executors/job.py @@ -1,4 +1,5 @@ import argparse +import asyncio import functools import json import os @@ -29,7 +30,9 @@ def sim_handler(task_id, meta_param_dict, adjustment): def main(args: argparse.Namespace): - async_task_wrapper(args.job_id, args.route_name, routes[args.route_name]) + asyncio.run( + async_task_wrapper(args.job_id, args.route_name, routes[args.route_name]) + ) def cli(subparsers: argparse._SubParsersAction): diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index bed63c91..83c52f8b 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -29,12 +29,13 @@ pass -async def sync_task_wrapper(task_id, task_name, func, **task_kwargs): +async def sync_task_wrapper(task_id, task_name, func, task_kwargs=None): + print("sync task", task_id, func, task_kwargs) start = time.time() traceback_str = None res = {} try: - outputs = func(task_id, **task_kwargs) + outputs = func(task_id, **(task_kwargs or {})) res.update(outputs) except Exception: traceback_str = traceback.format_exc() @@ -50,20 +51,22 @@ async def sync_task_wrapper(task_id, task_name, func, **task_kwargs): return res -async def async_task_wrapper(task_id, task_name, func, **task_kwargs): - print("sim task", task_id, func) +async def async_task_wrapper(task_id, task_name, func, task_kwargs=None): + print("async task", task_id, func, task_kwargs) start = time.time() traceback_str = None res = {"job_id": task_id} try: - print("calling func", func) - if not task_kwargs: + if task_kwargs is None: + if not task_id.startswith("job-"): + _task_id = f"job-{task_id}" + else: + _task_id = task_id with redis.Redis(**redis_conn) as rclient: - task_kwargs = rclient.get(task_id) - if task_kwargs is None: - raise KeyError(f"No value found for job id: {task_id}") - task_kwargs = json.loads(task_kwargs.decode()) - outputs = func(task_id, **task_kwargs) + task_kwargs = rclient.get(_task_id) + if task_kwargs is not None: + task_kwargs = json.loads(task_kwargs.decode()) + outputs = func(task_id, **(task_kwargs or {})) res.update( { "model_version": functions.get_version(), diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index a20da8f9..7b5f43cf 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -97,6 +97,7 @@ def __init__( self.secret_template = yaml.safe_load(f.read()) self._redis_secrets = None + self._secrets = None def build(self): """ @@ -255,7 +256,7 @@ def cs_api_token(self): @property def secrets(self): if self._secrets is None: - self._secrets = Secrets() + self._secrets = Secrets(self.project) return self._secrets @@ -284,6 +285,10 @@ def config(args: argparse.Namespace): cluster.config() +def serve(args: argparse.Namespace): + run("kubectl port-forward svc/scheduler 8888:80") + + def cli(subparsers: argparse._SubParsersAction): parser = subparsers.add_parser("services", aliases=["svc"]) svc_subparsers = parser.add_subparsers() @@ -298,3 +303,6 @@ def cli(subparsers: argparse._SubParsersAction): config_parser = svc_subparsers.add_parser("config") config_parser.add_argument("--out", "-o") config_parser.set_defaults(func=config) + + serve_parser = svc_subparsers.add_parser("serve") + serve_parser.set_defaults(func=serve) diff --git a/workers/cs_workers/services/scheduler.py b/workers/cs_workers/services/scheduler.py index 67562090..43cde6fb 100644 --- a/workers/cs_workers/services/scheduler.py +++ b/workers/cs_workers/services/scheduler.py @@ -21,7 +21,7 @@ class Payload(ma.Schema): task_kwargs = ma.fields.Dict( keys=ma.fields.Str(), values=ma.fields.Field(), missing=dict ) - tag = ma.fields.Str(required=False) + tag = ma.fields.Str(required=False, allow_none=True) def get_projects(): From 9937b4edcae2aa4f1f6ff929a05014976a5c17d4 Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 21 May 2020 10:26:50 -0400 Subject: [PATCH 19/55] Reconcile worker/webapp interfaces - Begin moving from job_id to task_id - Drop use of queue length - Drop cluster status checks (for now) --- webapp/apps/comp/asyncsubmit.py | 6 +-- webapp/apps/comp/compute.py | 33 +------------- webapp/apps/comp/parser.py | 7 ++- webapp/apps/comp/serializers.py | 8 +++- webapp/apps/comp/tests/compute.py | 45 +------------------- webapp/apps/comp/tests/test_asyncviews.py | 24 ++++++++--- webapp/apps/comp/views/api.py | 1 - webapp/apps/conftest.py | 2 + webapp/apps/publish/serializers.py | 4 +- webapp/apps/publish/tests/test_views.py | 23 ++++++++++ webapp/apps/publish/views.py | 6 +++ workers/cs_workers/executors/task_wrapper.py | 2 +- workers/cs_workers/services/manage.py | 4 ++ 13 files changed, 71 insertions(+), 94 deletions(-) diff --git a/webapp/apps/comp/asyncsubmit.py b/webapp/apps/comp/asyncsubmit.py index 8485efc7..eff07a15 100755 --- a/webapp/apps/comp/asyncsubmit.py +++ b/webapp/apps/comp/asyncsubmit.py @@ -122,14 +122,13 @@ def submit(self): "adjustment": inputs.deserialized_inputs, } print("submit", data) - self.submitted_id, self.max_q_length = self.compute.submit_job( + self.submitted_id = self.compute.submit_job( project=inputs.project, task_name=actions.SIM, task_kwargs=data, tag=self.sim.project.latest_tag, ) print(f"job id: {self.submitted_id}") - print(f"q lenghth: {self.max_q_length}") self.sim = self.save() return self.sim @@ -142,8 +141,7 @@ def save(self): cur_dt = timezone.now() - future_offset_seconds = (self.max_q_length) * sim.project.exp_task_time - future_offset = datetime.timedelta(seconds=future_offset_seconds) + future_offset = datetime.timedelta(seconds=sim.project.exp_task_time) expected_completion = cur_dt + future_offset sim.exp_comp_datetime = expected_completion diff --git a/webapp/apps/comp/compute.py b/webapp/apps/comp/compute.py index 4335d417..577898f7 100755 --- a/webapp/apps/comp/compute.py +++ b/webapp/apps/comp/compute.py @@ -45,7 +45,6 @@ def submit_job(self, project, task_name, task_kwargs, tag=None): ) def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): - queue_length = 0 submitted = False attempts = 0 while not submitted: @@ -58,7 +57,6 @@ def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): submitted = True data = response.json() job_id = data["task_id"] - queue_length = 0 # data["qlength"] else: print("FAILED: ", WORKER_HN) attempts += 1 @@ -72,36 +70,7 @@ def submit(self, tasks, url, increment_counter=True, use_wnc_offset=True): print("Exceeded max attempts. Bailing out.") raise WorkersUnreachableError() - return job_id, queue_length - - # def results_ready(self, sim): - # result_url = ( - # f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" - # f"/query/{sim.job_id}/" - # ) - # job_response = self.remote_query_job(result_url) - # msg = "{0} failed on host: {1}".format(sim.job_id, WORKER_HN) - # if job_response.status_code == 200: # Valid response - # return job_response.text - # else: - # print("did not expect response with status_code", job_response.status_code) - # raise JobFailError(msg) - - # def get_results(self, sim): - # result_url = ( - # f"http://{WORKER_HN}/{sim.project.owner.user.username}/{sim.project.title}" - # f"/get_job/{sim.job_id}/" - # ) - # job_response = self.remote_get_job(result_url) - # if job_response.status_code == 200: # Valid response - # try: - # return job_response.json() - # except ValueError: - # # Got back a bad response. Get the text and re-raise - # msg = "PROBLEM WITH RESPONSE. TEXT RECEIVED: {}" - # raise ValueError(msg) - # else: - # raise WorkersUnreachableError() + return job_id class SyncCompute(Compute): diff --git a/webapp/apps/comp/parser.py b/webapp/apps/comp/parser.py index f74737f8..0f3d76f9 100755 --- a/webapp/apps/comp/parser.py +++ b/webapp/apps/comp/parser.py @@ -54,10 +54,10 @@ def post(self, errors_warnings, params): "adjustment": params, "errors_warnings": errors_warnings, } - job_id, queue_length = self.compute.submit_job( + job_id = self.compute.submit_job( project=self.project, task_name=actions.PARSE, task_kwargs=data ) - return job_id, queue_length + return job_id class Parser: @@ -77,10 +77,9 @@ def parse_parameters(self): adjustment[sect].update(self.clean_inputs.get(sect, {})) # kick off async parsing - job_id, queue_length = self.post(errors_warnings, adjustment) + job_id = self.post(errors_warnings, adjustment) return { "job_id": job_id, - "queue_length": queue_length, "adjustment": adjustment, "errors_warnings": errors_warnings, "custom_adjustment": None, diff --git a/webapp/apps/comp/serializers.py b/webapp/apps/comp/serializers.py index 0571b62c..4dc2a74a 100755 --- a/webapp/apps/comp/serializers.py +++ b/webapp/apps/comp/serializers.py @@ -22,6 +22,11 @@ class OutputsSerializer(serializers.Serializer): aggr_outputs = serializers.JSONField(required=False) version = serializers.CharField(required=False) + def to_internal_value(self, data): + if "task_id" in data: + data["job_id"] = data.pop("task_id") + return super().to_internal_value(data) + class PendingPermissionSerializer(serializers.ModelSerializer): profile = serializers.StringRelatedField() @@ -173,9 +178,10 @@ def to_representation(self, obj): return rep def to_internal_value(self, data): - print("to_internal_value", data) if "outputs" in data: data.update(**data.pop("outputs")) + if "task_id" in data: + data["job_id"] = data.pop("task_id") return super().to_internal_value(data) class Meta: diff --git a/webapp/apps/comp/tests/compute.py b/webapp/apps/comp/tests/compute.py index 415812b9..f67d9e33 100755 --- a/webapp/apps/comp/tests/compute.py +++ b/webapp/apps/comp/tests/compute.py @@ -26,58 +26,15 @@ def __init__(self, num_times_to_wait=0): def remote_submit_job(self, url, data, timeout, headers=None): print("mocking:", url) with requests_mock.Mocker() as mock: - resp = {"job_id": str(uuid.uuid4()), "qlength": 2} + resp = {"task_id": str(uuid.uuid4())} resp = json.dumps(resp) print("mocking", url) mock.register_uri("POST", url, text=resp) self.last_posted = data return Compute.remote_submit_job(self, url, data, timeout) - def remote_query_job(self, url): - # Need to login as the comp-api-user - self.client.login(username=self.user, password=self.password) - if isinstance(self.client, APIClient): - format_kwarg = {"format": "json"} - else: - format_kwarg = {"content_type": "application/json"} - resp = self.client.put( - "/outputs/api/", - data=dict(json.loads(self.outputs), **{"job_id": self.sim.job_id}), - **format_kwarg, - ) - if resp.status_code != 200: - raise CallbackException( - f"Status code: {resp.status_code}\n {json.dumps(resp.data, indent=4)}" - ) - self.client = None - self.sim = None - with requests_mock.Mocker() as mock: - text = "NO" - mock.register_uri("GET", url, text=text) - return Compute.remote_query_job(self, url) - - def remote_get_job(self, url): - self.count += 1 - with requests_mock.Mocker() as mock: - mock.register_uri("GET", url, text=self.outputs) - return Compute.remote_get_job(self, url) - def reset_count(self): """ reset worker node count """ self.count = 0 - - -class MockComputeWorkerFailure(MockCompute): - next_response = None - outputs = json.dumps({"status": "WORKER_FAILURE", "traceback": "Error: whoops"}) - - def remote_query_job(self, url): - self.client = None - self.sim = None - with requests_mock.Mocker() as mock: - print("mocking: ", url) - text = "FAIL" - mock.register_uri("GET", url, text=text) - return Compute.remote_query_job(self, url) diff --git a/webapp/apps/comp/tests/test_asyncviews.py b/webapp/apps/comp/tests/test_asyncviews.py index 709f7f68..478d562d 100755 --- a/webapp/apps/comp/tests/test_asyncviews.py +++ b/webapp/apps/comp/tests/test_asyncviews.py @@ -19,7 +19,7 @@ from webapp.apps.comp.models import Inputs, Simulation, PendingPermission, ANON_BEFORE from webapp.apps.comp.ioutils import get_ioutils from webapp.apps.comp.exceptions import ResourceLimitException -from .compute import MockCompute, MockComputeWorkerFailure +from .compute import MockCompute from .utils import ( read_outputs, _submit_inputs, @@ -112,10 +112,10 @@ def run(self): defaults_resp_data = {"status": "SUCCESS", **self.defaults} adj = self.inputs adj_job_id = str(uuid.uuid4()) - adj_resp_data = {"job_id": adj_job_id, "qlength": 1} + adj_resp_data = {"task_id": adj_job_id} adj_callback_data = { "status": "SUCCESS", - "job_id": adj_job_id, + "task_id": adj_job_id, **{"errors_warnings": self.errors_warnings}, } with requests_mock.Mocker(real_http=True) as mock: @@ -157,6 +157,7 @@ def post_adjustment( json=lambda request, context: { "defaults": defaults_resp_data, "parse": adj_resp_data, + "version": {"status": "success", "version": "v1"}, }[request.json()["task_name"]], ) @@ -215,6 +216,19 @@ def poll_simulation(self, inputs: Inputs): assert_status(202, get_resp_pend, "poll_simulation") def check_simulation_finished(self, model_pk: int): + self.sim = Simulation.objects.get(project=self.project, model_pk=model_pk) + set_auth_token(self.api_client, self.comp_api_user.user) + resp = self.api_client.put( + "/outputs/api/", + data=dict( + json.loads(self.mockcompute.outputs), **{"task_id": self.sim.job_id} + ), + format="json", + ) + if resp.status_code != 200: + raise Exception( + f"Status code: {resp.status_code}\n {json.dumps(resp.data, indent=4)}" + ) self.client.force_login(self.sim_owner.user) self.api_client.force_login(self.sim_owner.user) @@ -223,7 +237,7 @@ def check_simulation_finished(self, model_pk: int): ) assert_status(200, get_resp_succ, "check_simulation_finished") model_pk = get_resp_succ.data["model_pk"] - self.sim = Simulation.objects.get(project=self.project, model_pk=model_pk) + self.sim.refresh_from_db() assert self.sim.status == "SUCCESS" assert self.sim.outputs assert self.sim.traceback is None @@ -427,7 +441,7 @@ def test_new_sim(self, use_api, client, api_client, profile, worker_url): defaults = self.defaults() inputs_resp_data = {"status": "SUCCESS", **defaults} - adj_resp_data = {"job_id": str(uuid.uuid4()), "qlength": 1} + adj_resp_data = {"task_id": str(uuid.uuid4())} with requests_mock.Mocker() as mock: mock.register_uri( "POST", diff --git a/webapp/apps/comp/views/api.py b/webapp/apps/comp/views/api.py index 0da624d2..840574d9 100755 --- a/webapp/apps/comp/views/api.py +++ b/webapp/apps/comp/views/api.py @@ -378,7 +378,6 @@ def put(self, request, *args, **kwargs): if request.user.username == "comp-api-user": data = request.data - print("got data", data) ser = InputsSerializer(data=request.data) if ser.is_valid(): data = ser.validated_data diff --git a/webapp/apps/conftest.py b/webapp/apps/conftest.py index e27bf9dc..6f7558de 100755 --- a/webapp/apps/conftest.py +++ b/webapp/apps/conftest.py @@ -14,6 +14,7 @@ from django.utils.timezone import make_aware from rest_framework.authtoken.models import Token from rest_framework.test import APIClient +from guardian.shortcuts import assign_perm from webapp.settings import USE_STRIPE from webapp.apps.billing.models import ( @@ -81,6 +82,7 @@ def django_db_setup(django_db_setup, django_db_blocker): for project_config in projects: project = Project.objects.create(**dict(common, **project_config)) + assign_perm("write_project", comp_api_user, project) if USE_STRIPE: create_pro_billing_objects() diff --git a/webapp/apps/publish/serializers.py b/webapp/apps/publish/serializers.py index 7b4fc59a..540264d1 100755 --- a/webapp/apps/publish/serializers.py +++ b/webapp/apps/publish/serializers.py @@ -8,7 +8,7 @@ class PublishSerializer(serializers.ModelSerializer): cluster_type = serializers.CharField(required=False) sim_count = serializers.IntegerField(required=False) user_count = serializers.IntegerField(required=False) - latest_tag = serializers.CharField(required=False) + latest_tag = serializers.CharField(allow_null=True, required=False) # see to_representation # has_write_access = serializers.BooleanField(source="has_write_access") @@ -53,7 +53,7 @@ class ProjectWithVersionSerializer(serializers.ModelSerializer): sim_count = serializers.IntegerField(required=False) version = serializers.CharField(required=False) user_count = serializers.IntegerField(required=False) - latest_tag = serializers.CharField(required=False) + latest_tag = serializers.CharField(allow_null=True, required=False) # see to_representation # has_write_access = serializers.BooleanField(source="has_write_access") diff --git a/webapp/apps/publish/tests/test_views.py b/webapp/apps/publish/tests/test_views.py index 814fb25c..3dac62c1 100755 --- a/webapp/apps/publish/tests/test_views.py +++ b/webapp/apps/publish/tests/test_views.py @@ -28,6 +28,7 @@ def test_post(self, client): "cpu": 3, "memory": 9, "listed": True, + "latest_tag": "v1", } resp = client.post("/publish/api/", post_data) assert resp.status_code == 401 @@ -42,6 +43,9 @@ def test_post(self, client): assert project assert project.server_cost + api_user = Profile.objects.get(user__username="comp-api-user") + assert project.has_write_access(api_user.user) + def test_get_detail_api(self, api_client, client, test_models): exp = { "title": "Detail-Test", @@ -55,6 +59,7 @@ def test_get_detail_api(self, api_client, client, test_models): "server_cost": Decimal("0.1"), "listed": True, "status": "live", + "latest_tag": "v1", } owner = Profile.objects.get(user__username="modeler") project = Project.objects.create(**dict(exp, **{"owner": owner})) @@ -84,6 +89,7 @@ def test_put_detail_api(self, client, test_models, profile, password): "repo_tag": "dev", "cpu": 2, "memory": 6, + "lastet_tag": "v2", } # not logged in --> not authorized resp = client.put( @@ -238,3 +244,20 @@ def test_recent_models_api(self, api_client, test_models, profile): assert "sim_count" in project and "user_count" in project else: assert "sim_count" not in project and "user_count" not in project + + def test_deployments_api(self, api_client, test_models): + prof = Profile.objects.get(user__username="comp-api-user") + api_client.force_login(prof.user) + + project = test_models[0].project + assert project.has_write_access(prof.user) + + resp = api_client.post( + f"/publish/api/{project.owner}/{project.title}/deployments/", + data={"latest_tag": "v5"}, + format="json", + ) + + assert resp.status_code == 200 + project.refresh_from_db() + assert project.latest_tag == "v5" diff --git a/webapp/apps/publish/views.py b/webapp/apps/publish/views.py index 1657557c..a41a739e 100644 --- a/webapp/apps/publish/views.py +++ b/webapp/apps/publish/views.py @@ -19,6 +19,8 @@ TokenAuthentication, ) +from guardian.shortcuts import assign_perm + # from webapp.settings import DEBUG from webapp.apps.users.models import Project, is_profile_active @@ -31,6 +33,8 @@ ) from .utils import title_fixup +User = get_user_model() + class GetProjectMixin: def get_object(self, username, title): @@ -130,6 +134,8 @@ def post(self, request, *args, **kwargs): server_cost=0.1, ) status_url = request.build_absolute_uri(model.app_url) + api_user = User.objects.get(username="comp-api-user") + assign_perm("write_project", api_user, model) try: send_mail( f"{request.user.username} is publishing a model on Compute Studio!", diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index 83c52f8b..78d117d1 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -55,7 +55,7 @@ async def async_task_wrapper(task_id, task_name, func, task_kwargs=None): print("async task", task_id, func, task_kwargs) start = time.time() traceback_str = None - res = {"job_id": task_id} + res = {"task_id": task_id} try: if task_kwargs is None: if not task_id.startswith("job-"): diff --git a/workers/cs_workers/services/manage.py b/workers/cs_workers/services/manage.py index 7b5f43cf..9e1e2ea7 100644 --- a/workers/cs_workers/services/manage.py +++ b/workers/cs_workers/services/manage.py @@ -70,11 +70,13 @@ def __init__( project, kubernetes_target="kubernetes/", use_kind=False, + cs_url=None, cs_api_token=None, ): self.tag = tag self.project = project self.use_kind = use_kind + self.cs_url = cs_url self._cs_api_token = cs_api_token if kubernetes_target is None: @@ -205,6 +207,7 @@ def write_redis_deployment(self): def write_secret(self): secrets = copy.deepcopy(self.secret_template) + secrets["stringData"]["CS_URL"] = self.cs_url secrets["stringData"]["CS_API_TOKEN"] = self.cs_api_token redis_secrets = self.redis_secrets() for name, sec in redis_secrets.items(): @@ -266,6 +269,7 @@ def cluster_from_args(args: argparse.Namespace): project=args.project, kubernetes_target=getattr(args, "out", None), use_kind=getattr(args, "use_kind", None), + cs_url=getattr(args, "cs_url", None), cs_api_token=getattr(args, "cs_api_token", None), ) From b83694f3b9dd65f59e8ae1284989724156e30c7e Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 21 May 2020 11:01:35 -0400 Subject: [PATCH 20/55] Swap from requests to httpx and clean up other deps --- workers/cs_workers/clients/core.py | 1 - workers/cs_workers/clients/publish.py | 4 ++-- workers/cs_workers/executors/job.py | 4 ++-- workers/cs_workers/executors/task_wrapper.py | 10 ++++++---- workers/cs_workers/utils.py | 4 ++-- workers/dockerfiles/Dockerfile.scheduler | 4 ++-- workers/setup.py | 15 +++++++++++++-- 7 files changed, 27 insertions(+), 15 deletions(-) diff --git a/workers/cs_workers/clients/core.py b/workers/cs_workers/clients/core.py index 99005c2f..d86cb993 100644 --- a/workers/cs_workers/clients/core.py +++ b/workers/cs_workers/clients/core.py @@ -6,7 +6,6 @@ import yaml from pathlib import Path -import requests from git import Repo, InvalidGitRepositoryError diff --git a/workers/cs_workers/clients/publish.py b/workers/cs_workers/clients/publish.py index 7ae8b80c..4e0c7532 100644 --- a/workers/cs_workers/clients/publish.py +++ b/workers/cs_workers/clients/publish.py @@ -5,7 +5,7 @@ import yaml from pathlib import Path -import requests +import httpx from cs_workers.utils import run, clean from cs_workers.secrets import Secrets @@ -172,7 +172,7 @@ def push_app_image(self, app): run(f"{cmd_prefix} {self.cr}/{self.project}/{img_name}:{self.tag}") if self.cs_url is not None: - resp = requests.post( + resp = httpx.post( f"{self.cs_url}/publish/api/{app['owner']}/{app['title']}/deployments/", json={"latest_tag": self.tag}, headers={"Authorization": f"Token {self.cs_api_token}"}, diff --git a/workers/cs_workers/executors/job.py b/workers/cs_workers/executors/job.py index 24c01ecc..fef5cf20 100644 --- a/workers/cs_workers/executors/job.py +++ b/workers/cs_workers/executors/job.py @@ -5,7 +5,7 @@ import os import redis -import requests +import httpx import cs_storage from cs_workers.executors.task_wrapper import async_task_wrapper @@ -18,7 +18,7 @@ def sim_handler(task_id, meta_param_dict, adjustment): print("got result") outputs = cs_storage.serialize_to_json(outputs) print("storing results") - resp = requests.post( + resp = httpx.post( "http://outputs-processor/write/", json={"task_id": task_id, "outputs": outputs} ) print("got resp", resp.status_code, resp.url) diff --git a/workers/cs_workers/executors/task_wrapper.py b/workers/cs_workers/executors/task_wrapper.py index 78d117d1..0808de39 100644 --- a/workers/cs_workers/executors/task_wrapper.py +++ b/workers/cs_workers/executors/task_wrapper.py @@ -6,7 +6,7 @@ import traceback import redis -import requests +import httpx import cs_storage from cs_workers.utils import redis_conn_from_env @@ -86,9 +86,11 @@ async def async_task_wrapper(task_id, task_name, func, task_kwargs=None): res["status"] = "FAIL" res["traceback"] = traceback_str print("saving results...") - resp = requests.post( - "http://outputs-processor/push/", json={"task_name": task_name, "result": res} - ) + async with httpx.AsyncClient() as client: + resp = client.post( + "http://outputs-processor/push/", + json={"task_name": task_name, "result": res}, + ) print("resp", resp.status_code, resp.url) assert resp.status_code == 200, f"Got code: {resp.status_code}" diff --git a/workers/cs_workers/utils.py b/workers/cs_workers/utils.py index bdc83ad7..d0d102d7 100644 --- a/workers/cs_workers/utils.py +++ b/workers/cs_workers/utils.py @@ -4,7 +4,7 @@ import subprocess import time -import requests +import httpx def clean(word): @@ -33,7 +33,7 @@ def read_github_file(org, repo, branch, filename): Read data from github api. Ht to @andersonfrailey for decoding the response """ url = f"https://api.github.com/repos/{org}/{repo}/contents/{filename}?ref={branch}" - response = requests.get(url) + response = httpx.get(url) if response.status_code == 403: assert "hit rate limit" == 403 assert response.status_code == 200, f"Got code: {response.status_code}" diff --git a/workers/dockerfiles/Dockerfile.scheduler b/workers/dockerfiles/Dockerfile.scheduler index 01b29990..d6d42c29 100755 --- a/workers/dockerfiles/Dockerfile.scheduler +++ b/workers/dockerfiles/Dockerfile.scheduler @@ -20,8 +20,8 @@ COPY cs_workers /home/cs_workers COPY setup.py /home RUN cd /home/ && pip install -e . -COPY ./google-creds.json /google-creds.json -ENV GOOGLE_APPLICATION_CREDENTIALS=/google-creds.json +# COPY ./google-creds.json /google-creds.json +# ENV GOOGLE_APPLICATION_CREDENTIALS=/google-creds.json WORKDIR /home diff --git a/workers/setup.py b/workers/setup.py index feaf6304..95569d12 100644 --- a/workers/setup.py +++ b/workers/setup.py @@ -16,9 +16,20 @@ description=("Build, publish, and run Compute Studio workers."), long_description=long_description, long_description_content_type="text/markdown", - url="https://github.com/compute-tooling/compute-studio-workers", + url="https://github.com/compute-tooling/compute-studio", packages=setuptools.find_packages(), - install_requires=["redis", "kubernetes", "gitpython", "pyyaml"], + install_requires=[ + "redis", + "kubernetes", + "gitpython", + "pyyaml", + "googe-cloud-secret-manager", + "httpx", + "dask", + "distributed", + "tornado", + "cs-storage", + ], include_package_data=True, entry_points={ "console_scripts": ["cs-workers=cs_workers.cli:cli", "csw=cs_workers.cli:cli"] From c307491cec34c866da0b6228169bfc6c1e7c729c Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 21 May 2020 11:05:56 -0400 Subject: [PATCH 21/55] Fix typo: --- workers/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workers/setup.py b/workers/setup.py index 95569d12..b22aff71 100644 --- a/workers/setup.py +++ b/workers/setup.py @@ -23,7 +23,7 @@ "kubernetes", "gitpython", "pyyaml", - "googe-cloud-secret-manager", + "google-cloud-secret-manager", "httpx", "dask", "distributed", From ce346e948da7a42cf26229f49606f810a366623d Mon Sep 17 00:00:00 2001 From: hdoupe Date: Thu, 21 May 2020 11:06:06 -0400 Subject: [PATCH 22/55] Fix typo --- static/js/publish.js | 14 +++++++------- static/js/publish.js.map | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/static/js/publish.js b/static/js/publish.js index 94eba02f..6c5d7359 100644 --- a/static/js/publish.js +++ b/static/js/publish.js @@ -28,7 +28,7 @@ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ -var n="undefined"!=typeof window&&"undefined"!=typeof document&&"undefined"!=typeof navigator,r=function(){for(var e=["Edge","Trident","Firefox"],t=0;t=0)return 1;return 0}();var o=n&&window.Promise?function(e){var t=!1;return function(){t||(t=!0,window.Promise.resolve().then((function(){t=!1,e()})))}}:function(e){var t=!1;return function(){t||(t=!0,setTimeout((function(){t=!1,e()}),r))}};function a(e){return e&&"[object Function]"==={}.toString.call(e)}function i(e,t){if(1!==e.nodeType)return[];var n=e.ownerDocument.defaultView.getComputedStyle(e,null);return t?n[t]:n}function s(e){return"HTML"===e.nodeName?e:e.parentNode||e.host}function l(e){if(!e)return document.body;switch(e.nodeName){case"HTML":case"BODY":return e.ownerDocument.body;case"#document":return e.body}var t=i(e),n=t.overflow,r=t.overflowX,o=t.overflowY;return/(auto|scroll|overlay)/.test(n+o+r)?e:l(s(e))}function u(e){return e&&e.referenceNode?e.referenceNode:e}var c=n&&!(!window.MSInputMethodContext||!document.documentMode),f=n&&/MSIE 10/.test(navigator.userAgent);function p(e){return 11===e?c:10===e?f:c||f}function d(e){if(!e)return document.documentElement;for(var t=p(10)?document.body:null,n=e.offsetParent||null;n===t&&e.nextElementSibling;)n=(e=e.nextElementSibling).offsetParent;var r=n&&n.nodeName;return r&&"BODY"!==r&&"HTML"!==r?-1!==["TH","TD","TABLE"].indexOf(n.nodeName)&&"static"===i(n,"position")?d(n):n:e?e.ownerDocument.documentElement:document.documentElement}function h(e){return null!==e.parentNode?h(e.parentNode):e}function m(e,t){if(!(e&&e.nodeType&&t&&t.nodeType))return document.documentElement;var n=e.compareDocumentPosition(t)&Node.DOCUMENT_POSITION_FOLLOWING,r=n?e:t,o=n?t:e,a=document.createRange();a.setStart(r,0),a.setEnd(o,0);var i,s,l=a.commonAncestorContainer;if(e!==l&&t!==l||r.contains(o))return"BODY"===(s=(i=l).nodeName)||"HTML"!==s&&d(i.firstElementChild)!==i?d(l):l;var u=h(e);return u.host?m(u.host,t):m(e,h(t).host)}function v(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"top",n="top"===t?"scrollTop":"scrollLeft",r=e.nodeName;if("BODY"===r||"HTML"===r){var o=e.ownerDocument.documentElement,a=e.ownerDocument.scrollingElement||o;return a[n]}return e[n]}function g(e,t){var n=arguments.length>2&&void 0!==arguments[2]&&arguments[2],r=v(t,"top"),o=v(t,"left"),a=n?-1:1;return e.top+=r*a,e.bottom+=r*a,e.left+=o*a,e.right+=o*a,e}function b(e,t){var n="x"===t?"Left":"Top",r="Left"===n?"Right":"Bottom";return parseFloat(e["border"+n+"Width"])+parseFloat(e["border"+r+"Width"])}function y(e,t,n,r){return Math.max(t["offset"+e],t["scroll"+e],n["client"+e],n["offset"+e],n["scroll"+e],p(10)?parseInt(n["offset"+e])+parseInt(r["margin"+("Height"===e?"Top":"Left")])+parseInt(r["margin"+("Height"===e?"Bottom":"Right")]):0)}function w(e){var t=e.body,n=e.documentElement,r=p(10)&&getComputedStyle(n);return{height:y("Height",t,n,r),width:y("Width",t,n,r)}}var x=function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")},E=function(){function e(e,t){for(var n=0;n2&&void 0!==arguments[2]&&arguments[2],r=p(10),o="HTML"===t.nodeName,a=S(e),s=S(t),u=l(e),c=i(t),f=parseFloat(c.borderTopWidth),d=parseFloat(c.borderLeftWidth);n&&o&&(s.top=Math.max(s.top,0),s.left=Math.max(s.left,0));var h=O({top:a.top-s.top-f,left:a.left-s.left-d,width:a.width,height:a.height});if(h.marginTop=0,h.marginLeft=0,!r&&o){var m=parseFloat(c.marginTop),v=parseFloat(c.marginLeft);h.top-=f-m,h.bottom-=f-m,h.left-=d-v,h.right-=d-v,h.marginTop=m,h.marginLeft=v}return(r&&!n?t.contains(u):t===u&&"BODY"!==u.nodeName)&&(h=g(h,t)),h}function T(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=e.ownerDocument.documentElement,r=C(e,n),o=Math.max(n.clientWidth,window.innerWidth||0),a=Math.max(n.clientHeight,window.innerHeight||0),i=t?0:v(n),s=t?0:v(n,"left"),l={top:i-r.top+r.marginTop,left:s-r.left+r.marginLeft,width:o,height:a};return O(l)}function j(e){var t=e.nodeName;if("BODY"===t||"HTML"===t)return!1;if("fixed"===i(e,"position"))return!0;var n=s(e);return!!n&&j(n)}function P(e){if(!e||!e.parentElement||p())return document.documentElement;for(var t=e.parentElement;t&&"none"===i(t,"transform");)t=t.parentElement;return t||document.documentElement}function N(e,t,n,r){var o=arguments.length>4&&void 0!==arguments[4]&&arguments[4],a={top:0,left:0},i=o?P(e):m(e,u(t));if("viewport"===r)a=T(i,o);else{var c=void 0;"scrollParent"===r?"BODY"===(c=l(s(t))).nodeName&&(c=e.ownerDocument.documentElement):c="window"===r?e.ownerDocument.documentElement:r;var f=C(c,i,o);if("HTML"!==c.nodeName||j(i))a=f;else{var p=w(e.ownerDocument),d=p.height,h=p.width;a.top+=f.top-f.marginTop,a.bottom=d+f.top,a.left+=f.left-f.marginLeft,a.right=h+f.left}}var v="number"==typeof(n=n||0);return a.left+=v?n:n.left||0,a.top+=v?n:n.top||0,a.right-=v?n:n.right||0,a.bottom-=v?n:n.bottom||0,a}function R(e){return e.width*e.height}function A(e,t,n,r,o){var a=arguments.length>5&&void 0!==arguments[5]?arguments[5]:0;if(-1===e.indexOf("auto"))return e;var i=N(n,r,a,o),s={top:{width:i.width,height:t.top-i.top},right:{width:i.right-t.right,height:i.height},bottom:{width:i.width,height:i.bottom-t.bottom},left:{width:t.left-i.left,height:i.height}},l=Object.keys(s).map((function(e){return _({key:e},s[e],{area:R(s[e])})})).sort((function(e,t){return t.area-e.area})),u=l.filter((function(e){var t=e.width,r=e.height;return t>=n.clientWidth&&r>=n.clientHeight})),c=u.length>0?u[0].key:l[0].key,f=e.split("-")[1];return c+(f?"-"+f:"")}function M(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,o=r?P(t):m(t,u(n));return C(n,o,r)}function F(e){var t=e.ownerDocument.defaultView.getComputedStyle(e),n=parseFloat(t.marginTop||0)+parseFloat(t.marginBottom||0),r=parseFloat(t.marginLeft||0)+parseFloat(t.marginRight||0);return{width:e.offsetWidth+r,height:e.offsetHeight+n}}function D(e){var t={left:"right",right:"left",bottom:"top",top:"bottom"};return e.replace(/left|right|bottom|top/g,(function(e){return t[e]}))}function I(e,t,n){n=n.split("-")[0];var r=F(e),o={width:r.width,height:r.height},a=-1!==["right","left"].indexOf(n),i=a?"top":"left",s=a?"left":"top",l=a?"height":"width",u=a?"width":"height";return o[i]=t[i]+t[l]/2-r[l]/2,o[s]=n===s?t[s]-r[u]:t[D(s)],o}function L(e,t){return Array.prototype.find?e.find(t):e.filter(t)[0]}function U(e,t,n){return(void 0===n?e:e.slice(0,function(e,t,n){if(Array.prototype.findIndex)return e.findIndex((function(e){return e[t]===n}));var r=L(e,(function(e){return e[t]===n}));return e.indexOf(r)}(e,"name",n))).forEach((function(e){e.function&&console.warn("`modifier.function` is deprecated, use `modifier.fn`!");var n=e.function||e.fn;e.enabled&&a(n)&&(t.offsets.popper=O(t.offsets.popper),t.offsets.reference=O(t.offsets.reference),t=n(t,e))})),t}function z(){if(!this.state.isDestroyed){var e={instance:this,styles:{},arrowStyles:{},attributes:{},flipped:!1,offsets:{}};e.offsets.reference=M(this.state,this.popper,this.reference,this.options.positionFixed),e.placement=A(this.options.placement,e.offsets.reference,this.popper,this.reference,this.options.modifiers.flip.boundariesElement,this.options.modifiers.flip.padding),e.originalPlacement=e.placement,e.positionFixed=this.options.positionFixed,e.offsets.popper=I(this.popper,e.offsets.reference,e.placement),e.offsets.popper.position=this.options.positionFixed?"fixed":"absolute",e=U(this.modifiers,e),this.state.isCreated?this.options.onUpdate(e):(this.state.isCreated=!0,this.options.onCreate(e))}}function q(e,t){return e.some((function(e){var n=e.name;return e.enabled&&n===t}))}function B(e){for(var t=[!1,"ms","Webkit","Moz","O"],n=e.charAt(0).toUpperCase()+e.slice(1),r=0;r1&&void 0!==arguments[1]&&arguments[1],n=J.indexOf(e),r=J.slice(n+1).concat(J.slice(0,n));return t?r.reverse():r}var te="flip",ne="clockwise",re="counterclockwise";function oe(e,t,n,r){var o=[0,0],a=-1!==["right","left"].indexOf(r),i=e.split(/(\+|\-)/).map((function(e){return e.trim()})),s=i.indexOf(L(i,(function(e){return-1!==e.search(/,|\s/)})));i[s]&&-1===i[s].indexOf(",")&&console.warn("Offsets separated by white space(s) are deprecated, use a comma (,) instead.");var l=/\s*,\s*|\s+/,u=-1!==s?[i.slice(0,s).concat([i[s].split(l)[0]]),[i[s].split(l)[1]].concat(i.slice(s+1))]:[i];return(u=u.map((function(e,r){var o=(1===r?!a:a)?"height":"width",i=!1;return e.reduce((function(e,t){return""===e[e.length-1]&&-1!==["+","-"].indexOf(t)?(e[e.length-1]=t,i=!0,e):i?(e[e.length-1]+=t,i=!1,e):e.concat(t)}),[]).map((function(e){return function(e,t,n,r){var o=e.match(/((?:\-|\+)?\d*\.?\d*)(.*)/),a=+o[1],i=o[2];if(!a)return e;if(0===i.indexOf("%")){var s=void 0;switch(i){case"%p":s=n;break;case"%":case"%r":default:s=r}return O(s)[t]/100*a}if("vh"===i||"vw"===i){return("vh"===i?Math.max(document.documentElement.clientHeight,window.innerHeight||0):Math.max(document.documentElement.clientWidth,window.innerWidth||0))/100*a}return a}(e,o,t,n)}))}))).forEach((function(e,t){e.forEach((function(n,r){G(n)&&(o[t]+=n*("-"===e[r-1]?-1:1))}))})),o}var ae={placement:"bottom",positionFixed:!1,eventsEnabled:!0,removeOnDestroy:!1,onCreate:function(){},onUpdate:function(){},modifiers:{shift:{order:100,enabled:!0,fn:function(e){var t=e.placement,n=t.split("-")[0],r=t.split("-")[1];if(r){var o=e.offsets,a=o.reference,i=o.popper,s=-1!==["bottom","top"].indexOf(n),l=s?"left":"top",u=s?"width":"height",c={start:k({},l,a[l]),end:k({},l,a[l]+a[u]-i[u])};e.offsets.popper=_({},i,c[r])}return e}},offset:{order:200,enabled:!0,fn:function(e,t){var n=t.offset,r=e.placement,o=e.offsets,a=o.popper,i=o.reference,s=r.split("-")[0],l=void 0;return l=G(+n)?[+n,0]:oe(n,a,i,s),"left"===s?(a.top+=l[0],a.left-=l[1]):"right"===s?(a.top+=l[0],a.left+=l[1]):"top"===s?(a.left+=l[0],a.top-=l[1]):"bottom"===s&&(a.left+=l[0],a.top+=l[1]),e.popper=a,e},offset:0},preventOverflow:{order:300,enabled:!0,fn:function(e,t){var n=t.boundariesElement||d(e.instance.popper);e.instance.reference===n&&(n=d(n));var r=B("transform"),o=e.instance.popper.style,a=o.top,i=o.left,s=o[r];o.top="",o.left="",o[r]="";var l=N(e.instance.popper,e.instance.reference,t.padding,n,e.positionFixed);o.top=a,o.left=i,o[r]=s,t.boundaries=l;var u=t.priority,c=e.offsets.popper,f={primary:function(e){var n=c[e];return c[e]l[e]&&!t.escapeWithReference&&(r=Math.min(c[n],l[e]-("right"===e?c.width:c.height))),k({},n,r)}};return u.forEach((function(e){var t=-1!==["left","top"].indexOf(e)?"primary":"secondary";c=_({},c,f[t](e))})),e.offsets.popper=c,e},priority:["left","right","top","bottom"],padding:5,boundariesElement:"scrollParent"},keepTogether:{order:400,enabled:!0,fn:function(e){var t=e.offsets,n=t.popper,r=t.reference,o=e.placement.split("-")[0],a=Math.floor,i=-1!==["top","bottom"].indexOf(o),s=i?"right":"bottom",l=i?"left":"top",u=i?"width":"height";return n[s]a(r[s])&&(e.offsets.popper[l]=a(r[s])),e}},arrow:{order:500,enabled:!0,fn:function(e,t){var n;if(!Z(e.instance.modifiers,"arrow","keepTogether"))return e;var r=t.element;if("string"==typeof r){if(!(r=e.instance.popper.querySelector(r)))return e}else if(!e.instance.popper.contains(r))return console.warn("WARNING: `arrow.element` must be child of its popper element!"),e;var o=e.placement.split("-")[0],a=e.offsets,s=a.popper,l=a.reference,u=-1!==["left","right"].indexOf(o),c=u?"height":"width",f=u?"Top":"Left",p=f.toLowerCase(),d=u?"left":"top",h=u?"bottom":"right",m=F(r)[c];l[h]-ms[h]&&(e.offsets.popper[p]+=l[p]+m-s[h]),e.offsets.popper=O(e.offsets.popper);var v=l[p]+l[c]/2-m/2,g=i(e.instance.popper),b=parseFloat(g["margin"+f]),y=parseFloat(g["border"+f+"Width"]),w=v-e.offsets.popper[p]-b-y;return w=Math.max(Math.min(s[c]-m,w),0),e.arrowElement=r,e.offsets.arrow=(k(n={},p,Math.round(w)),k(n,d,""),n),e},element:"[x-arrow]"},flip:{order:600,enabled:!0,fn:function(e,t){if(q(e.instance.modifiers,"inner"))return e;if(e.flipped&&e.placement===e.originalPlacement)return e;var n=N(e.instance.popper,e.instance.reference,t.padding,t.boundariesElement,e.positionFixed),r=e.placement.split("-")[0],o=D(r),a=e.placement.split("-")[1]||"",i=[];switch(t.behavior){case te:i=[r,o];break;case ne:i=ee(r);break;case re:i=ee(r,!0);break;default:i=t.behavior}return i.forEach((function(s,l){if(r!==s||i.length===l+1)return e;r=e.placement.split("-")[0],o=D(r);var u=e.offsets.popper,c=e.offsets.reference,f=Math.floor,p="left"===r&&f(u.right)>f(c.left)||"right"===r&&f(u.left)f(c.top)||"bottom"===r&&f(u.top)f(n.right),m=f(u.top)f(n.bottom),g="left"===r&&d||"right"===r&&h||"top"===r&&m||"bottom"===r&&v,b=-1!==["top","bottom"].indexOf(r),y=!!t.flipVariations&&(b&&"start"===a&&d||b&&"end"===a&&h||!b&&"start"===a&&m||!b&&"end"===a&&v),w=!!t.flipVariationsByContent&&(b&&"start"===a&&h||b&&"end"===a&&d||!b&&"start"===a&&v||!b&&"end"===a&&m),x=y||w;(p||g||x)&&(e.flipped=!0,(p||g)&&(r=i[l+1]),x&&(a=function(e){return"end"===e?"start":"start"===e?"end":e}(a)),e.placement=r+(a?"-"+a:""),e.offsets.popper=_({},e.offsets.popper,I(e.instance.popper,e.offsets.reference,e.placement)),e=U(e.instance.modifiers,e,"flip"))})),e},behavior:"flip",padding:5,boundariesElement:"viewport",flipVariations:!1,flipVariationsByContent:!1},inner:{order:700,enabled:!1,fn:function(e){var t=e.placement,n=t.split("-")[0],r=e.offsets,o=r.popper,a=r.reference,i=-1!==["left","right"].indexOf(n),s=-1===["top","left"].indexOf(n);return o[i?"left":"top"]=a[n]-(s?o[i?"width":"height"]:0),e.placement=D(t),e.offsets.popper=O(o),e}},hide:{order:800,enabled:!0,fn:function(e){if(!Z(e.instance.modifiers,"hide","preventOverflow"))return e;var t=e.offsets.reference,n=L(e.instance.modifiers,(function(e){return"preventOverflow"===e.name})).boundaries;if(t.bottomn.right||t.top>n.bottom||t.right2&&void 0!==arguments[2]?arguments[2]:{};x(this,e),this.scheduleUpdate=function(){return requestAnimationFrame(r.update)},this.update=o(this.update.bind(this)),this.options=_({},e.Defaults,i),this.state={isDestroyed:!1,isCreated:!1,scrollParents:[]},this.reference=t&&t.jquery?t[0]:t,this.popper=n&&n.jquery?n[0]:n,this.options.modifiers={},Object.keys(_({},e.Defaults.modifiers,i.modifiers)).forEach((function(t){r.options.modifiers[t]=_({},e.Defaults.modifiers[t]||{},i.modifiers?i.modifiers[t]:{})})),this.modifiers=Object.keys(this.options.modifiers).map((function(e){return _({name:e},r.options.modifiers[e])})).sort((function(e,t){return e.order-t.order})),this.modifiers.forEach((function(e){e.enabled&&a(e.onLoad)&&e.onLoad(r.reference,r.popper,r.options,e,r.state)})),this.update();var s=this.options.eventsEnabled;s&&this.enableEventListeners(),this.state.eventsEnabled=s}return E(e,[{key:"update",value:function(){return z.call(this)}},{key:"destroy",value:function(){return H.call(this)}},{key:"enableEventListeners",value:function(){return W.call(this)}},{key:"disableEventListeners",value:function(){return K.call(this)}}]),e}();ie.Utils=("undefined"!=typeof window?window:e).PopperUtils,ie.placements=X,ie.Defaults=ae,t.a=ie}).call(this,n(48))},function(e,t){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(e){"object"==typeof window&&(n=window)}e.exports=n},function(e,t){e.exports=function(e){if(!e.webpackPolyfill){var t=Object.create(e);t.children||(t.children=[]),Object.defineProperty(t,"loaded",{enumerable:!0,get:function(){return t.l}}),Object.defineProperty(t,"id",{enumerable:!0,get:function(){return t.i}}),Object.defineProperty(t,"exports",{enumerable:!0}),t.webpackPolyfill=1}return t}},function(e,t,n){var r=n(11),o=n(51),a=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,i=/^\w*$/;e.exports=function(e,t){if(r(e))return!1;var n=typeof e;return!("number"!=n&&"symbol"!=n&&"boolean"!=n&&null!=e&&!o(e))||(i.test(e)||!a.test(e)||null!=t&&e in Object(t))}},function(e,t,n){var r=n(22),o=n(16);e.exports=function(e){return"symbol"==typeof e||o(e)&&"[object Symbol]"==r(e)}},function(e,t,n){var r=n(168),o=n(184),a=n(186),i=n(187),s=n(188);function l(e){var t=-1,n=null==e?0:e.length;for(this.clear();++t-1&&e%1==0&&e<=9007199254740991}},function(e,t,n){var r=n(33),o=n(193),a=n(194),i=n(195),s=n(196),l=n(197);function u(e){var t=this.__data__=new r(e);this.size=t.size}u.prototype.clear=o,u.prototype.delete=a,u.prototype.get=i,u.prototype.has=s,u.prototype.set=l,e.exports=u},function(e,t,n){(function(e){var r=n(12),o=n(202),a=t&&!t.nodeType&&t,i=a&&"object"==typeof e&&e&&!e.nodeType&&e,s=i&&i.exports===a?r.Buffer:void 0,l=(s?s.isBuffer:void 0)||o;e.exports=l}).call(this,n(58)(e))},function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children||(e.children=[]),Object.defineProperty(e,"loaded",{enumerable:!0,get:function(){return e.l}}),Object.defineProperty(e,"id",{enumerable:!0,get:function(){return e.i}}),e.webpackPolyfill=1),e}},function(e,t){e.exports=function(e){return function(t){return e(t)}}},function(e,t,n){(function(e){var r=n(83),o=t&&!t.nodeType&&t,a=o&&"object"==typeof e&&e&&!e.nodeType&&e,i=a&&a.exports===o&&r.process,s=function(){try{var e=a&&a.require&&a.require("util").types;return e||i&&i.binding&&i.binding("util")}catch(e){}}();e.exports=s}).call(this,n(58)(e))},function(e,t){var n=Object.prototype;e.exports=function(e){var t=e&&e.constructor;return e===("function"==typeof t&&t.prototype||n)}},function(e,t,n){var r=n(84),o=n(55);e.exports=function(e){return null!=e&&o(e.length)&&!r(e)}},function(e,t,n){var r=n(211),o=n(95),a=Object.prototype.propertyIsEnumerable,i=Object.getOwnPropertySymbols,s=i?function(e){return null==e?[]:(e=Object(e),r(i(e),(function(t){return a.call(e,t)})))}:o;e.exports=s},function(e,t,n){var r=n(101);e.exports=function(e){var t=new e.constructor(e.byteLength);return new r(t).set(new r(e)),t}},function(e,t,n){"use strict";var r=n(7);t.__esModule=!0,t.propagateErrors=function(e,t){return e?null:function(e){return t.push(e),e.value}},t.settled=l,t.collectErrors=u,t.default=function(e){var t=e.endEarly,n=(0,o.default)(e,["endEarly"]);return t?function(e,t,n){return s(n).all(e).catch((function(e){throw"ValidationError"===e.name&&(e.value=t),e})).then((function(){return t}))}(n.validations,n.value,n.sync):u(n)};var o=r(n(66)),a=n(106),i=r(n(67)),s=function(e){return e?a.SynchronousPromise:Promise};function l(e,t){var n=s(t);return n.all(e.map((function(e){return n.resolve(e).then((function(e){return{fulfilled:!0,value:e}}),(function(e){return{fulfilled:!1,value:e}}))})))}function u(e){var t=e.validations,n=e.value,r=e.path,o=e.sync,a=e.errors,s=e.sort;return a=function(e){return void 0===e&&(e=[]),e.inner&&e.inner.length?e.inner:[].concat(e)}(a),l(t,o).then((function(e){var t=e.filter((function(e){return!e.fulfilled})).reduce((function(e,t){var n=t.value;if(!i.default.isError(n))throw n;return e.concat(n)}),[]);if(s&&t.sort(s),(a=t.concat(a)).length)throw new i.default(a,n,r);return n}))}},function(e,t){e.exports=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}},function(e,t,n){"use strict";var r=n(7);t.__esModule=!0,t.default=s;var o=r(n(39)),a=/\$\{\s*(\w+)\s*\}/g,i=function(e){return function(t){return e.replace(a,(function(e,n){return(0,o.default)(t[n])}))}};function s(e,t,n,r){var o=this;this.name="ValidationError",this.value=t,this.path=n,this.type=r,this.errors=[],this.inner=[],e&&[].concat(e).forEach((function(e){o.errors=o.errors.concat(e.errors||e),e.inner&&(o.inner=o.inner.concat(e.inner.length?e.inner:e))})),this.message=this.errors.length>1?this.errors.length+" errors occurred":this.errors[0],Error.captureStackTrace&&Error.captureStackTrace(this,s)}s.prototype=Object.create(Error.prototype),s.prototype.constructor=s,s.isError=function(e){return e&&"ValidationError"===e.name},s.formatError=function(e,t){"string"==typeof e&&(e=i(e));var n=function(t){return t.path=t.label||t.path||"this","function"==typeof e?e(t):e};return 1===arguments.length?n:n(t)},e.exports=t.default},function(e,t,n){"use strict";function r(e,t,n,r,o){this.src=e,this.env=r,this.options=n,this.parser=t,this.tokens=o,this.pos=0,this.posMax=this.src.length,this.level=0,this.pending="",this.pendingLevel=0,this.cache=[],this.isInLabel=!1,this.linkLevel=0,this.linkContent="",this.labelUnmatchedScopes=0}r.prototype.pushPending=function(){this.tokens.push({type:"text",content:this.pending,level:this.pendingLevel}),this.pending=""},r.prototype.push=function(e){this.pending&&this.pushPending(),this.tokens.push(e),this.pendingLevel=this.level},r.prototype.cacheSet=function(e,t){for(var n=this.cache.length;n<=e;n++)this.cache.push(0);this.cache[e]=t},r.prototype.cacheGet=function(e){return e=0)return 1;return 0}();var o=n&&window.Promise?function(e){var t=!1;return function(){t||(t=!0,window.Promise.resolve().then((function(){t=!1,e()})))}}:function(e){var t=!1;return function(){t||(t=!0,setTimeout((function(){t=!1,e()}),r))}};function a(e){return e&&"[object Function]"==={}.toString.call(e)}function i(e,t){if(1!==e.nodeType)return[];var n=e.ownerDocument.defaultView.getComputedStyle(e,null);return t?n[t]:n}function s(e){return"HTML"===e.nodeName?e:e.parentNode||e.host}function l(e){if(!e)return document.body;switch(e.nodeName){case"HTML":case"BODY":return e.ownerDocument.body;case"#document":return e.body}var t=i(e),n=t.overflow,r=t.overflowX,o=t.overflowY;return/(auto|scroll|overlay)/.test(n+o+r)?e:l(s(e))}function u(e){return e&&e.referenceNode?e.referenceNode:e}var c=n&&!(!window.MSInputMethodContext||!document.documentMode),f=n&&/MSIE 10/.test(navigator.userAgent);function p(e){return 11===e?c:10===e?f:c||f}function d(e){if(!e)return document.documentElement;for(var t=p(10)?document.body:null,n=e.offsetParent||null;n===t&&e.nextElementSibling;)n=(e=e.nextElementSibling).offsetParent;var r=n&&n.nodeName;return r&&"BODY"!==r&&"HTML"!==r?-1!==["TH","TD","TABLE"].indexOf(n.nodeName)&&"static"===i(n,"position")?d(n):n:e?e.ownerDocument.documentElement:document.documentElement}function h(e){return null!==e.parentNode?h(e.parentNode):e}function m(e,t){if(!(e&&e.nodeType&&t&&t.nodeType))return document.documentElement;var n=e.compareDocumentPosition(t)&Node.DOCUMENT_POSITION_FOLLOWING,r=n?e:t,o=n?t:e,a=document.createRange();a.setStart(r,0),a.setEnd(o,0);var i,s,l=a.commonAncestorContainer;if(e!==l&&t!==l||r.contains(o))return"BODY"===(s=(i=l).nodeName)||"HTML"!==s&&d(i.firstElementChild)!==i?d(l):l;var u=h(e);return u.host?m(u.host,t):m(e,h(t).host)}function v(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"top",n="top"===t?"scrollTop":"scrollLeft",r=e.nodeName;if("BODY"===r||"HTML"===r){var o=e.ownerDocument.documentElement,a=e.ownerDocument.scrollingElement||o;return a[n]}return e[n]}function g(e,t){var n=arguments.length>2&&void 0!==arguments[2]&&arguments[2],r=v(t,"top"),o=v(t,"left"),a=n?-1:1;return e.top+=r*a,e.bottom+=r*a,e.left+=o*a,e.right+=o*a,e}function b(e,t){var n="x"===t?"Left":"Top",r="Left"===n?"Right":"Bottom";return parseFloat(e["border"+n+"Width"])+parseFloat(e["border"+r+"Width"])}function y(e,t,n,r){return Math.max(t["offset"+e],t["scroll"+e],n["client"+e],n["offset"+e],n["scroll"+e],p(10)?parseInt(n["offset"+e])+parseInt(r["margin"+("Height"===e?"Top":"Left")])+parseInt(r["margin"+("Height"===e?"Bottom":"Right")]):0)}function w(e){var t=e.body,n=e.documentElement,r=p(10)&&getComputedStyle(n);return{height:y("Height",t,n,r),width:y("Width",t,n,r)}}var x=function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")},E=function(){function e(e,t){for(var n=0;n2&&void 0!==arguments[2]&&arguments[2],r=p(10),o="HTML"===t.nodeName,a=S(e),s=S(t),u=l(e),c=i(t),f=parseFloat(c.borderTopWidth),d=parseFloat(c.borderLeftWidth);n&&o&&(s.top=Math.max(s.top,0),s.left=Math.max(s.left,0));var h=O({top:a.top-s.top-f,left:a.left-s.left-d,width:a.width,height:a.height});if(h.marginTop=0,h.marginLeft=0,!r&&o){var m=parseFloat(c.marginTop),v=parseFloat(c.marginLeft);h.top-=f-m,h.bottom-=f-m,h.left-=d-v,h.right-=d-v,h.marginTop=m,h.marginLeft=v}return(r&&!n?t.contains(u):t===u&&"BODY"!==u.nodeName)&&(h=g(h,t)),h}function T(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=e.ownerDocument.documentElement,r=C(e,n),o=Math.max(n.clientWidth,window.innerWidth||0),a=Math.max(n.clientHeight,window.innerHeight||0),i=t?0:v(n),s=t?0:v(n,"left"),l={top:i-r.top+r.marginTop,left:s-r.left+r.marginLeft,width:o,height:a};return O(l)}function j(e){var t=e.nodeName;if("BODY"===t||"HTML"===t)return!1;if("fixed"===i(e,"position"))return!0;var n=s(e);return!!n&&j(n)}function P(e){if(!e||!e.parentElement||p())return document.documentElement;for(var t=e.parentElement;t&&"none"===i(t,"transform");)t=t.parentElement;return t||document.documentElement}function N(e,t,n,r){var o=arguments.length>4&&void 0!==arguments[4]&&arguments[4],a={top:0,left:0},i=o?P(e):m(e,u(t));if("viewport"===r)a=T(i,o);else{var c=void 0;"scrollParent"===r?"BODY"===(c=l(s(t))).nodeName&&(c=e.ownerDocument.documentElement):c="window"===r?e.ownerDocument.documentElement:r;var f=C(c,i,o);if("HTML"!==c.nodeName||j(i))a=f;else{var p=w(e.ownerDocument),d=p.height,h=p.width;a.top+=f.top-f.marginTop,a.bottom=d+f.top,a.left+=f.left-f.marginLeft,a.right=h+f.left}}var v="number"==typeof(n=n||0);return a.left+=v?n:n.left||0,a.top+=v?n:n.top||0,a.right-=v?n:n.right||0,a.bottom-=v?n:n.bottom||0,a}function R(e){return e.width*e.height}function A(e,t,n,r,o){var a=arguments.length>5&&void 0!==arguments[5]?arguments[5]:0;if(-1===e.indexOf("auto"))return e;var i=N(n,r,a,o),s={top:{width:i.width,height:t.top-i.top},right:{width:i.right-t.right,height:i.height},bottom:{width:i.width,height:i.bottom-t.bottom},left:{width:t.left-i.left,height:i.height}},l=Object.keys(s).map((function(e){return _({key:e},s[e],{area:R(s[e])})})).sort((function(e,t){return t.area-e.area})),u=l.filter((function(e){var t=e.width,r=e.height;return t>=n.clientWidth&&r>=n.clientHeight})),c=u.length>0?u[0].key:l[0].key,f=e.split("-")[1];return c+(f?"-"+f:"")}function M(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,o=r?P(t):m(t,u(n));return C(n,o,r)}function F(e){var t=e.ownerDocument.defaultView.getComputedStyle(e),n=parseFloat(t.marginTop||0)+parseFloat(t.marginBottom||0),r=parseFloat(t.marginLeft||0)+parseFloat(t.marginRight||0);return{width:e.offsetWidth+r,height:e.offsetHeight+n}}function D(e){var t={left:"right",right:"left",bottom:"top",top:"bottom"};return e.replace(/left|right|bottom|top/g,(function(e){return t[e]}))}function I(e,t,n){n=n.split("-")[0];var r=F(e),o={width:r.width,height:r.height},a=-1!==["right","left"].indexOf(n),i=a?"top":"left",s=a?"left":"top",l=a?"height":"width",u=a?"width":"height";return o[i]=t[i]+t[l]/2-r[l]/2,o[s]=n===s?t[s]-r[u]:t[D(s)],o}function L(e,t){return Array.prototype.find?e.find(t):e.filter(t)[0]}function U(e,t,n){return(void 0===n?e:e.slice(0,function(e,t,n){if(Array.prototype.findIndex)return e.findIndex((function(e){return e[t]===n}));var r=L(e,(function(e){return e[t]===n}));return e.indexOf(r)}(e,"name",n))).forEach((function(e){e.function&&console.warn("`modifier.function` is deprecated, use `modifier.fn`!");var n=e.function||e.fn;e.enabled&&a(n)&&(t.offsets.popper=O(t.offsets.popper),t.offsets.reference=O(t.offsets.reference),t=n(t,e))})),t}function q(){if(!this.state.isDestroyed){var e={instance:this,styles:{},arrowStyles:{},attributes:{},flipped:!1,offsets:{}};e.offsets.reference=M(this.state,this.popper,this.reference,this.options.positionFixed),e.placement=A(this.options.placement,e.offsets.reference,this.popper,this.reference,this.options.modifiers.flip.boundariesElement,this.options.modifiers.flip.padding),e.originalPlacement=e.placement,e.positionFixed=this.options.positionFixed,e.offsets.popper=I(this.popper,e.offsets.reference,e.placement),e.offsets.popper.position=this.options.positionFixed?"fixed":"absolute",e=U(this.modifiers,e),this.state.isCreated?this.options.onUpdate(e):(this.state.isCreated=!0,this.options.onCreate(e))}}function z(e,t){return e.some((function(e){var n=e.name;return e.enabled&&n===t}))}function B(e){for(var t=[!1,"ms","Webkit","Moz","O"],n=e.charAt(0).toUpperCase()+e.slice(1),r=0;r1&&void 0!==arguments[1]&&arguments[1],n=J.indexOf(e),r=J.slice(n+1).concat(J.slice(0,n));return t?r.reverse():r}var te="flip",ne="clockwise",re="counterclockwise";function oe(e,t,n,r){var o=[0,0],a=-1!==["right","left"].indexOf(r),i=e.split(/(\+|\-)/).map((function(e){return e.trim()})),s=i.indexOf(L(i,(function(e){return-1!==e.search(/,|\s/)})));i[s]&&-1===i[s].indexOf(",")&&console.warn("Offsets separated by white space(s) are deprecated, use a comma (,) instead.");var l=/\s*,\s*|\s+/,u=-1!==s?[i.slice(0,s).concat([i[s].split(l)[0]]),[i[s].split(l)[1]].concat(i.slice(s+1))]:[i];return(u=u.map((function(e,r){var o=(1===r?!a:a)?"height":"width",i=!1;return e.reduce((function(e,t){return""===e[e.length-1]&&-1!==["+","-"].indexOf(t)?(e[e.length-1]=t,i=!0,e):i?(e[e.length-1]+=t,i=!1,e):e.concat(t)}),[]).map((function(e){return function(e,t,n,r){var o=e.match(/((?:\-|\+)?\d*\.?\d*)(.*)/),a=+o[1],i=o[2];if(!a)return e;if(0===i.indexOf("%")){var s=void 0;switch(i){case"%p":s=n;break;case"%":case"%r":default:s=r}return O(s)[t]/100*a}if("vh"===i||"vw"===i){return("vh"===i?Math.max(document.documentElement.clientHeight,window.innerHeight||0):Math.max(document.documentElement.clientWidth,window.innerWidth||0))/100*a}return a}(e,o,t,n)}))}))).forEach((function(e,t){e.forEach((function(n,r){G(n)&&(o[t]+=n*("-"===e[r-1]?-1:1))}))})),o}var ae={placement:"bottom",positionFixed:!1,eventsEnabled:!0,removeOnDestroy:!1,onCreate:function(){},onUpdate:function(){},modifiers:{shift:{order:100,enabled:!0,fn:function(e){var t=e.placement,n=t.split("-")[0],r=t.split("-")[1];if(r){var o=e.offsets,a=o.reference,i=o.popper,s=-1!==["bottom","top"].indexOf(n),l=s?"left":"top",u=s?"width":"height",c={start:k({},l,a[l]),end:k({},l,a[l]+a[u]-i[u])};e.offsets.popper=_({},i,c[r])}return e}},offset:{order:200,enabled:!0,fn:function(e,t){var n=t.offset,r=e.placement,o=e.offsets,a=o.popper,i=o.reference,s=r.split("-")[0],l=void 0;return l=G(+n)?[+n,0]:oe(n,a,i,s),"left"===s?(a.top+=l[0],a.left-=l[1]):"right"===s?(a.top+=l[0],a.left+=l[1]):"top"===s?(a.left+=l[0],a.top-=l[1]):"bottom"===s&&(a.left+=l[0],a.top+=l[1]),e.popper=a,e},offset:0},preventOverflow:{order:300,enabled:!0,fn:function(e,t){var n=t.boundariesElement||d(e.instance.popper);e.instance.reference===n&&(n=d(n));var r=B("transform"),o=e.instance.popper.style,a=o.top,i=o.left,s=o[r];o.top="",o.left="",o[r]="";var l=N(e.instance.popper,e.instance.reference,t.padding,n,e.positionFixed);o.top=a,o.left=i,o[r]=s,t.boundaries=l;var u=t.priority,c=e.offsets.popper,f={primary:function(e){var n=c[e];return c[e]l[e]&&!t.escapeWithReference&&(r=Math.min(c[n],l[e]-("right"===e?c.width:c.height))),k({},n,r)}};return u.forEach((function(e){var t=-1!==["left","top"].indexOf(e)?"primary":"secondary";c=_({},c,f[t](e))})),e.offsets.popper=c,e},priority:["left","right","top","bottom"],padding:5,boundariesElement:"scrollParent"},keepTogether:{order:400,enabled:!0,fn:function(e){var t=e.offsets,n=t.popper,r=t.reference,o=e.placement.split("-")[0],a=Math.floor,i=-1!==["top","bottom"].indexOf(o),s=i?"right":"bottom",l=i?"left":"top",u=i?"width":"height";return n[s]a(r[s])&&(e.offsets.popper[l]=a(r[s])),e}},arrow:{order:500,enabled:!0,fn:function(e,t){var n;if(!Z(e.instance.modifiers,"arrow","keepTogether"))return e;var r=t.element;if("string"==typeof r){if(!(r=e.instance.popper.querySelector(r)))return e}else if(!e.instance.popper.contains(r))return console.warn("WARNING: `arrow.element` must be child of its popper element!"),e;var o=e.placement.split("-")[0],a=e.offsets,s=a.popper,l=a.reference,u=-1!==["left","right"].indexOf(o),c=u?"height":"width",f=u?"Top":"Left",p=f.toLowerCase(),d=u?"left":"top",h=u?"bottom":"right",m=F(r)[c];l[h]-ms[h]&&(e.offsets.popper[p]+=l[p]+m-s[h]),e.offsets.popper=O(e.offsets.popper);var v=l[p]+l[c]/2-m/2,g=i(e.instance.popper),b=parseFloat(g["margin"+f]),y=parseFloat(g["border"+f+"Width"]),w=v-e.offsets.popper[p]-b-y;return w=Math.max(Math.min(s[c]-m,w),0),e.arrowElement=r,e.offsets.arrow=(k(n={},p,Math.round(w)),k(n,d,""),n),e},element:"[x-arrow]"},flip:{order:600,enabled:!0,fn:function(e,t){if(z(e.instance.modifiers,"inner"))return e;if(e.flipped&&e.placement===e.originalPlacement)return e;var n=N(e.instance.popper,e.instance.reference,t.padding,t.boundariesElement,e.positionFixed),r=e.placement.split("-")[0],o=D(r),a=e.placement.split("-")[1]||"",i=[];switch(t.behavior){case te:i=[r,o];break;case ne:i=ee(r);break;case re:i=ee(r,!0);break;default:i=t.behavior}return i.forEach((function(s,l){if(r!==s||i.length===l+1)return e;r=e.placement.split("-")[0],o=D(r);var u=e.offsets.popper,c=e.offsets.reference,f=Math.floor,p="left"===r&&f(u.right)>f(c.left)||"right"===r&&f(u.left)f(c.top)||"bottom"===r&&f(u.top)f(n.right),m=f(u.top)f(n.bottom),g="left"===r&&d||"right"===r&&h||"top"===r&&m||"bottom"===r&&v,b=-1!==["top","bottom"].indexOf(r),y=!!t.flipVariations&&(b&&"start"===a&&d||b&&"end"===a&&h||!b&&"start"===a&&m||!b&&"end"===a&&v),w=!!t.flipVariationsByContent&&(b&&"start"===a&&h||b&&"end"===a&&d||!b&&"start"===a&&v||!b&&"end"===a&&m),x=y||w;(p||g||x)&&(e.flipped=!0,(p||g)&&(r=i[l+1]),x&&(a=function(e){return"end"===e?"start":"start"===e?"end":e}(a)),e.placement=r+(a?"-"+a:""),e.offsets.popper=_({},e.offsets.popper,I(e.instance.popper,e.offsets.reference,e.placement)),e=U(e.instance.modifiers,e,"flip"))})),e},behavior:"flip",padding:5,boundariesElement:"viewport",flipVariations:!1,flipVariationsByContent:!1},inner:{order:700,enabled:!1,fn:function(e){var t=e.placement,n=t.split("-")[0],r=e.offsets,o=r.popper,a=r.reference,i=-1!==["left","right"].indexOf(n),s=-1===["top","left"].indexOf(n);return o[i?"left":"top"]=a[n]-(s?o[i?"width":"height"]:0),e.placement=D(t),e.offsets.popper=O(o),e}},hide:{order:800,enabled:!0,fn:function(e){if(!Z(e.instance.modifiers,"hide","preventOverflow"))return e;var t=e.offsets.reference,n=L(e.instance.modifiers,(function(e){return"preventOverflow"===e.name})).boundaries;if(t.bottomn.right||t.top>n.bottom||t.right2&&void 0!==arguments[2]?arguments[2]:{};x(this,e),this.scheduleUpdate=function(){return requestAnimationFrame(r.update)},this.update=o(this.update.bind(this)),this.options=_({},e.Defaults,i),this.state={isDestroyed:!1,isCreated:!1,scrollParents:[]},this.reference=t&&t.jquery?t[0]:t,this.popper=n&&n.jquery?n[0]:n,this.options.modifiers={},Object.keys(_({},e.Defaults.modifiers,i.modifiers)).forEach((function(t){r.options.modifiers[t]=_({},e.Defaults.modifiers[t]||{},i.modifiers?i.modifiers[t]:{})})),this.modifiers=Object.keys(this.options.modifiers).map((function(e){return _({name:e},r.options.modifiers[e])})).sort((function(e,t){return e.order-t.order})),this.modifiers.forEach((function(e){e.enabled&&a(e.onLoad)&&e.onLoad(r.reference,r.popper,r.options,e,r.state)})),this.update();var s=this.options.eventsEnabled;s&&this.enableEventListeners(),this.state.eventsEnabled=s}return E(e,[{key:"update",value:function(){return q.call(this)}},{key:"destroy",value:function(){return H.call(this)}},{key:"enableEventListeners",value:function(){return W.call(this)}},{key:"disableEventListeners",value:function(){return K.call(this)}}]),e}();ie.Utils=("undefined"!=typeof window?window:e).PopperUtils,ie.placements=X,ie.Defaults=ae,t.a=ie}).call(this,n(48))},function(e,t){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(e){"object"==typeof window&&(n=window)}e.exports=n},function(e,t){e.exports=function(e){if(!e.webpackPolyfill){var t=Object.create(e);t.children||(t.children=[]),Object.defineProperty(t,"loaded",{enumerable:!0,get:function(){return t.l}}),Object.defineProperty(t,"id",{enumerable:!0,get:function(){return t.i}}),Object.defineProperty(t,"exports",{enumerable:!0}),t.webpackPolyfill=1}return t}},function(e,t,n){var r=n(11),o=n(51),a=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,i=/^\w*$/;e.exports=function(e,t){if(r(e))return!1;var n=typeof e;return!("number"!=n&&"symbol"!=n&&"boolean"!=n&&null!=e&&!o(e))||(i.test(e)||!a.test(e)||null!=t&&e in Object(t))}},function(e,t,n){var r=n(22),o=n(16);e.exports=function(e){return"symbol"==typeof e||o(e)&&"[object Symbol]"==r(e)}},function(e,t,n){var r=n(168),o=n(184),a=n(186),i=n(187),s=n(188);function l(e){var t=-1,n=null==e?0:e.length;for(this.clear();++t-1&&e%1==0&&e<=9007199254740991}},function(e,t,n){var r=n(33),o=n(193),a=n(194),i=n(195),s=n(196),l=n(197);function u(e){var t=this.__data__=new r(e);this.size=t.size}u.prototype.clear=o,u.prototype.delete=a,u.prototype.get=i,u.prototype.has=s,u.prototype.set=l,e.exports=u},function(e,t,n){(function(e){var r=n(12),o=n(202),a=t&&!t.nodeType&&t,i=a&&"object"==typeof e&&e&&!e.nodeType&&e,s=i&&i.exports===a?r.Buffer:void 0,l=(s?s.isBuffer:void 0)||o;e.exports=l}).call(this,n(58)(e))},function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children||(e.children=[]),Object.defineProperty(e,"loaded",{enumerable:!0,get:function(){return e.l}}),Object.defineProperty(e,"id",{enumerable:!0,get:function(){return e.i}}),e.webpackPolyfill=1),e}},function(e,t){e.exports=function(e){return function(t){return e(t)}}},function(e,t,n){(function(e){var r=n(83),o=t&&!t.nodeType&&t,a=o&&"object"==typeof e&&e&&!e.nodeType&&e,i=a&&a.exports===o&&r.process,s=function(){try{var e=a&&a.require&&a.require("util").types;return e||i&&i.binding&&i.binding("util")}catch(e){}}();e.exports=s}).call(this,n(58)(e))},function(e,t){var n=Object.prototype;e.exports=function(e){var t=e&&e.constructor;return e===("function"==typeof t&&t.prototype||n)}},function(e,t,n){var r=n(84),o=n(55);e.exports=function(e){return null!=e&&o(e.length)&&!r(e)}},function(e,t,n){var r=n(211),o=n(95),a=Object.prototype.propertyIsEnumerable,i=Object.getOwnPropertySymbols,s=i?function(e){return null==e?[]:(e=Object(e),r(i(e),(function(t){return a.call(e,t)})))}:o;e.exports=s},function(e,t,n){var r=n(101);e.exports=function(e){var t=new e.constructor(e.byteLength);return new r(t).set(new r(e)),t}},function(e,t,n){"use strict";var r=n(7);t.__esModule=!0,t.propagateErrors=function(e,t){return e?null:function(e){return t.push(e),e.value}},t.settled=l,t.collectErrors=u,t.default=function(e){var t=e.endEarly,n=(0,o.default)(e,["endEarly"]);return t?function(e,t,n){return s(n).all(e).catch((function(e){throw"ValidationError"===e.name&&(e.value=t),e})).then((function(){return t}))}(n.validations,n.value,n.sync):u(n)};var o=r(n(66)),a=n(106),i=r(n(67)),s=function(e){return e?a.SynchronousPromise:Promise};function l(e,t){var n=s(t);return n.all(e.map((function(e){return n.resolve(e).then((function(e){return{fulfilled:!0,value:e}}),(function(e){return{fulfilled:!1,value:e}}))})))}function u(e){var t=e.validations,n=e.value,r=e.path,o=e.sync,a=e.errors,s=e.sort;return a=function(e){return void 0===e&&(e=[]),e.inner&&e.inner.length?e.inner:[].concat(e)}(a),l(t,o).then((function(e){var t=e.filter((function(e){return!e.fulfilled})).reduce((function(e,t){var n=t.value;if(!i.default.isError(n))throw n;return e.concat(n)}),[]);if(s&&t.sort(s),(a=t.concat(a)).length)throw new i.default(a,n,r);return n}))}},function(e,t){e.exports=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}},function(e,t,n){"use strict";var r=n(7);t.__esModule=!0,t.default=s;var o=r(n(39)),a=/\$\{\s*(\w+)\s*\}/g,i=function(e){return function(t){return e.replace(a,(function(e,n){return(0,o.default)(t[n])}))}};function s(e,t,n,r){var o=this;this.name="ValidationError",this.value=t,this.path=n,this.type=r,this.errors=[],this.inner=[],e&&[].concat(e).forEach((function(e){o.errors=o.errors.concat(e.errors||e),e.inner&&(o.inner=o.inner.concat(e.inner.length?e.inner:e))})),this.message=this.errors.length>1?this.errors.length+" errors occurred":this.errors[0],Error.captureStackTrace&&Error.captureStackTrace(this,s)}s.prototype=Object.create(Error.prototype),s.prototype.constructor=s,s.isError=function(e){return e&&"ValidationError"===e.name},s.formatError=function(e,t){"string"==typeof e&&(e=i(e));var n=function(t){return t.path=t.label||t.path||"this","function"==typeof e?e(t):e};return 1===arguments.length?n:n(t)},e.exports=t.default},function(e,t,n){"use strict";function r(e,t,n,r,o){this.src=e,this.env=r,this.options=n,this.parser=t,this.tokens=o,this.pos=0,this.posMax=this.src.length,this.level=0,this.pending="",this.pendingLevel=0,this.cache=[],this.isInLabel=!1,this.linkLevel=0,this.linkContent="",this.labelUnmatchedScopes=0}r.prototype.pushPending=function(){this.tokens.push({type:"text",content:this.pending,level:this.pendingLevel}),this.pending=""},r.prototype.push=function(e){this.pending&&this.pushPending(),this.tokens.push(e),this.pendingLevel=this.level},r.prototype.cacheSet=function(e,t){for(var n=this.cache.length;n<=e;n++)this.cache.push(0);this.cache[e]=t},r.prototype.cacheGet=function(e){return e=200&&e<300}};l.headers={common:{Accept:"application/json, text/plain, */*"}},r.forEach(["delete","get","head"],(function(e){l.headers[e]={}})),r.forEach(["post","put","patch"],(function(e){l.headers[e]=r.merge(a)})),e.exports=l}).call(this,n(146))},function(e,t,n){"use strict";var r=n(10),o=n(148),a=n(74),i=n(150),s=n(153),l=n(154),u=n(78);e.exports=function(e){return new Promise((function(t,c){var f=e.data,p=e.headers;r.isFormData(f)&&delete p["Content-Type"];var d=new XMLHttpRequest;if(e.auth){var h=e.auth.username||"",m=e.auth.password||"";p.Authorization="Basic "+btoa(h+":"+m)}var v=i(e.baseURL,e.url);if(d.open(e.method.toUpperCase(),a(v,e.params,e.paramsSerializer),!0),d.timeout=e.timeout,d.onreadystatechange=function(){if(d&&4===d.readyState&&(0!==d.status||d.responseURL&&0===d.responseURL.indexOf("file:"))){var n="getAllResponseHeaders"in d?s(d.getAllResponseHeaders()):null,r={data:e.responseType&&"text"!==e.responseType?d.response:d.responseText,status:d.status,statusText:d.statusText,headers:n,config:e,request:d};o(t,c,r),d=null}},d.onabort=function(){d&&(c(u("Request aborted",e,"ECONNABORTED",d)),d=null)},d.onerror=function(){c(u("Network Error",e,null,d)),d=null},d.ontimeout=function(){var t="timeout of "+e.timeout+"ms exceeded";e.timeoutErrorMessage&&(t=e.timeoutErrorMessage),c(u(t,e,"ECONNABORTED",d)),d=null},r.isStandardBrowserEnv()){var g=n(155),b=(e.withCredentials||l(v))&&e.xsrfCookieName?g.read(e.xsrfCookieName):void 0;b&&(p[e.xsrfHeaderName]=b)}if("setRequestHeader"in d&&r.forEach(p,(function(e,t){void 0===f&&"content-type"===t.toLowerCase()?delete p[t]:d.setRequestHeader(t,e)})),r.isUndefined(e.withCredentials)||(d.withCredentials=!!e.withCredentials),e.responseType)try{d.responseType=e.responseType}catch(t){if("json"!==e.responseType)throw t}"function"==typeof e.onDownloadProgress&&d.addEventListener("progress",e.onDownloadProgress),"function"==typeof e.onUploadProgress&&d.upload&&d.upload.addEventListener("progress",e.onUploadProgress),e.cancelToken&&e.cancelToken.promise.then((function(e){d&&(d.abort(),c(e),d=null)})),void 0===f&&(f=null),d.send(f)}))}},function(e,t,n){"use strict";var r=n(149);e.exports=function(e,t,n,o,a){var i=new Error(e);return r(i,t,n,o,a)}},function(e,t,n){"use strict";var r=n(10);e.exports=function(e,t){t=t||{};var n={},o=["url","method","params","data"],a=["headers","auth","proxy"],i=["baseURL","url","transformRequest","transformResponse","paramsSerializer","timeout","withCredentials","adapter","responseType","xsrfCookieName","xsrfHeaderName","onUploadProgress","onDownloadProgress","maxContentLength","validateStatus","maxRedirects","httpAgent","httpsAgent","cancelToken","socketPath"];r.forEach(o,(function(e){void 0!==t[e]&&(n[e]=t[e])})),r.forEach(a,(function(o){r.isObject(t[o])?n[o]=r.deepMerge(e[o],t[o]):void 0!==t[o]?n[o]=t[o]:r.isObject(e[o])?n[o]=r.deepMerge(e[o]):void 0!==e[o]&&(n[o]=e[o])})),r.forEach(i,(function(r){void 0!==t[r]?n[r]=t[r]:void 0!==e[r]&&(n[r]=e[r])}));var s=o.concat(a).concat(i),l=Object.keys(t).filter((function(e){return-1===s.indexOf(e)}));return r.forEach(l,(function(r){void 0!==t[r]?n[r]=t[r]:void 0!==e[r]&&(n[r]=e[r])})),n}},function(e,t,n){"use strict";function r(e){this.message=e}r.prototype.toString=function(){return"Cancel"+(this.message?": "+this.message:"")},r.prototype.__CANCEL__=!0,e.exports=r},function(e,t,n){var r=n(82),o=n(87),a=n(11),i=n(88),s=n(55),l=n(36);e.exports=function(e,t,n){for(var u=-1,c=(t=r(t,e)).length,f=!1;++u-1&&e%1==0&&ec))return!1;var p=l.get(e);if(p&&l.get(t))return p==t;var d=-1,h=!0,m=2&n?new r:void 0;for(l.set(e,t),l.set(t,e);++d=n.length)throw new Error("Yup.reach cannot resolve an array item at index: "+o+", in the path: "+t+". because there is no value at that index. ");n=n[p]}}if(!c){if(e=e.resolve({context:r,parent:i,value:n}),!(0,a.default)(e,"fields")||!(0,a.default)(e.fields,f))throw new Error("The schema does not contain the path: "+t+". (failed at: "+l+' which is a type: "'+e._type+'") ');e=e.fields[f],i=n,n=n&&n[f],s=f,l=u?"["+o+"]":"."+o}})),e&&(e=e.resolve({context:r,parent:i,value:n})),{schema:e,parent:i,parentPath:s}):{parent:i,parentPath:t,schema:e.resolve({context:r,parent:i,value:n})}}var s=function(e,t,n,r){return i(e,t,n,r).schema};t.default=s},function(e,t){e.exports=function(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}return t.default=e,t}},function(e,t){e.exports=function(e,t){return t||(t=e.slice(0)),e.raw=t,e}},function(e,t,n){var r=n(268),o=n(269),a=n(272),i=RegExp("['’]","g");e.exports=function(e){return function(t){return r(a(o(t).replace(i,"")),e,"")}}},function(e,t,n){"use strict";t.__esModule=!0,t.default=function(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),r=1;r",Gt:"≫",gt:">",gtcc:"⪧",gtcir:"⩺",gtdot:"⋗",gtlPar:"⦕",gtquest:"⩼",gtrapprox:"⪆",gtrarr:"⥸",gtrdot:"⋗",gtreqless:"⋛",gtreqqless:"⪌",gtrless:"≷",gtrsim:"≳",gvertneqq:"≩︀",gvnE:"≩︀",Hacek:"ˇ",hairsp:" ",half:"½",hamilt:"ℋ",HARDcy:"Ъ",hardcy:"ъ",hArr:"⇔",harr:"↔",harrcir:"⥈",harrw:"↭",Hat:"^",hbar:"ℏ",Hcirc:"Ĥ",hcirc:"ĥ",hearts:"♥",heartsuit:"♥",hellip:"…",hercon:"⊹",Hfr:"ℌ",hfr:"𝔥",HilbertSpace:"ℋ",hksearow:"⤥",hkswarow:"⤦",hoarr:"⇿",homtht:"∻",hookleftarrow:"↩",hookrightarrow:"↪",Hopf:"ℍ",hopf:"𝕙",horbar:"―",HorizontalLine:"─",Hscr:"ℋ",hscr:"𝒽",hslash:"ℏ",Hstrok:"Ħ",hstrok:"ħ",HumpDownHump:"≎",HumpEqual:"≏",hybull:"⁃",hyphen:"‐",Iacute:"Í",iacute:"í",ic:"⁣",Icirc:"Î",icirc:"î",Icy:"И",icy:"и",Idot:"İ",IEcy:"Е",iecy:"е",iexcl:"¡",iff:"⇔",Ifr:"ℑ",ifr:"𝔦",Igrave:"Ì",igrave:"ì",ii:"ⅈ",iiiint:"⨌",iiint:"∭",iinfin:"⧜",iiota:"℩",IJlig:"IJ",ijlig:"ij",Im:"ℑ",Imacr:"Ī",imacr:"ī",image:"ℑ",ImaginaryI:"ⅈ",imagline:"ℐ",imagpart:"ℑ",imath:"ı",imof:"⊷",imped:"Ƶ",Implies:"⇒",in:"∈",incare:"℅",infin:"∞",infintie:"⧝",inodot:"ı",Int:"∬",int:"∫",intcal:"⊺",integers:"ℤ",Integral:"∫",intercal:"⊺",Intersection:"⋂",intlarhk:"⨗",intprod:"⨼",InvisibleComma:"⁣",InvisibleTimes:"⁢",IOcy:"Ё",iocy:"ё",Iogon:"Į",iogon:"į",Iopf:"𝕀",iopf:"𝕚",Iota:"Ι",iota:"ι",iprod:"⨼",iquest:"¿",Iscr:"ℐ",iscr:"𝒾",isin:"∈",isindot:"⋵",isinE:"⋹",isins:"⋴",isinsv:"⋳",isinv:"∈",it:"⁢",Itilde:"Ĩ",itilde:"ĩ",Iukcy:"І",iukcy:"і",Iuml:"Ï",iuml:"ï",Jcirc:"Ĵ",jcirc:"ĵ",Jcy:"Й",jcy:"й",Jfr:"𝔍",jfr:"𝔧",jmath:"ȷ",Jopf:"𝕁",jopf:"𝕛",Jscr:"𝒥",jscr:"𝒿",Jsercy:"Ј",jsercy:"ј",Jukcy:"Є",jukcy:"є",Kappa:"Κ",kappa:"κ",kappav:"ϰ",Kcedil:"Ķ",kcedil:"ķ",Kcy:"К",kcy:"к",Kfr:"𝔎",kfr:"𝔨",kgreen:"ĸ",KHcy:"Х",khcy:"х",KJcy:"Ќ",kjcy:"ќ",Kopf:"𝕂",kopf:"𝕜",Kscr:"𝒦",kscr:"𝓀",lAarr:"⇚",Lacute:"Ĺ",lacute:"ĺ",laemptyv:"⦴",lagran:"ℒ",Lambda:"Λ",lambda:"λ",Lang:"⟪",lang:"⟨",langd:"⦑",langle:"⟨",lap:"⪅",Laplacetrf:"ℒ",laquo:"«",Larr:"↞",lArr:"⇐",larr:"←",larrb:"⇤",larrbfs:"⤟",larrfs:"⤝",larrhk:"↩",larrlp:"↫",larrpl:"⤹",larrsim:"⥳",larrtl:"↢",lat:"⪫",lAtail:"⤛",latail:"⤙",late:"⪭",lates:"⪭︀",lBarr:"⤎",lbarr:"⤌",lbbrk:"❲",lbrace:"{",lbrack:"[",lbrke:"⦋",lbrksld:"⦏",lbrkslu:"⦍",Lcaron:"Ľ",lcaron:"ľ",Lcedil:"Ļ",lcedil:"ļ",lceil:"⌈",lcub:"{",Lcy:"Л",lcy:"л",ldca:"⤶",ldquo:"“",ldquor:"„",ldrdhar:"⥧",ldrushar:"⥋",ldsh:"↲",lE:"≦",le:"≤",LeftAngleBracket:"⟨",LeftArrow:"←",Leftarrow:"⇐",leftarrow:"←",LeftArrowBar:"⇤",LeftArrowRightArrow:"⇆",leftarrowtail:"↢",LeftCeiling:"⌈",LeftDoubleBracket:"⟦",LeftDownTeeVector:"⥡",LeftDownVector:"⇃",LeftDownVectorBar:"⥙",LeftFloor:"⌊",leftharpoondown:"↽",leftharpoonup:"↼",leftleftarrows:"⇇",LeftRightArrow:"↔",Leftrightarrow:"⇔",leftrightarrow:"↔",leftrightarrows:"⇆",leftrightharpoons:"⇋",leftrightsquigarrow:"↭",LeftRightVector:"⥎",LeftTee:"⊣",LeftTeeArrow:"↤",LeftTeeVector:"⥚",leftthreetimes:"⋋",LeftTriangle:"⊲",LeftTriangleBar:"⧏",LeftTriangleEqual:"⊴",LeftUpDownVector:"⥑",LeftUpTeeVector:"⥠",LeftUpVector:"↿",LeftUpVectorBar:"⥘",LeftVector:"↼",LeftVectorBar:"⥒",lEg:"⪋",leg:"⋚",leq:"≤",leqq:"≦",leqslant:"⩽",les:"⩽",lescc:"⪨",lesdot:"⩿",lesdoto:"⪁",lesdotor:"⪃",lesg:"⋚︀",lesges:"⪓",lessapprox:"⪅",lessdot:"⋖",lesseqgtr:"⋚",lesseqqgtr:"⪋",LessEqualGreater:"⋚",LessFullEqual:"≦",LessGreater:"≶",lessgtr:"≶",LessLess:"⪡",lesssim:"≲",LessSlantEqual:"⩽",LessTilde:"≲",lfisht:"⥼",lfloor:"⌊",Lfr:"𝔏",lfr:"𝔩",lg:"≶",lgE:"⪑",lHar:"⥢",lhard:"↽",lharu:"↼",lharul:"⥪",lhblk:"▄",LJcy:"Љ",ljcy:"љ",Ll:"⋘",ll:"≪",llarr:"⇇",llcorner:"⌞",Lleftarrow:"⇚",llhard:"⥫",lltri:"◺",Lmidot:"Ŀ",lmidot:"ŀ",lmoust:"⎰",lmoustache:"⎰",lnap:"⪉",lnapprox:"⪉",lnE:"≨",lne:"⪇",lneq:"⪇",lneqq:"≨",lnsim:"⋦",loang:"⟬",loarr:"⇽",lobrk:"⟦",LongLeftArrow:"⟵",Longleftarrow:"⟸",longleftarrow:"⟵",LongLeftRightArrow:"⟷",Longleftrightarrow:"⟺",longleftrightarrow:"⟷",longmapsto:"⟼",LongRightArrow:"⟶",Longrightarrow:"⟹",longrightarrow:"⟶",looparrowleft:"↫",looparrowright:"↬",lopar:"⦅",Lopf:"𝕃",lopf:"𝕝",loplus:"⨭",lotimes:"⨴",lowast:"∗",lowbar:"_",LowerLeftArrow:"↙",LowerRightArrow:"↘",loz:"◊",lozenge:"◊",lozf:"⧫",lpar:"(",lparlt:"⦓",lrarr:"⇆",lrcorner:"⌟",lrhar:"⇋",lrhard:"⥭",lrm:"‎",lrtri:"⊿",lsaquo:"‹",Lscr:"ℒ",lscr:"𝓁",Lsh:"↰",lsh:"↰",lsim:"≲",lsime:"⪍",lsimg:"⪏",lsqb:"[",lsquo:"‘",lsquor:"‚",Lstrok:"Ł",lstrok:"ł",LT:"<",Lt:"≪",lt:"<",ltcc:"⪦",ltcir:"⩹",ltdot:"⋖",lthree:"⋋",ltimes:"⋉",ltlarr:"⥶",ltquest:"⩻",ltri:"◃",ltrie:"⊴",ltrif:"◂",ltrPar:"⦖",lurdshar:"⥊",luruhar:"⥦",lvertneqq:"≨︀",lvnE:"≨︀",macr:"¯",male:"♂",malt:"✠",maltese:"✠",Map:"⤅",map:"↦",mapsto:"↦",mapstodown:"↧",mapstoleft:"↤",mapstoup:"↥",marker:"▮",mcomma:"⨩",Mcy:"М",mcy:"м",mdash:"—",mDDot:"∺",measuredangle:"∡",MediumSpace:" ",Mellintrf:"ℳ",Mfr:"𝔐",mfr:"𝔪",mho:"℧",micro:"µ",mid:"∣",midast:"*",midcir:"⫰",middot:"·",minus:"−",minusb:"⊟",minusd:"∸",minusdu:"⨪",MinusPlus:"∓",mlcp:"⫛",mldr:"…",mnplus:"∓",models:"⊧",Mopf:"𝕄",mopf:"𝕞",mp:"∓",Mscr:"ℳ",mscr:"𝓂",mstpos:"∾",Mu:"Μ",mu:"μ",multimap:"⊸",mumap:"⊸",nabla:"∇",Nacute:"Ń",nacute:"ń",nang:"∠⃒",nap:"≉",napE:"⩰̸",napid:"≋̸",napos:"ʼn",napprox:"≉",natur:"♮",natural:"♮",naturals:"ℕ",nbsp:" ",nbump:"≎̸",nbumpe:"≏̸",ncap:"⩃",Ncaron:"Ň",ncaron:"ň",Ncedil:"Ņ",ncedil:"ņ",ncong:"≇",ncongdot:"⩭̸",ncup:"⩂",Ncy:"Н",ncy:"н",ndash:"–",ne:"≠",nearhk:"⤤",neArr:"⇗",nearr:"↗",nearrow:"↗",nedot:"≐̸",NegativeMediumSpace:"​",NegativeThickSpace:"​",NegativeThinSpace:"​",NegativeVeryThinSpace:"​",nequiv:"≢",nesear:"⤨",nesim:"≂̸",NestedGreaterGreater:"≫",NestedLessLess:"≪",NewLine:"\n",nexist:"∄",nexists:"∄",Nfr:"𝔑",nfr:"𝔫",ngE:"≧̸",nge:"≱",ngeq:"≱",ngeqq:"≧̸",ngeqslant:"⩾̸",nges:"⩾̸",nGg:"⋙̸",ngsim:"≵",nGt:"≫⃒",ngt:"≯",ngtr:"≯",nGtv:"≫̸",nhArr:"⇎",nharr:"↮",nhpar:"⫲",ni:"∋",nis:"⋼",nisd:"⋺",niv:"∋",NJcy:"Њ",njcy:"њ",nlArr:"⇍",nlarr:"↚",nldr:"‥",nlE:"≦̸",nle:"≰",nLeftarrow:"⇍",nleftarrow:"↚",nLeftrightarrow:"⇎",nleftrightarrow:"↮",nleq:"≰",nleqq:"≦̸",nleqslant:"⩽̸",nles:"⩽̸",nless:"≮",nLl:"⋘̸",nlsim:"≴",nLt:"≪⃒",nlt:"≮",nltri:"⋪",nltrie:"⋬",nLtv:"≪̸",nmid:"∤",NoBreak:"⁠",NonBreakingSpace:" ",Nopf:"ℕ",nopf:"𝕟",Not:"⫬",not:"¬",NotCongruent:"≢",NotCupCap:"≭",NotDoubleVerticalBar:"∦",NotElement:"∉",NotEqual:"≠",NotEqualTilde:"≂̸",NotExists:"∄",NotGreater:"≯",NotGreaterEqual:"≱",NotGreaterFullEqual:"≧̸",NotGreaterGreater:"≫̸",NotGreaterLess:"≹",NotGreaterSlantEqual:"⩾̸",NotGreaterTilde:"≵",NotHumpDownHump:"≎̸",NotHumpEqual:"≏̸",notin:"∉",notindot:"⋵̸",notinE:"⋹̸",notinva:"∉",notinvb:"⋷",notinvc:"⋶",NotLeftTriangle:"⋪",NotLeftTriangleBar:"⧏̸",NotLeftTriangleEqual:"⋬",NotLess:"≮",NotLessEqual:"≰",NotLessGreater:"≸",NotLessLess:"≪̸",NotLessSlantEqual:"⩽̸",NotLessTilde:"≴",NotNestedGreaterGreater:"⪢̸",NotNestedLessLess:"⪡̸",notni:"∌",notniva:"∌",notnivb:"⋾",notnivc:"⋽",NotPrecedes:"⊀",NotPrecedesEqual:"⪯̸",NotPrecedesSlantEqual:"⋠",NotReverseElement:"∌",NotRightTriangle:"⋫",NotRightTriangleBar:"⧐̸",NotRightTriangleEqual:"⋭",NotSquareSubset:"⊏̸",NotSquareSubsetEqual:"⋢",NotSquareSuperset:"⊐̸",NotSquareSupersetEqual:"⋣",NotSubset:"⊂⃒",NotSubsetEqual:"⊈",NotSucceeds:"⊁",NotSucceedsEqual:"⪰̸",NotSucceedsSlantEqual:"⋡",NotSucceedsTilde:"≿̸",NotSuperset:"⊃⃒",NotSupersetEqual:"⊉",NotTilde:"≁",NotTildeEqual:"≄",NotTildeFullEqual:"≇",NotTildeTilde:"≉",NotVerticalBar:"∤",npar:"∦",nparallel:"∦",nparsl:"⫽⃥",npart:"∂̸",npolint:"⨔",npr:"⊀",nprcue:"⋠",npre:"⪯̸",nprec:"⊀",npreceq:"⪯̸",nrArr:"⇏",nrarr:"↛",nrarrc:"⤳̸",nrarrw:"↝̸",nRightarrow:"⇏",nrightarrow:"↛",nrtri:"⋫",nrtrie:"⋭",nsc:"⊁",nsccue:"⋡",nsce:"⪰̸",Nscr:"𝒩",nscr:"𝓃",nshortmid:"∤",nshortparallel:"∦",nsim:"≁",nsime:"≄",nsimeq:"≄",nsmid:"∤",nspar:"∦",nsqsube:"⋢",nsqsupe:"⋣",nsub:"⊄",nsubE:"⫅̸",nsube:"⊈",nsubset:"⊂⃒",nsubseteq:"⊈",nsubseteqq:"⫅̸",nsucc:"⊁",nsucceq:"⪰̸",nsup:"⊅",nsupE:"⫆̸",nsupe:"⊉",nsupset:"⊃⃒",nsupseteq:"⊉",nsupseteqq:"⫆̸",ntgl:"≹",Ntilde:"Ñ",ntilde:"ñ",ntlg:"≸",ntriangleleft:"⋪",ntrianglelefteq:"⋬",ntriangleright:"⋫",ntrianglerighteq:"⋭",Nu:"Ν",nu:"ν",num:"#",numero:"№",numsp:" ",nvap:"≍⃒",nVDash:"⊯",nVdash:"⊮",nvDash:"⊭",nvdash:"⊬",nvge:"≥⃒",nvgt:">⃒",nvHarr:"⤄",nvinfin:"⧞",nvlArr:"⤂",nvle:"≤⃒",nvlt:"<⃒",nvltrie:"⊴⃒",nvrArr:"⤃",nvrtrie:"⊵⃒",nvsim:"∼⃒",nwarhk:"⤣",nwArr:"⇖",nwarr:"↖",nwarrow:"↖",nwnear:"⤧",Oacute:"Ó",oacute:"ó",oast:"⊛",ocir:"⊚",Ocirc:"Ô",ocirc:"ô",Ocy:"О",ocy:"о",odash:"⊝",Odblac:"Ő",odblac:"ő",odiv:"⨸",odot:"⊙",odsold:"⦼",OElig:"Œ",oelig:"œ",ofcir:"⦿",Ofr:"𝔒",ofr:"𝔬",ogon:"˛",Ograve:"Ò",ograve:"ò",ogt:"⧁",ohbar:"⦵",ohm:"Ω",oint:"∮",olarr:"↺",olcir:"⦾",olcross:"⦻",oline:"‾",olt:"⧀",Omacr:"Ō",omacr:"ō",Omega:"Ω",omega:"ω",Omicron:"Ο",omicron:"ο",omid:"⦶",ominus:"⊖",Oopf:"𝕆",oopf:"𝕠",opar:"⦷",OpenCurlyDoubleQuote:"“",OpenCurlyQuote:"‘",operp:"⦹",oplus:"⊕",Or:"⩔",or:"∨",orarr:"↻",ord:"⩝",order:"ℴ",orderof:"ℴ",ordf:"ª",ordm:"º",origof:"⊶",oror:"⩖",orslope:"⩗",orv:"⩛",oS:"Ⓢ",Oscr:"𝒪",oscr:"ℴ",Oslash:"Ø",oslash:"ø",osol:"⊘",Otilde:"Õ",otilde:"õ",Otimes:"⨷",otimes:"⊗",otimesas:"⨶",Ouml:"Ö",ouml:"ö",ovbar:"⌽",OverBar:"‾",OverBrace:"⏞",OverBracket:"⎴",OverParenthesis:"⏜",par:"∥",para:"¶",parallel:"∥",parsim:"⫳",parsl:"⫽",part:"∂",PartialD:"∂",Pcy:"П",pcy:"п",percnt:"%",period:".",permil:"‰",perp:"⊥",pertenk:"‱",Pfr:"𝔓",pfr:"𝔭",Phi:"Φ",phi:"φ",phiv:"ϕ",phmmat:"ℳ",phone:"☎",Pi:"Π",pi:"π",pitchfork:"⋔",piv:"ϖ",planck:"ℏ",planckh:"ℎ",plankv:"ℏ",plus:"+",plusacir:"⨣",plusb:"⊞",pluscir:"⨢",plusdo:"∔",plusdu:"⨥",pluse:"⩲",PlusMinus:"±",plusmn:"±",plussim:"⨦",plustwo:"⨧",pm:"±",Poincareplane:"ℌ",pointint:"⨕",Popf:"ℙ",popf:"𝕡",pound:"£",Pr:"⪻",pr:"≺",prap:"⪷",prcue:"≼",prE:"⪳",pre:"⪯",prec:"≺",precapprox:"⪷",preccurlyeq:"≼",Precedes:"≺",PrecedesEqual:"⪯",PrecedesSlantEqual:"≼",PrecedesTilde:"≾",preceq:"⪯",precnapprox:"⪹",precneqq:"⪵",precnsim:"⋨",precsim:"≾",Prime:"″",prime:"′",primes:"ℙ",prnap:"⪹",prnE:"⪵",prnsim:"⋨",prod:"∏",Product:"∏",profalar:"⌮",profline:"⌒",profsurf:"⌓",prop:"∝",Proportion:"∷",Proportional:"∝",propto:"∝",prsim:"≾",prurel:"⊰",Pscr:"𝒫",pscr:"𝓅",Psi:"Ψ",psi:"ψ",puncsp:" ",Qfr:"𝔔",qfr:"𝔮",qint:"⨌",Qopf:"ℚ",qopf:"𝕢",qprime:"⁗",Qscr:"𝒬",qscr:"𝓆",quaternions:"ℍ",quatint:"⨖",quest:"?",questeq:"≟",QUOT:'"',quot:'"',rAarr:"⇛",race:"∽̱",Racute:"Ŕ",racute:"ŕ",radic:"√",raemptyv:"⦳",Rang:"⟫",rang:"⟩",rangd:"⦒",range:"⦥",rangle:"⟩",raquo:"»",Rarr:"↠",rArr:"⇒",rarr:"→",rarrap:"⥵",rarrb:"⇥",rarrbfs:"⤠",rarrc:"⤳",rarrfs:"⤞",rarrhk:"↪",rarrlp:"↬",rarrpl:"⥅",rarrsim:"⥴",Rarrtl:"⤖",rarrtl:"↣",rarrw:"↝",rAtail:"⤜",ratail:"⤚",ratio:"∶",rationals:"ℚ",RBarr:"⤐",rBarr:"⤏",rbarr:"⤍",rbbrk:"❳",rbrace:"}",rbrack:"]",rbrke:"⦌",rbrksld:"⦎",rbrkslu:"⦐",Rcaron:"Ř",rcaron:"ř",Rcedil:"Ŗ",rcedil:"ŗ",rceil:"⌉",rcub:"}",Rcy:"Р",rcy:"р",rdca:"⤷",rdldhar:"⥩",rdquo:"”",rdquor:"”",rdsh:"↳",Re:"ℜ",real:"ℜ",realine:"ℛ",realpart:"ℜ",reals:"ℝ",rect:"▭",REG:"®",reg:"®",ReverseElement:"∋",ReverseEquilibrium:"⇋",ReverseUpEquilibrium:"⥯",rfisht:"⥽",rfloor:"⌋",Rfr:"ℜ",rfr:"𝔯",rHar:"⥤",rhard:"⇁",rharu:"⇀",rharul:"⥬",Rho:"Ρ",rho:"ρ",rhov:"ϱ",RightAngleBracket:"⟩",RightArrow:"→",Rightarrow:"⇒",rightarrow:"→",RightArrowBar:"⇥",RightArrowLeftArrow:"⇄",rightarrowtail:"↣",RightCeiling:"⌉",RightDoubleBracket:"⟧",RightDownTeeVector:"⥝",RightDownVector:"⇂",RightDownVectorBar:"⥕",RightFloor:"⌋",rightharpoondown:"⇁",rightharpoonup:"⇀",rightleftarrows:"⇄",rightleftharpoons:"⇌",rightrightarrows:"⇉",rightsquigarrow:"↝",RightTee:"⊢",RightTeeArrow:"↦",RightTeeVector:"⥛",rightthreetimes:"⋌",RightTriangle:"⊳",RightTriangleBar:"⧐",RightTriangleEqual:"⊵",RightUpDownVector:"⥏",RightUpTeeVector:"⥜",RightUpVector:"↾",RightUpVectorBar:"⥔",RightVector:"⇀",RightVectorBar:"⥓",ring:"˚",risingdotseq:"≓",rlarr:"⇄",rlhar:"⇌",rlm:"‏",rmoust:"⎱",rmoustache:"⎱",rnmid:"⫮",roang:"⟭",roarr:"⇾",robrk:"⟧",ropar:"⦆",Ropf:"ℝ",ropf:"𝕣",roplus:"⨮",rotimes:"⨵",RoundImplies:"⥰",rpar:")",rpargt:"⦔",rppolint:"⨒",rrarr:"⇉",Rrightarrow:"⇛",rsaquo:"›",Rscr:"ℛ",rscr:"𝓇",Rsh:"↱",rsh:"↱",rsqb:"]",rsquo:"’",rsquor:"’",rthree:"⋌",rtimes:"⋊",rtri:"▹",rtrie:"⊵",rtrif:"▸",rtriltri:"⧎",RuleDelayed:"⧴",ruluhar:"⥨",rx:"℞",Sacute:"Ś",sacute:"ś",sbquo:"‚",Sc:"⪼",sc:"≻",scap:"⪸",Scaron:"Š",scaron:"š",sccue:"≽",scE:"⪴",sce:"⪰",Scedil:"Ş",scedil:"ş",Scirc:"Ŝ",scirc:"ŝ",scnap:"⪺",scnE:"⪶",scnsim:"⋩",scpolint:"⨓",scsim:"≿",Scy:"С",scy:"с",sdot:"⋅",sdotb:"⊡",sdote:"⩦",searhk:"⤥",seArr:"⇘",searr:"↘",searrow:"↘",sect:"§",semi:";",seswar:"⤩",setminus:"∖",setmn:"∖",sext:"✶",Sfr:"𝔖",sfr:"𝔰",sfrown:"⌢",sharp:"♯",SHCHcy:"Щ",shchcy:"щ",SHcy:"Ш",shcy:"ш",ShortDownArrow:"↓",ShortLeftArrow:"←",shortmid:"∣",shortparallel:"∥",ShortRightArrow:"→",ShortUpArrow:"↑",shy:"­",Sigma:"Σ",sigma:"σ",sigmaf:"ς",sigmav:"ς",sim:"∼",simdot:"⩪",sime:"≃",simeq:"≃",simg:"⪞",simgE:"⪠",siml:"⪝",simlE:"⪟",simne:"≆",simplus:"⨤",simrarr:"⥲",slarr:"←",SmallCircle:"∘",smallsetminus:"∖",smashp:"⨳",smeparsl:"⧤",smid:"∣",smile:"⌣",smt:"⪪",smte:"⪬",smtes:"⪬︀",SOFTcy:"Ь",softcy:"ь",sol:"/",solb:"⧄",solbar:"⌿",Sopf:"𝕊",sopf:"𝕤",spades:"♠",spadesuit:"♠",spar:"∥",sqcap:"⊓",sqcaps:"⊓︀",sqcup:"⊔",sqcups:"⊔︀",Sqrt:"√",sqsub:"⊏",sqsube:"⊑",sqsubset:"⊏",sqsubseteq:"⊑",sqsup:"⊐",sqsupe:"⊒",sqsupset:"⊐",sqsupseteq:"⊒",squ:"□",Square:"□",square:"□",SquareIntersection:"⊓",SquareSubset:"⊏",SquareSubsetEqual:"⊑",SquareSuperset:"⊐",SquareSupersetEqual:"⊒",SquareUnion:"⊔",squarf:"▪",squf:"▪",srarr:"→",Sscr:"𝒮",sscr:"𝓈",ssetmn:"∖",ssmile:"⌣",sstarf:"⋆",Star:"⋆",star:"☆",starf:"★",straightepsilon:"ϵ",straightphi:"ϕ",strns:"¯",Sub:"⋐",sub:"⊂",subdot:"⪽",subE:"⫅",sube:"⊆",subedot:"⫃",submult:"⫁",subnE:"⫋",subne:"⊊",subplus:"⪿",subrarr:"⥹",Subset:"⋐",subset:"⊂",subseteq:"⊆",subseteqq:"⫅",SubsetEqual:"⊆",subsetneq:"⊊",subsetneqq:"⫋",subsim:"⫇",subsub:"⫕",subsup:"⫓",succ:"≻",succapprox:"⪸",succcurlyeq:"≽",Succeeds:"≻",SucceedsEqual:"⪰",SucceedsSlantEqual:"≽",SucceedsTilde:"≿",succeq:"⪰",succnapprox:"⪺",succneqq:"⪶",succnsim:"⋩",succsim:"≿",SuchThat:"∋",Sum:"∑",sum:"∑",sung:"♪",Sup:"⋑",sup:"⊃",sup1:"¹",sup2:"²",sup3:"³",supdot:"⪾",supdsub:"⫘",supE:"⫆",supe:"⊇",supedot:"⫄",Superset:"⊃",SupersetEqual:"⊇",suphsol:"⟉",suphsub:"⫗",suplarr:"⥻",supmult:"⫂",supnE:"⫌",supne:"⊋",supplus:"⫀",Supset:"⋑",supset:"⊃",supseteq:"⊇",supseteqq:"⫆",supsetneq:"⊋",supsetneqq:"⫌",supsim:"⫈",supsub:"⫔",supsup:"⫖",swarhk:"⤦",swArr:"⇙",swarr:"↙",swarrow:"↙",swnwar:"⤪",szlig:"ß",Tab:"\t",target:"⌖",Tau:"Τ",tau:"τ",tbrk:"⎴",Tcaron:"Ť",tcaron:"ť",Tcedil:"Ţ",tcedil:"ţ",Tcy:"Т",tcy:"т",tdot:"⃛",telrec:"⌕",Tfr:"𝔗",tfr:"𝔱",there4:"∴",Therefore:"∴",therefore:"∴",Theta:"Θ",theta:"θ",thetasym:"ϑ",thetav:"ϑ",thickapprox:"≈",thicksim:"∼",ThickSpace:"  ",thinsp:" ",ThinSpace:" ",thkap:"≈",thksim:"∼",THORN:"Þ",thorn:"þ",Tilde:"∼",tilde:"˜",TildeEqual:"≃",TildeFullEqual:"≅",TildeTilde:"≈",times:"×",timesb:"⊠",timesbar:"⨱",timesd:"⨰",tint:"∭",toea:"⤨",top:"⊤",topbot:"⌶",topcir:"⫱",Topf:"𝕋",topf:"𝕥",topfork:"⫚",tosa:"⤩",tprime:"‴",TRADE:"™",trade:"™",triangle:"▵",triangledown:"▿",triangleleft:"◃",trianglelefteq:"⊴",triangleq:"≜",triangleright:"▹",trianglerighteq:"⊵",tridot:"◬",trie:"≜",triminus:"⨺",TripleDot:"⃛",triplus:"⨹",trisb:"⧍",tritime:"⨻",trpezium:"⏢",Tscr:"𝒯",tscr:"𝓉",TScy:"Ц",tscy:"ц",TSHcy:"Ћ",tshcy:"ћ",Tstrok:"Ŧ",tstrok:"ŧ",twixt:"≬",twoheadleftarrow:"↞",twoheadrightarrow:"↠",Uacute:"Ú",uacute:"ú",Uarr:"↟",uArr:"⇑",uarr:"↑",Uarrocir:"⥉",Ubrcy:"Ў",ubrcy:"ў",Ubreve:"Ŭ",ubreve:"ŭ",Ucirc:"Û",ucirc:"û",Ucy:"У",ucy:"у",udarr:"⇅",Udblac:"Ű",udblac:"ű",udhar:"⥮",ufisht:"⥾",Ufr:"𝔘",ufr:"𝔲",Ugrave:"Ù",ugrave:"ù",uHar:"⥣",uharl:"↿",uharr:"↾",uhblk:"▀",ulcorn:"⌜",ulcorner:"⌜",ulcrop:"⌏",ultri:"◸",Umacr:"Ū",umacr:"ū",uml:"¨",UnderBar:"_",UnderBrace:"⏟",UnderBracket:"⎵",UnderParenthesis:"⏝",Union:"⋃",UnionPlus:"⊎",Uogon:"Ų",uogon:"ų",Uopf:"𝕌",uopf:"𝕦",UpArrow:"↑",Uparrow:"⇑",uparrow:"↑",UpArrowBar:"⤒",UpArrowDownArrow:"⇅",UpDownArrow:"↕",Updownarrow:"⇕",updownarrow:"↕",UpEquilibrium:"⥮",upharpoonleft:"↿",upharpoonright:"↾",uplus:"⊎",UpperLeftArrow:"↖",UpperRightArrow:"↗",Upsi:"ϒ",upsi:"υ",upsih:"ϒ",Upsilon:"Υ",upsilon:"υ",UpTee:"⊥",UpTeeArrow:"↥",upuparrows:"⇈",urcorn:"⌝",urcorner:"⌝",urcrop:"⌎",Uring:"Ů",uring:"ů",urtri:"◹",Uscr:"𝒰",uscr:"𝓊",utdot:"⋰",Utilde:"Ũ",utilde:"ũ",utri:"▵",utrif:"▴",uuarr:"⇈",Uuml:"Ü",uuml:"ü",uwangle:"⦧",vangrt:"⦜",varepsilon:"ϵ",varkappa:"ϰ",varnothing:"∅",varphi:"ϕ",varpi:"ϖ",varpropto:"∝",vArr:"⇕",varr:"↕",varrho:"ϱ",varsigma:"ς",varsubsetneq:"⊊︀",varsubsetneqq:"⫋︀",varsupsetneq:"⊋︀",varsupsetneqq:"⫌︀",vartheta:"ϑ",vartriangleleft:"⊲",vartriangleright:"⊳",Vbar:"⫫",vBar:"⫨",vBarv:"⫩",Vcy:"В",vcy:"в",VDash:"⊫",Vdash:"⊩",vDash:"⊨",vdash:"⊢",Vdashl:"⫦",Vee:"⋁",vee:"∨",veebar:"⊻",veeeq:"≚",vellip:"⋮",Verbar:"‖",verbar:"|",Vert:"‖",vert:"|",VerticalBar:"∣",VerticalLine:"|",VerticalSeparator:"❘",VerticalTilde:"≀",VeryThinSpace:" ",Vfr:"𝔙",vfr:"𝔳",vltri:"⊲",vnsub:"⊂⃒",vnsup:"⊃⃒",Vopf:"𝕍",vopf:"𝕧",vprop:"∝",vrtri:"⊳",Vscr:"𝒱",vscr:"𝓋",vsubnE:"⫋︀",vsubne:"⊊︀",vsupnE:"⫌︀",vsupne:"⊋︀",Vvdash:"⊪",vzigzag:"⦚",Wcirc:"Ŵ",wcirc:"ŵ",wedbar:"⩟",Wedge:"⋀",wedge:"∧",wedgeq:"≙",weierp:"℘",Wfr:"𝔚",wfr:"𝔴",Wopf:"𝕎",wopf:"𝕨",wp:"℘",wr:"≀",wreath:"≀",Wscr:"𝒲",wscr:"𝓌",xcap:"⋂",xcirc:"◯",xcup:"⋃",xdtri:"▽",Xfr:"𝔛",xfr:"𝔵",xhArr:"⟺",xharr:"⟷",Xi:"Ξ",xi:"ξ",xlArr:"⟸",xlarr:"⟵",xmap:"⟼",xnis:"⋻",xodot:"⨀",Xopf:"𝕏",xopf:"𝕩",xoplus:"⨁",xotime:"⨂",xrArr:"⟹",xrarr:"⟶",Xscr:"𝒳",xscr:"𝓍",xsqcup:"⨆",xuplus:"⨄",xutri:"△",xvee:"⋁",xwedge:"⋀",Yacute:"Ý",yacute:"ý",YAcy:"Я",yacy:"я",Ycirc:"Ŷ",ycirc:"ŷ",Ycy:"Ы",ycy:"ы",yen:"¥",Yfr:"𝔜",yfr:"𝔶",YIcy:"Ї",yicy:"ї",Yopf:"𝕐",yopf:"𝕪",Yscr:"𝒴",yscr:"𝓎",YUcy:"Ю",yucy:"ю",Yuml:"Ÿ",yuml:"ÿ",Zacute:"Ź",zacute:"ź",Zcaron:"Ž",zcaron:"ž",Zcy:"З",zcy:"з",Zdot:"Ż",zdot:"ż",zeetrf:"ℨ",ZeroWidthSpace:"​",Zeta:"Ζ",zeta:"ζ",Zfr:"ℨ",zfr:"𝔷",ZHcy:"Ж",zhcy:"ж",zigrarr:"⇝",Zopf:"ℤ",zopf:"𝕫",Zscr:"𝒵",zscr:"𝓏",zwj:"‍",zwnj:"‌"}},function(e,t,n){"use strict";var r=n(123),o=n(9).unescapeMd;e.exports=function(e,t){var n,a,i,s=t,l=e.posMax;if(60===e.src.charCodeAt(t)){for(t++;t1)break;if(41===n&&--a<0)break;t++}return s!==t&&(i=o(e.src.slice(s,t)),!!e.parser.validateLink(i)&&(e.linkContent=i,e.pos=t,!0))}},function(e,t,n){"use strict";var r=n(9).replaceEntities;e.exports=function(e){var t=r(e);try{t=decodeURI(t)}catch(e){}return encodeURI(t)}},function(e,t,n){"use strict";var r=n(9).unescapeMd;e.exports=function(e,t){var n,o=t,a=e.posMax,i=e.src.charCodeAt(t);if(34!==i&&39!==i&&40!==i)return!1;for(t++,40===i&&(i=41);t=0||(o[n]=e[n]);return o}var a=n(3),i=n.n(a),s=n(0),l=n.n(s),u=n(5),c=n.n(u),f=function(){};function p(e,t){return void 0!==e[t]}function d(e){return"default"+e.charAt(0).toUpperCase()+e.substr(1)}function h(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var n=e[Symbol.toPrimitive];if(void 0!==n){var r=n.call(e,t||"default");if("object"!=typeof r)return r;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function m(e,t){return Object.keys(t).reduce((function(n,a){var i,l=n,u=l[d(a)],c=l[a],f=o(l,[d(a),a].map(h)),p=t[a],m=function(e,t,n){var r=Object(s.useRef)(void 0!==e),o=Object(s.useState)(t),a=o[0],i=o[1],l=void 0!==e,u=r.current;return r.current=l,!l&&u&&a!==t&&i(t),[l?e:a,Object(s.useCallback)((function(e){for(var t=arguments.length,r=new Array(t>1?t-1:0),o=1;oft(t.props.children)-1){if(!r)return;o=0}t.select(o,e,"next")}},t.handlePrev=function(e){if(!t._isSliding){var n=t.props,r=n.wrap,o=n.activeIndex-1;if(o<0){if(!r)return;o=ft(t.props.children)-1}t.select(o,e,"prev")}},t}v(t,e);var n=t.prototype;return n.componentDidMount=function(){this.cycle()},t.getDerivedStateFromProps=function(e,t){var n=t.activeIndex;if(e.activeIndex!==n){var r=ft(e.children)-1,o=Math.max(0,Math.min(e.activeIndex,r));return{direction:0===o&&n>=r||n<=o?"next":"prev",previousActiveIndex:n,activeIndex:o}}return null},n.componentDidUpdate=function(e,t){var n=this,r=this.props,o=r.bsPrefix,a=r.slide,s=r.onSlideEnd;if(a&&this.state.activeIndex!==t.activeIndex&&!this._isSliding){var l,u,c=this.state,f=c.activeIndex,p=c.direction;"next"===p?(l=o+"-item-next",u=o+"-item-left"):"prev"===p&&(l=o+"-item-prev",u=o+"-item-right"),this._isSliding=!0,this.pause(),this.safeSetState({prevClasses:"active",currentClasses:l},(function(){var e=n.carousel.current.children[f];se(e),n.safeSetState({prevClasses:i()("active",u),currentClasses:i()(l,u)},(function(){return Y(e,(function(){n.safeSetState({prevClasses:"",currentClasses:"active"},n.handleSlideEnd),s&&s()}))}))}))}},n.componentWillUnmount=function(){clearTimeout(this.timeout),this.isUnmounted=!0},n.safeSetState=function(e,t){var n=this;this.isUnmounted||this.setState(e,(function(){return!n.isUnmounted&&t()}))},n.pause=function(){this._isPaused=!0,clearInterval(this._interval),this._interval=null},n.cycle=function(){this._isPaused=!1,clearInterval(this._interval),this._interval=null,this.props.interval&&!this._isPaused&&(this._interval=setInterval(document.visibilityState?this.handleNextWhenVisible:this.handleNext,this.props.interval))},n.to=function(e,t){var n=this.props.children;e<0||e>ft(n)-1||(this._isSliding?this._pendingIndex=e:this.select(e,t))},n.select=function(e,t,n){var r=this;clearTimeout(this.selectThrottle),t&&t.persist&&t.persist(),this.selectThrottle=setTimeout((function(){clearTimeout(r.timeout);var o=r.props,a=o.activeIndex,i=o.onSelect;e===a||r._isSliding||r.isUnmounted||i(e,n||(e1?i-1:0),l=1;l *"},Vt.Menu=Lt,Vt.Toggle=Bt;var $t=Vt,Wt=l.a.createContext(null),Kt={as:Re,disabled:!1},Gt=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.eventKey,f=e.disabled,p=e.href,d=e.onClick,h=e.onSelect,m=e.active,v=e.as,g=o(e,["bsPrefix","className","children","eventKey","disabled","href","onClick","onSelect","active","as"]),b=_(n,"dropdown-item"),y=Object(s.useContext)(j),w=(Object(s.useContext)(Wt)||{}).activeKey,x=T(c,p),E=null==m&&null!=x?T(w)===x:m,k=be((function(e){f||(d&&d(e),y&&y(x,e),h&&h(x,e))}));return l.a.createElement(v,Object(r.a)({},g,{ref:t,href:p,disabled:f,className:i()(a,b,E&&"active",f&&"disabled"),onClick:k}),u)}));Gt.displayName="DropdownItem",Gt.defaultProps=Kt;var Yt=Gt,Qt=function(e){return e&&"function"!=typeof e?function(t){e.current=t}:e};var Zt=function(e,t){return Object(s.useMemo)((function(){return function(e,t){var n=Qt(e),r=Qt(t);return function(e){n&&n(e),r&&r(e)}}(e,t)}),[e,t])},Xt=l.a.createContext(null);function Jt(e,t){return e}var en=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.alignRight,c=e.rootCloseEvent,f=e.flip,p=e.popperConfig,d=e.show,h=e.as,m=void 0===h?"div":h,v=o(e,["bsPrefix","className","alignRight","rootCloseEvent","flip","popperConfig","show","as"]),g=Object(s.useContext)(Xt),b=_(n,"dropdown-menu"),y=Ft({flip:f,popperConfig:p,rootCloseEvent:c,show:d,alignEnd:u,usePopper:!g}),w=y.hasShown,x=y.placement,E=y.show,k=y.alignEnd,O=y.close,S=y.props;if(S.ref=Zt(S.ref,Jt(t)),!w)return null;"string"!=typeof m&&(S.show=E,S.close=O,S.alignRight=k);var C=v.style;return x&&(C=Object(r.a)({},C,{},S.style),v["x-placement"]=x),l.a.createElement(m,Object(r.a)({},v,S,{style:C,className:i()(a,b,E&&"show",k&&b+"-right")}))}));en.displayName="DropdownMenu",en.defaultProps={alignRight:!1,flip:!0};var tn=en,nn=(n(41),l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.split,s=e.className,u=e.children,c=e.childBsPrefix,f=e.as,p=void 0===f?$e:f,d=o(e,["bsPrefix","split","className","children","childBsPrefix","as"]),h=_(n,"dropdown-toggle");void 0!==c&&(d.bsPrefix=c);var m=Ut(),v=m[0],g=m[1].toggle;return v.ref=Zt(v.ref,Jt(t)),l.a.createElement(p,Object(r.a)({onClick:g,className:i()(s,h,a&&h+"-split")},v,d),u)})));nn.displayName="DropdownToggle";var rn=nn,on=l.a.forwardRef((function(e,t){var n=m(e,{show:"onToggle"}),a=n.bsPrefix,u=n.drop,c=n.show,f=n.className,p=n.alignRight,d=n.onSelect,h=n.onToggle,v=n.focusFirstItemOnShow,g=n.as,b=void 0===g?"div":g,y=(n.navbar,o(n,["bsPrefix","drop","show","className","alignRight","onSelect","onToggle","focusFirstItemOnShow","as","navbar"])),w=Object(s.useContext)(j),x=_(a,"dropdown"),E=be((function(e,t,n){void 0===n&&(n=t.type),t.currentTarget===document&&(n="rootClose"),h(e,t,{source:n})})),k=be((function(e,t){w&&w(e,t),d&&d(e,t),E(!1,t,"select")}));return l.a.createElement(j.Provider,{value:k},l.a.createElement($t,{drop:u,show:c,alignEnd:p,onToggle:E,focusFirstItemOnShow:v,itemSelector:"."+x+"-item:not(.disabled):not(:disabled)"},(function(e){var n=e.props;return l.a.createElement(b,Object(r.a)({},y,n,{ref:t,className:i()(f,c&&"show",(!u||"down"===u)&&x,"up"===u&&"dropup","right"===u&&"dropright","left"===u&&"dropleft")}))})))}));on.displayName="Dropdown",on.defaultProps={navbar:!1},on.Toggle=rn,on.Menu=tn,on.Item=Yt,on.Header=xe("dropdown-header",{defaultProps:{role:"heading"}}),on.Divider=xe("dropdown-divider",{defaultProps:{role:"separator"}});var an=on,sn={id:Z.a.any,href:Z.a.string,onClick:Z.a.func,title:Z.a.node.isRequired,disabled:Z.a.bool,menuRole:Z.a.string,rootCloseEvent:Z.a.string,bsPrefix:Z.a.string,variant:Z.a.string,size:Z.a.string},ln=l.a.forwardRef((function(e,t){var n=e.title,a=e.children,i=e.bsPrefix,s=e.rootCloseEvent,u=e.variant,c=e.size,f=e.menuRole,p=e.disabled,d=e.href,h=e.id,m=o(e,["title","children","bsPrefix","rootCloseEvent","variant","size","menuRole","disabled","href","id"]);return l.a.createElement(an,Object(r.a)({ref:t},m),l.a.createElement(an.Toggle,{id:h,href:d,size:c,variant:u,disabled:p,childBsPrefix:i},n),l.a.createElement(an.Menu,{role:f,rootCloseEvent:s},a))}));ln.displayName="DropdownButton",ln.propTypes=sn;var un=ln,cn=(n(120),{type:Z.a.string.isRequired,as:Z.a.elementType}),fn=l.a.forwardRef((function(e,t){var n=e.as,a=void 0===n?"div":n,s=e.className,u=e.type,c=o(e,["as","className","type"]);return l.a.createElement(a,Object(r.a)({},c,{ref:t,className:i()(s,u&&u+"-feedback")}))}));fn.displayName="Feedback",fn.propTypes=cn,fn.defaultProps={type:"valid"};var pn=fn,dn=l.a.createContext({controlId:void 0}),hn=l.a.forwardRef((function(e,t){var n=e.id,a=e.bsPrefix,u=e.bsCustomPrefix,c=e.className,f=e.isValid,p=e.isInvalid,d=e.isStatic,h=e.as,m=void 0===h?"input":h,v=o(e,["id","bsPrefix","bsCustomPrefix","className","isValid","isInvalid","isStatic","as"]),g=Object(s.useContext)(dn),b=g.controlId;return a=g.custom?_(u,"custom-control-input"):_(a,"form-check-input"),l.a.createElement(m,Object(r.a)({},v,{ref:t,id:n||b,className:i()(c,a,f&&"is-valid",p&&"is-invalid",d&&"position-static")}))}));hn.displayName="FormCheckInput",hn.defaultProps={type:"checkbox"};var mn=hn,vn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.bsCustomPrefix,u=e.className,c=e.htmlFor,f=o(e,["bsPrefix","bsCustomPrefix","className","htmlFor"]),p=Object(s.useContext)(dn),d=p.controlId;return n=p.custom?_(a,"custom-control-label"):_(n,"form-check-label"),l.a.createElement("label",Object(r.a)({},f,{ref:t,htmlFor:c||d,className:i()(u,n)}))}));vn.displayName="FormCheckLabel";var gn=vn,bn=l.a.forwardRef((function(e,t){var n=e.id,a=e.bsPrefix,u=e.bsCustomPrefix,c=e.inline,f=e.disabled,p=e.isValid,d=e.isInvalid,h=e.feedback,m=e.className,v=e.style,g=e.title,b=e.type,y=e.label,w=e.children,x=e.custom,E=e.as,k=void 0===E?"input":E,O=o(e,["id","bsPrefix","bsCustomPrefix","inline","disabled","isValid","isInvalid","feedback","className","style","title","type","label","children","custom","as"]),S="switch"===b||x;a=S?_(u,"custom-control"):_(a,"form-check");var C=Object(s.useContext)(dn).controlId,T=Object(s.useMemo)((function(){return{controlId:n||C,custom:S}}),[C,S,n]),j=null!=y&&!1!==y&&!w,P=l.a.createElement(mn,Object(r.a)({},O,{type:"switch"===b?"checkbox":b,ref:t,isValid:p,isInvalid:d,isStatic:!j,disabled:f,as:k}));return l.a.createElement(dn.Provider,{value:T},l.a.createElement("div",{style:v,className:i()(m,a,S&&"custom-"+b,c&&a+"-inline")},w||l.a.createElement(l.a.Fragment,null,P,j&&l.a.createElement(gn,{title:g},y),(p||d)&&l.a.createElement(pn,{type:p?"valid":"invalid"},h))))}));bn.displayName="FormCheck",bn.defaultProps={type:"checkbox",inline:!1,disabled:!1,isValid:!1,isInvalid:!1,title:""},bn.Input=mn,bn.Label=gn;var yn=bn,wn=l.a.forwardRef((function(e,t){var n,a,u=e.bsPrefix,c=e.type,f=e.size,p=e.id,d=e.className,h=e.isValid,m=e.isInvalid,v=e.plaintext,g=e.readOnly,b=e.as,y=void 0===b?"input":b,w=o(e,["bsPrefix","type","size","id","className","isValid","isInvalid","plaintext","readOnly","as"]),x=Object(s.useContext)(dn).controlId;if(u=_(u,"form-control"),v)(a={})[u+"-plaintext"]=!0,n=a;else if("file"===c){var E;(E={})[u+"-file"]=!0,n=E}else{var k;(k={})[u]=!0,k[u+"-"+f]=f,n=k}return l.a.createElement(y,Object(r.a)({},w,{type:c,ref:t,readOnly:g,id:p||x,className:i()(d,n,h&&"is-valid",m&&"is-invalid")}))}));wn.displayName="FormControl",wn.Feedback=pn;var xn=wn,En=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.controlId,f=e.as,p=void 0===f?"div":f,d=o(e,["bsPrefix","className","children","controlId","as"]);n=_(n,"form-group");var h=Object(s.useMemo)((function(){return{controlId:c}}),[c]);return l.a.createElement(dn.Provider,{value:h},l.a.createElement(p,Object(r.a)({},d,{ref:t,className:i()(a,n)}),u))}));En.displayName="FormGroup";var kn=En,_n=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.column,u=e.srOnly,c=e.className,f=e.htmlFor,p=o(e,["bsPrefix","column","srOnly","className","htmlFor"]),d=Object(s.useContext)(dn).controlId;n=_(n,"form-label");var h="col-form-label";"string"==typeof a&&(h=h+"-"+a);var m=i()(c,n,u&&"sr-only",a&&h);return f=f||d,a?l.a.createElement(yt,Object(r.a)({as:"label",className:m,htmlFor:f},p)):l.a.createElement("label",Object(r.a)({ref:t,className:m,htmlFor:f},p))}));_n.displayName="FormLabel",_n.defaultProps={column:!1,srOnly:!1};var On=_n,Sn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=void 0===s?"small":s,c=e.muted,f=o(e,["bsPrefix","className","as","muted"]);return n=_(n,"form-text"),l.a.createElement(u,Object(r.a)({},f,{ref:t,className:i()(a,n,c&&"text-muted")}))}));Sn.displayName="FormText";var Cn=Sn,Tn=l.a.forwardRef((function(e,t){return l.a.createElement(yn,Object(r.a)({},e,{ref:t,type:"switch"}))}));Tn.displayName="Switch",Tn.Input=yn.Input,Tn.Label=yn.Label;var jn=Tn,Pn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.inline,s=e.className,u=e.validated,c=e.as,f=void 0===c?"form":c,p=o(e,["bsPrefix","inline","className","validated","as"]);return n=_(n,"form"),l.a.createElement(f,Object(r.a)({},p,{ref:t,className:i()(s,u&&"was-validated",a&&n+"-inline")}))}));Pn.displayName="Form",Pn.defaultProps={inline:!1},Pn.Row=xe("form-row"),Pn.Group=kn,Pn.Control=xn,Pn.Check=yn,Pn.Switch=jn,Pn.Label=On,Pn.Text=Cn;var Nn=Pn,Rn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.fluid,s=e.as,u=void 0===s?"div":s,c=e.className,f=o(e,["bsPrefix","fluid","as","className"]),p=_(n,"container"),d="string"==typeof a?"-"+a:"-fluid";return l.a.createElement(u,Object(r.a)({ref:t},f,{className:i()(c,a?""+p+d:p)}))}));Rn.displayName="Container",Rn.defaultProps={fluid:!1};var An=Rn,Mn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.fluid,u=e.rounded,c=e.roundedCircle,f=e.thumbnail,p=o(e,["bsPrefix","className","fluid","rounded","roundedCircle","thumbnail"]);n=_(n,"img");var d=i()(s&&n+"-fluid",u&&"rounded",c&&"rounded-circle",f&&n+"-thumbnail");return l.a.createElement("img",Object(r.a)({ref:t},p,{className:i()(a,d)}))}));Mn.displayName="Image",Mn.defaultProps={fluid:!1,rounded:!1,roundedCircle:!1,thumbnail:!1};var Fn=Mn,Dn={bsPrefix:Z.a.string,fluid:Z.a.bool,rounded:Z.a.bool,roundedCircle:Z.a.bool,thumbnail:Z.a.bool},In=l.a.forwardRef((function(e,t){var n=e.className,a=o(e,["className"]);return l.a.createElement(Fn,Object(r.a)({ref:t},a,{className:i()(n,"figure-img")}))}));In.displayName="FigureImage",In.propTypes=Dn,In.defaultProps={fluid:!0};var Ln=In,Un=xe("figure-caption",{Component:"figcaption"}),zn=xe("figure",{Component:"figure"});zn.Image=Ln,zn.Caption=Un;var qn=zn,Bn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.size,s=e.className,u=e.as,c=void 0===u?"div":u,f=o(e,["bsPrefix","size","className","as"]);return n=_(n,"input-group"),l.a.createElement(c,Object(r.a)({ref:t},f,{className:i()(s,n,a&&n+"-"+a)}))})),Hn=xe("input-group-append"),Vn=xe("input-group-prepend"),$n=xe("input-group-text",{Component:"span"});Bn.displayName="InputGroup",Bn.Text=$n,Bn.Radio=function(e){return l.a.createElement($n,null,l.a.createElement("input",Object(r.a)({type:"radio"},e)))},Bn.Checkbox=function(e){return l.a.createElement($n,null,l.a.createElement("input",Object(r.a)({type:"checkbox"},e)))},Bn.Append=Hn,Bn.Prepend=Vn;var Wn=Bn,Kn=l.a.forwardRef((function(e,t){var n,a=e.as,s=void 0===a?"div":a,u=e.className,c=e.fluid,f=e.bsPrefix,p=o(e,["as","className","fluid","bsPrefix"]),d=((n={})[f=_(f,"jumbotron")]=!0,n[f+"-fluid"]=c,n);return l.a.createElement(s,Object(r.a)({ref:t},p,{className:i()(u,d)}))}));Kn.defaultProps={fluid:!1},Kn.displayName="Jumbotron";var Gn=Kn,Yn=l.a.createContext(null),Qn=function(){},Zn=l.a.forwardRef((function(e,t){var n,a,i=e.as,u=void 0===i?"ul":i,c=e.onSelect,f=e.activeKey,p=e.role,d=e.onKeyDown,h=o(e,["as","onSelect","activeKey","role","onKeyDown"]),m=kt(),v=Object(s.useRef)(!1),g=Object(s.useContext)(j),b=Object(s.useContext)(Yn);b&&(p=p||"tablist",f=b.activeKey,n=b.getControlledId,a=b.getControllerId);var y=Object(s.useRef)(null),w=function(e){if(!y.current)return null;var t=xt(y.current,"[data-rb-event-key]:not(.disabled)"),n=y.current.querySelector(".active"),r=t.indexOf(n);if(-1===r)return null;var o=r+e;return o>=t.length&&(o=0),o<0&&(o=t.length-1),t[o]},x=function(e,t){null!=e&&(c&&c(e,t),g&&g(e,t))};Object(s.useEffect)((function(){if(y.current&&v.current){var e=y.current.querySelector("[data-rb-event-key].active");e&&e.focus()}v.current=!1}));var E=Zt(t,y);return l.a.createElement(j.Provider,{value:x},l.a.createElement(Wt.Provider,{value:{role:p,activeKey:T(f),getControlledId:n||Qn,getControllerId:a||Qn}},l.a.createElement(u,Object(r.a)({},h,{onKeyDown:function(e){var t;switch(d&&d(e),e.key){case"ArrowLeft":case"ArrowUp":t=w(-1);break;case"ArrowRight":case"ArrowDown":t=w(1);break;default:return}t&&(e.preventDefault(),x(t.dataset.rbEventKey,e),v.current=!0,m())},ref:E,role:p}))))})),Xn=l.a.forwardRef((function(e,t){var n=e.active,a=e.className,u=e.tabIndex,c=e.eventKey,f=e.onSelect,p=e.onClick,d=e.as,h=o(e,["active","className","tabIndex","eventKey","onSelect","onClick","as"]),m=T(c,h.href),v=Object(s.useContext)(j),g=Object(s.useContext)(Wt),b=n;g&&(h.role||"tablist"!==g.role||(h.role="tab"),h["data-rb-event-key"]=m,h.id=g.getControllerId(m),h["aria-controls"]=g.getControlledId(m),b=null==n&&null!=m?g.activeKey===m:n),"tab"===h.role&&(h.tabIndex=b?u:-1,h["aria-selected"]=b);var y=be((function(e){p&&p(e),null!=m&&(f&&f(m,e),v&&v(m,e))}));return l.a.createElement(d,Object(r.a)({},h,{ref:t,onClick:y,className:i()(a,b&&"active")}))}));Xn.defaultProps={disabled:!1};var Jn=Xn,er=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.active,u=e.disabled,c=e.className,f=e.variant,p=e.action,d=e.as,h=e.eventKey,m=e.onClick,v=o(e,["bsPrefix","active","disabled","className","variant","action","as","eventKey","onClick"]);n=_(n,"list-group-item");var g=Object(s.useCallback)((function(e){if(u)return e.preventDefault(),void e.stopPropagation();m&&m(e)}),[u,m]);return l.a.createElement(Jn,Object(r.a)({ref:t},v,{eventKey:T(h,v.href),as:d||(p?v.href?"a":"button":"div"),onClick:g,className:i()(c,n,a&&"active",u&&"disabled",f&&n+"-"+f,p&&n+"-action")}))}));er.defaultProps={variant:null,active:!1,disabled:!1},er.displayName="ListGroupItem";var tr=er,nr=l.a.forwardRef((function(e,t){var n,a=m(e,{activeKey:"onSelect"}),s=a.className,u=a.bsPrefix,c=a.variant,f=a.horizontal,p=a.as,d=void 0===p?"div":p,h=o(a,["className","bsPrefix","variant","horizontal","as"]);return u=_(u,"list-group"),n=f?!0===f?"horizontal":"horizontal-"+f:null,l.a.createElement(Zn,Object(r.a)({ref:t},h,{as:d,className:i()(s,u,c&&u+"-"+c,n&&u+"-"+n)}))}));nr.defaultProps={variant:null,horizontal:null},nr.displayName="ListGroup",nr.Item=tr;var rr=nr,or=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=void 0===s?"div":s,c=o(e,["bsPrefix","className","as"]),f=_(n,"media");return l.a.createElement(u,Object(r.a)({},c,{ref:t,className:i()(a,f)}))}));or.displayName="Media",or.Body=xe("media-body");var ar,ir=or;function sr(e){if((!ar&&0!==ar||e)&&z){var t=document.createElement("div");t.style.position="absolute",t.style.top="-9999px",t.style.width="50px",t.style.height="50px",t.style.overflow="scroll",document.body.appendChild(t),ar=t.offsetWidth-t.clientWidth,document.body.removeChild(t)}return ar}function lr(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function ur(e){void 0===e&&(e=A());try{var t=e.activeElement;return t&&t.nodeName?t:null}catch(t){return e.body}}function cr(e,t){e.classList?e.classList.add(t):function(e,t){return e.classList?!!t&&e.classList.contains(t):-1!==(" "+(e.className.baseVal||e.className)+" ").indexOf(" "+t+" ")}(e,t)||("string"==typeof e.className?e.className=e.className+" "+t:e.setAttribute("class",(e.className&&e.className.baseVal||"")+" "+t))}function fr(e,t){return e.replace(new RegExp("(^|\\s)"+t+"(?:\\s|$)","g"),"$1").replace(/\s+/g," ").replace(/^\s*|\s*$/g,"")}function pr(e,t){e.classList?e.classList.remove(t):"string"==typeof e.className?e.className=fr(e.className,t):e.setAttribute("class",fr(e.className&&e.className.baseVal||"",t))}function dr(e){return"window"in e&&e.window===e?e:"nodeType"in(t=e)&&t.nodeType===document.DOCUMENT_NODE&&e.defaultView||!1;var t}function hr(e){var t;return dr(e)||(t=e)&&"body"===t.tagName.toLowerCase()?function(e){var t=A(e),n=dr(t);return t.body.clientWidthe.clientHeight}var mr=["template","script","style"],vr=function(e,t,n){t=[].concat(t),[].forEach.call(e.children,(function(e){var r,o,a;-1===t.indexOf(e)&&(o=(r=e).nodeType,a=r.tagName,1===o&&-1===mr.indexOf(a.toLowerCase()))&&n(e)}))};function gr(e,t){t&&(e?t.setAttribute("aria-hidden","true"):t.removeAttribute("aria-hidden"))}var br,yr=function(){function e(e){var t=void 0===e?{}:e,n=t.hideSiblingNodes,r=void 0===n||n,o=t.handleContainerOverflow,a=void 0===o||o;this.hideSiblingNodes=r,this.handleContainerOverflow=a,this.modals=[],this.containers=[],this.data=[],this.scrollbarSize=sr()}var t=e.prototype;return t.isContainerOverflowing=function(e){var t=this.data[this.containerIndexFromModal(e)];return t&&t.overflowing},t.containerIndexFromModal=function(e){return t=this.data,n=function(t){return-1!==t.modals.indexOf(e)},r=-1,t.some((function(e,t){return!!n(e,t)&&(r=t,!0)})),r;var t,n,r},t.setContainerStyle=function(e,t){var n={overflow:"hidden"};e.style={overflow:t.style.overflow,paddingRight:t.style.paddingRight},e.overflowing&&(n.paddingRight=parseInt(U(t,"paddingRight")||0,10)+this.scrollbarSize+"px"),U(t,n)},t.removeContainerStyle=function(e,t){var n=e.style;Object.keys(n).forEach((function(e){t.style[e]=n[e]}))},t.add=function(e,t,n){var r=this.modals.indexOf(e),o=this.containers.indexOf(t);if(-1!==r)return r;if(r=this.modals.length,this.modals.push(e),this.hideSiblingNodes&&function(e,t){var n=t.dialog,r=t.backdrop;vr(e,[n,r],(function(e){return gr(!0,e)}))}(t,e),-1!==o)return this.data[o].modals.push(e),r;var a={modals:[e],classes:n?n.split(/\s+/):[],overflowing:hr(t)};return this.handleContainerOverflow&&this.setContainerStyle(a,t),a.classes.forEach(cr.bind(null,t)),this.containers.push(t),this.data.push(a),r},t.remove=function(e){var t=this.modals.indexOf(e);if(-1!==t){var n=this.containerIndexFromModal(e),r=this.data[n],o=this.containers[n];if(r.modals.splice(r.modals.indexOf(e),1),this.modals.splice(t,1),0===r.modals.length)r.classes.forEach(pr.bind(null,o)),this.handleContainerOverflow&&this.removeContainerStyle(r,o),this.hideSiblingNodes&&function(e,t){var n=t.dialog,r=t.backdrop;vr(e,[n,r],(function(e){return gr(!1,e)}))}(o,e),this.containers.splice(n,1),this.data.splice(n,1);else if(this.hideSiblingNodes){var a=r.modals[r.modals.length-1],i=a.backdrop;gr(!1,a.dialog),gr(!1,i)}}},t.isTopModal=function(e){return!!this.modals.length&&this.modals[this.modals.length-1]===e},e}(),wr=function(e){if("undefined"!=typeof document)return null==e?A().body:("function"==typeof e&&(e=e()),e&&e.current&&(e=e.current),e&&e.nodeType?e:null)};function xr(e,t){var n=Object(s.useState)((function(){return wr(e)})),r=n[0],o=n[1];if(!r){var a=wr(e);a&&o(a)}return Object(s.useEffect)((function(){t&&r&&t(r)}),[t,r]),Object(s.useEffect)((function(){var t=wr(e);t!==r&&o(t)}),[e,r]),r}var Er=function(e){function t(){for(var t,n=arguments.length,r=new Array(n),o=0;o1?r-1:0),a=1;a1?r-1:0),a=1;a1?r-1:0),a=1;aA(e).documentElement.clientHeight;this.setState({style:{paddingRight:t&&!n?sr():void 0,paddingLeft:!t&&n?sr():void 0}})}},n.render=function(){var e=this.props,t=e.bsPrefix,n=e.className,a=e.style,s=e.dialogClassName,u=e.children,c=e.dialogAs,f=e.show,p=e.animation,d=e.backdrop,h=e.keyboard,m=e.onEscapeKeyDown,v=e.onShow,g=e.onHide,b=e.container,y=e.autoFocus,w=e.enforceFocus,x=e.restoreFocus,E=e.restoreFocusOptions,k=e.onEntered,_=e.onExit,O=e.onExiting,S=(e.onExited,e.onEntering,e.onEnter,e.onEntering,e.backdropClassName,o(e,["bsPrefix","className","style","dialogClassName","children","dialogAs","show","animation","backdrop","keyboard","onEscapeKeyDown","onShow","onHide","container","autoFocus","enforceFocus","restoreFocus","restoreFocusOptions","onEntered","onExit","onExiting","onExited","onEntering","onEnter","onEntering","backdropClassName"])),C=!0===d?this.handleClick:null,T=Object(r.a)({},a,{},this.state.style);return p||(T.display="block"),l.a.createElement(Rr.Provider,{value:this.modalContext},l.a.createElement(Sr,{show:f,backdrop:d,container:b,keyboard:h,autoFocus:y,enforceFocus:w,restoreFocus:x,restoreFocusOptions:E,onEscapeKeyDown:m,onShow:v,onHide:g,onEntered:k,onExit:_,onExiting:O,manager:this.getModalManager(),ref:this.setModalRef,style:T,className:i()(n,t),containerClassName:t+"-open",transition:p?qr:void 0,backdropTransition:p?Br:void 0,renderBackdrop:this.renderBackdrop,onClick:C,onMouseUp:this.handleMouseUp,onEnter:this.handleEnter,onEntering:this.handleEntering,onExited:this.handleExited},l.a.createElement(c,Object(r.a)({},S,{onMouseDown:this.handleDialogMouseDown,className:s}),u)))},t}(l.a.Component);Hr.defaultProps=zr;var Vr=O(Hr,"modal");Vr.Body=Nr,Vr.Header=Lr,Vr.Title=Ur,Vr.Footer=Fr,Vr.Dialog=Mr,Vr.TRANSITION_DURATION=300,Vr.BACKDROP_TRANSITION_DURATION=150;var $r=Vr,Wr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.children,u=e.as,c=void 0===u?"div":u,f=o(e,["bsPrefix","className","children","as"]);return n=_(n,"nav-item"),l.a.createElement(c,Object(r.a)({},f,{ref:t,className:i()(a,n)}),s)}));Wr.displayName="NavItem";var Kr=Wr,Gr={disabled:!1,as:Re},Yr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.disabled,s=e.className,u=e.href,c=e.eventKey,f=e.onSelect,p=e.as,d=o(e,["bsPrefix","disabled","className","href","eventKey","onSelect","as"]);return n=_(n,"nav-link"),l.a.createElement(Jn,Object(r.a)({},d,{href:u,ref:t,eventKey:c,as:p,disabled:a,onSelect:f,className:i()(s,n,a&&"disabled")}))}));Yr.displayName="NavLink",Yr.defaultProps=Gr;var Qr=Yr,Zr=l.a.forwardRef((function(e,t){var n,a,u,c=m(e,{activeKey:"onSelect"}),f=c.as,p=void 0===f?"div":f,d=c.bsPrefix,h=c.variant,v=c.fill,g=c.justify,b=c.navbar,y=c.className,w=c.children,x=c.activeKey,E=o(c,["as","bsPrefix","variant","fill","justify","navbar","className","children","activeKey"]);d=_(d,"nav");var k=Object(s.useContext)(Xt),O=Object(s.useContext)(Qe);return k?(a=k.bsPrefix,b=null==b||b):O&&(u=O.cardHeaderBsPrefix),l.a.createElement(Zn,Object(r.a)({as:p,ref:t,activeKey:x,className:i()(y,(n={},n[d]=!b,n[a+"-nav"]=b,n[u+"-"+h]=!!u,n[d+"-"+h]=!!h,n[d+"-fill"]=v,n[d+"-justified"]=g,n))},E),w)}));Zr.displayName="Nav",Zr.defaultProps={justify:!1,fill:!1},Zr.Item=Kr,Zr.Link=Qr;var Xr=Zr,Jr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=o(e,["bsPrefix","className","as"]);n=_(n,"navbar-brand");var c=s||(u.href?"a":"span");return l.a.createElement(c,Object(r.a)({},u,{ref:t,className:i()(a,n)}))}));Jr.displayName="NavbarBrand";var eo=Jr,to=l.a.forwardRef((function(e,t){var n=e.children,a=e.bsPrefix,i=o(e,["children","bsPrefix"]);return a=_(a,"navbar-collapse"),l.a.createElement(Xt.Consumer,null,(function(e){return l.a.createElement(pe,Object(r.a)({in:!(!e||!e.expanded)},i),l.a.createElement("div",{ref:t,className:a},n))}))}));to.displayName="NavbarCollapse";var no=to,ro=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.label,f=e.as,p=void 0===f?"button":f,d=e.onClick,h=o(e,["bsPrefix","className","children","label","as","onClick"]);n=_(n,"navbar-toggler");var m=Object(s.useContext)(Xt)||{},v=m.onToggle,g=m.expanded,b=be((function(e){d&&d(e),v&&v()}));return"button"===p&&(h.type="button"),l.a.createElement(p,Object(r.a)({},h,{ref:t,onClick:b,"aria-label":c,className:i()(a,n,!g&&"collapsed")}),u||l.a.createElement("span",{className:n+"-icon"}))}));ro.displayName="NavbarToggle",ro.defaultProps={label:"Toggle navigation"};var oo=ro,ao=l.a.forwardRef((function(e,t){var n=m(e,{expanded:"onToggle"}),a=n.bsPrefix,u=n.expand,c=n.variant,f=n.bg,p=n.fixed,d=n.sticky,h=n.className,v=n.children,g=n.as,b=void 0===g?"nav":g,y=n.expanded,w=n.onToggle,x=n.onSelect,E=n.collapseOnSelect,k=o(n,["bsPrefix","expand","variant","bg","fixed","sticky","className","children","as","expanded","onToggle","onSelect","collapseOnSelect"]);a=_(a,"navbar");var O=Object(s.useCallback)((function(){x&&x.apply(void 0,arguments),E&&y&&w(!1)}),[x,E,y,w]);void 0===k.role&&"nav"!==b&&(k.role="navigation");var S=a+"-expand";"string"==typeof u&&(S=S+"-"+u);var C=Object(s.useMemo)((function(){return{onToggle:function(){return w(!y)},bsPrefix:a,expanded:y}}),[a,y,w]);return l.a.createElement(Xt.Provider,{value:C},l.a.createElement(j.Provider,{value:O},l.a.createElement(b,Object(r.a)({ref:t},k,{className:i()(h,a,u&&S,c&&a+"-"+c,f&&"bg-"+f,d&&"sticky-"+d,p&&"fixed-"+p)}),v)))}));ao.defaultProps={expand:!0,variant:"light",collapseOnSelect:!1},ao.displayName="Navbar",ao.Brand=eo,ao.Toggle=oo,ao.Collapse=no,ao.Text=xe("navbar-text",{Component:"span"});var io=ao,so={id:Z.a.any,onClick:Z.a.func,title:Z.a.node.isRequired,disabled:Z.a.bool,active:Z.a.bool,menuRole:Z.a.string,rootCloseEvent:Z.a.string,bsPrefix:Z.a.string},lo=l.a.forwardRef((function(e,t){var n=e.id,a=e.title,i=e.children,s=e.bsPrefix,u=e.rootCloseEvent,c=e.menuRole,f=e.disabled,p=e.active,d=o(e,["id","title","children","bsPrefix","rootCloseEvent","menuRole","disabled","active"]);return l.a.createElement(an,Object(r.a)({ref:t},d,{as:Kr}),l.a.createElement(an.Toggle,{id:n,eventKey:null,active:p,disabled:f,childBsPrefix:s,as:Qr},a),l.a.createElement(an.Menu,{role:c,rootCloseEvent:u},i))}));lo.displayName="NavDropdown",lo.propTypes=so,lo.Item=an.Item,lo.Divider=an.Divider,lo.Header=an.Header;var uo=lo,co=l.a.forwardRef((function(e,t){var n=e.flip,a=e.placement,i=e.containerPadding,u=e.popperConfig,c=void 0===u?{}:u,f=e.transition,p=Et(),d=p[0],h=p[1],m=Et(),v=m[0],g=m[1],b=Zt(h,t),y=xr(e.container),w=xr(e.target),x=Object(s.useState)(!e.show),E=x[0],k=x[1],_=c.modifiers,O=void 0===_?{}:_,S=Tt(w,d,Object(r.a)({},c,{placement:a||"bottom",enableEvents:e.show,modifiers:Object(r.a)({},O,{preventOverflow:Object(r.a)({padding:i||5},O.preventOverflow),arrow:Object(r.a)({},O.arrow,{enabled:!!v,element:v}),flip:Object(r.a)({enabled:!!n},O.preventOverflow)})})),C=S.styles,T=S.arrowStyles,j=o(S,["styles","arrowStyles"]);e.show?E&&k(!1):e.transition||E||k(!0);var P=e.show||f&&!E;if(Mt(d,e.onHide,{disabled:!e.rootClose||e.rootCloseDisabled,clickTrigger:e.rootCloseEvent}),!P)return null;var N=e.children(Object(r.a)({},j,{show:e.show,props:{style:C,ref:b},arrowProps:{style:T,ref:g}}));if(f){var R=e.onExit,A=e.onExiting,M=e.onEnter,F=e.onEntering,D=e.onEntered;N=l.a.createElement(f,{in:e.show,appear:!0,onExit:R,onExiting:A,onExited:function(){k(!0),e.onExited&&e.onExited.apply(e,arguments)},onEnter:M,onEntering:F,onEntered:D},N)}return y?J.a.createPortal(N,y):null}));co.displayName="Overlay",co.propTypes={show:Z.a.bool,placement:Z.a.oneOf(Ot.a.placements),target:Z.a.any,container:Z.a.any,flip:Z.a.bool,children:Z.a.func.isRequired,containerPadding:Z.a.number,popperConfig:Z.a.object,rootClose:Z.a.bool,rootCloseEvent:Z.a.oneOf(["click","mousedown"]),rootCloseDisabled:Z.a.bool,onHide:function(e){var t=Z.a.func;e.rootClose&&(t=t.isRequired);for(var n=arguments.length,r=new Array(n>1?n-1:0),o=1;o-1};var U=function(e,t){var n=this.__data__,r=M(n,e);return r<0?(++this.size,n.push([e,t])):n[r][1]=t,this};function z(e){var t=-1,n=null==e?0:e.length;for(this.clear();++t-1&&e%1==0&&e-1&&e%1==0&&e<=9007199254740991},Ye={};Ye["[object Float32Array]"]=Ye["[object Float64Array]"]=Ye["[object Int8Array]"]=Ye["[object Int16Array]"]=Ye["[object Int32Array]"]=Ye["[object Uint8Array]"]=Ye["[object Uint8ClampedArray]"]=Ye["[object Uint16Array]"]=Ye["[object Uint32Array]"]=!0,Ye["[object Arguments]"]=Ye["[object Array]"]=Ye["[object ArrayBuffer]"]=Ye["[object Boolean]"]=Ye["[object DataView]"]=Ye["[object Date]"]=Ye["[object Error]"]=Ye["[object Function]"]=Ye["[object Map]"]=Ye["[object Number]"]=Ye["[object Object]"]=Ye["[object RegExp]"]=Ye["[object Set]"]=Ye["[object String]"]=Ye["[object WeakMap]"]=!1;var Qe=function(e){return O(e)&&Ge(e.length)&&!!Ye[E(e)]};var Ze=function(e){return function(t){return e(t)}},Xe=n(14),Je=Xe.a&&Xe.a.isTypedArray,et=Je?Ze(Je):Qe,tt=Object.prototype.hasOwnProperty;var nt=function(e,t){var n=Ve(e),r=!n&&He(e),o=!n&&!r&&Object($e.a)(e),a=!n&&!r&&!o&&et(e),i=n||r||o||a,s=i?Le(e.length,String):[],l=s.length;for(var u in e)!t&&!tt.call(e,u)||i&&("length"==u||o&&("offset"==u||"parent"==u)||a&&("buffer"==u||"byteLength"==u||"byteOffset"==u)||Ke(u,l))||s.push(u);return s},rt=Object.prototype;var ot=function(e){var t=e&&e.constructor;return e===("function"==typeof t&&t.prototype||rt)},at=k(Object.keys,Object),it=Object.prototype.hasOwnProperty;var st=function(e){if(!ot(e))return at(e);var t=[];for(var n in Object(e))it.call(e,n)&&"constructor"!=n&&t.push(n);return t};var lt=function(e){return null!=e&&Ge(e.length)&&!G(e)};var ut=function(e){return lt(e)?nt(e):st(e)};var ct=function(e,t){return e&&Ie(t,ut(t),e)};var ft=function(e){var t=[];if(null!=e)for(var n in Object(e))t.push(n);return t},pt=Object.prototype.hasOwnProperty;var dt=function(e){if(!W(e))return ft(e);var t=ot(e),n=[];for(var r in e)("constructor"!=r||!t&&pt.call(e,r))&&n.push(r);return n};var ht=function(e){return lt(e)?nt(e,!0):dt(e)};var mt=function(e,t){return e&&Ie(t,ht(t),e)},vt=n(130);var gt=function(e,t){var n=-1,r=e.length;for(t||(t=Array(r));++n=0||(o[n]=e[n]);return o}function An(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}var Mn=function(e){return Array.isArray(e)&&0===e.length},Fn=function(e){return"function"==typeof e},Dn=function(e){return null!==e&&"object"==typeof e},In=function(e){return String(Math.floor(Number(e)))===e},Ln=function(e){return"[object String]"===Object.prototype.toString.call(e)},Un=function(e){return e!=e},zn=function(e){return 0===r.Children.count(e)},qn=function(e){return Dn(e)&&Fn(e.then)},Bn=function(e){return e&&Dn(e)&&Dn(e.target)};function Hn(e){if(void 0===(e=e||("undefined"!=typeof document?document:void 0)))return null;try{return e.activeElement||e.body}catch(t){return e.body}}function Vn(e,t,n,r){void 0===r&&(r=0);for(var o=_n(t);e&&r=0?[]:{}}}return(0===a?e:o)[i[a]]===n?e:(void 0===n?delete o[i[a]]:o[i[a]]=n,0===a&&void 0===n&&delete r[i[a]],r)}function Wn(e,t,n,r){void 0===n&&(n=new WeakMap),void 0===r&&(r={});for(var o=0,a=Object.keys(e);o0?t.map((function(t){return C(t,Vn(e,t))})):[Promise.resolve("DO_NOT_DELETE_YOU_WILL_BE_FIRED")];return Promise.all(n).then((function(e){return e.reduce((function(e,n,r){return"DO_NOT_DELETE_YOU_WILL_BE_FIRED"===n||n&&(e=$n(e,t[r],n)),e}),{})}))}),[C]),j=Object(r.useCallback)((function(e){return Promise.all([T(e),m.validationSchema?S(e):{},m.validate?O(e):{}]).then((function(e){var t=e[0],n=e[1],r=e[2];return f.all([t,n,r],{arrayMerge:ar})}))}),[m.validate,m.validationSchema,T,O,S]),P=sr((function(e){return void 0===e&&(e=k.values),Object(Sn.unstable_runWithPriority)(Sn.LowPriority,(function(){return j(e).then((function(e){return w.current&&_({type:"SET_ERRORS",payload:e}),e})).catch((function(e){0}))}))})),N=sr((function(e){return void 0===e&&(e=k.values),_({type:"SET_ISVALIDATING",payload:!0}),j(e).then((function(e){return w.current&&(_({type:"SET_ISVALIDATING",payload:!1}),a()(k.errors,e)||_({type:"SET_ERRORS",payload:e})),e}))}));Object(r.useEffect)((function(){l&&!0===w.current&&P(v.current)}),[l,P]);var R=Object(r.useCallback)((function(e){var t=e&&e.values?e.values:v.current,n=e&&e.errors?e.errors:g.current?g.current:m.initialErrors||{},r=e&&e.touched?e.touched:b.current?b.current:m.initialTouched||{},o=e&&e.status?e.status:y.current?y.current:m.initialStatus;v.current=t,g.current=n,b.current=r,y.current=o;var a=function(){_({type:"RESET_FORM",payload:{isSubmitting:!!e&&!!e.isSubmitting,errors:n,touched:r,status:o,values:t,isValidating:!!e&&!!e.isValidating,submitCount:e&&e.submitCount&&"number"==typeof e.submitCount?e.submitCount:0}})};if(m.onReset){var i=m.onReset(k.values,Z);qn(i)?i.then(a):a()}else a()}),[m.initialErrors,m.initialStatus,m.initialTouched]);Object(r.useEffect)((function(){p||(v.current=m.initialValues)}),[p,m.initialValues]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(v.current,m.initialValues)&&(v.current=m.initialValues,R())}),[p,m.initialValues,R]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(g.current,m.initialErrors)&&(g.current=m.initialErrors||Xn,_({type:"SET_ERRORS",payload:m.initialErrors||Xn}))}),[p,m.initialErrors]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(b.current,m.initialTouched)&&(b.current=m.initialTouched||Jn,_({type:"SET_TOUCHED",payload:m.initialTouched||Jn}))}),[p,m.initialTouched]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(y.current,m.initialStatus)&&(y.current=m.initialStatus,_({type:"SET_STATUS",payload:m.initialStatus}))}),[p,m.initialStatus,m.initialTouched]);var A=sr((function(e){if(Fn(x.current[e].validate)){var t=Vn(k.values,e),n=x.current[e].validate(t);return qn(n)?(_({type:"SET_ISVALIDATING",payload:!0}),n.then((function(e){return e})).then((function(t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t}}),_({type:"SET_ISVALIDATING",payload:!1})}))):(_({type:"SET_FIELD_ERROR",payload:{field:e,value:n}}),Promise.resolve(n))}return m.validationSchema?(_({type:"SET_ISVALIDATING",payload:!0}),S(k.values,e).then((function(e){return e})).then((function(t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t[e]}}),_({type:"SET_ISVALIDATING",payload:!1})}))):Promise.resolve()})),M=Object(r.useCallback)((function(e,t){var n=t.validate;x.current[e]={validate:n}}),[]),F=Object(r.useCallback)((function(e){delete x.current[e]}),[]),D=sr((function(e,t){return _({type:"SET_TOUCHED",payload:e}),(void 0===t?i:t)?P(k.values):Promise.resolve()})),I=Object(r.useCallback)((function(e){_({type:"SET_ERRORS",payload:e})}),[]),L=sr((function(e,t){return _({type:"SET_VALUES",payload:e}),(void 0===t?n:t)?P(e):Promise.resolve()})),U=Object(r.useCallback)((function(e,t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t}})}),[]),z=sr((function(e,t,r){return _({type:"SET_FIELD_VALUE",payload:{field:e,value:t}}),(void 0===r?n:r)?P($n(k.values,e,t)):Promise.resolve()})),q=Object(r.useCallback)((function(e,t){var n,r=t,o=e;if(!Ln(e)){e.persist&&e.persist();var a=e.target?e.target:e.currentTarget,i=a.type,s=a.name,l=a.id,u=a.value,c=a.checked,f=(a.outerHTML,a.options),p=a.multiple;r=t||(s||l),o=/number|range/.test(i)?(n=parseFloat(u),isNaN(n)?"":n):/checkbox/.test(i)?function(e,t,n){if("boolean"==typeof e)return Boolean(t);var r=[],o=!1,a=-1;if(Array.isArray(e))r=e,a=e.indexOf(n),o=a>=0;else if(!n||"true"==n||"false"==n)return Boolean(t);if(t&&n&&!o)return r.concat(n);if(!o)return r;return r.slice(0,a).concat(r.slice(a+1))}(Vn(k.values,r),c,u):p?function(e){return Array.from(e).filter((function(e){return e.selected})).map((function(e){return e.value}))}(f):u}r&&z(r,o)}),[z,k.values]),B=sr((function(e){if(Ln(e))return function(t){return q(t,e)};q(e)})),H=sr((function(e,t,n){return void 0===t&&(t=!0),_({type:"SET_FIELD_TOUCHED",payload:{field:e,value:t}}),(void 0===n?i:n)?P(k.values):Promise.resolve()})),V=Object(r.useCallback)((function(e,t){e.persist&&e.persist();var n=e.target,r=n.name,o=n.id,a=(n.outerHTML,t||(r||o));H(a,!0)}),[H]),$=sr((function(e){if(Ln(e))return function(t){return V(t,e)};V(e)})),W=Object(r.useCallback)((function(e){Fn(e)?_({type:"SET_FORMIK_STATE",payload:e}):_({type:"SET_FORMIK_STATE",payload:function(){return e}})}),[]),K=Object(r.useCallback)((function(e){_({type:"SET_STATUS",payload:e})}),[]),G=Object(r.useCallback)((function(e){_({type:"SET_ISSUBMITTING",payload:e})}),[]),Y=sr((function(){return _({type:"SUBMIT_ATTEMPT"}),N().then((function(e){var t=e instanceof Error;if(!t&&0===Object.keys(e).length){var n;try{if(void 0===(n=X()))return}catch(e){throw e}return Promise.resolve(n).then((function(){w.current&&_({type:"SUBMIT_SUCCESS"})})).catch((function(e){if(w.current)throw _({type:"SUBMIT_FAILURE"}),e}))}if(w.current&&(_({type:"SUBMIT_FAILURE"}),t))throw e}))})),Q=sr((function(e){e&&e.preventDefault&&Fn(e.preventDefault)&&e.preventDefault(),e&&e.stopPropagation&&Fn(e.stopPropagation)&&e.stopPropagation(),Y().catch((function(e){console.warn("Warning: An unhandled error was caught from submitForm()",e)}))})),Z={resetForm:R,validateForm:N,validateField:A,setErrors:I,setFieldError:U,setFieldTouched:H,setFieldValue:z,setStatus:K,setSubmitting:G,setTouched:D,setValues:L,setFormikState:W,submitForm:Y},X=sr((function(){return d(k.values,Z)})),J=sr((function(e){e&&e.preventDefault&&Fn(e.preventDefault)&&e.preventDefault(),e&&e.stopPropagation&&Fn(e.stopPropagation)&&e.stopPropagation(),R()})),ee=Object(r.useCallback)((function(e){return{value:Vn(k.values,e),error:Vn(k.errors,e),touched:!!Vn(k.touched,e),initialValue:Vn(v.current,e),initialTouched:!!Vn(b.current,e),initialError:Vn(g.current,e)}}),[k.errors,k.touched,k.values]),te=Object(r.useCallback)((function(e){return{setValue:function(t){return z(e,t)},setTouched:function(t){return H(e,t)},setError:function(t){return U(e,t)}}}),[z,H,U]),ne=Object(r.useCallback)((function(e){var t=Dn(e),n=t?e.name:e,r=Vn(k.values,n),o={name:n,value:r,onChange:B,onBlur:$};if(t){var a=e.type,i=e.value,s=e.as,l=e.multiple;"checkbox"===a?void 0===i?o.checked=!!r:(o.checked=!(!Array.isArray(r)||!~r.indexOf(i)),o.value=i):"radio"===a?(o.checked=r===i,o.value=i):"select"===s&&l&&(o.value=o.value||[],o.multiple=!0)}return o}),[$,B,k.values]),re=Object(r.useMemo)((function(){return!a()(v.current,k.values)}),[v.current,k.values]),oe=Object(r.useMemo)((function(){return void 0!==u?re?k.errors&&0===Object.keys(k.errors).length:!1!==u&&Fn(u)?u(m):u:k.errors&&0===Object.keys(k.errors).length}),[u,re,k.errors,m]);return Pn({},k,{initialValues:v.current,initialErrors:g.current,initialTouched:b.current,initialStatus:y.current,handleBlur:$,handleChange:B,handleReset:J,handleSubmit:Q,resetForm:R,setErrors:I,setFormikState:W,setFieldTouched:H,setFieldValue:z,setFieldError:U,setStatus:K,setSubmitting:G,setTouched:D,setValues:L,submitForm:Y,validateForm:N,validateField:A,isValid:oe,dirty:re,unregisterField:F,registerField:M,getFieldProps:ne,getFieldMeta:ee,getFieldHelpers:te,validateOnBlur:i,validateOnChange:n,validateOnMount:l})}function tr(e){var t=er(e),n=e.component,o=e.children,a=e.render,i=e.innerRef;return Object(r.useImperativeHandle)(i,(function(){return t})),Object(r.useEffect)((function(){0}),[]),Object(r.createElement)(Gn,{value:t},n?Object(r.createElement)(n,t):a?a(t):o?Fn(o)?o(t):zn(o)?null:r.Children.only(o):null)}function nr(e){var t={};if(e.inner){if(0===e.inner.length)return $n(t,e.path,e.message);var n=e.inner,r=Array.isArray(n),o=0;for(n=r?n:n[Symbol.iterator]();;){var a;if(r){if(o>=n.length)break;a=n[o++]}else{if((o=n.next()).done)break;a=o.value}var i=a;Vn(t,i.path)||(t=$n(t,i.path,i.message))}}return t}function rr(e,t,n,r){void 0===n&&(n=!1),void 0===r&&(r={});var o=or(e);return t[n?"validateSync":"validate"](o,{abortEarly:!1,context:r})}function or(e){var t={};for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=String(n);!0===Array.isArray(e[r])?t[r]=e[r].map((function(e){return!0===Array.isArray(e)||N(e)?or(e):""!==e?e:void 0})):N(e[r])?t[r]=or(e[r]):t[r]=""!==e[r]?e[r]:void 0}return t}function ar(e,t,n){var r=e.slice();return t.forEach((function(t,o){if(void 0===r[o]){var a=!1!==n.clone&&n.isMergeableObject(t);r[o]=a?f(Array.isArray(t)?[]:{},t,n):t}else n.isMergeableObject(t)?r[o]=f(e[o],t,n):-1===e.indexOf(t)&&r.push(t)})),r}var ir="undefined"!=typeof window&&void 0!==window.document&&void 0!==window.document.createElement?r.useLayoutEffect:r.useEffect;function sr(e){var t=Object(r.useRef)(e);return ir((function(){t.current=e})),Object(r.useCallback)((function(){for(var e=arguments.length,n=new Array(e),r=0;re?t:e}),0);return Array.from(Pn({},e,{length:t+1}))}return[]},br=function(e){function t(t){var n;return(n=e.call(this,t)||this).updateArrayField=function(e,t,r){var o=n.props,a=o.name;(0,o.formik.setFormikState)((function(n){var o="function"==typeof r?r:e,i="function"==typeof t?t:e,s=$n(n.values,a,e(Vn(n.values,a))),l=r?o(Vn(n.errors,a)):void 0,u=t?i(Vn(n.touched,a)):void 0;return Mn(l)&&(l=void 0),Mn(u)&&(u=void 0),Pn({},n,{values:s,errors:r?$n(n.errors,a,l):n.errors,touched:t?$n(n.touched,a,u):n.touched})}))},n.push=function(e){return n.updateArrayField((function(t){return[].concat(gr(t),[jn(e)])}),!1,!1)},n.handlePush=function(e){return function(){return n.push(e)}},n.swap=function(e,t){return n.updateArrayField((function(n){return hr(n,e,t)}),!0,!0)},n.handleSwap=function(e,t){return function(){return n.swap(e,t)}},n.move=function(e,t){return n.updateArrayField((function(n){return dr(n,e,t)}),!0,!0)},n.handleMove=function(e,t){return function(){return n.move(e,t)}},n.insert=function(e,t){return n.updateArrayField((function(n){return mr(n,e,t)}),(function(t){return mr(t,e,null)}),(function(t){return mr(t,e,null)}))},n.handleInsert=function(e,t){return function(){return n.insert(e,t)}},n.replace=function(e,t){return n.updateArrayField((function(n){return vr(n,e,t)}),!1,!1)},n.handleReplace=function(e,t){return function(){return n.replace(e,t)}},n.unshift=function(e){var t=-1;return n.updateArrayField((function(n){var r=n?[e].concat(n):[e];return t<0&&(t=r.length),r}),(function(e){var n=e?[null].concat(e):[null];return t<0&&(t=n.length),n}),(function(e){var n=e?[null].concat(e):[null];return t<0&&(t=n.length),n})),t},n.handleUnshift=function(e){return function(){return n.unshift(e)}},n.handleRemove=function(e){return function(){return n.remove(e)}},n.handlePop=function(){return function(){return n.pop()}},n.remove=n.remove.bind(An(n)),n.pop=n.pop.bind(An(n)),n}Nn(t,e);var n=t.prototype;return n.componentDidUpdate=function(e){!a()(Vn(e.formik.values,e.name),Vn(this.props.formik.values,this.props.name))&&this.props.formik.validateOnChange&&this.props.formik.validateForm(this.props.formik.values)},n.remove=function(e){var t;return this.updateArrayField((function(n){var r=n?gr(n):[];return t||(t=r[e]),Fn(r.splice)&&r.splice(e,1),r}),!0,!0),t},n.pop=function(){var e;return this.updateArrayField((function(t){var n=t;return e||(e=n&&n.pop&&n.pop()),n}),!0,!0),e},n.render=function(){var e={push:this.push,pop:this.pop,swap:this.swap,move:this.move,insert:this.insert,replace:this.replace,unshift:this.unshift,remove:this.remove,handlePush:this.handlePush,handlePop:this.handlePop,handleSwap:this.handleSwap,handleMove:this.handleMove,handleInsert:this.handleInsert,handleReplace:this.handleReplace,handleUnshift:this.handleUnshift,handleRemove:this.handleRemove},t=this.props,n=t.component,o=t.render,a=t.children,i=t.name,s=Pn({},e,{form:Rn(t.formik,["validate","validationSchema"]),name:i});return n?Object(r.createElement)(n,s):o?o(s):a?"function"==typeof a?a(s):zn(a)?null:r.Children.only(a):null},t}(r.Component);br.defaultProps={validateOnChange:!0};var yr=pr(br),wr=pr(function(e){function t(){return e.apply(this,arguments)||this}Nn(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return Vn(this.props.formik.errors,this.props.name)!==Vn(e.formik.errors,this.props.name)||Vn(this.props.formik.touched,this.props.name)!==Vn(e.formik.touched,this.props.name)||Object.keys(this.props).length!==Object.keys(e).length},n.render=function(){var e=this.props,t=e.component,n=e.formik,o=e.render,a=e.children,i=e.name,s=Rn(e,["component","formik","render","children","name"]),l=Vn(n.touched,i),u=Vn(n.errors,i);return l&&u?o?Fn(o)?o(u):null:a?Fn(a)?a(u):null:t?Object(r.createElement)(t,s,u):u:null},t}(r.Component)),xr=pr(function(e){function t(t){var n;n=e.call(this,t)||this;var r=t.render,o=t.children,a=t.component,i=t.as;t.name;return r&&Object(On.a)(!1),a&&r&&Object(On.a)(!1),i&&o&&Fn(o)&&Object(On.a)(!1),a&&o&&Fn(o)&&Object(On.a)(!1),r&&o&&!zn(o)&&Object(On.a)(!1),n}Nn(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return this.props.shouldUpdate?this.props.shouldUpdate(e,this.props):e.name!==this.props.name||Vn(e.formik.values,this.props.name)!==Vn(this.props.formik.values,this.props.name)||Vn(e.formik.errors,this.props.name)!==Vn(this.props.formik.errors,this.props.name)||Vn(e.formik.touched,this.props.name)!==Vn(this.props.formik.touched,this.props.name)||Object.keys(this.props).length!==Object.keys(e).length||e.formik.isSubmitting!==this.props.formik.isSubmitting},n.componentDidMount=function(){this.props.formik.registerField(this.props.name,{validate:this.props.validate})},n.componentDidUpdate=function(e){this.props.name!==e.name&&(this.props.formik.unregisterField(e.name),this.props.formik.registerField(this.props.name,{validate:this.props.validate})),this.props.validate!==e.validate&&this.props.formik.registerField(this.props.name,{validate:this.props.validate})},n.componentWillUnmount=function(){this.props.formik.unregisterField(this.props.name)},n.render=function(){var e=this.props,t=e.name,n=e.render,o=e.as,a=e.children,i=e.component,s=e.formik,l=Rn(e,["validate","name","render","as","children","component","shouldUpdate","formik"]),u=Rn(s,["validate","validationSchema"]),c={value:"radio"===l.type||"checkbox"===l.type?l.value:Vn(s.values,t),name:t,onChange:s.handleChange,onBlur:s.handleBlur},f={field:c,meta:{value:Vn(s.values,t),error:Vn(s.errors,t),touched:!!Vn(s.touched,t),initialValue:Vn(s.initialValues,t),initialTouched:!!Vn(s.initialTouched,t),initialError:Vn(s.initialErrors,t)},form:u};if(n)return n(f);if(Fn(a))return a(f);if(i){if("string"==typeof i){var p=l.innerRef,d=Rn(l,["innerRef"]);return Object(r.createElement)(i,Pn({ref:p},c,{},d),a)}return Object(r.createElement)(i,Pn({field:c,form:s},l),a)}var h=o||"input";if("string"==typeof h){var m=l.innerRef,v=Rn(l,["innerRef"]);return Object(r.createElement)(h,Pn({ref:m},c,{},v),a)}return Object(r.createElement)(h,Pn({},c,{},l),a)},t}(r.Component))},function(e,t,n){"use strict";var r={childContextTypes:!0,contextTypes:!0,defaultProps:!0,displayName:!0,getDefaultProps:!0,getDerivedStateFromProps:!0,mixins:!0,propTypes:!0,type:!0},o={name:!0,length:!0,prototype:!0,caller:!0,callee:!0,arguments:!0,arity:!0},a=Object.defineProperty,i=Object.getOwnPropertyNames,s=Object.getOwnPropertySymbols,l=Object.getOwnPropertyDescriptor,u=Object.getPrototypeOf,c=u&&u(Object);e.exports=function e(t,n,f){if("string"!=typeof n){if(c){var p=u(n);p&&p!==c&&e(t,p,f)}var d=i(n);s&&(d=d.concat(s(n)));for(var h=0;h=200&&e<300}};l.headers={common:{Accept:"application/json, text/plain, */*"}},r.forEach(["delete","get","head"],(function(e){l.headers[e]={}})),r.forEach(["post","put","patch"],(function(e){l.headers[e]=r.merge(a)})),e.exports=l}).call(this,n(146))},function(e,t,n){"use strict";var r=n(10),o=n(148),a=n(74),i=n(150),s=n(153),l=n(154),u=n(78);e.exports=function(e){return new Promise((function(t,c){var f=e.data,p=e.headers;r.isFormData(f)&&delete p["Content-Type"];var d=new XMLHttpRequest;if(e.auth){var h=e.auth.username||"",m=e.auth.password||"";p.Authorization="Basic "+btoa(h+":"+m)}var v=i(e.baseURL,e.url);if(d.open(e.method.toUpperCase(),a(v,e.params,e.paramsSerializer),!0),d.timeout=e.timeout,d.onreadystatechange=function(){if(d&&4===d.readyState&&(0!==d.status||d.responseURL&&0===d.responseURL.indexOf("file:"))){var n="getAllResponseHeaders"in d?s(d.getAllResponseHeaders()):null,r={data:e.responseType&&"text"!==e.responseType?d.response:d.responseText,status:d.status,statusText:d.statusText,headers:n,config:e,request:d};o(t,c,r),d=null}},d.onabort=function(){d&&(c(u("Request aborted",e,"ECONNABORTED",d)),d=null)},d.onerror=function(){c(u("Network Error",e,null,d)),d=null},d.ontimeout=function(){var t="timeout of "+e.timeout+"ms exceeded";e.timeoutErrorMessage&&(t=e.timeoutErrorMessage),c(u(t,e,"ECONNABORTED",d)),d=null},r.isStandardBrowserEnv()){var g=n(155),b=(e.withCredentials||l(v))&&e.xsrfCookieName?g.read(e.xsrfCookieName):void 0;b&&(p[e.xsrfHeaderName]=b)}if("setRequestHeader"in d&&r.forEach(p,(function(e,t){void 0===f&&"content-type"===t.toLowerCase()?delete p[t]:d.setRequestHeader(t,e)})),r.isUndefined(e.withCredentials)||(d.withCredentials=!!e.withCredentials),e.responseType)try{d.responseType=e.responseType}catch(t){if("json"!==e.responseType)throw t}"function"==typeof e.onDownloadProgress&&d.addEventListener("progress",e.onDownloadProgress),"function"==typeof e.onUploadProgress&&d.upload&&d.upload.addEventListener("progress",e.onUploadProgress),e.cancelToken&&e.cancelToken.promise.then((function(e){d&&(d.abort(),c(e),d=null)})),void 0===f&&(f=null),d.send(f)}))}},function(e,t,n){"use strict";var r=n(149);e.exports=function(e,t,n,o,a){var i=new Error(e);return r(i,t,n,o,a)}},function(e,t,n){"use strict";var r=n(10);e.exports=function(e,t){t=t||{};var n={},o=["url","method","params","data"],a=["headers","auth","proxy"],i=["baseURL","url","transformRequest","transformResponse","paramsSerializer","timeout","withCredentials","adapter","responseType","xsrfCookieName","xsrfHeaderName","onUploadProgress","onDownloadProgress","maxContentLength","validateStatus","maxRedirects","httpAgent","httpsAgent","cancelToken","socketPath"];r.forEach(o,(function(e){void 0!==t[e]&&(n[e]=t[e])})),r.forEach(a,(function(o){r.isObject(t[o])?n[o]=r.deepMerge(e[o],t[o]):void 0!==t[o]?n[o]=t[o]:r.isObject(e[o])?n[o]=r.deepMerge(e[o]):void 0!==e[o]&&(n[o]=e[o])})),r.forEach(i,(function(r){void 0!==t[r]?n[r]=t[r]:void 0!==e[r]&&(n[r]=e[r])}));var s=o.concat(a).concat(i),l=Object.keys(t).filter((function(e){return-1===s.indexOf(e)}));return r.forEach(l,(function(r){void 0!==t[r]?n[r]=t[r]:void 0!==e[r]&&(n[r]=e[r])})),n}},function(e,t,n){"use strict";function r(e){this.message=e}r.prototype.toString=function(){return"Cancel"+(this.message?": "+this.message:"")},r.prototype.__CANCEL__=!0,e.exports=r},function(e,t,n){var r=n(82),o=n(87),a=n(11),i=n(88),s=n(55),l=n(36);e.exports=function(e,t,n){for(var u=-1,c=(t=r(t,e)).length,f=!1;++u-1&&e%1==0&&ec))return!1;var p=l.get(e);if(p&&l.get(t))return p==t;var d=-1,h=!0,m=2&n?new r:void 0;for(l.set(e,t),l.set(t,e);++d=n.length)throw new Error("Yup.reach cannot resolve an array item at index: "+o+", in the path: "+t+". because there is no value at that index. ");n=n[p]}}if(!c){if(e=e.resolve({context:r,parent:i,value:n}),!(0,a.default)(e,"fields")||!(0,a.default)(e.fields,f))throw new Error("The schema does not contain the path: "+t+". (failed at: "+l+' which is a type: "'+e._type+'") ');e=e.fields[f],i=n,n=n&&n[f],s=f,l=u?"["+o+"]":"."+o}})),e&&(e=e.resolve({context:r,parent:i,value:n})),{schema:e,parent:i,parentPath:s}):{parent:i,parentPath:t,schema:e.resolve({context:r,parent:i,value:n})}}var s=function(e,t,n,r){return i(e,t,n,r).schema};t.default=s},function(e,t){e.exports=function(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}return t.default=e,t}},function(e,t){e.exports=function(e,t){return t||(t=e.slice(0)),e.raw=t,e}},function(e,t,n){var r=n(268),o=n(269),a=n(272),i=RegExp("['’]","g");e.exports=function(e){return function(t){return r(a(o(t).replace(i,"")),e,"")}}},function(e,t,n){"use strict";t.__esModule=!0,t.default=function(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),r=1;r",Gt:"≫",gt:">",gtcc:"⪧",gtcir:"⩺",gtdot:"⋗",gtlPar:"⦕",gtquest:"⩼",gtrapprox:"⪆",gtrarr:"⥸",gtrdot:"⋗",gtreqless:"⋛",gtreqqless:"⪌",gtrless:"≷",gtrsim:"≳",gvertneqq:"≩︀",gvnE:"≩︀",Hacek:"ˇ",hairsp:" ",half:"½",hamilt:"ℋ",HARDcy:"Ъ",hardcy:"ъ",hArr:"⇔",harr:"↔",harrcir:"⥈",harrw:"↭",Hat:"^",hbar:"ℏ",Hcirc:"Ĥ",hcirc:"ĥ",hearts:"♥",heartsuit:"♥",hellip:"…",hercon:"⊹",Hfr:"ℌ",hfr:"𝔥",HilbertSpace:"ℋ",hksearow:"⤥",hkswarow:"⤦",hoarr:"⇿",homtht:"∻",hookleftarrow:"↩",hookrightarrow:"↪",Hopf:"ℍ",hopf:"𝕙",horbar:"―",HorizontalLine:"─",Hscr:"ℋ",hscr:"𝒽",hslash:"ℏ",Hstrok:"Ħ",hstrok:"ħ",HumpDownHump:"≎",HumpEqual:"≏",hybull:"⁃",hyphen:"‐",Iacute:"Í",iacute:"í",ic:"⁣",Icirc:"Î",icirc:"î",Icy:"И",icy:"и",Idot:"İ",IEcy:"Е",iecy:"е",iexcl:"¡",iff:"⇔",Ifr:"ℑ",ifr:"𝔦",Igrave:"Ì",igrave:"ì",ii:"ⅈ",iiiint:"⨌",iiint:"∭",iinfin:"⧜",iiota:"℩",IJlig:"IJ",ijlig:"ij",Im:"ℑ",Imacr:"Ī",imacr:"ī",image:"ℑ",ImaginaryI:"ⅈ",imagline:"ℐ",imagpart:"ℑ",imath:"ı",imof:"⊷",imped:"Ƶ",Implies:"⇒",in:"∈",incare:"℅",infin:"∞",infintie:"⧝",inodot:"ı",Int:"∬",int:"∫",intcal:"⊺",integers:"ℤ",Integral:"∫",intercal:"⊺",Intersection:"⋂",intlarhk:"⨗",intprod:"⨼",InvisibleComma:"⁣",InvisibleTimes:"⁢",IOcy:"Ё",iocy:"ё",Iogon:"Į",iogon:"į",Iopf:"𝕀",iopf:"𝕚",Iota:"Ι",iota:"ι",iprod:"⨼",iquest:"¿",Iscr:"ℐ",iscr:"𝒾",isin:"∈",isindot:"⋵",isinE:"⋹",isins:"⋴",isinsv:"⋳",isinv:"∈",it:"⁢",Itilde:"Ĩ",itilde:"ĩ",Iukcy:"І",iukcy:"і",Iuml:"Ï",iuml:"ï",Jcirc:"Ĵ",jcirc:"ĵ",Jcy:"Й",jcy:"й",Jfr:"𝔍",jfr:"𝔧",jmath:"ȷ",Jopf:"𝕁",jopf:"𝕛",Jscr:"𝒥",jscr:"𝒿",Jsercy:"Ј",jsercy:"ј",Jukcy:"Є",jukcy:"є",Kappa:"Κ",kappa:"κ",kappav:"ϰ",Kcedil:"Ķ",kcedil:"ķ",Kcy:"К",kcy:"к",Kfr:"𝔎",kfr:"𝔨",kgreen:"ĸ",KHcy:"Х",khcy:"х",KJcy:"Ќ",kjcy:"ќ",Kopf:"𝕂",kopf:"𝕜",Kscr:"𝒦",kscr:"𝓀",lAarr:"⇚",Lacute:"Ĺ",lacute:"ĺ",laemptyv:"⦴",lagran:"ℒ",Lambda:"Λ",lambda:"λ",Lang:"⟪",lang:"⟨",langd:"⦑",langle:"⟨",lap:"⪅",Laplacetrf:"ℒ",laquo:"«",Larr:"↞",lArr:"⇐",larr:"←",larrb:"⇤",larrbfs:"⤟",larrfs:"⤝",larrhk:"↩",larrlp:"↫",larrpl:"⤹",larrsim:"⥳",larrtl:"↢",lat:"⪫",lAtail:"⤛",latail:"⤙",late:"⪭",lates:"⪭︀",lBarr:"⤎",lbarr:"⤌",lbbrk:"❲",lbrace:"{",lbrack:"[",lbrke:"⦋",lbrksld:"⦏",lbrkslu:"⦍",Lcaron:"Ľ",lcaron:"ľ",Lcedil:"Ļ",lcedil:"ļ",lceil:"⌈",lcub:"{",Lcy:"Л",lcy:"л",ldca:"⤶",ldquo:"“",ldquor:"„",ldrdhar:"⥧",ldrushar:"⥋",ldsh:"↲",lE:"≦",le:"≤",LeftAngleBracket:"⟨",LeftArrow:"←",Leftarrow:"⇐",leftarrow:"←",LeftArrowBar:"⇤",LeftArrowRightArrow:"⇆",leftarrowtail:"↢",LeftCeiling:"⌈",LeftDoubleBracket:"⟦",LeftDownTeeVector:"⥡",LeftDownVector:"⇃",LeftDownVectorBar:"⥙",LeftFloor:"⌊",leftharpoondown:"↽",leftharpoonup:"↼",leftleftarrows:"⇇",LeftRightArrow:"↔",Leftrightarrow:"⇔",leftrightarrow:"↔",leftrightarrows:"⇆",leftrightharpoons:"⇋",leftrightsquigarrow:"↭",LeftRightVector:"⥎",LeftTee:"⊣",LeftTeeArrow:"↤",LeftTeeVector:"⥚",leftthreetimes:"⋋",LeftTriangle:"⊲",LeftTriangleBar:"⧏",LeftTriangleEqual:"⊴",LeftUpDownVector:"⥑",LeftUpTeeVector:"⥠",LeftUpVector:"↿",LeftUpVectorBar:"⥘",LeftVector:"↼",LeftVectorBar:"⥒",lEg:"⪋",leg:"⋚",leq:"≤",leqq:"≦",leqslant:"⩽",les:"⩽",lescc:"⪨",lesdot:"⩿",lesdoto:"⪁",lesdotor:"⪃",lesg:"⋚︀",lesges:"⪓",lessapprox:"⪅",lessdot:"⋖",lesseqgtr:"⋚",lesseqqgtr:"⪋",LessEqualGreater:"⋚",LessFullEqual:"≦",LessGreater:"≶",lessgtr:"≶",LessLess:"⪡",lesssim:"≲",LessSlantEqual:"⩽",LessTilde:"≲",lfisht:"⥼",lfloor:"⌊",Lfr:"𝔏",lfr:"𝔩",lg:"≶",lgE:"⪑",lHar:"⥢",lhard:"↽",lharu:"↼",lharul:"⥪",lhblk:"▄",LJcy:"Љ",ljcy:"љ",Ll:"⋘",ll:"≪",llarr:"⇇",llcorner:"⌞",Lleftarrow:"⇚",llhard:"⥫",lltri:"◺",Lmidot:"Ŀ",lmidot:"ŀ",lmoust:"⎰",lmoustache:"⎰",lnap:"⪉",lnapprox:"⪉",lnE:"≨",lne:"⪇",lneq:"⪇",lneqq:"≨",lnsim:"⋦",loang:"⟬",loarr:"⇽",lobrk:"⟦",LongLeftArrow:"⟵",Longleftarrow:"⟸",longleftarrow:"⟵",LongLeftRightArrow:"⟷",Longleftrightarrow:"⟺",longleftrightarrow:"⟷",longmapsto:"⟼",LongRightArrow:"⟶",Longrightarrow:"⟹",longrightarrow:"⟶",looparrowleft:"↫",looparrowright:"↬",lopar:"⦅",Lopf:"𝕃",lopf:"𝕝",loplus:"⨭",lotimes:"⨴",lowast:"∗",lowbar:"_",LowerLeftArrow:"↙",LowerRightArrow:"↘",loz:"◊",lozenge:"◊",lozf:"⧫",lpar:"(",lparlt:"⦓",lrarr:"⇆",lrcorner:"⌟",lrhar:"⇋",lrhard:"⥭",lrm:"‎",lrtri:"⊿",lsaquo:"‹",Lscr:"ℒ",lscr:"𝓁",Lsh:"↰",lsh:"↰",lsim:"≲",lsime:"⪍",lsimg:"⪏",lsqb:"[",lsquo:"‘",lsquor:"‚",Lstrok:"Ł",lstrok:"ł",LT:"<",Lt:"≪",lt:"<",ltcc:"⪦",ltcir:"⩹",ltdot:"⋖",lthree:"⋋",ltimes:"⋉",ltlarr:"⥶",ltquest:"⩻",ltri:"◃",ltrie:"⊴",ltrif:"◂",ltrPar:"⦖",lurdshar:"⥊",luruhar:"⥦",lvertneqq:"≨︀",lvnE:"≨︀",macr:"¯",male:"♂",malt:"✠",maltese:"✠",Map:"⤅",map:"↦",mapsto:"↦",mapstodown:"↧",mapstoleft:"↤",mapstoup:"↥",marker:"▮",mcomma:"⨩",Mcy:"М",mcy:"м",mdash:"—",mDDot:"∺",measuredangle:"∡",MediumSpace:" ",Mellintrf:"ℳ",Mfr:"𝔐",mfr:"𝔪",mho:"℧",micro:"µ",mid:"∣",midast:"*",midcir:"⫰",middot:"·",minus:"−",minusb:"⊟",minusd:"∸",minusdu:"⨪",MinusPlus:"∓",mlcp:"⫛",mldr:"…",mnplus:"∓",models:"⊧",Mopf:"𝕄",mopf:"𝕞",mp:"∓",Mscr:"ℳ",mscr:"𝓂",mstpos:"∾",Mu:"Μ",mu:"μ",multimap:"⊸",mumap:"⊸",nabla:"∇",Nacute:"Ń",nacute:"ń",nang:"∠⃒",nap:"≉",napE:"⩰̸",napid:"≋̸",napos:"ʼn",napprox:"≉",natur:"♮",natural:"♮",naturals:"ℕ",nbsp:" ",nbump:"≎̸",nbumpe:"≏̸",ncap:"⩃",Ncaron:"Ň",ncaron:"ň",Ncedil:"Ņ",ncedil:"ņ",ncong:"≇",ncongdot:"⩭̸",ncup:"⩂",Ncy:"Н",ncy:"н",ndash:"–",ne:"≠",nearhk:"⤤",neArr:"⇗",nearr:"↗",nearrow:"↗",nedot:"≐̸",NegativeMediumSpace:"​",NegativeThickSpace:"​",NegativeThinSpace:"​",NegativeVeryThinSpace:"​",nequiv:"≢",nesear:"⤨",nesim:"≂̸",NestedGreaterGreater:"≫",NestedLessLess:"≪",NewLine:"\n",nexist:"∄",nexists:"∄",Nfr:"𝔑",nfr:"𝔫",ngE:"≧̸",nge:"≱",ngeq:"≱",ngeqq:"≧̸",ngeqslant:"⩾̸",nges:"⩾̸",nGg:"⋙̸",ngsim:"≵",nGt:"≫⃒",ngt:"≯",ngtr:"≯",nGtv:"≫̸",nhArr:"⇎",nharr:"↮",nhpar:"⫲",ni:"∋",nis:"⋼",nisd:"⋺",niv:"∋",NJcy:"Њ",njcy:"њ",nlArr:"⇍",nlarr:"↚",nldr:"‥",nlE:"≦̸",nle:"≰",nLeftarrow:"⇍",nleftarrow:"↚",nLeftrightarrow:"⇎",nleftrightarrow:"↮",nleq:"≰",nleqq:"≦̸",nleqslant:"⩽̸",nles:"⩽̸",nless:"≮",nLl:"⋘̸",nlsim:"≴",nLt:"≪⃒",nlt:"≮",nltri:"⋪",nltrie:"⋬",nLtv:"≪̸",nmid:"∤",NoBreak:"⁠",NonBreakingSpace:" ",Nopf:"ℕ",nopf:"𝕟",Not:"⫬",not:"¬",NotCongruent:"≢",NotCupCap:"≭",NotDoubleVerticalBar:"∦",NotElement:"∉",NotEqual:"≠",NotEqualTilde:"≂̸",NotExists:"∄",NotGreater:"≯",NotGreaterEqual:"≱",NotGreaterFullEqual:"≧̸",NotGreaterGreater:"≫̸",NotGreaterLess:"≹",NotGreaterSlantEqual:"⩾̸",NotGreaterTilde:"≵",NotHumpDownHump:"≎̸",NotHumpEqual:"≏̸",notin:"∉",notindot:"⋵̸",notinE:"⋹̸",notinva:"∉",notinvb:"⋷",notinvc:"⋶",NotLeftTriangle:"⋪",NotLeftTriangleBar:"⧏̸",NotLeftTriangleEqual:"⋬",NotLess:"≮",NotLessEqual:"≰",NotLessGreater:"≸",NotLessLess:"≪̸",NotLessSlantEqual:"⩽̸",NotLessTilde:"≴",NotNestedGreaterGreater:"⪢̸",NotNestedLessLess:"⪡̸",notni:"∌",notniva:"∌",notnivb:"⋾",notnivc:"⋽",NotPrecedes:"⊀",NotPrecedesEqual:"⪯̸",NotPrecedesSlantEqual:"⋠",NotReverseElement:"∌",NotRightTriangle:"⋫",NotRightTriangleBar:"⧐̸",NotRightTriangleEqual:"⋭",NotSquareSubset:"⊏̸",NotSquareSubsetEqual:"⋢",NotSquareSuperset:"⊐̸",NotSquareSupersetEqual:"⋣",NotSubset:"⊂⃒",NotSubsetEqual:"⊈",NotSucceeds:"⊁",NotSucceedsEqual:"⪰̸",NotSucceedsSlantEqual:"⋡",NotSucceedsTilde:"≿̸",NotSuperset:"⊃⃒",NotSupersetEqual:"⊉",NotTilde:"≁",NotTildeEqual:"≄",NotTildeFullEqual:"≇",NotTildeTilde:"≉",NotVerticalBar:"∤",npar:"∦",nparallel:"∦",nparsl:"⫽⃥",npart:"∂̸",npolint:"⨔",npr:"⊀",nprcue:"⋠",npre:"⪯̸",nprec:"⊀",npreceq:"⪯̸",nrArr:"⇏",nrarr:"↛",nrarrc:"⤳̸",nrarrw:"↝̸",nRightarrow:"⇏",nrightarrow:"↛",nrtri:"⋫",nrtrie:"⋭",nsc:"⊁",nsccue:"⋡",nsce:"⪰̸",Nscr:"𝒩",nscr:"𝓃",nshortmid:"∤",nshortparallel:"∦",nsim:"≁",nsime:"≄",nsimeq:"≄",nsmid:"∤",nspar:"∦",nsqsube:"⋢",nsqsupe:"⋣",nsub:"⊄",nsubE:"⫅̸",nsube:"⊈",nsubset:"⊂⃒",nsubseteq:"⊈",nsubseteqq:"⫅̸",nsucc:"⊁",nsucceq:"⪰̸",nsup:"⊅",nsupE:"⫆̸",nsupe:"⊉",nsupset:"⊃⃒",nsupseteq:"⊉",nsupseteqq:"⫆̸",ntgl:"≹",Ntilde:"Ñ",ntilde:"ñ",ntlg:"≸",ntriangleleft:"⋪",ntrianglelefteq:"⋬",ntriangleright:"⋫",ntrianglerighteq:"⋭",Nu:"Ν",nu:"ν",num:"#",numero:"№",numsp:" ",nvap:"≍⃒",nVDash:"⊯",nVdash:"⊮",nvDash:"⊭",nvdash:"⊬",nvge:"≥⃒",nvgt:">⃒",nvHarr:"⤄",nvinfin:"⧞",nvlArr:"⤂",nvle:"≤⃒",nvlt:"<⃒",nvltrie:"⊴⃒",nvrArr:"⤃",nvrtrie:"⊵⃒",nvsim:"∼⃒",nwarhk:"⤣",nwArr:"⇖",nwarr:"↖",nwarrow:"↖",nwnear:"⤧",Oacute:"Ó",oacute:"ó",oast:"⊛",ocir:"⊚",Ocirc:"Ô",ocirc:"ô",Ocy:"О",ocy:"о",odash:"⊝",Odblac:"Ő",odblac:"ő",odiv:"⨸",odot:"⊙",odsold:"⦼",OElig:"Œ",oelig:"œ",ofcir:"⦿",Ofr:"𝔒",ofr:"𝔬",ogon:"˛",Ograve:"Ò",ograve:"ò",ogt:"⧁",ohbar:"⦵",ohm:"Ω",oint:"∮",olarr:"↺",olcir:"⦾",olcross:"⦻",oline:"‾",olt:"⧀",Omacr:"Ō",omacr:"ō",Omega:"Ω",omega:"ω",Omicron:"Ο",omicron:"ο",omid:"⦶",ominus:"⊖",Oopf:"𝕆",oopf:"𝕠",opar:"⦷",OpenCurlyDoubleQuote:"“",OpenCurlyQuote:"‘",operp:"⦹",oplus:"⊕",Or:"⩔",or:"∨",orarr:"↻",ord:"⩝",order:"ℴ",orderof:"ℴ",ordf:"ª",ordm:"º",origof:"⊶",oror:"⩖",orslope:"⩗",orv:"⩛",oS:"Ⓢ",Oscr:"𝒪",oscr:"ℴ",Oslash:"Ø",oslash:"ø",osol:"⊘",Otilde:"Õ",otilde:"õ",Otimes:"⨷",otimes:"⊗",otimesas:"⨶",Ouml:"Ö",ouml:"ö",ovbar:"⌽",OverBar:"‾",OverBrace:"⏞",OverBracket:"⎴",OverParenthesis:"⏜",par:"∥",para:"¶",parallel:"∥",parsim:"⫳",parsl:"⫽",part:"∂",PartialD:"∂",Pcy:"П",pcy:"п",percnt:"%",period:".",permil:"‰",perp:"⊥",pertenk:"‱",Pfr:"𝔓",pfr:"𝔭",Phi:"Φ",phi:"φ",phiv:"ϕ",phmmat:"ℳ",phone:"☎",Pi:"Π",pi:"π",pitchfork:"⋔",piv:"ϖ",planck:"ℏ",planckh:"ℎ",plankv:"ℏ",plus:"+",plusacir:"⨣",plusb:"⊞",pluscir:"⨢",plusdo:"∔",plusdu:"⨥",pluse:"⩲",PlusMinus:"±",plusmn:"±",plussim:"⨦",plustwo:"⨧",pm:"±",Poincareplane:"ℌ",pointint:"⨕",Popf:"ℙ",popf:"𝕡",pound:"£",Pr:"⪻",pr:"≺",prap:"⪷",prcue:"≼",prE:"⪳",pre:"⪯",prec:"≺",precapprox:"⪷",preccurlyeq:"≼",Precedes:"≺",PrecedesEqual:"⪯",PrecedesSlantEqual:"≼",PrecedesTilde:"≾",preceq:"⪯",precnapprox:"⪹",precneqq:"⪵",precnsim:"⋨",precsim:"≾",Prime:"″",prime:"′",primes:"ℙ",prnap:"⪹",prnE:"⪵",prnsim:"⋨",prod:"∏",Product:"∏",profalar:"⌮",profline:"⌒",profsurf:"⌓",prop:"∝",Proportion:"∷",Proportional:"∝",propto:"∝",prsim:"≾",prurel:"⊰",Pscr:"𝒫",pscr:"𝓅",Psi:"Ψ",psi:"ψ",puncsp:" ",Qfr:"𝔔",qfr:"𝔮",qint:"⨌",Qopf:"ℚ",qopf:"𝕢",qprime:"⁗",Qscr:"𝒬",qscr:"𝓆",quaternions:"ℍ",quatint:"⨖",quest:"?",questeq:"≟",QUOT:'"',quot:'"',rAarr:"⇛",race:"∽̱",Racute:"Ŕ",racute:"ŕ",radic:"√",raemptyv:"⦳",Rang:"⟫",rang:"⟩",rangd:"⦒",range:"⦥",rangle:"⟩",raquo:"»",Rarr:"↠",rArr:"⇒",rarr:"→",rarrap:"⥵",rarrb:"⇥",rarrbfs:"⤠",rarrc:"⤳",rarrfs:"⤞",rarrhk:"↪",rarrlp:"↬",rarrpl:"⥅",rarrsim:"⥴",Rarrtl:"⤖",rarrtl:"↣",rarrw:"↝",rAtail:"⤜",ratail:"⤚",ratio:"∶",rationals:"ℚ",RBarr:"⤐",rBarr:"⤏",rbarr:"⤍",rbbrk:"❳",rbrace:"}",rbrack:"]",rbrke:"⦌",rbrksld:"⦎",rbrkslu:"⦐",Rcaron:"Ř",rcaron:"ř",Rcedil:"Ŗ",rcedil:"ŗ",rceil:"⌉",rcub:"}",Rcy:"Р",rcy:"р",rdca:"⤷",rdldhar:"⥩",rdquo:"”",rdquor:"”",rdsh:"↳",Re:"ℜ",real:"ℜ",realine:"ℛ",realpart:"ℜ",reals:"ℝ",rect:"▭",REG:"®",reg:"®",ReverseElement:"∋",ReverseEquilibrium:"⇋",ReverseUpEquilibrium:"⥯",rfisht:"⥽",rfloor:"⌋",Rfr:"ℜ",rfr:"𝔯",rHar:"⥤",rhard:"⇁",rharu:"⇀",rharul:"⥬",Rho:"Ρ",rho:"ρ",rhov:"ϱ",RightAngleBracket:"⟩",RightArrow:"→",Rightarrow:"⇒",rightarrow:"→",RightArrowBar:"⇥",RightArrowLeftArrow:"⇄",rightarrowtail:"↣",RightCeiling:"⌉",RightDoubleBracket:"⟧",RightDownTeeVector:"⥝",RightDownVector:"⇂",RightDownVectorBar:"⥕",RightFloor:"⌋",rightharpoondown:"⇁",rightharpoonup:"⇀",rightleftarrows:"⇄",rightleftharpoons:"⇌",rightrightarrows:"⇉",rightsquigarrow:"↝",RightTee:"⊢",RightTeeArrow:"↦",RightTeeVector:"⥛",rightthreetimes:"⋌",RightTriangle:"⊳",RightTriangleBar:"⧐",RightTriangleEqual:"⊵",RightUpDownVector:"⥏",RightUpTeeVector:"⥜",RightUpVector:"↾",RightUpVectorBar:"⥔",RightVector:"⇀",RightVectorBar:"⥓",ring:"˚",risingdotseq:"≓",rlarr:"⇄",rlhar:"⇌",rlm:"‏",rmoust:"⎱",rmoustache:"⎱",rnmid:"⫮",roang:"⟭",roarr:"⇾",robrk:"⟧",ropar:"⦆",Ropf:"ℝ",ropf:"𝕣",roplus:"⨮",rotimes:"⨵",RoundImplies:"⥰",rpar:")",rpargt:"⦔",rppolint:"⨒",rrarr:"⇉",Rrightarrow:"⇛",rsaquo:"›",Rscr:"ℛ",rscr:"𝓇",Rsh:"↱",rsh:"↱",rsqb:"]",rsquo:"’",rsquor:"’",rthree:"⋌",rtimes:"⋊",rtri:"▹",rtrie:"⊵",rtrif:"▸",rtriltri:"⧎",RuleDelayed:"⧴",ruluhar:"⥨",rx:"℞",Sacute:"Ś",sacute:"ś",sbquo:"‚",Sc:"⪼",sc:"≻",scap:"⪸",Scaron:"Š",scaron:"š",sccue:"≽",scE:"⪴",sce:"⪰",Scedil:"Ş",scedil:"ş",Scirc:"Ŝ",scirc:"ŝ",scnap:"⪺",scnE:"⪶",scnsim:"⋩",scpolint:"⨓",scsim:"≿",Scy:"С",scy:"с",sdot:"⋅",sdotb:"⊡",sdote:"⩦",searhk:"⤥",seArr:"⇘",searr:"↘",searrow:"↘",sect:"§",semi:";",seswar:"⤩",setminus:"∖",setmn:"∖",sext:"✶",Sfr:"𝔖",sfr:"𝔰",sfrown:"⌢",sharp:"♯",SHCHcy:"Щ",shchcy:"щ",SHcy:"Ш",shcy:"ш",ShortDownArrow:"↓",ShortLeftArrow:"←",shortmid:"∣",shortparallel:"∥",ShortRightArrow:"→",ShortUpArrow:"↑",shy:"­",Sigma:"Σ",sigma:"σ",sigmaf:"ς",sigmav:"ς",sim:"∼",simdot:"⩪",sime:"≃",simeq:"≃",simg:"⪞",simgE:"⪠",siml:"⪝",simlE:"⪟",simne:"≆",simplus:"⨤",simrarr:"⥲",slarr:"←",SmallCircle:"∘",smallsetminus:"∖",smashp:"⨳",smeparsl:"⧤",smid:"∣",smile:"⌣",smt:"⪪",smte:"⪬",smtes:"⪬︀",SOFTcy:"Ь",softcy:"ь",sol:"/",solb:"⧄",solbar:"⌿",Sopf:"𝕊",sopf:"𝕤",spades:"♠",spadesuit:"♠",spar:"∥",sqcap:"⊓",sqcaps:"⊓︀",sqcup:"⊔",sqcups:"⊔︀",Sqrt:"√",sqsub:"⊏",sqsube:"⊑",sqsubset:"⊏",sqsubseteq:"⊑",sqsup:"⊐",sqsupe:"⊒",sqsupset:"⊐",sqsupseteq:"⊒",squ:"□",Square:"□",square:"□",SquareIntersection:"⊓",SquareSubset:"⊏",SquareSubsetEqual:"⊑",SquareSuperset:"⊐",SquareSupersetEqual:"⊒",SquareUnion:"⊔",squarf:"▪",squf:"▪",srarr:"→",Sscr:"𝒮",sscr:"𝓈",ssetmn:"∖",ssmile:"⌣",sstarf:"⋆",Star:"⋆",star:"☆",starf:"★",straightepsilon:"ϵ",straightphi:"ϕ",strns:"¯",Sub:"⋐",sub:"⊂",subdot:"⪽",subE:"⫅",sube:"⊆",subedot:"⫃",submult:"⫁",subnE:"⫋",subne:"⊊",subplus:"⪿",subrarr:"⥹",Subset:"⋐",subset:"⊂",subseteq:"⊆",subseteqq:"⫅",SubsetEqual:"⊆",subsetneq:"⊊",subsetneqq:"⫋",subsim:"⫇",subsub:"⫕",subsup:"⫓",succ:"≻",succapprox:"⪸",succcurlyeq:"≽",Succeeds:"≻",SucceedsEqual:"⪰",SucceedsSlantEqual:"≽",SucceedsTilde:"≿",succeq:"⪰",succnapprox:"⪺",succneqq:"⪶",succnsim:"⋩",succsim:"≿",SuchThat:"∋",Sum:"∑",sum:"∑",sung:"♪",Sup:"⋑",sup:"⊃",sup1:"¹",sup2:"²",sup3:"³",supdot:"⪾",supdsub:"⫘",supE:"⫆",supe:"⊇",supedot:"⫄",Superset:"⊃",SupersetEqual:"⊇",suphsol:"⟉",suphsub:"⫗",suplarr:"⥻",supmult:"⫂",supnE:"⫌",supne:"⊋",supplus:"⫀",Supset:"⋑",supset:"⊃",supseteq:"⊇",supseteqq:"⫆",supsetneq:"⊋",supsetneqq:"⫌",supsim:"⫈",supsub:"⫔",supsup:"⫖",swarhk:"⤦",swArr:"⇙",swarr:"↙",swarrow:"↙",swnwar:"⤪",szlig:"ß",Tab:"\t",target:"⌖",Tau:"Τ",tau:"τ",tbrk:"⎴",Tcaron:"Ť",tcaron:"ť",Tcedil:"Ţ",tcedil:"ţ",Tcy:"Т",tcy:"т",tdot:"⃛",telrec:"⌕",Tfr:"𝔗",tfr:"𝔱",there4:"∴",Therefore:"∴",therefore:"∴",Theta:"Θ",theta:"θ",thetasym:"ϑ",thetav:"ϑ",thickapprox:"≈",thicksim:"∼",ThickSpace:"  ",thinsp:" ",ThinSpace:" ",thkap:"≈",thksim:"∼",THORN:"Þ",thorn:"þ",Tilde:"∼",tilde:"˜",TildeEqual:"≃",TildeFullEqual:"≅",TildeTilde:"≈",times:"×",timesb:"⊠",timesbar:"⨱",timesd:"⨰",tint:"∭",toea:"⤨",top:"⊤",topbot:"⌶",topcir:"⫱",Topf:"𝕋",topf:"𝕥",topfork:"⫚",tosa:"⤩",tprime:"‴",TRADE:"™",trade:"™",triangle:"▵",triangledown:"▿",triangleleft:"◃",trianglelefteq:"⊴",triangleq:"≜",triangleright:"▹",trianglerighteq:"⊵",tridot:"◬",trie:"≜",triminus:"⨺",TripleDot:"⃛",triplus:"⨹",trisb:"⧍",tritime:"⨻",trpezium:"⏢",Tscr:"𝒯",tscr:"𝓉",TScy:"Ц",tscy:"ц",TSHcy:"Ћ",tshcy:"ћ",Tstrok:"Ŧ",tstrok:"ŧ",twixt:"≬",twoheadleftarrow:"↞",twoheadrightarrow:"↠",Uacute:"Ú",uacute:"ú",Uarr:"↟",uArr:"⇑",uarr:"↑",Uarrocir:"⥉",Ubrcy:"Ў",ubrcy:"ў",Ubreve:"Ŭ",ubreve:"ŭ",Ucirc:"Û",ucirc:"û",Ucy:"У",ucy:"у",udarr:"⇅",Udblac:"Ű",udblac:"ű",udhar:"⥮",ufisht:"⥾",Ufr:"𝔘",ufr:"𝔲",Ugrave:"Ù",ugrave:"ù",uHar:"⥣",uharl:"↿",uharr:"↾",uhblk:"▀",ulcorn:"⌜",ulcorner:"⌜",ulcrop:"⌏",ultri:"◸",Umacr:"Ū",umacr:"ū",uml:"¨",UnderBar:"_",UnderBrace:"⏟",UnderBracket:"⎵",UnderParenthesis:"⏝",Union:"⋃",UnionPlus:"⊎",Uogon:"Ų",uogon:"ų",Uopf:"𝕌",uopf:"𝕦",UpArrow:"↑",Uparrow:"⇑",uparrow:"↑",UpArrowBar:"⤒",UpArrowDownArrow:"⇅",UpDownArrow:"↕",Updownarrow:"⇕",updownarrow:"↕",UpEquilibrium:"⥮",upharpoonleft:"↿",upharpoonright:"↾",uplus:"⊎",UpperLeftArrow:"↖",UpperRightArrow:"↗",Upsi:"ϒ",upsi:"υ",upsih:"ϒ",Upsilon:"Υ",upsilon:"υ",UpTee:"⊥",UpTeeArrow:"↥",upuparrows:"⇈",urcorn:"⌝",urcorner:"⌝",urcrop:"⌎",Uring:"Ů",uring:"ů",urtri:"◹",Uscr:"𝒰",uscr:"𝓊",utdot:"⋰",Utilde:"Ũ",utilde:"ũ",utri:"▵",utrif:"▴",uuarr:"⇈",Uuml:"Ü",uuml:"ü",uwangle:"⦧",vangrt:"⦜",varepsilon:"ϵ",varkappa:"ϰ",varnothing:"∅",varphi:"ϕ",varpi:"ϖ",varpropto:"∝",vArr:"⇕",varr:"↕",varrho:"ϱ",varsigma:"ς",varsubsetneq:"⊊︀",varsubsetneqq:"⫋︀",varsupsetneq:"⊋︀",varsupsetneqq:"⫌︀",vartheta:"ϑ",vartriangleleft:"⊲",vartriangleright:"⊳",Vbar:"⫫",vBar:"⫨",vBarv:"⫩",Vcy:"В",vcy:"в",VDash:"⊫",Vdash:"⊩",vDash:"⊨",vdash:"⊢",Vdashl:"⫦",Vee:"⋁",vee:"∨",veebar:"⊻",veeeq:"≚",vellip:"⋮",Verbar:"‖",verbar:"|",Vert:"‖",vert:"|",VerticalBar:"∣",VerticalLine:"|",VerticalSeparator:"❘",VerticalTilde:"≀",VeryThinSpace:" ",Vfr:"𝔙",vfr:"𝔳",vltri:"⊲",vnsub:"⊂⃒",vnsup:"⊃⃒",Vopf:"𝕍",vopf:"𝕧",vprop:"∝",vrtri:"⊳",Vscr:"𝒱",vscr:"𝓋",vsubnE:"⫋︀",vsubne:"⊊︀",vsupnE:"⫌︀",vsupne:"⊋︀",Vvdash:"⊪",vzigzag:"⦚",Wcirc:"Ŵ",wcirc:"ŵ",wedbar:"⩟",Wedge:"⋀",wedge:"∧",wedgeq:"≙",weierp:"℘",Wfr:"𝔚",wfr:"𝔴",Wopf:"𝕎",wopf:"𝕨",wp:"℘",wr:"≀",wreath:"≀",Wscr:"𝒲",wscr:"𝓌",xcap:"⋂",xcirc:"◯",xcup:"⋃",xdtri:"▽",Xfr:"𝔛",xfr:"𝔵",xhArr:"⟺",xharr:"⟷",Xi:"Ξ",xi:"ξ",xlArr:"⟸",xlarr:"⟵",xmap:"⟼",xnis:"⋻",xodot:"⨀",Xopf:"𝕏",xopf:"𝕩",xoplus:"⨁",xotime:"⨂",xrArr:"⟹",xrarr:"⟶",Xscr:"𝒳",xscr:"𝓍",xsqcup:"⨆",xuplus:"⨄",xutri:"△",xvee:"⋁",xwedge:"⋀",Yacute:"Ý",yacute:"ý",YAcy:"Я",yacy:"я",Ycirc:"Ŷ",ycirc:"ŷ",Ycy:"Ы",ycy:"ы",yen:"¥",Yfr:"𝔜",yfr:"𝔶",YIcy:"Ї",yicy:"ї",Yopf:"𝕐",yopf:"𝕪",Yscr:"𝒴",yscr:"𝓎",YUcy:"Ю",yucy:"ю",Yuml:"Ÿ",yuml:"ÿ",Zacute:"Ź",zacute:"ź",Zcaron:"Ž",zcaron:"ž",Zcy:"З",zcy:"з",Zdot:"Ż",zdot:"ż",zeetrf:"ℨ",ZeroWidthSpace:"​",Zeta:"Ζ",zeta:"ζ",Zfr:"ℨ",zfr:"𝔷",ZHcy:"Ж",zhcy:"ж",zigrarr:"⇝",Zopf:"ℤ",zopf:"𝕫",Zscr:"𝒵",zscr:"𝓏",zwj:"‍",zwnj:"‌"}},function(e,t,n){"use strict";var r=n(123),o=n(9).unescapeMd;e.exports=function(e,t){var n,a,i,s=t,l=e.posMax;if(60===e.src.charCodeAt(t)){for(t++;t1)break;if(41===n&&--a<0)break;t++}return s!==t&&(i=o(e.src.slice(s,t)),!!e.parser.validateLink(i)&&(e.linkContent=i,e.pos=t,!0))}},function(e,t,n){"use strict";var r=n(9).replaceEntities;e.exports=function(e){var t=r(e);try{t=decodeURI(t)}catch(e){}return encodeURI(t)}},function(e,t,n){"use strict";var r=n(9).unescapeMd;e.exports=function(e,t){var n,o=t,a=e.posMax,i=e.src.charCodeAt(t);if(34!==i&&39!==i&&40!==i)return!1;for(t++,40===i&&(i=41);t=0||(o[n]=e[n]);return o}var a=n(3),i=n.n(a),s=n(0),l=n.n(s),u=n(5),c=n.n(u),f=function(){};function p(e,t){return void 0!==e[t]}function d(e){return"default"+e.charAt(0).toUpperCase()+e.substr(1)}function h(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var n=e[Symbol.toPrimitive];if(void 0!==n){var r=n.call(e,t||"default");if("object"!=typeof r)return r;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function m(e,t){return Object.keys(t).reduce((function(n,a){var i,l=n,u=l[d(a)],c=l[a],f=o(l,[d(a),a].map(h)),p=t[a],m=function(e,t,n){var r=Object(s.useRef)(void 0!==e),o=Object(s.useState)(t),a=o[0],i=o[1],l=void 0!==e,u=r.current;return r.current=l,!l&&u&&a!==t&&i(t),[l?e:a,Object(s.useCallback)((function(e){for(var t=arguments.length,r=new Array(t>1?t-1:0),o=1;oft(t.props.children)-1){if(!r)return;o=0}t.select(o,e,"next")}},t.handlePrev=function(e){if(!t._isSliding){var n=t.props,r=n.wrap,o=n.activeIndex-1;if(o<0){if(!r)return;o=ft(t.props.children)-1}t.select(o,e,"prev")}},t}v(t,e);var n=t.prototype;return n.componentDidMount=function(){this.cycle()},t.getDerivedStateFromProps=function(e,t){var n=t.activeIndex;if(e.activeIndex!==n){var r=ft(e.children)-1,o=Math.max(0,Math.min(e.activeIndex,r));return{direction:0===o&&n>=r||n<=o?"next":"prev",previousActiveIndex:n,activeIndex:o}}return null},n.componentDidUpdate=function(e,t){var n=this,r=this.props,o=r.bsPrefix,a=r.slide,s=r.onSlideEnd;if(a&&this.state.activeIndex!==t.activeIndex&&!this._isSliding){var l,u,c=this.state,f=c.activeIndex,p=c.direction;"next"===p?(l=o+"-item-next",u=o+"-item-left"):"prev"===p&&(l=o+"-item-prev",u=o+"-item-right"),this._isSliding=!0,this.pause(),this.safeSetState({prevClasses:"active",currentClasses:l},(function(){var e=n.carousel.current.children[f];se(e),n.safeSetState({prevClasses:i()("active",u),currentClasses:i()(l,u)},(function(){return Y(e,(function(){n.safeSetState({prevClasses:"",currentClasses:"active"},n.handleSlideEnd),s&&s()}))}))}))}},n.componentWillUnmount=function(){clearTimeout(this.timeout),this.isUnmounted=!0},n.safeSetState=function(e,t){var n=this;this.isUnmounted||this.setState(e,(function(){return!n.isUnmounted&&t()}))},n.pause=function(){this._isPaused=!0,clearInterval(this._interval),this._interval=null},n.cycle=function(){this._isPaused=!1,clearInterval(this._interval),this._interval=null,this.props.interval&&!this._isPaused&&(this._interval=setInterval(document.visibilityState?this.handleNextWhenVisible:this.handleNext,this.props.interval))},n.to=function(e,t){var n=this.props.children;e<0||e>ft(n)-1||(this._isSliding?this._pendingIndex=e:this.select(e,t))},n.select=function(e,t,n){var r=this;clearTimeout(this.selectThrottle),t&&t.persist&&t.persist(),this.selectThrottle=setTimeout((function(){clearTimeout(r.timeout);var o=r.props,a=o.activeIndex,i=o.onSelect;e===a||r._isSliding||r.isUnmounted||i(e,n||(e1?i-1:0),l=1;l *"},Vt.Menu=Lt,Vt.Toggle=Bt;var $t=Vt,Wt=l.a.createContext(null),Kt={as:Re,disabled:!1},Gt=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.eventKey,f=e.disabled,p=e.href,d=e.onClick,h=e.onSelect,m=e.active,v=e.as,g=o(e,["bsPrefix","className","children","eventKey","disabled","href","onClick","onSelect","active","as"]),b=_(n,"dropdown-item"),y=Object(s.useContext)(j),w=(Object(s.useContext)(Wt)||{}).activeKey,x=T(c,p),E=null==m&&null!=x?T(w)===x:m,k=be((function(e){f||(d&&d(e),y&&y(x,e),h&&h(x,e))}));return l.a.createElement(v,Object(r.a)({},g,{ref:t,href:p,disabled:f,className:i()(a,b,E&&"active",f&&"disabled"),onClick:k}),u)}));Gt.displayName="DropdownItem",Gt.defaultProps=Kt;var Yt=Gt,Qt=function(e){return e&&"function"!=typeof e?function(t){e.current=t}:e};var Zt=function(e,t){return Object(s.useMemo)((function(){return function(e,t){var n=Qt(e),r=Qt(t);return function(e){n&&n(e),r&&r(e)}}(e,t)}),[e,t])},Xt=l.a.createContext(null);function Jt(e,t){return e}var en=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.alignRight,c=e.rootCloseEvent,f=e.flip,p=e.popperConfig,d=e.show,h=e.as,m=void 0===h?"div":h,v=o(e,["bsPrefix","className","alignRight","rootCloseEvent","flip","popperConfig","show","as"]),g=Object(s.useContext)(Xt),b=_(n,"dropdown-menu"),y=Ft({flip:f,popperConfig:p,rootCloseEvent:c,show:d,alignEnd:u,usePopper:!g}),w=y.hasShown,x=y.placement,E=y.show,k=y.alignEnd,O=y.close,S=y.props;if(S.ref=Zt(S.ref,Jt(t)),!w)return null;"string"!=typeof m&&(S.show=E,S.close=O,S.alignRight=k);var C=v.style;return x&&(C=Object(r.a)({},C,{},S.style),v["x-placement"]=x),l.a.createElement(m,Object(r.a)({},v,S,{style:C,className:i()(a,b,E&&"show",k&&b+"-right")}))}));en.displayName="DropdownMenu",en.defaultProps={alignRight:!1,flip:!0};var tn=en,nn=(n(41),l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.split,s=e.className,u=e.children,c=e.childBsPrefix,f=e.as,p=void 0===f?$e:f,d=o(e,["bsPrefix","split","className","children","childBsPrefix","as"]),h=_(n,"dropdown-toggle");void 0!==c&&(d.bsPrefix=c);var m=Ut(),v=m[0],g=m[1].toggle;return v.ref=Zt(v.ref,Jt(t)),l.a.createElement(p,Object(r.a)({onClick:g,className:i()(s,h,a&&h+"-split")},v,d),u)})));nn.displayName="DropdownToggle";var rn=nn,on=l.a.forwardRef((function(e,t){var n=m(e,{show:"onToggle"}),a=n.bsPrefix,u=n.drop,c=n.show,f=n.className,p=n.alignRight,d=n.onSelect,h=n.onToggle,v=n.focusFirstItemOnShow,g=n.as,b=void 0===g?"div":g,y=(n.navbar,o(n,["bsPrefix","drop","show","className","alignRight","onSelect","onToggle","focusFirstItemOnShow","as","navbar"])),w=Object(s.useContext)(j),x=_(a,"dropdown"),E=be((function(e,t,n){void 0===n&&(n=t.type),t.currentTarget===document&&(n="rootClose"),h(e,t,{source:n})})),k=be((function(e,t){w&&w(e,t),d&&d(e,t),E(!1,t,"select")}));return l.a.createElement(j.Provider,{value:k},l.a.createElement($t,{drop:u,show:c,alignEnd:p,onToggle:E,focusFirstItemOnShow:v,itemSelector:"."+x+"-item:not(.disabled):not(:disabled)"},(function(e){var n=e.props;return l.a.createElement(b,Object(r.a)({},y,n,{ref:t,className:i()(f,c&&"show",(!u||"down"===u)&&x,"up"===u&&"dropup","right"===u&&"dropright","left"===u&&"dropleft")}))})))}));on.displayName="Dropdown",on.defaultProps={navbar:!1},on.Toggle=rn,on.Menu=tn,on.Item=Yt,on.Header=xe("dropdown-header",{defaultProps:{role:"heading"}}),on.Divider=xe("dropdown-divider",{defaultProps:{role:"separator"}});var an=on,sn={id:Z.a.any,href:Z.a.string,onClick:Z.a.func,title:Z.a.node.isRequired,disabled:Z.a.bool,menuRole:Z.a.string,rootCloseEvent:Z.a.string,bsPrefix:Z.a.string,variant:Z.a.string,size:Z.a.string},ln=l.a.forwardRef((function(e,t){var n=e.title,a=e.children,i=e.bsPrefix,s=e.rootCloseEvent,u=e.variant,c=e.size,f=e.menuRole,p=e.disabled,d=e.href,h=e.id,m=o(e,["title","children","bsPrefix","rootCloseEvent","variant","size","menuRole","disabled","href","id"]);return l.a.createElement(an,Object(r.a)({ref:t},m),l.a.createElement(an.Toggle,{id:h,href:d,size:c,variant:u,disabled:p,childBsPrefix:i},n),l.a.createElement(an.Menu,{role:f,rootCloseEvent:s},a))}));ln.displayName="DropdownButton",ln.propTypes=sn;var un=ln,cn=(n(120),{type:Z.a.string.isRequired,as:Z.a.elementType}),fn=l.a.forwardRef((function(e,t){var n=e.as,a=void 0===n?"div":n,s=e.className,u=e.type,c=o(e,["as","className","type"]);return l.a.createElement(a,Object(r.a)({},c,{ref:t,className:i()(s,u&&u+"-feedback")}))}));fn.displayName="Feedback",fn.propTypes=cn,fn.defaultProps={type:"valid"};var pn=fn,dn=l.a.createContext({controlId:void 0}),hn=l.a.forwardRef((function(e,t){var n=e.id,a=e.bsPrefix,u=e.bsCustomPrefix,c=e.className,f=e.isValid,p=e.isInvalid,d=e.isStatic,h=e.as,m=void 0===h?"input":h,v=o(e,["id","bsPrefix","bsCustomPrefix","className","isValid","isInvalid","isStatic","as"]),g=Object(s.useContext)(dn),b=g.controlId;return a=g.custom?_(u,"custom-control-input"):_(a,"form-check-input"),l.a.createElement(m,Object(r.a)({},v,{ref:t,id:n||b,className:i()(c,a,f&&"is-valid",p&&"is-invalid",d&&"position-static")}))}));hn.displayName="FormCheckInput",hn.defaultProps={type:"checkbox"};var mn=hn,vn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.bsCustomPrefix,u=e.className,c=e.htmlFor,f=o(e,["bsPrefix","bsCustomPrefix","className","htmlFor"]),p=Object(s.useContext)(dn),d=p.controlId;return n=p.custom?_(a,"custom-control-label"):_(n,"form-check-label"),l.a.createElement("label",Object(r.a)({},f,{ref:t,htmlFor:c||d,className:i()(u,n)}))}));vn.displayName="FormCheckLabel";var gn=vn,bn=l.a.forwardRef((function(e,t){var n=e.id,a=e.bsPrefix,u=e.bsCustomPrefix,c=e.inline,f=e.disabled,p=e.isValid,d=e.isInvalid,h=e.feedback,m=e.className,v=e.style,g=e.title,b=e.type,y=e.label,w=e.children,x=e.custom,E=e.as,k=void 0===E?"input":E,O=o(e,["id","bsPrefix","bsCustomPrefix","inline","disabled","isValid","isInvalid","feedback","className","style","title","type","label","children","custom","as"]),S="switch"===b||x;a=S?_(u,"custom-control"):_(a,"form-check");var C=Object(s.useContext)(dn).controlId,T=Object(s.useMemo)((function(){return{controlId:n||C,custom:S}}),[C,S,n]),j=null!=y&&!1!==y&&!w,P=l.a.createElement(mn,Object(r.a)({},O,{type:"switch"===b?"checkbox":b,ref:t,isValid:p,isInvalid:d,isStatic:!j,disabled:f,as:k}));return l.a.createElement(dn.Provider,{value:T},l.a.createElement("div",{style:v,className:i()(m,a,S&&"custom-"+b,c&&a+"-inline")},w||l.a.createElement(l.a.Fragment,null,P,j&&l.a.createElement(gn,{title:g},y),(p||d)&&l.a.createElement(pn,{type:p?"valid":"invalid"},h))))}));bn.displayName="FormCheck",bn.defaultProps={type:"checkbox",inline:!1,disabled:!1,isValid:!1,isInvalid:!1,title:""},bn.Input=mn,bn.Label=gn;var yn=bn,wn=l.a.forwardRef((function(e,t){var n,a,u=e.bsPrefix,c=e.type,f=e.size,p=e.id,d=e.className,h=e.isValid,m=e.isInvalid,v=e.plaintext,g=e.readOnly,b=e.as,y=void 0===b?"input":b,w=o(e,["bsPrefix","type","size","id","className","isValid","isInvalid","plaintext","readOnly","as"]),x=Object(s.useContext)(dn).controlId;if(u=_(u,"form-control"),v)(a={})[u+"-plaintext"]=!0,n=a;else if("file"===c){var E;(E={})[u+"-file"]=!0,n=E}else{var k;(k={})[u]=!0,k[u+"-"+f]=f,n=k}return l.a.createElement(y,Object(r.a)({},w,{type:c,ref:t,readOnly:g,id:p||x,className:i()(d,n,h&&"is-valid",m&&"is-invalid")}))}));wn.displayName="FormControl",wn.Feedback=pn;var xn=wn,En=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.controlId,f=e.as,p=void 0===f?"div":f,d=o(e,["bsPrefix","className","children","controlId","as"]);n=_(n,"form-group");var h=Object(s.useMemo)((function(){return{controlId:c}}),[c]);return l.a.createElement(dn.Provider,{value:h},l.a.createElement(p,Object(r.a)({},d,{ref:t,className:i()(a,n)}),u))}));En.displayName="FormGroup";var kn=En,_n=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.column,u=e.srOnly,c=e.className,f=e.htmlFor,p=o(e,["bsPrefix","column","srOnly","className","htmlFor"]),d=Object(s.useContext)(dn).controlId;n=_(n,"form-label");var h="col-form-label";"string"==typeof a&&(h=h+"-"+a);var m=i()(c,n,u&&"sr-only",a&&h);return f=f||d,a?l.a.createElement(yt,Object(r.a)({as:"label",className:m,htmlFor:f},p)):l.a.createElement("label",Object(r.a)({ref:t,className:m,htmlFor:f},p))}));_n.displayName="FormLabel",_n.defaultProps={column:!1,srOnly:!1};var On=_n,Sn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=void 0===s?"small":s,c=e.muted,f=o(e,["bsPrefix","className","as","muted"]);return n=_(n,"form-text"),l.a.createElement(u,Object(r.a)({},f,{ref:t,className:i()(a,n,c&&"text-muted")}))}));Sn.displayName="FormText";var Cn=Sn,Tn=l.a.forwardRef((function(e,t){return l.a.createElement(yn,Object(r.a)({},e,{ref:t,type:"switch"}))}));Tn.displayName="Switch",Tn.Input=yn.Input,Tn.Label=yn.Label;var jn=Tn,Pn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.inline,s=e.className,u=e.validated,c=e.as,f=void 0===c?"form":c,p=o(e,["bsPrefix","inline","className","validated","as"]);return n=_(n,"form"),l.a.createElement(f,Object(r.a)({},p,{ref:t,className:i()(s,u&&"was-validated",a&&n+"-inline")}))}));Pn.displayName="Form",Pn.defaultProps={inline:!1},Pn.Row=xe("form-row"),Pn.Group=kn,Pn.Control=xn,Pn.Check=yn,Pn.Switch=jn,Pn.Label=On,Pn.Text=Cn;var Nn=Pn,Rn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.fluid,s=e.as,u=void 0===s?"div":s,c=e.className,f=o(e,["bsPrefix","fluid","as","className"]),p=_(n,"container"),d="string"==typeof a?"-"+a:"-fluid";return l.a.createElement(u,Object(r.a)({ref:t},f,{className:i()(c,a?""+p+d:p)}))}));Rn.displayName="Container",Rn.defaultProps={fluid:!1};var An=Rn,Mn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.fluid,u=e.rounded,c=e.roundedCircle,f=e.thumbnail,p=o(e,["bsPrefix","className","fluid","rounded","roundedCircle","thumbnail"]);n=_(n,"img");var d=i()(s&&n+"-fluid",u&&"rounded",c&&"rounded-circle",f&&n+"-thumbnail");return l.a.createElement("img",Object(r.a)({ref:t},p,{className:i()(a,d)}))}));Mn.displayName="Image",Mn.defaultProps={fluid:!1,rounded:!1,roundedCircle:!1,thumbnail:!1};var Fn=Mn,Dn={bsPrefix:Z.a.string,fluid:Z.a.bool,rounded:Z.a.bool,roundedCircle:Z.a.bool,thumbnail:Z.a.bool},In=l.a.forwardRef((function(e,t){var n=e.className,a=o(e,["className"]);return l.a.createElement(Fn,Object(r.a)({ref:t},a,{className:i()(n,"figure-img")}))}));In.displayName="FigureImage",In.propTypes=Dn,In.defaultProps={fluid:!0};var Ln=In,Un=xe("figure-caption",{Component:"figcaption"}),qn=xe("figure",{Component:"figure"});qn.Image=Ln,qn.Caption=Un;var zn=qn,Bn=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.size,s=e.className,u=e.as,c=void 0===u?"div":u,f=o(e,["bsPrefix","size","className","as"]);return n=_(n,"input-group"),l.a.createElement(c,Object(r.a)({ref:t},f,{className:i()(s,n,a&&n+"-"+a)}))})),Hn=xe("input-group-append"),Vn=xe("input-group-prepend"),$n=xe("input-group-text",{Component:"span"});Bn.displayName="InputGroup",Bn.Text=$n,Bn.Radio=function(e){return l.a.createElement($n,null,l.a.createElement("input",Object(r.a)({type:"radio"},e)))},Bn.Checkbox=function(e){return l.a.createElement($n,null,l.a.createElement("input",Object(r.a)({type:"checkbox"},e)))},Bn.Append=Hn,Bn.Prepend=Vn;var Wn=Bn,Kn=l.a.forwardRef((function(e,t){var n,a=e.as,s=void 0===a?"div":a,u=e.className,c=e.fluid,f=e.bsPrefix,p=o(e,["as","className","fluid","bsPrefix"]),d=((n={})[f=_(f,"jumbotron")]=!0,n[f+"-fluid"]=c,n);return l.a.createElement(s,Object(r.a)({ref:t},p,{className:i()(u,d)}))}));Kn.defaultProps={fluid:!1},Kn.displayName="Jumbotron";var Gn=Kn,Yn=l.a.createContext(null),Qn=function(){},Zn=l.a.forwardRef((function(e,t){var n,a,i=e.as,u=void 0===i?"ul":i,c=e.onSelect,f=e.activeKey,p=e.role,d=e.onKeyDown,h=o(e,["as","onSelect","activeKey","role","onKeyDown"]),m=kt(),v=Object(s.useRef)(!1),g=Object(s.useContext)(j),b=Object(s.useContext)(Yn);b&&(p=p||"tablist",f=b.activeKey,n=b.getControlledId,a=b.getControllerId);var y=Object(s.useRef)(null),w=function(e){if(!y.current)return null;var t=xt(y.current,"[data-rb-event-key]:not(.disabled)"),n=y.current.querySelector(".active"),r=t.indexOf(n);if(-1===r)return null;var o=r+e;return o>=t.length&&(o=0),o<0&&(o=t.length-1),t[o]},x=function(e,t){null!=e&&(c&&c(e,t),g&&g(e,t))};Object(s.useEffect)((function(){if(y.current&&v.current){var e=y.current.querySelector("[data-rb-event-key].active");e&&e.focus()}v.current=!1}));var E=Zt(t,y);return l.a.createElement(j.Provider,{value:x},l.a.createElement(Wt.Provider,{value:{role:p,activeKey:T(f),getControlledId:n||Qn,getControllerId:a||Qn}},l.a.createElement(u,Object(r.a)({},h,{onKeyDown:function(e){var t;switch(d&&d(e),e.key){case"ArrowLeft":case"ArrowUp":t=w(-1);break;case"ArrowRight":case"ArrowDown":t=w(1);break;default:return}t&&(e.preventDefault(),x(t.dataset.rbEventKey,e),v.current=!0,m())},ref:E,role:p}))))})),Xn=l.a.forwardRef((function(e,t){var n=e.active,a=e.className,u=e.tabIndex,c=e.eventKey,f=e.onSelect,p=e.onClick,d=e.as,h=o(e,["active","className","tabIndex","eventKey","onSelect","onClick","as"]),m=T(c,h.href),v=Object(s.useContext)(j),g=Object(s.useContext)(Wt),b=n;g&&(h.role||"tablist"!==g.role||(h.role="tab"),h["data-rb-event-key"]=m,h.id=g.getControllerId(m),h["aria-controls"]=g.getControlledId(m),b=null==n&&null!=m?g.activeKey===m:n),"tab"===h.role&&(h.tabIndex=b?u:-1,h["aria-selected"]=b);var y=be((function(e){p&&p(e),null!=m&&(f&&f(m,e),v&&v(m,e))}));return l.a.createElement(d,Object(r.a)({},h,{ref:t,onClick:y,className:i()(a,b&&"active")}))}));Xn.defaultProps={disabled:!1};var Jn=Xn,er=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.active,u=e.disabled,c=e.className,f=e.variant,p=e.action,d=e.as,h=e.eventKey,m=e.onClick,v=o(e,["bsPrefix","active","disabled","className","variant","action","as","eventKey","onClick"]);n=_(n,"list-group-item");var g=Object(s.useCallback)((function(e){if(u)return e.preventDefault(),void e.stopPropagation();m&&m(e)}),[u,m]);return l.a.createElement(Jn,Object(r.a)({ref:t},v,{eventKey:T(h,v.href),as:d||(p?v.href?"a":"button":"div"),onClick:g,className:i()(c,n,a&&"active",u&&"disabled",f&&n+"-"+f,p&&n+"-action")}))}));er.defaultProps={variant:null,active:!1,disabled:!1},er.displayName="ListGroupItem";var tr=er,nr=l.a.forwardRef((function(e,t){var n,a=m(e,{activeKey:"onSelect"}),s=a.className,u=a.bsPrefix,c=a.variant,f=a.horizontal,p=a.as,d=void 0===p?"div":p,h=o(a,["className","bsPrefix","variant","horizontal","as"]);return u=_(u,"list-group"),n=f?!0===f?"horizontal":"horizontal-"+f:null,l.a.createElement(Zn,Object(r.a)({ref:t},h,{as:d,className:i()(s,u,c&&u+"-"+c,n&&u+"-"+n)}))}));nr.defaultProps={variant:null,horizontal:null},nr.displayName="ListGroup",nr.Item=tr;var rr=nr,or=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=void 0===s?"div":s,c=o(e,["bsPrefix","className","as"]),f=_(n,"media");return l.a.createElement(u,Object(r.a)({},c,{ref:t,className:i()(a,f)}))}));or.displayName="Media",or.Body=xe("media-body");var ar,ir=or;function sr(e){if((!ar&&0!==ar||e)&&q){var t=document.createElement("div");t.style.position="absolute",t.style.top="-9999px",t.style.width="50px",t.style.height="50px",t.style.overflow="scroll",document.body.appendChild(t),ar=t.offsetWidth-t.clientWidth,document.body.removeChild(t)}return ar}function lr(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function ur(e){void 0===e&&(e=A());try{var t=e.activeElement;return t&&t.nodeName?t:null}catch(t){return e.body}}function cr(e,t){e.classList?e.classList.add(t):function(e,t){return e.classList?!!t&&e.classList.contains(t):-1!==(" "+(e.className.baseVal||e.className)+" ").indexOf(" "+t+" ")}(e,t)||("string"==typeof e.className?e.className=e.className+" "+t:e.setAttribute("class",(e.className&&e.className.baseVal||"")+" "+t))}function fr(e,t){return e.replace(new RegExp("(^|\\s)"+t+"(?:\\s|$)","g"),"$1").replace(/\s+/g," ").replace(/^\s*|\s*$/g,"")}function pr(e,t){e.classList?e.classList.remove(t):"string"==typeof e.className?e.className=fr(e.className,t):e.setAttribute("class",fr(e.className&&e.className.baseVal||"",t))}function dr(e){return"window"in e&&e.window===e?e:"nodeType"in(t=e)&&t.nodeType===document.DOCUMENT_NODE&&e.defaultView||!1;var t}function hr(e){var t;return dr(e)||(t=e)&&"body"===t.tagName.toLowerCase()?function(e){var t=A(e),n=dr(t);return t.body.clientWidthe.clientHeight}var mr=["template","script","style"],vr=function(e,t,n){t=[].concat(t),[].forEach.call(e.children,(function(e){var r,o,a;-1===t.indexOf(e)&&(o=(r=e).nodeType,a=r.tagName,1===o&&-1===mr.indexOf(a.toLowerCase()))&&n(e)}))};function gr(e,t){t&&(e?t.setAttribute("aria-hidden","true"):t.removeAttribute("aria-hidden"))}var br,yr=function(){function e(e){var t=void 0===e?{}:e,n=t.hideSiblingNodes,r=void 0===n||n,o=t.handleContainerOverflow,a=void 0===o||o;this.hideSiblingNodes=r,this.handleContainerOverflow=a,this.modals=[],this.containers=[],this.data=[],this.scrollbarSize=sr()}var t=e.prototype;return t.isContainerOverflowing=function(e){var t=this.data[this.containerIndexFromModal(e)];return t&&t.overflowing},t.containerIndexFromModal=function(e){return t=this.data,n=function(t){return-1!==t.modals.indexOf(e)},r=-1,t.some((function(e,t){return!!n(e,t)&&(r=t,!0)})),r;var t,n,r},t.setContainerStyle=function(e,t){var n={overflow:"hidden"};e.style={overflow:t.style.overflow,paddingRight:t.style.paddingRight},e.overflowing&&(n.paddingRight=parseInt(U(t,"paddingRight")||0,10)+this.scrollbarSize+"px"),U(t,n)},t.removeContainerStyle=function(e,t){var n=e.style;Object.keys(n).forEach((function(e){t.style[e]=n[e]}))},t.add=function(e,t,n){var r=this.modals.indexOf(e),o=this.containers.indexOf(t);if(-1!==r)return r;if(r=this.modals.length,this.modals.push(e),this.hideSiblingNodes&&function(e,t){var n=t.dialog,r=t.backdrop;vr(e,[n,r],(function(e){return gr(!0,e)}))}(t,e),-1!==o)return this.data[o].modals.push(e),r;var a={modals:[e],classes:n?n.split(/\s+/):[],overflowing:hr(t)};return this.handleContainerOverflow&&this.setContainerStyle(a,t),a.classes.forEach(cr.bind(null,t)),this.containers.push(t),this.data.push(a),r},t.remove=function(e){var t=this.modals.indexOf(e);if(-1!==t){var n=this.containerIndexFromModal(e),r=this.data[n],o=this.containers[n];if(r.modals.splice(r.modals.indexOf(e),1),this.modals.splice(t,1),0===r.modals.length)r.classes.forEach(pr.bind(null,o)),this.handleContainerOverflow&&this.removeContainerStyle(r,o),this.hideSiblingNodes&&function(e,t){var n=t.dialog,r=t.backdrop;vr(e,[n,r],(function(e){return gr(!1,e)}))}(o,e),this.containers.splice(n,1),this.data.splice(n,1);else if(this.hideSiblingNodes){var a=r.modals[r.modals.length-1],i=a.backdrop;gr(!1,a.dialog),gr(!1,i)}}},t.isTopModal=function(e){return!!this.modals.length&&this.modals[this.modals.length-1]===e},e}(),wr=function(e){if("undefined"!=typeof document)return null==e?A().body:("function"==typeof e&&(e=e()),e&&e.current&&(e=e.current),e&&e.nodeType?e:null)};function xr(e,t){var n=Object(s.useState)((function(){return wr(e)})),r=n[0],o=n[1];if(!r){var a=wr(e);a&&o(a)}return Object(s.useEffect)((function(){t&&r&&t(r)}),[t,r]),Object(s.useEffect)((function(){var t=wr(e);t!==r&&o(t)}),[e,r]),r}var Er=function(e){function t(){for(var t,n=arguments.length,r=new Array(n),o=0;o1?r-1:0),a=1;a1?r-1:0),a=1;a1?r-1:0),a=1;aA(e).documentElement.clientHeight;this.setState({style:{paddingRight:t&&!n?sr():void 0,paddingLeft:!t&&n?sr():void 0}})}},n.render=function(){var e=this.props,t=e.bsPrefix,n=e.className,a=e.style,s=e.dialogClassName,u=e.children,c=e.dialogAs,f=e.show,p=e.animation,d=e.backdrop,h=e.keyboard,m=e.onEscapeKeyDown,v=e.onShow,g=e.onHide,b=e.container,y=e.autoFocus,w=e.enforceFocus,x=e.restoreFocus,E=e.restoreFocusOptions,k=e.onEntered,_=e.onExit,O=e.onExiting,S=(e.onExited,e.onEntering,e.onEnter,e.onEntering,e.backdropClassName,o(e,["bsPrefix","className","style","dialogClassName","children","dialogAs","show","animation","backdrop","keyboard","onEscapeKeyDown","onShow","onHide","container","autoFocus","enforceFocus","restoreFocus","restoreFocusOptions","onEntered","onExit","onExiting","onExited","onEntering","onEnter","onEntering","backdropClassName"])),C=!0===d?this.handleClick:null,T=Object(r.a)({},a,{},this.state.style);return p||(T.display="block"),l.a.createElement(Rr.Provider,{value:this.modalContext},l.a.createElement(Sr,{show:f,backdrop:d,container:b,keyboard:h,autoFocus:y,enforceFocus:w,restoreFocus:x,restoreFocusOptions:E,onEscapeKeyDown:m,onShow:v,onHide:g,onEntered:k,onExit:_,onExiting:O,manager:this.getModalManager(),ref:this.setModalRef,style:T,className:i()(n,t),containerClassName:t+"-open",transition:p?zr:void 0,backdropTransition:p?Br:void 0,renderBackdrop:this.renderBackdrop,onClick:C,onMouseUp:this.handleMouseUp,onEnter:this.handleEnter,onEntering:this.handleEntering,onExited:this.handleExited},l.a.createElement(c,Object(r.a)({},S,{onMouseDown:this.handleDialogMouseDown,className:s}),u)))},t}(l.a.Component);Hr.defaultProps=qr;var Vr=O(Hr,"modal");Vr.Body=Nr,Vr.Header=Lr,Vr.Title=Ur,Vr.Footer=Fr,Vr.Dialog=Mr,Vr.TRANSITION_DURATION=300,Vr.BACKDROP_TRANSITION_DURATION=150;var $r=Vr,Wr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.children,u=e.as,c=void 0===u?"div":u,f=o(e,["bsPrefix","className","children","as"]);return n=_(n,"nav-item"),l.a.createElement(c,Object(r.a)({},f,{ref:t,className:i()(a,n)}),s)}));Wr.displayName="NavItem";var Kr=Wr,Gr={disabled:!1,as:Re},Yr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.disabled,s=e.className,u=e.href,c=e.eventKey,f=e.onSelect,p=e.as,d=o(e,["bsPrefix","disabled","className","href","eventKey","onSelect","as"]);return n=_(n,"nav-link"),l.a.createElement(Jn,Object(r.a)({},d,{href:u,ref:t,eventKey:c,as:p,disabled:a,onSelect:f,className:i()(s,n,a&&"disabled")}))}));Yr.displayName="NavLink",Yr.defaultProps=Gr;var Qr=Yr,Zr=l.a.forwardRef((function(e,t){var n,a,u,c=m(e,{activeKey:"onSelect"}),f=c.as,p=void 0===f?"div":f,d=c.bsPrefix,h=c.variant,v=c.fill,g=c.justify,b=c.navbar,y=c.className,w=c.children,x=c.activeKey,E=o(c,["as","bsPrefix","variant","fill","justify","navbar","className","children","activeKey"]);d=_(d,"nav");var k=Object(s.useContext)(Xt),O=Object(s.useContext)(Qe);return k?(a=k.bsPrefix,b=null==b||b):O&&(u=O.cardHeaderBsPrefix),l.a.createElement(Zn,Object(r.a)({as:p,ref:t,activeKey:x,className:i()(y,(n={},n[d]=!b,n[a+"-nav"]=b,n[u+"-"+h]=!!u,n[d+"-"+h]=!!h,n[d+"-fill"]=v,n[d+"-justified"]=g,n))},E),w)}));Zr.displayName="Nav",Zr.defaultProps={justify:!1,fill:!1},Zr.Item=Kr,Zr.Link=Qr;var Xr=Zr,Jr=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,s=e.as,u=o(e,["bsPrefix","className","as"]);n=_(n,"navbar-brand");var c=s||(u.href?"a":"span");return l.a.createElement(c,Object(r.a)({},u,{ref:t,className:i()(a,n)}))}));Jr.displayName="NavbarBrand";var eo=Jr,to=l.a.forwardRef((function(e,t){var n=e.children,a=e.bsPrefix,i=o(e,["children","bsPrefix"]);return a=_(a,"navbar-collapse"),l.a.createElement(Xt.Consumer,null,(function(e){return l.a.createElement(pe,Object(r.a)({in:!(!e||!e.expanded)},i),l.a.createElement("div",{ref:t,className:a},n))}))}));to.displayName="NavbarCollapse";var no=to,ro=l.a.forwardRef((function(e,t){var n=e.bsPrefix,a=e.className,u=e.children,c=e.label,f=e.as,p=void 0===f?"button":f,d=e.onClick,h=o(e,["bsPrefix","className","children","label","as","onClick"]);n=_(n,"navbar-toggler");var m=Object(s.useContext)(Xt)||{},v=m.onToggle,g=m.expanded,b=be((function(e){d&&d(e),v&&v()}));return"button"===p&&(h.type="button"),l.a.createElement(p,Object(r.a)({},h,{ref:t,onClick:b,"aria-label":c,className:i()(a,n,!g&&"collapsed")}),u||l.a.createElement("span",{className:n+"-icon"}))}));ro.displayName="NavbarToggle",ro.defaultProps={label:"Toggle navigation"};var oo=ro,ao=l.a.forwardRef((function(e,t){var n=m(e,{expanded:"onToggle"}),a=n.bsPrefix,u=n.expand,c=n.variant,f=n.bg,p=n.fixed,d=n.sticky,h=n.className,v=n.children,g=n.as,b=void 0===g?"nav":g,y=n.expanded,w=n.onToggle,x=n.onSelect,E=n.collapseOnSelect,k=o(n,["bsPrefix","expand","variant","bg","fixed","sticky","className","children","as","expanded","onToggle","onSelect","collapseOnSelect"]);a=_(a,"navbar");var O=Object(s.useCallback)((function(){x&&x.apply(void 0,arguments),E&&y&&w(!1)}),[x,E,y,w]);void 0===k.role&&"nav"!==b&&(k.role="navigation");var S=a+"-expand";"string"==typeof u&&(S=S+"-"+u);var C=Object(s.useMemo)((function(){return{onToggle:function(){return w(!y)},bsPrefix:a,expanded:y}}),[a,y,w]);return l.a.createElement(Xt.Provider,{value:C},l.a.createElement(j.Provider,{value:O},l.a.createElement(b,Object(r.a)({ref:t},k,{className:i()(h,a,u&&S,c&&a+"-"+c,f&&"bg-"+f,d&&"sticky-"+d,p&&"fixed-"+p)}),v)))}));ao.defaultProps={expand:!0,variant:"light",collapseOnSelect:!1},ao.displayName="Navbar",ao.Brand=eo,ao.Toggle=oo,ao.Collapse=no,ao.Text=xe("navbar-text",{Component:"span"});var io=ao,so={id:Z.a.any,onClick:Z.a.func,title:Z.a.node.isRequired,disabled:Z.a.bool,active:Z.a.bool,menuRole:Z.a.string,rootCloseEvent:Z.a.string,bsPrefix:Z.a.string},lo=l.a.forwardRef((function(e,t){var n=e.id,a=e.title,i=e.children,s=e.bsPrefix,u=e.rootCloseEvent,c=e.menuRole,f=e.disabled,p=e.active,d=o(e,["id","title","children","bsPrefix","rootCloseEvent","menuRole","disabled","active"]);return l.a.createElement(an,Object(r.a)({ref:t},d,{as:Kr}),l.a.createElement(an.Toggle,{id:n,eventKey:null,active:p,disabled:f,childBsPrefix:s,as:Qr},a),l.a.createElement(an.Menu,{role:c,rootCloseEvent:u},i))}));lo.displayName="NavDropdown",lo.propTypes=so,lo.Item=an.Item,lo.Divider=an.Divider,lo.Header=an.Header;var uo=lo,co=l.a.forwardRef((function(e,t){var n=e.flip,a=e.placement,i=e.containerPadding,u=e.popperConfig,c=void 0===u?{}:u,f=e.transition,p=Et(),d=p[0],h=p[1],m=Et(),v=m[0],g=m[1],b=Zt(h,t),y=xr(e.container),w=xr(e.target),x=Object(s.useState)(!e.show),E=x[0],k=x[1],_=c.modifiers,O=void 0===_?{}:_,S=Tt(w,d,Object(r.a)({},c,{placement:a||"bottom",enableEvents:e.show,modifiers:Object(r.a)({},O,{preventOverflow:Object(r.a)({padding:i||5},O.preventOverflow),arrow:Object(r.a)({},O.arrow,{enabled:!!v,element:v}),flip:Object(r.a)({enabled:!!n},O.preventOverflow)})})),C=S.styles,T=S.arrowStyles,j=o(S,["styles","arrowStyles"]);e.show?E&&k(!1):e.transition||E||k(!0);var P=e.show||f&&!E;if(Mt(d,e.onHide,{disabled:!e.rootClose||e.rootCloseDisabled,clickTrigger:e.rootCloseEvent}),!P)return null;var N=e.children(Object(r.a)({},j,{show:e.show,props:{style:C,ref:b},arrowProps:{style:T,ref:g}}));if(f){var R=e.onExit,A=e.onExiting,M=e.onEnter,F=e.onEntering,D=e.onEntered;N=l.a.createElement(f,{in:e.show,appear:!0,onExit:R,onExiting:A,onExited:function(){k(!0),e.onExited&&e.onExited.apply(e,arguments)},onEnter:M,onEntering:F,onEntered:D},N)}return y?J.a.createPortal(N,y):null}));co.displayName="Overlay",co.propTypes={show:Z.a.bool,placement:Z.a.oneOf(Ot.a.placements),target:Z.a.any,container:Z.a.any,flip:Z.a.bool,children:Z.a.func.isRequired,containerPadding:Z.a.number,popperConfig:Z.a.object,rootClose:Z.a.bool,rootCloseEvent:Z.a.oneOf(["click","mousedown"]),rootCloseDisabled:Z.a.bool,onHide:function(e){var t=Z.a.func;e.rootClose&&(t=t.isRequired);for(var n=arguments.length,r=new Array(n>1?n-1:0),o=1;o-1};var U=function(e,t){var n=this.__data__,r=M(n,e);return r<0?(++this.size,n.push([e,t])):n[r][1]=t,this};function q(e){var t=-1,n=null==e?0:e.length;for(this.clear();++t-1&&e%1==0&&e-1&&e%1==0&&e<=9007199254740991},Ye={};Ye["[object Float32Array]"]=Ye["[object Float64Array]"]=Ye["[object Int8Array]"]=Ye["[object Int16Array]"]=Ye["[object Int32Array]"]=Ye["[object Uint8Array]"]=Ye["[object Uint8ClampedArray]"]=Ye["[object Uint16Array]"]=Ye["[object Uint32Array]"]=!0,Ye["[object Arguments]"]=Ye["[object Array]"]=Ye["[object ArrayBuffer]"]=Ye["[object Boolean]"]=Ye["[object DataView]"]=Ye["[object Date]"]=Ye["[object Error]"]=Ye["[object Function]"]=Ye["[object Map]"]=Ye["[object Number]"]=Ye["[object Object]"]=Ye["[object RegExp]"]=Ye["[object Set]"]=Ye["[object String]"]=Ye["[object WeakMap]"]=!1;var Qe=function(e){return O(e)&&Ge(e.length)&&!!Ye[E(e)]};var Ze=function(e){return function(t){return e(t)}},Xe=n(14),Je=Xe.a&&Xe.a.isTypedArray,et=Je?Ze(Je):Qe,tt=Object.prototype.hasOwnProperty;var nt=function(e,t){var n=Ve(e),r=!n&&He(e),o=!n&&!r&&Object($e.a)(e),a=!n&&!r&&!o&&et(e),i=n||r||o||a,s=i?Le(e.length,String):[],l=s.length;for(var u in e)!t&&!tt.call(e,u)||i&&("length"==u||o&&("offset"==u||"parent"==u)||a&&("buffer"==u||"byteLength"==u||"byteOffset"==u)||Ke(u,l))||s.push(u);return s},rt=Object.prototype;var ot=function(e){var t=e&&e.constructor;return e===("function"==typeof t&&t.prototype||rt)},at=k(Object.keys,Object),it=Object.prototype.hasOwnProperty;var st=function(e){if(!ot(e))return at(e);var t=[];for(var n in Object(e))it.call(e,n)&&"constructor"!=n&&t.push(n);return t};var lt=function(e){return null!=e&&Ge(e.length)&&!G(e)};var ut=function(e){return lt(e)?nt(e):st(e)};var ct=function(e,t){return e&&Ie(t,ut(t),e)};var ft=function(e){var t=[];if(null!=e)for(var n in Object(e))t.push(n);return t},pt=Object.prototype.hasOwnProperty;var dt=function(e){if(!W(e))return ft(e);var t=ot(e),n=[];for(var r in e)("constructor"!=r||!t&&pt.call(e,r))&&n.push(r);return n};var ht=function(e){return lt(e)?nt(e,!0):dt(e)};var mt=function(e,t){return e&&Ie(t,ht(t),e)},vt=n(130);var gt=function(e,t){var n=-1,r=e.length;for(t||(t=Array(r));++n=0||(o[n]=e[n]);return o}function An(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}var Mn=function(e){return Array.isArray(e)&&0===e.length},Fn=function(e){return"function"==typeof e},Dn=function(e){return null!==e&&"object"==typeof e},In=function(e){return String(Math.floor(Number(e)))===e},Ln=function(e){return"[object String]"===Object.prototype.toString.call(e)},Un=function(e){return e!=e},qn=function(e){return 0===r.Children.count(e)},zn=function(e){return Dn(e)&&Fn(e.then)},Bn=function(e){return e&&Dn(e)&&Dn(e.target)};function Hn(e){if(void 0===(e=e||("undefined"!=typeof document?document:void 0)))return null;try{return e.activeElement||e.body}catch(t){return e.body}}function Vn(e,t,n,r){void 0===r&&(r=0);for(var o=_n(t);e&&r=0?[]:{}}}return(0===a?e:o)[i[a]]===n?e:(void 0===n?delete o[i[a]]:o[i[a]]=n,0===a&&void 0===n&&delete r[i[a]],r)}function Wn(e,t,n,r){void 0===n&&(n=new WeakMap),void 0===r&&(r={});for(var o=0,a=Object.keys(e);o0?t.map((function(t){return C(t,Vn(e,t))})):[Promise.resolve("DO_NOT_DELETE_YOU_WILL_BE_FIRED")];return Promise.all(n).then((function(e){return e.reduce((function(e,n,r){return"DO_NOT_DELETE_YOU_WILL_BE_FIRED"===n||n&&(e=$n(e,t[r],n)),e}),{})}))}),[C]),j=Object(r.useCallback)((function(e){return Promise.all([T(e),m.validationSchema?S(e):{},m.validate?O(e):{}]).then((function(e){var t=e[0],n=e[1],r=e[2];return f.all([t,n,r],{arrayMerge:ar})}))}),[m.validate,m.validationSchema,T,O,S]),P=sr((function(e){return void 0===e&&(e=k.values),Object(Sn.unstable_runWithPriority)(Sn.LowPriority,(function(){return j(e).then((function(e){return w.current&&_({type:"SET_ERRORS",payload:e}),e})).catch((function(e){0}))}))})),N=sr((function(e){return void 0===e&&(e=k.values),_({type:"SET_ISVALIDATING",payload:!0}),j(e).then((function(e){return w.current&&(_({type:"SET_ISVALIDATING",payload:!1}),a()(k.errors,e)||_({type:"SET_ERRORS",payload:e})),e}))}));Object(r.useEffect)((function(){l&&!0===w.current&&P(v.current)}),[l,P]);var R=Object(r.useCallback)((function(e){var t=e&&e.values?e.values:v.current,n=e&&e.errors?e.errors:g.current?g.current:m.initialErrors||{},r=e&&e.touched?e.touched:b.current?b.current:m.initialTouched||{},o=e&&e.status?e.status:y.current?y.current:m.initialStatus;v.current=t,g.current=n,b.current=r,y.current=o;var a=function(){_({type:"RESET_FORM",payload:{isSubmitting:!!e&&!!e.isSubmitting,errors:n,touched:r,status:o,values:t,isValidating:!!e&&!!e.isValidating,submitCount:e&&e.submitCount&&"number"==typeof e.submitCount?e.submitCount:0}})};if(m.onReset){var i=m.onReset(k.values,Z);zn(i)?i.then(a):a()}else a()}),[m.initialErrors,m.initialStatus,m.initialTouched]);Object(r.useEffect)((function(){p||(v.current=m.initialValues)}),[p,m.initialValues]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(v.current,m.initialValues)&&(v.current=m.initialValues,R())}),[p,m.initialValues,R]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(g.current,m.initialErrors)&&(g.current=m.initialErrors||Xn,_({type:"SET_ERRORS",payload:m.initialErrors||Xn}))}),[p,m.initialErrors]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(b.current,m.initialTouched)&&(b.current=m.initialTouched||Jn,_({type:"SET_TOUCHED",payload:m.initialTouched||Jn}))}),[p,m.initialTouched]),Object(r.useEffect)((function(){p&&!0===w.current&&!a()(y.current,m.initialStatus)&&(y.current=m.initialStatus,_({type:"SET_STATUS",payload:m.initialStatus}))}),[p,m.initialStatus,m.initialTouched]);var A=sr((function(e){if(Fn(x.current[e].validate)){var t=Vn(k.values,e),n=x.current[e].validate(t);return zn(n)?(_({type:"SET_ISVALIDATING",payload:!0}),n.then((function(e){return e})).then((function(t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t}}),_({type:"SET_ISVALIDATING",payload:!1})}))):(_({type:"SET_FIELD_ERROR",payload:{field:e,value:n}}),Promise.resolve(n))}return m.validationSchema?(_({type:"SET_ISVALIDATING",payload:!0}),S(k.values,e).then((function(e){return e})).then((function(t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t[e]}}),_({type:"SET_ISVALIDATING",payload:!1})}))):Promise.resolve()})),M=Object(r.useCallback)((function(e,t){var n=t.validate;x.current[e]={validate:n}}),[]),F=Object(r.useCallback)((function(e){delete x.current[e]}),[]),D=sr((function(e,t){return _({type:"SET_TOUCHED",payload:e}),(void 0===t?i:t)?P(k.values):Promise.resolve()})),I=Object(r.useCallback)((function(e){_({type:"SET_ERRORS",payload:e})}),[]),L=sr((function(e,t){return _({type:"SET_VALUES",payload:e}),(void 0===t?n:t)?P(e):Promise.resolve()})),U=Object(r.useCallback)((function(e,t){_({type:"SET_FIELD_ERROR",payload:{field:e,value:t}})}),[]),q=sr((function(e,t,r){return _({type:"SET_FIELD_VALUE",payload:{field:e,value:t}}),(void 0===r?n:r)?P($n(k.values,e,t)):Promise.resolve()})),z=Object(r.useCallback)((function(e,t){var n,r=t,o=e;if(!Ln(e)){e.persist&&e.persist();var a=e.target?e.target:e.currentTarget,i=a.type,s=a.name,l=a.id,u=a.value,c=a.checked,f=(a.outerHTML,a.options),p=a.multiple;r=t||(s||l),o=/number|range/.test(i)?(n=parseFloat(u),isNaN(n)?"":n):/checkbox/.test(i)?function(e,t,n){if("boolean"==typeof e)return Boolean(t);var r=[],o=!1,a=-1;if(Array.isArray(e))r=e,a=e.indexOf(n),o=a>=0;else if(!n||"true"==n||"false"==n)return Boolean(t);if(t&&n&&!o)return r.concat(n);if(!o)return r;return r.slice(0,a).concat(r.slice(a+1))}(Vn(k.values,r),c,u):p?function(e){return Array.from(e).filter((function(e){return e.selected})).map((function(e){return e.value}))}(f):u}r&&q(r,o)}),[q,k.values]),B=sr((function(e){if(Ln(e))return function(t){return z(t,e)};z(e)})),H=sr((function(e,t,n){return void 0===t&&(t=!0),_({type:"SET_FIELD_TOUCHED",payload:{field:e,value:t}}),(void 0===n?i:n)?P(k.values):Promise.resolve()})),V=Object(r.useCallback)((function(e,t){e.persist&&e.persist();var n=e.target,r=n.name,o=n.id,a=(n.outerHTML,t||(r||o));H(a,!0)}),[H]),$=sr((function(e){if(Ln(e))return function(t){return V(t,e)};V(e)})),W=Object(r.useCallback)((function(e){Fn(e)?_({type:"SET_FORMIK_STATE",payload:e}):_({type:"SET_FORMIK_STATE",payload:function(){return e}})}),[]),K=Object(r.useCallback)((function(e){_({type:"SET_STATUS",payload:e})}),[]),G=Object(r.useCallback)((function(e){_({type:"SET_ISSUBMITTING",payload:e})}),[]),Y=sr((function(){return _({type:"SUBMIT_ATTEMPT"}),N().then((function(e){var t=e instanceof Error;if(!t&&0===Object.keys(e).length){var n;try{if(void 0===(n=X()))return}catch(e){throw e}return Promise.resolve(n).then((function(){w.current&&_({type:"SUBMIT_SUCCESS"})})).catch((function(e){if(w.current)throw _({type:"SUBMIT_FAILURE"}),e}))}if(w.current&&(_({type:"SUBMIT_FAILURE"}),t))throw e}))})),Q=sr((function(e){e&&e.preventDefault&&Fn(e.preventDefault)&&e.preventDefault(),e&&e.stopPropagation&&Fn(e.stopPropagation)&&e.stopPropagation(),Y().catch((function(e){console.warn("Warning: An unhandled error was caught from submitForm()",e)}))})),Z={resetForm:R,validateForm:N,validateField:A,setErrors:I,setFieldError:U,setFieldTouched:H,setFieldValue:q,setStatus:K,setSubmitting:G,setTouched:D,setValues:L,setFormikState:W,submitForm:Y},X=sr((function(){return d(k.values,Z)})),J=sr((function(e){e&&e.preventDefault&&Fn(e.preventDefault)&&e.preventDefault(),e&&e.stopPropagation&&Fn(e.stopPropagation)&&e.stopPropagation(),R()})),ee=Object(r.useCallback)((function(e){return{value:Vn(k.values,e),error:Vn(k.errors,e),touched:!!Vn(k.touched,e),initialValue:Vn(v.current,e),initialTouched:!!Vn(b.current,e),initialError:Vn(g.current,e)}}),[k.errors,k.touched,k.values]),te=Object(r.useCallback)((function(e){return{setValue:function(t){return q(e,t)},setTouched:function(t){return H(e,t)},setError:function(t){return U(e,t)}}}),[q,H,U]),ne=Object(r.useCallback)((function(e){var t=Dn(e),n=t?e.name:e,r=Vn(k.values,n),o={name:n,value:r,onChange:B,onBlur:$};if(t){var a=e.type,i=e.value,s=e.as,l=e.multiple;"checkbox"===a?void 0===i?o.checked=!!r:(o.checked=!(!Array.isArray(r)||!~r.indexOf(i)),o.value=i):"radio"===a?(o.checked=r===i,o.value=i):"select"===s&&l&&(o.value=o.value||[],o.multiple=!0)}return o}),[$,B,k.values]),re=Object(r.useMemo)((function(){return!a()(v.current,k.values)}),[v.current,k.values]),oe=Object(r.useMemo)((function(){return void 0!==u?re?k.errors&&0===Object.keys(k.errors).length:!1!==u&&Fn(u)?u(m):u:k.errors&&0===Object.keys(k.errors).length}),[u,re,k.errors,m]);return Pn({},k,{initialValues:v.current,initialErrors:g.current,initialTouched:b.current,initialStatus:y.current,handleBlur:$,handleChange:B,handleReset:J,handleSubmit:Q,resetForm:R,setErrors:I,setFormikState:W,setFieldTouched:H,setFieldValue:q,setFieldError:U,setStatus:K,setSubmitting:G,setTouched:D,setValues:L,submitForm:Y,validateForm:N,validateField:A,isValid:oe,dirty:re,unregisterField:F,registerField:M,getFieldProps:ne,getFieldMeta:ee,getFieldHelpers:te,validateOnBlur:i,validateOnChange:n,validateOnMount:l})}function tr(e){var t=er(e),n=e.component,o=e.children,a=e.render,i=e.innerRef;return Object(r.useImperativeHandle)(i,(function(){return t})),Object(r.useEffect)((function(){0}),[]),Object(r.createElement)(Gn,{value:t},n?Object(r.createElement)(n,t):a?a(t):o?Fn(o)?o(t):qn(o)?null:r.Children.only(o):null)}function nr(e){var t={};if(e.inner){if(0===e.inner.length)return $n(t,e.path,e.message);var n=e.inner,r=Array.isArray(n),o=0;for(n=r?n:n[Symbol.iterator]();;){var a;if(r){if(o>=n.length)break;a=n[o++]}else{if((o=n.next()).done)break;a=o.value}var i=a;Vn(t,i.path)||(t=$n(t,i.path,i.message))}}return t}function rr(e,t,n,r){void 0===n&&(n=!1),void 0===r&&(r={});var o=or(e);return t[n?"validateSync":"validate"](o,{abortEarly:!1,context:r})}function or(e){var t={};for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=String(n);!0===Array.isArray(e[r])?t[r]=e[r].map((function(e){return!0===Array.isArray(e)||N(e)?or(e):""!==e?e:void 0})):N(e[r])?t[r]=or(e[r]):t[r]=""!==e[r]?e[r]:void 0}return t}function ar(e,t,n){var r=e.slice();return t.forEach((function(t,o){if(void 0===r[o]){var a=!1!==n.clone&&n.isMergeableObject(t);r[o]=a?f(Array.isArray(t)?[]:{},t,n):t}else n.isMergeableObject(t)?r[o]=f(e[o],t,n):-1===e.indexOf(t)&&r.push(t)})),r}var ir="undefined"!=typeof window&&void 0!==window.document&&void 0!==window.document.createElement?r.useLayoutEffect:r.useEffect;function sr(e){var t=Object(r.useRef)(e);return ir((function(){t.current=e})),Object(r.useCallback)((function(){for(var e=arguments.length,n=new Array(e),r=0;re?t:e}),0);return Array.from(Pn({},e,{length:t+1}))}return[]},br=function(e){function t(t){var n;return(n=e.call(this,t)||this).updateArrayField=function(e,t,r){var o=n.props,a=o.name;(0,o.formik.setFormikState)((function(n){var o="function"==typeof r?r:e,i="function"==typeof t?t:e,s=$n(n.values,a,e(Vn(n.values,a))),l=r?o(Vn(n.errors,a)):void 0,u=t?i(Vn(n.touched,a)):void 0;return Mn(l)&&(l=void 0),Mn(u)&&(u=void 0),Pn({},n,{values:s,errors:r?$n(n.errors,a,l):n.errors,touched:t?$n(n.touched,a,u):n.touched})}))},n.push=function(e){return n.updateArrayField((function(t){return[].concat(gr(t),[jn(e)])}),!1,!1)},n.handlePush=function(e){return function(){return n.push(e)}},n.swap=function(e,t){return n.updateArrayField((function(n){return hr(n,e,t)}),!0,!0)},n.handleSwap=function(e,t){return function(){return n.swap(e,t)}},n.move=function(e,t){return n.updateArrayField((function(n){return dr(n,e,t)}),!0,!0)},n.handleMove=function(e,t){return function(){return n.move(e,t)}},n.insert=function(e,t){return n.updateArrayField((function(n){return mr(n,e,t)}),(function(t){return mr(t,e,null)}),(function(t){return mr(t,e,null)}))},n.handleInsert=function(e,t){return function(){return n.insert(e,t)}},n.replace=function(e,t){return n.updateArrayField((function(n){return vr(n,e,t)}),!1,!1)},n.handleReplace=function(e,t){return function(){return n.replace(e,t)}},n.unshift=function(e){var t=-1;return n.updateArrayField((function(n){var r=n?[e].concat(n):[e];return t<0&&(t=r.length),r}),(function(e){var n=e?[null].concat(e):[null];return t<0&&(t=n.length),n}),(function(e){var n=e?[null].concat(e):[null];return t<0&&(t=n.length),n})),t},n.handleUnshift=function(e){return function(){return n.unshift(e)}},n.handleRemove=function(e){return function(){return n.remove(e)}},n.handlePop=function(){return function(){return n.pop()}},n.remove=n.remove.bind(An(n)),n.pop=n.pop.bind(An(n)),n}Nn(t,e);var n=t.prototype;return n.componentDidUpdate=function(e){!a()(Vn(e.formik.values,e.name),Vn(this.props.formik.values,this.props.name))&&this.props.formik.validateOnChange&&this.props.formik.validateForm(this.props.formik.values)},n.remove=function(e){var t;return this.updateArrayField((function(n){var r=n?gr(n):[];return t||(t=r[e]),Fn(r.splice)&&r.splice(e,1),r}),!0,!0),t},n.pop=function(){var e;return this.updateArrayField((function(t){var n=t;return e||(e=n&&n.pop&&n.pop()),n}),!0,!0),e},n.render=function(){var e={push:this.push,pop:this.pop,swap:this.swap,move:this.move,insert:this.insert,replace:this.replace,unshift:this.unshift,remove:this.remove,handlePush:this.handlePush,handlePop:this.handlePop,handleSwap:this.handleSwap,handleMove:this.handleMove,handleInsert:this.handleInsert,handleReplace:this.handleReplace,handleUnshift:this.handleUnshift,handleRemove:this.handleRemove},t=this.props,n=t.component,o=t.render,a=t.children,i=t.name,s=Pn({},e,{form:Rn(t.formik,["validate","validationSchema"]),name:i});return n?Object(r.createElement)(n,s):o?o(s):a?"function"==typeof a?a(s):qn(a)?null:r.Children.only(a):null},t}(r.Component);br.defaultProps={validateOnChange:!0};var yr=pr(br),wr=pr(function(e){function t(){return e.apply(this,arguments)||this}Nn(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return Vn(this.props.formik.errors,this.props.name)!==Vn(e.formik.errors,this.props.name)||Vn(this.props.formik.touched,this.props.name)!==Vn(e.formik.touched,this.props.name)||Object.keys(this.props).length!==Object.keys(e).length},n.render=function(){var e=this.props,t=e.component,n=e.formik,o=e.render,a=e.children,i=e.name,s=Rn(e,["component","formik","render","children","name"]),l=Vn(n.touched,i),u=Vn(n.errors,i);return l&&u?o?Fn(o)?o(u):null:a?Fn(a)?a(u):null:t?Object(r.createElement)(t,s,u):u:null},t}(r.Component)),xr=pr(function(e){function t(t){var n;n=e.call(this,t)||this;var r=t.render,o=t.children,a=t.component,i=t.as;t.name;return r&&Object(On.a)(!1),a&&r&&Object(On.a)(!1),i&&o&&Fn(o)&&Object(On.a)(!1),a&&o&&Fn(o)&&Object(On.a)(!1),r&&o&&!qn(o)&&Object(On.a)(!1),n}Nn(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return this.props.shouldUpdate?this.props.shouldUpdate(e,this.props):e.name!==this.props.name||Vn(e.formik.values,this.props.name)!==Vn(this.props.formik.values,this.props.name)||Vn(e.formik.errors,this.props.name)!==Vn(this.props.formik.errors,this.props.name)||Vn(e.formik.touched,this.props.name)!==Vn(this.props.formik.touched,this.props.name)||Object.keys(this.props).length!==Object.keys(e).length||e.formik.isSubmitting!==this.props.formik.isSubmitting},n.componentDidMount=function(){this.props.formik.registerField(this.props.name,{validate:this.props.validate})},n.componentDidUpdate=function(e){this.props.name!==e.name&&(this.props.formik.unregisterField(e.name),this.props.formik.registerField(this.props.name,{validate:this.props.validate})),this.props.validate!==e.validate&&this.props.formik.registerField(this.props.name,{validate:this.props.validate})},n.componentWillUnmount=function(){this.props.formik.unregisterField(this.props.name)},n.render=function(){var e=this.props,t=e.name,n=e.render,o=e.as,a=e.children,i=e.component,s=e.formik,l=Rn(e,["validate","name","render","as","children","component","shouldUpdate","formik"]),u=Rn(s,["validate","validationSchema"]),c={value:"radio"===l.type||"checkbox"===l.type?l.value:Vn(s.values,t),name:t,onChange:s.handleChange,onBlur:s.handleBlur},f={field:c,meta:{value:Vn(s.values,t),error:Vn(s.errors,t),touched:!!Vn(s.touched,t),initialValue:Vn(s.initialValues,t),initialTouched:!!Vn(s.initialTouched,t),initialError:Vn(s.initialErrors,t)},form:u};if(n)return n(f);if(Fn(a))return a(f);if(i){if("string"==typeof i){var p=l.innerRef,d=Rn(l,["innerRef"]);return Object(r.createElement)(i,Pn({ref:p},c,{},d),a)}return Object(r.createElement)(i,Pn({field:c,form:s},l),a)}var h=o||"input";if("string"==typeof h){var m=l.innerRef,v=Rn(l,["innerRef"]);return Object(r.createElement)(h,Pn({ref:m},c,{},v),a)}return Object(r.createElement)(h,Pn({},c,{},l),a)},t}(r.Component))},function(e,t,n){"use strict";var r={childContextTypes:!0,contextTypes:!0,defaultProps:!0,displayName:!0,getDefaultProps:!0,getDerivedStateFromProps:!0,mixins:!0,propTypes:!0,type:!0},o={name:!0,length:!0,prototype:!0,caller:!0,callee:!0,arguments:!0,arity:!0},a=Object.defineProperty,i=Object.getOwnPropertyNames,s=Object.getOwnPropertySymbols,l=Object.getOwnPropertyDescriptor,u=Object.getPrototypeOf,c=u&&u(Object);e.exports=function e(t,n,f){if("string"!=typeof n){if(c){var p=u(n);p&&p!==c&&e(t,p,f)}var d=i(n);s&&(d=d.concat(s(n)));for(var h=0;h