diff --git a/.idea/tidbcloudy.iml b/.idea/tidbcloudy.iml
index ec63674..6d2b87d 100644
--- a/.idea/tidbcloudy.iml
+++ b/.idea/tidbcloudy.iml
@@ -3,5 +3,8 @@
+
+
+
\ No newline at end of file
diff --git a/README.md b/README.md
index 0aaedab..40a779a 100644
--- a/README.md
+++ b/README.md
@@ -71,6 +71,23 @@ You can use this SDK to access [TiDB Cloud](https://tidbcloud.com) and manage yo
+
+ 1.1.0
+ ✅
+ ❌
+ ✅
+ ✅
+ ✅
+ ✅
+ ❌
+ ✅
+ ✅
+ ✅
+ ✅
+ ✅
+ ✅
+ ✅
+
1.0.10
✅
diff --git a/mock_server/mock_config.json b/mock_server/mock_config.json
new file mode 100644
index 0000000..97b5d89
--- /dev/null
+++ b/mock_server/mock_config.json
@@ -0,0 +1,455 @@
+{
+ "projects": [
+ {
+ "id": "1",
+ "org_id": "1",
+ "name": "default_project",
+ "cluster_count": 4,
+ "user_count": 10,
+ "create_timestamp": "1656991448",
+ "aws_cmek_enabled": false
+ },
+ {
+ "id": "2",
+ "org_id": "1",
+ "name": "default_project_1",
+ "cluster_count": 5,
+ "user_count": 1,
+ "create_timestamp": "1650091448",
+ "aws_cmek_enabled": true
+ }
+ ],
+ "clusters": [
+ {
+ "id": "1",
+ "project_id": "1",
+ "name": "Cluster0",
+ "cluster_type": "DEDICATED",
+ "cloud_provider": "AWS",
+ "region": "us-west-2",
+ "create_timestamp": "1656991448",
+ "config": {
+ "port": 4000,
+ "components": {
+ "tidb": {
+ "node_size": "8C16G",
+ "node_quantity": 2
+ },
+ "tikv": {
+ "node_size": "8C32G",
+ "storage_size_gib": 1024,
+ "node_quantity": 3
+ }
+ }
+ },
+ "status": {
+ "tidb_version": "v6.1.0",
+ "cluster_status": "AVAILABLE",
+ "node_map": {
+ "tidb": [
+ {
+ "node_name": "tidb-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C16G",
+ "vcpu_num": 8,
+ "ram_bytes": "17179869184",
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tidb-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C16G",
+ "vcpu_num": 8,
+ "ram_bytes": "17179869184",
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ],
+ "tikv": [
+ {
+ "node_name": "tikv-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C32G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tikv-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tikv-2",
+ "availability_zone": "us-west-2c",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ],
+ "tiflash": [
+ {
+ "node_name": "tiflash-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tiflash-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ]
+ },
+ "connection_strings": {
+ "default_user": "root",
+ "standard": {
+ "host": "tidb.us-east-1.shared.aws.tidbcloud.com",
+ "port": 4000
+ },
+ "vpc_peering": {
+ "host": "private-tidb.us-east-1.shared.aws.tidbcloud.com",
+ "port": 4000
+ }
+ }
+ }
+ },
+ {
+ "id": "2",
+ "project_id": "2",
+ "name": "Cluster1",
+ "cluster_type": "DEDICATED",
+ "cloud_provider": "AWS",
+ "region": "us-west-1",
+ "create_timestamp": "1656991448",
+ "config": {
+ "port": 4000,
+ "components": {
+ "tidb": {
+ "node_size": "8C16G",
+ "node_quantity": 2
+ },
+ "tikv": {
+ "node_size": "8C32G",
+ "storage_size_gib": 1024,
+ "node_quantity": 3
+ }
+ }
+ },
+ "status": {
+ "tidb_version": "v7.1.0",
+ "cluster_status": "AVAILABLE",
+ "node_map": {
+ "tidb": [
+ {
+ "node_name": "tidb-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C16G",
+ "vcpu_num": 8,
+ "ram_bytes": "17179869184",
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tidb-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C16G",
+ "vcpu_num": 8,
+ "ram_bytes": "17179869184",
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ],
+ "tikv": [
+ {
+ "node_name": "tikv-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C32G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tikv-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tikv-2",
+ "availability_zone": "us-west-2c",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ],
+ "tiflash": [
+ {
+ "node_name": "tiflash-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ },
+ {
+ "node_name": "tiflash-1",
+ "availability_zone": "us-west-2b",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ ]
+ },
+ "connection_strings": {
+ "default_user": "root",
+ "standard": {
+ "host": "tidb.test.us-east-1.shared.aws.tidbcloud.com",
+ "port": 4000
+ },
+ "vpc_peering": {
+ "host": "private-tidb.test.us-east-1.shared.aws.tidbcloud.com",
+ "port": 4000
+ }
+ }
+ }
+ },
+ {
+ "id": "3456",
+ "project_id": "2",
+ "name": "serverless-0",
+ "cluster_type": "DEVELOPER",
+ "cloud_provider": "AWS",
+ "region": "us-west-2",
+ "create_timestamp": "1606472018",
+ "config": {
+ "port": 4000,
+ "components": {
+ "tidb": {
+ "node_size": "Shared0",
+ "node_quantity": 1
+ },
+ "tikv": {
+ "node_size": "Shared0",
+ "node_quantity": 1,
+ "storage_size_gib": 0
+ },
+ "tiflash": {
+ "node_size": "Shared0",
+ "node_quantity": 1,
+ "storage_size_gib": 0
+ }
+ },
+ "ip_access_list": []
+ },
+ "status": {
+ "tidb_version": "v7.1.0",
+ "cluster_status": "AVAILABLE",
+ "node_map": {
+ "tidb": [],
+ "tikv": [],
+ "tiflash": []
+ },
+ "connection_strings": {
+ "default_user": "test.root",
+ "standard": {
+ "host": "gateway01.prod.aws.tidbcloud.com",
+ "port": 4000
+ },
+ "vpc_peering": {
+ "host": "gateway01-privatelink.prod.aws.tidbcloud.com",
+ "port": 4000
+ }
+ }
+ }
+ }
+ ],
+ "org_id": "1",
+ "provider_regions": [
+ {
+ "cluster_type": "DEDICATED",
+ "cloud_provider": "AWS",
+ "region": "us-west-2",
+ "tidb": [
+ {
+ "node_size": "8C16G",
+ "node_quantity_range": {
+ "min": 1,
+ "step": 1
+ }
+ }
+ ],
+ "tikv": [
+ {
+ "node_size": "8C32G",
+ "node_quantity_range": {
+ "min": 3,
+ "step": 3
+ },
+ "storage_size_gib_range": {
+ "min": 500,
+ "max": 4096
+ }
+ }
+ ],
+ "tiflash": [
+ {
+ "node_size": "8C64G",
+ "node_quantity_range": {
+ "min": 0,
+ "step": 1
+ },
+ "storage_size_gib_range": {
+ "min": 500,
+ "max": 2048
+ }
+ }
+ ]
+ },
+ {
+ "cluster_type": "DEVELOPER",
+ "cloud_provider": "AWS",
+ "region": "us-west-2",
+ "tidb": [
+ {
+ "node_size": "Shared0",
+ "node_quantity_range": {
+ "min": 1,
+ "step": 1
+ }
+ }
+ ],
+ "tikv": [
+ {
+ "node_size": "Shared0",
+ "node_quantity_range": {
+ "min": 1,
+ "step": 1
+ },
+ "storage_size_gib_range": {
+ "min": 1,
+ "max": 1
+ }
+ }
+ ],
+ "tiflash": [
+ {
+ "node_size": "Shared0",
+ "node_quantity_range": {
+ "min": 1,
+ "step": 1
+ },
+ "storage_size_gib_range": {
+ "min": 1,
+ "max": 1
+ }
+ }
+ ]
+ }
+ ],
+ "billings": [
+ {
+ "overview": {
+ "billedMonth": "2023-10",
+ "credits": "1.00",
+ "discounts": "2.00",
+ "runningTotal": "3.00",
+ "totalCost": "4.00"
+ },
+ "summaryByProject": {
+ "otherCharges": [
+ {
+ "chargeName": "Support Plan",
+ "credits": "0.10",
+ "discounts": "0.20",
+ "runningTotal": "0.30",
+ "totalCost": "0.40"
+ }
+ ],
+ "projects": [
+ {
+ "credits": "3.00",
+ "discounts": "0.50",
+ "projectName": "prod-project",
+ "runningTotal": "1.00",
+ "totalCost": "4.00"
+ }
+ ]
+ },
+ "summaryByService": [
+ {
+ "credits": "2.00",
+ "discounts": "3.00",
+ "runningTotal": "5.00",
+ "serviceCosts": [
+ {}
+ ],
+ "serviceName": "TiDB Dedicated",
+ "totalCost": "4.00"
+ }
+ ]
+ },
+ {
+ "overview": {
+ "billedMonth": "2023-09",
+ "credits": "1.10",
+ "discounts": "2.10",
+ "runningTotal": "3.10",
+ "totalCost": "4.10"
+ },
+ "summaryByProject": {
+ "otherCharges": [
+ {
+ "chargeName": "Support Plan",
+ "credits": "0.11",
+ "discounts": "0.21",
+ "runningTotal": "0.31",
+ "totalCost": "0.41"
+ }
+ ],
+ "projects": [
+ {
+ "credits": "3.01",
+ "discounts": "0.50",
+ "projectName": "prod-project",
+ "runningTotal": "1.01",
+ "totalCost": "4.01"
+ }
+ ]
+ },
+ "summaryByService": [
+ {
+ "credits": "2.10",
+ "discounts": "3.10",
+ "runningTotal": "5.10",
+ "serviceCosts": [
+ {}
+ ],
+ "serviceName": "TiDB Dedicated",
+ "totalCost": "4.10"
+ }
+ ]
+ }
+ ]
+}
+
diff --git a/mock_server/models/billing.py b/mock_server/models/billing.py
new file mode 100644
index 0000000..a7f25cc
--- /dev/null
+++ b/mock_server/models/billing.py
@@ -0,0 +1,29 @@
+from flask import Blueprint, jsonify, Response
+from httpx import HTTPStatusError
+
+from mock_server.server_state import CONFIG
+from mock_server.services.org_service import OrgService
+from tidbcloudy.context import Context
+from tidbcloudy.specification import BillingMonthSummary
+
+
+def create_billing_blueprint():
+ bp = Blueprint("billing", __name__)
+
+ org_service = OrgService()
+ contex = Context("", "", {})
+
+ @bp.errorhandler(HTTPStatusError)
+ def handle_status_error(exc: HTTPStatusError):
+ return jsonify({
+ "error": exc.response.text
+ }), exc.response.status_code
+
+ @bp.route("", methods=["GET"])
+ def tidbcloudy_get_monthly_bill(month: str) -> [Response, int]:
+ billings = [BillingMonthSummary.from_object(contex, item) for item in CONFIG["billings"]]
+ billing = org_service.get_monthly_bill(billings, month)
+ resp = billing.to_object()
+ return resp, 200
+
+ return bp
diff --git a/mock_server/models/clusters.py b/mock_server/models/clusters.py
new file mode 100644
index 0000000..a834a79
--- /dev/null
+++ b/mock_server/models/clusters.py
@@ -0,0 +1,31 @@
+from flask import Blueprint, jsonify, Response
+from httpx import HTTPStatusError
+
+from mock_server.server_state import CONFIG
+from mock_server.services.project_service import ProjectService
+from tidbcloudy.context import Context
+from tidbcloudy.specification import CloudSpecification
+
+
+def create_clusters_blueprint():
+ bp = Blueprint("clusters", __name__)
+
+ pro_service = ProjectService()
+ contex = Context("", "", {})
+
+ @bp.errorhandler(HTTPStatusError)
+ def handle_status_error(exc: HTTPStatusError):
+ return jsonify({
+ "error": exc.response.text
+ }), exc.response.status_code
+
+ @bp.route("/provider/regions", methods=["GET"])
+ def tidbcloudy_provider() -> [Response, int]:
+ provider_regions = [CloudSpecification.from_object(contex, item) for item in CONFIG["provider_regions"]]
+ provider_regions_obj = pro_service.list_provider_regions(provider_regions)
+ resp = {
+ "items": [item.to_object() for item in provider_regions_obj]
+ }
+ return resp, 200
+
+ return bp
diff --git a/mock_server/models/projects.py b/mock_server/models/projects.py
new file mode 100644
index 0000000..8ec6925
--- /dev/null
+++ b/mock_server/models/projects.py
@@ -0,0 +1,106 @@
+from flask import Blueprint, jsonify, request, Response
+from httpx import HTTPStatusError
+
+from mock_server.server_state import CONFIG
+from mock_server.services.org_service import OrgService
+from mock_server.services.project_service import ProjectService
+from tidbcloudy.cluster import Cluster
+from tidbcloudy.context import Context
+from tidbcloudy.project import Project
+
+
+def create_projects_blueprint():
+ bp = Blueprint("projects", __name__)
+
+ org_service = OrgService()
+ pro_service = ProjectService()
+ contex = Context("", "", {})
+
+ @bp.errorhandler(HTTPStatusError)
+ def handle_status_error(exc: HTTPStatusError):
+ return jsonify({
+ "error": exc.response.text
+ }), exc.response.status_code
+
+ @bp.route("", methods=["GET"])
+ def tidbcloudy_list_projects() -> [Response, int]:
+ projects = [Project.from_object(contex, item) for item in CONFIG["projects"]]
+ page = request.args.get("page", default=1, type=int)
+ page_size = request.args.get("page_size", default=10, type=int)
+ return_projects = org_service.list_projects(projects, page, page_size)
+ resp = jsonify({
+ "items": [item.to_object() for item in return_projects],
+ "total": len(projects)
+ })
+ return resp, 200
+
+ @bp.route("", methods=["POST"])
+ def tidbcloudy_create_project() -> [Response, int]:
+ new_project = org_service.create_project(request.json)
+ CONFIG["projects"].append(new_project.to_object())
+ resp = jsonify({
+ "id": new_project.id
+ })
+ return resp, 200
+
+ @bp.route("//aws-cmek", methods=["GET"])
+ def tidbcloudy_list_project_aws_cmeks(project_id) -> [Response, int]:
+ projects = CONFIG["projects"]
+ project_cmeks = pro_service.list_project_aws_cmeks(projects, project_id)
+ resp = jsonify({
+ "items": project_cmeks
+ })
+ return resp, 200
+
+ @bp.route("//aws-cmek", methods=["POST"])
+ def tidbcloudy_create_project_aws_cmek(project_id) -> [Response, int]:
+ projects = CONFIG["projects"]
+ body = request.json
+ pro_service.create_project_aws_cmek(projects, project_id, body)
+ return {}, 200
+
+ @bp.route("//clusters", methods=["GET"])
+ def tidbcloudy_list_clusters(project_id) -> [Response, int]:
+ clusters = [Cluster.from_object(contex, item) for item in CONFIG["clusters"]]
+ page = request.args.get("page", default=1, type=int)
+ page_size = request.args.get("page_size", default=10, type=int)
+ return_clusters, total = pro_service.list_clusters(clusters, project_id, page, page_size)
+ resp = jsonify(
+ {
+ "items": [item.to_object() for item in return_clusters],
+ "total": total
+ }
+ )
+ return resp, 200
+
+ @bp.route("//clusters", methods=["POST"])
+ def tidbcloudy_create_cluster(project_id) -> [Response, int]:
+ new_cluster = pro_service.create_cluster(project_id, request.json)
+ CONFIG["clusters"].append(new_cluster.to_object())
+ resp = jsonify({
+ "id": new_cluster.id
+ })
+ return resp, 200
+
+ @bp.route("//clusters/", methods=["GET"])
+ def tidbcloudy_get_cluster(project_id, cluster_id) -> [Response, int]:
+ clusters = [Cluster.from_object(contex, item) for item in CONFIG["clusters"]]
+ cluster = pro_service.get_cluster(clusters, project_id, cluster_id)
+ resp = jsonify(cluster.to_object())
+ return resp, 200
+
+ @bp.route("//clusters/", methods=["DELETE"])
+ def tidbcloudy_delete_cluster(project_id, cluster_id) -> [Response, int]:
+ clusters = [Cluster.from_object(contex, item) for item in CONFIG["clusters"]]
+ current_clusters = pro_service.delete_cluster(clusters, project_id, cluster_id)
+ CONFIG["clusters"] = [item.to_object() for item in current_clusters]
+ return {}, 200
+
+ @bp.route("//clusters/", methods=["PATCH"])
+ def tidbcloudy_update_cluster(project_id, cluster_id) -> [Response, int]:
+ clusters = [Cluster.from_object(contex, item) for item in CONFIG["clusters"]]
+ current_clusters = pro_service.update_cluster(clusters, project_id, cluster_id, request.json)
+ CONFIG["clusters"] = [item.to_object() for item in current_clusters]
+ return {}, 200
+
+ return bp
diff --git a/mock_server/run.py b/mock_server/run.py
new file mode 100644
index 0000000..650e2f8
--- /dev/null
+++ b/mock_server/run.py
@@ -0,0 +1,19 @@
+from flask import Flask
+
+app = Flask(__name__)
+app.config["SERVER_NAME"] = "127.0.0.1:5000"
+
+from mock_server.models.projects import create_projects_blueprint
+from mock_server.models.clusters import create_clusters_blueprint
+from mock_server.models.billing import create_billing_blueprint
+
+project_bp = create_projects_blueprint()
+cluster_bp = create_clusters_blueprint()
+billing_bp = create_billing_blueprint()
+
+app.register_blueprint(project_bp, url_prefix="/api/v1beta/projects")
+app.register_blueprint(cluster_bp, url_prefix="/api/v1beta/clusters")
+app.register_blueprint(billing_bp, url_prefix="/billing/v1beta1/bills")
+
+if __name__ == "__main__":
+ app.run(debug=True)
diff --git a/mock_server/server_state.py b/mock_server/server_state.py
new file mode 100644
index 0000000..c2554ca
--- /dev/null
+++ b/mock_server/server_state.py
@@ -0,0 +1,27 @@
+import json
+import os
+from typing import Any, Dict
+
+
+def load_config(filename: str = "mock_config.json") -> Dict[str, Any]:
+ """
+ Load a configuration file in JSON format.
+
+ Args:
+ filename (str): The name of the configuration file.
+
+ Returns:
+ dict: A dictionary containing the configuration parameters.
+ """
+ try:
+ with open(f"{os.path.dirname(__file__)}/{filename}", "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ raise FileNotFoundError(
+ f"Configuration file '{filename}' not found in '{os.path.dirname(__file__)}'."
+ )
+ except json.JSONDecodeError:
+ raise ValueError(f"Fail to decode {filename}")
+
+
+CONFIG = load_config()
diff --git a/mock_server/services/org_service.py b/mock_server/services/org_service.py
new file mode 100644
index 0000000..a5a433b
--- /dev/null
+++ b/mock_server/services/org_service.py
@@ -0,0 +1,42 @@
+import uuid
+from datetime import datetime
+from typing import List
+
+from httpx import HTTPStatusError, Request, Response
+
+from mock_server.server_state import CONFIG
+from tidbcloudy.context import Context
+from tidbcloudy.project import Project
+from tidbcloudy.specification import BillingMonthSummary
+
+
+class OrgService:
+ def __init__(self):
+ self.org_id = CONFIG["org_id"]
+ self._context = Context("", "", {})
+
+ @staticmethod
+ def list_projects(projects: List[Project], page: int, page_size: int) -> List[Project]:
+ return_projects = projects[(page - 1) * page_size: page * page_size]
+ return return_projects
+
+ def create_project(self, body: dict) -> Project:
+ new_project = Project.from_object(self._context, {
+ "id": str(uuid.uuid4().int % (10 ** 19)),
+ "org_id": self.org_id,
+ "name": body["name"],
+ "aws_cmek_enabled": body["aws_cmek_enabled"] if "aws_cmek_enabled" in body else False,
+ "cluster_count": 0,
+ "user_count": 1,
+ "create_timestamp": str(int(datetime.now().timestamp()))
+ })
+ return new_project
+
+ @staticmethod
+ def get_monthly_bill(billings: List[BillingMonthSummary], month: str) -> BillingMonthSummary:
+ for billing in billings:
+ if billing.overview.billedMonth == month:
+ return billing
+ raise HTTPStatusError("",
+ request=Request("GET", ""),
+ response=Response(400, text="The billing month is not found"))
diff --git a/mock_server/services/project_service.py b/mock_server/services/project_service.py
new file mode 100644
index 0000000..c76f265
--- /dev/null
+++ b/mock_server/services/project_service.py
@@ -0,0 +1,172 @@
+import uuid
+from datetime import datetime
+from typing import List, Union
+
+from httpx import HTTPStatusError, Request, Response
+
+from tidbcloudy.cluster import Cluster
+from tidbcloudy.context import Context
+from tidbcloudy.specification import CloudSpecification, ClusterStatus, TiDBComponent, TiFlashComponent, TiKVComponent
+
+VALID_COMPONENTS = {
+ "tidb": {
+ "class": TiDBComponent,
+ "attributes": {"node_size", "node_quantity"}
+ },
+ "tikv": {
+ "class": TiKVComponent,
+ "attributes": {"node_size", "node_quantity", "storage_size_gib"}
+ },
+ "tiflash": {
+ "class": TiFlashComponent,
+ "attributes": {"node_size", "node_quantity", "storage_size_gib"}
+ }
+}
+
+
+class ProjectService:
+ def __init__(self):
+ self._context = Context("", "", {})
+
+ @staticmethod
+ def _get_project_by_id(projects: List[dict], project_id: str) -> dict:
+ for project in projects:
+ if project["id"] == project_id:
+ return project
+ return {}
+
+ @staticmethod
+ def _get_project_index_by_id(projects: List[dict], project_id: str) -> Union[int, None]:
+ for index, project in enumerate(projects):
+ if project["id"] == project_id:
+ return index
+ return None
+
+ @staticmethod
+ def list_project_aws_cmeks(projects: List[dict], project_id: str) -> Union[list, List[dict]]:
+ project = ProjectService._get_project_by_id(projects, project_id)
+ if project:
+ return project.get("aws_cmek", [])
+
+ @staticmethod
+ def create_project_aws_cmek(projects: List[dict], project_id: str, body: dict) -> None:
+ project_index = ProjectService._get_project_index_by_id(projects, project_id)
+ if project_index is None or projects[project_index].get("aws_cmek_enabled") is False:
+ raise HTTPStatusError("",
+ request=Request("POST", ""),
+ response=Response(400, text="aws cmek is not enabled"))
+ project_cmek = projects[project_index].get("aws_cmek", [])
+ for create_cmek in body.get("specs", []):
+ current_cmek = {
+ "region": create_cmek["region"],
+ "kms_arn": create_cmek["kms_arn"],
+ }
+ project_cmek.append(current_cmek)
+ projects[project_index].update({"aws_cmek": project_cmek})
+
+ @staticmethod
+ def list_provider_regions(provider_regions: List[CloudSpecification]) -> List[CloudSpecification]:
+ return provider_regions
+
+ @staticmethod
+ def list_clusters(clusters: List[Cluster], project_id: str, page: int, page_size: int) -> [List[Cluster], int]:
+ current_clusters = []
+ for cluster in clusters:
+ if cluster.project_id == project_id:
+ current_clusters.append(cluster)
+ return_clusters = current_clusters[page_size * (page - 1): page_size * page]
+ total = len(current_clusters)
+ return return_clusters, total
+
+ def create_cluster(self, project_id: str, body: dict) -> Cluster:
+ body["id"] = str(uuid.uuid4().int % (10 ** 19))
+ body["project_id"] = project_id
+ body["create_timestamp"] = str(int(datetime.now().timestamp()))
+ body["status"] = {
+ "tidb_version": "v0.0.0",
+ "cluster_status": ClusterStatus.AVAILABLE.value,
+ "connection_strings": {
+ "default_user": "root",
+ "standard": {
+ "host": "gateway01.prod.aws.tidbcloud.com",
+ "port": 4000
+ },
+ "vpc_peering": {
+ "host": "gateway01-privatelink.prod.aws.tidbcloud.com",
+ "port": 4000
+ }
+ }
+ }
+ if body["config"].get("port") is None:
+ body["config"]["port"] = 4000
+ new_cluster = Cluster.from_object(self._context, body)
+ return new_cluster
+
+ @staticmethod
+ def get_cluster(clusters: List[Cluster], project_id: str, cluster_id: str) -> Cluster:
+ for cluster in clusters:
+ if cluster.project_id == project_id and cluster.id == cluster_id:
+ return cluster
+ raise HTTPStatusError("",
+ request=Request("GET", ""),
+ response=Response(400, text=f"Cluster {cluster_id} not found"))
+
+ def delete_cluster(self, clusters: List[Cluster], project_id: str, cluster_id: str) -> List[Cluster]:
+ delete_cluster = self.get_cluster(clusters, project_id, cluster_id)
+ clusters.remove(delete_cluster)
+ return clusters
+
+ @staticmethod
+ def _get_component(cluster: Cluster, component_name: str):
+ if component_name not in VALID_COMPONENTS:
+ raise HTTPStatusError("",
+ request=Request("GET", ""),
+ response=Response(400, text=f"Component {component_name} is not supported"))
+ component = getattr(cluster.config.components, component_name)
+ if component is None:
+ init_component = VALID_COMPONENTS[component_name]["class"]
+ component = init_component()
+ setattr(cluster.config.components, component_name, component)
+ return component
+
+ @staticmethod
+ def _update_components(cluster: Cluster, components_config: dict) -> Cluster:
+ for component, config in components_config.items():
+ valid_attrs = VALID_COMPONENTS.get(component, {}).get("attributes", set())
+ for attribute, value in config.items():
+ if attribute not in valid_attrs:
+ raise HTTPStatusError("",
+ request=Request("POST", ""),
+ response=Response(400, text=f"Aattribute {attribute} is not supported"))
+ setattr(ProjectService._get_component(cluster, component), attribute, value)
+ return cluster
+
+ @staticmethod
+ def _pause_resume_cluster(cluster: Cluster, config) -> Cluster:
+ if not isinstance(config, bool):
+ raise HTTPStatusError("",
+ request=Request("POST", ""),
+ response=Response(400, text="The paused config must be a boolean"))
+ current_status = cluster.status.cluster_status
+ if config and current_status == ClusterStatus.AVAILABLE:
+ cluster.status.cluster_status = ClusterStatus.PAUSED
+ elif not config and current_status == ClusterStatus.PAUSED:
+ cluster.status.cluster_status = ClusterStatus.AVAILABLE
+ else:
+ raise HTTPStatusError("", request=Request("POST", ""),
+ response=Response(400, text="The cluster cannot be paused or resumed"))
+ return cluster
+
+ def update_cluster(self, clusters: List[Cluster], project_id: str, cluster_id: str, body: dict) -> List[Cluster]:
+ update_cluster = self.get_cluster(clusters, project_id, cluster_id)
+ config = body.get("config", {})
+ components = config.get("components", {})
+ update_cluster = ProjectService._update_components(update_cluster, components)
+ is_paused_config = config.get("paused")
+ if is_paused_config is not None:
+ ProjectService._pause_resume_cluster(update_cluster, is_paused_config)
+ for index, cluster in enumerate(clusters):
+ if cluster.project_id == project_id and cluster.id == cluster_id:
+ clusters[index] = update_cluster
+ break
+ return clusters
diff --git a/poetry.lock b/poetry.lock
index 56c4027..aac658b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -33,6 +33,21 @@ files = [
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -72,6 +87,28 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
+[[package]]
+name = "flask"
+version = "2.2.5"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"},
+ {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"},
+]
+
+[package.dependencies]
+click = ">=8.0"
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
+itsdangerous = ">=2.0"
+Jinja2 = ">=3.0"
+Werkzeug = ">=2.2.2"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
[[package]]
name = "h11"
version = "0.14.0"
@@ -172,6 +209,103 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
+[[package]]
+name = "itsdangerous"
+version = "2.1.2"
+description = "Safely pass data to untrusted environments and back."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
+ {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.3"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"},
+ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+]
+
[[package]]
name = "mysqlclient"
version = "2.1.1"
@@ -273,6 +407,23 @@ files = [
{file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
]
+[[package]]
+name = "werkzeug"
+version = "2.2.3"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"},
+ {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog"]
+
[[package]]
name = "zipp"
version = "3.15.0"
@@ -291,4 +442,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.7"
-content-hash = "2c30ca981cd781e352df4cf5d718ac8ab04560489201748914f2cfa1856aba6f"
+content-hash = "8ba13f05231baf53d8d7dd3862dd4030fd5904224fef3f9eadd2b7154dcaede9"
diff --git a/pyproject.toml b/pyproject.toml
index 5a8d88a..87725ef 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tidbcloudy"
-version = "1.0.10"
+version = "1.1.0"
description = "(Unofficial) Python SDK for TiDB Cloud"
readme = "README.md"
authors = ["Aolin "]
@@ -15,6 +15,7 @@ httpx = "^0.24.1"
[tool.poetry.dev-dependencies]
pytest = "^7.4"
+flask = "^2.2.5"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/test/.gitignore b/test/.gitignore
new file mode 100644
index 0000000..3ce1d24
--- /dev/null
+++ b/test/.gitignore
@@ -0,0 +1 @@
+.pytest_cache
diff --git a/test/test_server_config.py b/test/test_server_config.py
new file mode 100644
index 0000000..a9230b0
--- /dev/null
+++ b/test/test_server_config.py
@@ -0,0 +1,20 @@
+TEST_SERVER_CONFIG = {
+ "v1beta": "http://127.0.0.1:5000/api/v1beta/",
+ "billing": "http://127.0.0.1:5000/billing/v1beta1/"
+}
+
+TEST_CLUSTER_CONFIG = {
+ "cloud_provider": "AWS",
+ "cluster_type": "DEDICATED",
+ "config": {
+ "components": {"tidb": {"node_size": "4C16G", "node_quantity": 1},
+ "tikv": {"node_size": "4C16G", "node_quantity": 2, "storage_size_gib": 200},
+ "tiflash": {"node_size": "4C16G", "node_quantity": 3, "storage_size_gib": 500}},
+ "ip_access_list": [{"cidr": "0.0.0.0/0", "description": "test 0"},
+ {"cidr": "1.1.1.1/1", "description": "test 1"}],
+ "port": 4000,
+ "root_password": "root",
+ },
+ "name": "test",
+ "region": "us-west-1"
+}
diff --git a/test/test_tidbcloudy_base.py b/test/test_tidbcloudy_base.py
new file mode 100644
index 0000000..f8f7dc6
--- /dev/null
+++ b/test/test_tidbcloudy_base.py
@@ -0,0 +1,28 @@
+from test_server_config import TEST_CLUSTER_CONFIG
+from tidbcloudy.cluster import Cluster
+from tidbcloudy.context import Context
+
+
+class TestTiDBCloudyBase:
+ def test_from_to_object(self):
+ context = Context("", "", {})
+ cluster = Cluster.from_object(context, TEST_CLUSTER_CONFIG)
+ assert cluster.cloud_provider.value == "AWS"
+ assert cluster.cluster_type.value == "DEDICATED"
+ assert cluster.config.components.tidb.node_size == "4C16G"
+ assert cluster.config.components.tidb.node_quantity == 1
+ assert cluster.config.components.tikv.node_size == "4C16G"
+ assert cluster.config.components.tikv.node_quantity == 2
+ assert cluster.config.components.tikv.storage_size_gib == 200
+ assert cluster.config.components.tiflash.node_size == "4C16G"
+ assert cluster.config.components.tiflash.node_quantity == 3
+ assert cluster.config.components.tiflash.storage_size_gib == 500
+ assert cluster.config.ip_access_list[0].cidr == "0.0.0.0/0"
+ assert cluster.config.ip_access_list[0].description == "test 0"
+ assert cluster.config.ip_access_list[1].cidr == "1.1.1.1/1"
+ assert cluster.config.ip_access_list[1].description == "test 1"
+ assert cluster.config.port == 4000
+ assert cluster.config.root_password == "root"
+ assert cluster.name == "test"
+ assert cluster.region == "us-west-1"
+ assert cluster.to_object() == TEST_CLUSTER_CONFIG
diff --git a/test/test_tidbcloudy_project.py b/test/test_tidbcloudy_project.py
new file mode 100644
index 0000000..c76057e
--- /dev/null
+++ b/test/test_tidbcloudy_project.py
@@ -0,0 +1,245 @@
+import pytest
+
+import tidbcloudy
+from test_server_config import TEST_SERVER_CONFIG
+from tidbcloudy.cluster import Cluster
+from tidbcloudy.exception import TiDBCloudResponseException
+from tidbcloudy.specification import CreateClusterConfig, ProjectAWSCMEK, UpdateClusterConfig
+from tidbcloudy.util.page import Page
+from tidbcloudy.util.timestamp import timestamp_to_string
+
+api = tidbcloudy.TiDBCloud(public_key="", private_key="", server_config=TEST_SERVER_CONFIG)
+project = api.get_project(project_id="2", update_from_server=True)
+
+
+class TestAWSCMEK:
+ @staticmethod
+ def assert_awscmek_properties(awscmek: ProjectAWSCMEK):
+ assert isinstance(awscmek, ProjectAWSCMEK)
+ assert isinstance(awscmek.region, str)
+ assert isinstance(awscmek.kms_arn, str)
+
+ @staticmethod
+ def assert_awscmek_1(awscmek: ProjectAWSCMEK):
+ TestAWSCMEK.assert_awscmek_properties(awscmek)
+ assert awscmek.region == "us-east-1"
+ assert awscmek.kms_arn == "arn:aws:kms:us-east-1:123456789"
+
+ @staticmethod
+ def assert_awscmek_2(awscmek: ProjectAWSCMEK):
+ TestAWSCMEK.assert_awscmek_properties(awscmek)
+ assert awscmek.region == "us-west-2"
+ assert awscmek.kms_arn == "arn:aws:kms:us-west-2:123456789"
+
+ def test_list_aws_cmek(self):
+ cmeks = project.list_aws_cmek()
+ assert isinstance(cmeks, Page)
+ assert cmeks.page == 1
+ assert len(cmeks.items) == 0
+ assert cmeks.total == 0
+ assert cmeks.page_size == cmeks.total
+
+ def test_create_aws_cmek(self):
+ project.create_aws_cmek(
+ [("us-east-1", "arn:aws:kms:us-east-1:123456789"),
+ ("us-west-2", "arn:aws:kms:us-west-2:123456789")])
+ cmeks = project.list_aws_cmek()
+ assert isinstance(cmeks, Page)
+ assert cmeks.page == 1
+ assert len(cmeks.items) == 2
+ assert cmeks.total == 2
+ assert cmeks.page_size == cmeks.total
+ self.assert_awscmek_1(cmeks.items[0])
+ self.assert_awscmek_2(cmeks.items[1])
+
+ def test_iter_aws_cmek(self):
+ for cmek in project.iter_aws_cmek():
+ self.assert_awscmek_properties(cmek)
+ if cmek.region == "us-east-1":
+ self.assert_awscmek_1(cmek)
+ elif cmek.region == "us-west-2":
+ self.assert_awscmek_2(cmek)
+ else:
+ assert False
+
+
+class TestCluster:
+ @staticmethod
+ def assert_cluster_dedicated_properties(cluster: Cluster):
+ assert cluster.id == "2"
+ assert cluster.name == "Cluster1"
+ assert cluster.create_timestamp == 1656991448
+ assert cluster.config.port == 4000
+ assert cluster.config.components.tidb.node_size == "8C16G"
+ assert cluster.config.components.tidb.node_quantity == 2
+ assert cluster.config.components.tikv.node_size == "8C32G"
+ assert cluster.config.components.tikv.node_quantity == 3
+ assert cluster.config.components.tikv.storage_size_gib == 1024
+ assert cluster.status.tidb_version == "v7.1.0"
+ assert cluster.status.cluster_status.value == "AVAILABLE"
+ assert cluster.status.node_map.tidb[0].to_object() == {
+ "node_name": "tidb-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C16G",
+ "vcpu_num": 8,
+ "ram_bytes": "17179869184",
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ assert cluster.status.node_map.tiflash[0].to_object() == {
+ "node_name": "tiflash-0",
+ "availability_zone": "us-west-2a",
+ "node_size": "8C64G",
+ "vcpu_num": 8,
+ "ram_bytes": "68719476736",
+ "storage_size_gib": 1024,
+ "status": "NODE_STATUS_AVAILABLE"
+ }
+ assert cluster.status.connection_strings.default_user == "root"
+ assert cluster.status.connection_strings.standard.host == "tidb.test.us-east-1.shared.aws.tidbcloud.com"
+ assert cluster.status.connection_strings.standard.port == cluster.status.connection_strings.vpc_peering.port \
+ == 4000
+ assert cluster.status.connection_strings.vpc_peering.host \
+ == "private-tidb.test.us-east-1.shared.aws.tidbcloud.com"
+ assert repr(cluster) == f""
+
+ @staticmethod
+ def assert_cluster_developer_properties(cluster: Cluster):
+ assert cluster.id == "3456"
+ assert cluster.name == "serverless-0"
+ assert cluster.create_timestamp == 1606472018
+ assert cluster.config.port == cluster.status.connection_strings.standard.port \
+ == cluster.status.connection_strings.vpc_peering.port == 4000
+ assert cluster.config.components.tidb.node_size == cluster.config.components.tikv.node_size \
+ == cluster.config.components.tiflash.node_size == "Shared0"
+ assert cluster.config.components.tidb.node_quantity == cluster.config.components.tikv.node_quantity \
+ == cluster.config.components.tiflash.node_quantity == 1
+ assert cluster.config.components.tikv.storage_size_gib \
+ == cluster.config.components.tiflash.storage_size_gib == 0
+ assert cluster.status.tidb_version == "v7.1.0"
+ assert cluster.status.cluster_status.value == "AVAILABLE"
+ assert cluster.status.node_map.tidb == cluster.status.node_map.tikv == cluster.status.node_map.tiflash == []
+ assert cluster.status.connection_strings.default_user == "test.root"
+ assert cluster.status.connection_strings.standard.host == "gateway01.prod.aws.tidbcloud.com"
+ assert cluster.status.connection_strings.vpc_peering.host == "gateway01-privatelink.prod.aws.tidbcloud.com"
+
+ def test_iter_clusters(self):
+ for cluster in project.iter_clusters():
+ assert isinstance(cluster, Cluster)
+ if cluster.cluster_type.value == "DEDICATED":
+ TestCluster.assert_cluster_dedicated_properties(cluster)
+ elif cluster.cluster_type.value == "DEVELOPER":
+ TestCluster.assert_cluster_developer_properties(cluster)
+ else:
+ assert False
+
+ def test_list_clusters(self):
+ clusters = project.list_clusters()
+ assert isinstance(clusters, Page)
+ assert len(clusters.items) == clusters.total == 2
+ assert clusters.page == 1
+ assert clusters.page_size == 10
+ for cluster in clusters.items:
+ assert isinstance(cluster, Cluster)
+ assert cluster.project_id == "2"
+ if cluster.cluster_type.value == "DEDICATED":
+ TestCluster.assert_cluster_dedicated_properties(cluster)
+ elif cluster.cluster_type.value == "DEVELOPER":
+ TestCluster.assert_cluster_developer_properties(cluster)
+ else:
+ assert False
+
+ def test_get_cluster(self):
+ cluster = project.get_cluster(cluster_id="2")
+ assert isinstance(cluster, Cluster)
+ TestCluster.assert_cluster_dedicated_properties(cluster)
+
+ def test_create_cluster(self):
+ config = CreateClusterConfig()
+ config \
+ .set_name("test-serverless") \
+ .set_cluster_type("DEVELOPER") \
+ .set_cloud_provider("aws") \
+ .set_region("us-west-2") \
+ .set_root_password("password") \
+ .add_ip_access(cidr="0.0.0.0/0") \
+ .add_ip_access(cidr="1.1.1.1/1")
+ cluster = project.create_cluster(config=config)
+ assert isinstance(cluster, Cluster)
+ assert repr(cluster) == f""
+ cluster.wait_for_available(interval_sec=1)
+ assert cluster.status.cluster_status.value == "AVAILABLE"
+ assert cluster.name == "test-serverless"
+ assert cluster.cluster_type.value == "DEVELOPER"
+ assert cluster.cloud_provider.value == "AWS"
+ assert cluster.region == "us-west-2"
+ assert cluster.config.port == cluster.status.connection_strings.standard.port \
+ == cluster.status.connection_strings.vpc_peering.port == 4000
+ assert cluster.status.tidb_version == "v0.0.0"
+ assert repr(cluster) == f""
+ assert project.get_cluster(cluster_id=cluster.id).to_object() == cluster.to_object()
+
+ def test_delete_cluster(self):
+ delete_cluster_id = "3456"
+ init_total = project.list_clusters().total
+ project.delete_cluster(cluster_id=delete_cluster_id)
+ current_total = project.list_clusters().total
+ assert current_total == init_total - 1
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.get_cluster(cluster_id=delete_cluster_id)
+ assert exc_info.value.status == 400
+
+ def test_update_cluster_pause_resume(self):
+ cluster_id = "2"
+ pause_config = {"config": {"paused": True}}
+ resume_config = {"config": {"paused": False}}
+ project.update_cluster(cluster_id=cluster_id, config=pause_config)
+ assert project.get_cluster(cluster_id=cluster_id).status.cluster_status.value == "PAUSED"
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.update_cluster(cluster_id=cluster_id, config=pause_config)
+ assert exc_info.value.status == 400
+ project.update_cluster(cluster_id=cluster_id, config=resume_config)
+ assert project.get_cluster(cluster_id=cluster_id).status.cluster_status.value == "AVAILABLE"
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.update_cluster(cluster_id=cluster_id, config=resume_config)
+ assert exc_info.value.status == 400
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.update_cluster(cluster_id=cluster_id, config={"config": {"paused": "true"}})
+ assert exc_info.value.status == 400
+ cluster = project.get_cluster(cluster_id=cluster_id)
+ cluster.pause()
+ assert cluster.status.cluster_status.value == "PAUSED"
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ cluster.pause()
+ assert exc_info.value.status == 400
+ cluster.resume()
+ assert cluster.status.cluster_status.value == "AVAILABLE"
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ cluster.resume()
+ assert exc_info.value.status == 400
+
+ def test_update_cluster_config(self):
+ cluster_id = "2"
+ config = UpdateClusterConfig()
+ config.update_component("tidb", 6, "4C32G")
+ config.update_component("tikv", 9, "8C16G", 400)
+ config.update_component("tiflash", 12, "8C64G", 500)
+ project.update_cluster(cluster_id=cluster_id, config=config)
+ cluster = project.get_cluster(cluster_id=cluster_id)
+ assert cluster.config.components.tidb.node_quantity == 6
+ assert cluster.config.components.tidb.node_size == "4C32G"
+ assert cluster.config.components.tikv.node_quantity == 9
+ assert cluster.config.components.tikv.node_size == "8C16G"
+ assert cluster.config.components.tikv.storage_size_gib == 400
+ assert cluster.config.components.tiflash.node_quantity == 12
+ assert cluster.config.components.tiflash.node_size == "8C64G"
+ assert cluster.config.components.tiflash.storage_size_gib == 500
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.update_cluster(cluster_id=cluster_id,
+ config={"config": {"components": {"pd": {"node_quantity": 1}}}})
+ assert exc_info.value.status == 400
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ project.update_cluster(cluster_id=cluster_id,
+ config={"config": {"components": {"tidb": {"storage_size_gib": 100}}}})
+ assert exc_info.value.status == 400
diff --git a/test/test_tidbcloudy_specification.py b/test/test_tidbcloudy_specification.py
new file mode 100644
index 0000000..250ace4
--- /dev/null
+++ b/test/test_tidbcloudy_specification.py
@@ -0,0 +1,70 @@
+from test_server_config import TEST_CLUSTER_CONFIG
+from tidbcloudy.specification import CreateClusterConfig, UpdateClusterConfig
+
+
+class TestCreateClusterConfig:
+ def test_default(self):
+ cluster_config = CreateClusterConfig()
+ assert (cluster_config.to_object() == {
+ "cloud_provider": None,
+ "cluster_type": None,
+ "config": {
+ "components": {"tidb": None, "tiflash": None, "tikv": None},
+ "ip_access_list": [],
+ "port": None,
+ "root_password": None,
+ },
+ "name": "",
+ "region": None})
+
+ def test_set_value(self):
+ cluster_config = CreateClusterConfig()
+ cluster_config \
+ .set_name("test") \
+ .set_cluster_type("dEdicatEd") \
+ .set_cloud_provider("aWs") \
+ .set_region("us-west-1") \
+ .set_root_password("root") \
+ .set_port(4000) \
+ .set_component("tidb", "4C16G", 1) \
+ .set_component("tikv", "4C16G", 2, 200) \
+ .set_component("tiflash", "4C16G", 3, 500) \
+ .add_ip_access("0.0.0.0/0", "test 0") \
+ .add_ip_access("1.1.1.1/1", "test 1")
+ assert cluster_config.to_object() == TEST_CLUSTER_CONFIG
+
+
+class TestUpdateClusterConfig:
+ def test_default(self):
+ cluster_config = UpdateClusterConfig()
+ assert cluster_config.to_object() == {
+ "config": {
+ "components": {}
+ }
+ }
+
+ def test_update_component(self):
+ cluster_config = UpdateClusterConfig()
+ cluster_config.update_component("tidb", 2, "8C16G")
+ cluster_config.update_component("tikv", 1, "8C32G", 400)
+ cluster_config.update_component("tiflash", 3, "8C64G", 500)
+ assert cluster_config.to_object() == {
+ "config": {
+ "components": {
+ "tidb": {
+ "node_quantity": 2,
+ "node_size": "8C16G"
+ },
+ "tikv": {
+ "node_quantity": 1,
+ "node_size": "8C32G",
+ "storage_size_gib": 400
+ },
+ "tiflash": {
+ "node_quantity": 3,
+ "node_size": "8C64G",
+ "storage_size_gib": 500
+ }
+ }
+ }
+ }
diff --git a/test/test_tidbcloudy_tidbcloud.py b/test/test_tidbcloudy_tidbcloud.py
new file mode 100644
index 0000000..eb15c1d
--- /dev/null
+++ b/test/test_tidbcloudy_tidbcloud.py
@@ -0,0 +1,156 @@
+import pytest
+
+import tidbcloudy
+from test_server_config import TEST_SERVER_CONFIG
+from tidbcloudy.exception import TiDBCloudResponseException
+from tidbcloudy.project import Project
+from tidbcloudy.specification import BillingMonthSummary, CloudSpecification
+from tidbcloudy.util.page import Page
+from tidbcloudy.util.timestamp import timestamp_to_string
+
+api = tidbcloudy.TiDBCloud(public_key="", private_key="", server_config=TEST_SERVER_CONFIG)
+
+
+class TestProject:
+ project_init_num = 2
+ page = 1
+ page_size = 1
+
+ @staticmethod
+ def assert_project_properties(project: Project):
+ assert isinstance(project, Project)
+ assert isinstance(project.id, str)
+ assert project.id.isdigit() and int(project.id) > 0
+ assert isinstance(project.org_id, str)
+ assert isinstance(project.name, str)
+ assert isinstance(project.cluster_count, int)
+ assert isinstance(project.user_count, int)
+ assert isinstance(project.create_timestamp, int)
+ assert project.create_timestamp > 0 and len(str(project.create_timestamp)) == 10
+ assert isinstance(project.aws_cmek_enabled, bool)
+
+ @staticmethod
+ def assert_project_1(project: Project):
+ TestProject.assert_project_properties(project)
+ assert repr(project) \
+ == ""
+ assert project.id == "1"
+ assert project.org_id == "1"
+ assert project.name == "default_project"
+ assert project.cluster_count == 4
+ assert project.user_count == 10
+ assert project.create_timestamp == 1656991448
+ assert project.aws_cmek_enabled is False
+
+ def test_list_projects_init(self):
+ projects = api.list_projects(page=TestProject.page, page_size=TestProject.page_size)
+ assert isinstance(projects, Page)
+ assert projects.page == TestProject.page
+ assert projects.page_size == TestProject.page_size
+ assert projects.total == TestProject.project_init_num
+ assert len(projects.items) == TestProject.page * TestProject.page_size
+ for project in projects.items:
+ self.assert_project_1(project)
+
+ def test_create_project(self):
+ project = api.create_project(name="test_project", aws_cmek_enabled=True, update_from_server=True)
+ self.assert_project_properties(project)
+ assert repr(project) == \
+ (f"")
+ assert project.org_id == "1"
+ assert project.name == "test_project"
+ assert project.cluster_count == 0
+ assert project.user_count == 1
+ assert project.aws_cmek_enabled is True
+ current_projects = api.list_projects(page=TestProject.page, page_size=TestProject.page_size)
+ assert current_projects.total == TestProject.project_init_num + TestProject.page * TestProject.page_size
+
+ def test_get_project(self):
+ project_id = "1"
+ project = api.get_project(project_id=project_id, update_from_server=False)
+ assert repr(project) == ""
+ project = api.get_project(project_id=project_id, update_from_server=True)
+ self.assert_project_1(project)
+
+ def test_iter_projects(self):
+ for project in api.iter_projects():
+ self.assert_project_properties(project)
+
+
+class TestProviderRegions:
+ @staticmethod
+ def assert_provider_regions_dedicated(spec: CloudSpecification):
+ assert repr(spec) == ""
+
+ @staticmethod
+ def assert_provider_regions_developer(spec: CloudSpecification):
+ assert repr(spec) == ""
+
+ def test_list_provider_regions(self):
+ provider_regions = api.list_provider_regions()
+ assert len(provider_regions) == 2
+ for spec in provider_regions:
+ assert isinstance(spec, CloudSpecification)
+ if spec.cluster_type.value == "DEDICATED":
+ TestProviderRegions.assert_provider_regions_dedicated(spec)
+ elif spec.cluster_type.value == "DEVELOPER":
+ TestProviderRegions.assert_provider_regions_developer(spec)
+ else:
+ assert False
+
+
+class TestBilling:
+ @staticmethod
+ def assert_billing(billing: BillingMonthSummary):
+ assert isinstance(billing, BillingMonthSummary)
+ assert repr(billing) == ""
+ assert billing.overview.to_object() == {
+ "billedMonth": "2023-10",
+ "credits": "1.00",
+ "discounts": "2.00",
+ "runningTotal": "3.00",
+ "totalCost": "4.00"
+ }
+ assert billing.summaryByProject.otherCharges[0].to_object() == {
+ "chargeName": "Support Plan",
+ "credits": "0.10",
+ "discounts": "0.20",
+ "runningTotal": "0.30",
+ "totalCost": "0.40"
+ }
+ assert billing.summaryByProject.projects[0].to_object() == {
+ "credits": "3.00",
+ "discounts": "0.50",
+ "projectName": "prod-project",
+ "runningTotal": "1.00",
+ "totalCost": "4.00"
+ }
+ assert billing.summaryByService[0].to_object() == {
+ "credits": "2.00",
+ "discounts": "3.00",
+ "runningTotal": "5.00",
+ "serviceCosts": [
+ {}
+ ],
+ "serviceName": "TiDB Dedicated",
+ "totalCost": "4.00"
+ }
+
+ def test_get_monthly_bill_properties(self):
+ current_bill = api.get_monthly_bill(month="202310")
+ assert current_bill.overview.billedMonth == "2023-10"
+ TestBilling.assert_billing(current_bill)
+
+ def test_get_monthly_bill_1(self):
+ current_bill = api.get_monthly_bill(month="2023-10")
+ assert current_bill.overview.billedMonth == "2023-10"
+
+ def test_get_monthly_bill_2(self):
+ current_bill = api.get_monthly_bill(month="202309")
+ assert current_bill.overview.billedMonth == "2023-09"
+
+ def test_get_monthly_bill_exc(self):
+ with pytest.raises(TiDBCloudResponseException) as exc_info:
+ api.get_monthly_bill(month="202308")
+ assert exc_info.value.status == 400
diff --git a/test/test_tidbcloudy_util_timestamp.py b/test/test_tidbcloudy_util_timestamp.py
new file mode 100644
index 0000000..d19133c
--- /dev/null
+++ b/test/test_tidbcloudy_util_timestamp.py
@@ -0,0 +1,13 @@
+from datetime import datetime, timezone
+
+from tidbcloudy.util.timestamp import get_current_year_month, timestamp_to_string
+
+
+def test_timestamp_to_string():
+ assert timestamp_to_string(None) == ""
+ assert timestamp_to_string(0, timezone=timezone.utc) == "1970-01-01 00:00:00"
+
+
+def test_get_current_year_month():
+ current_date = datetime.now(tz=timezone.utc)
+ assert get_current_year_month(timezone=timezone.utc) == f"{current_date.year}-{current_date.month}"
diff --git a/tidbcloudy/project.py b/tidbcloudy/project.py
index a986100..8eaa8fe 100644
--- a/tidbcloudy/project.py
+++ b/tidbcloudy/project.py
@@ -143,7 +143,7 @@ def iter_clusters(self, page_size: int = 10) -> Iterator[Cluster]:
yield cluster
page += 1
- def list_clusters(self, page: int = None, page_size: int = None) -> Page[Cluster]:
+ def list_clusters(self, page: int = 1, page_size: int = 10) -> Page[Cluster]:
"""
List all clusters in the project.
Args:
diff --git a/tidbcloudy/specification.py b/tidbcloudy/specification.py
index d92b300..3459e69 100644
--- a/tidbcloudy/specification.py
+++ b/tidbcloudy/specification.py
@@ -423,8 +423,8 @@ class BillingServiceCost(TiDBCloudyBase):
__slots__ = []
-class BillingMonthSummaryByService(TiDBCloudyBase):
- __slots__ = ["_serviceCosts", "_serviceName"]
+class BillingMonthSummaryByService(BillingBase):
+ __slots__ = ["_serviceCosts", "_serviceName"] + BillingBase.__slots__
serviceCosts: List[dict] = TiDBCloudyListField(BillingServiceCost)
serviceName: str = TiDBCloudyField(str)
@@ -437,7 +437,7 @@ class BillingMonthSummary(TiDBCloudyBase):
__slots__ = ["_overview", "_summaryByProject", "_summaryByService"]
overview: BillingMonthOverview = TiDBCloudyField(BillingMonthOverview)
summaryByProject: BillingMonthSummaryByProject = TiDBCloudyField(BillingMonthSummaryByProject)
- summaryByService: BillingMonthSummaryByService = TiDBCloudyListField(BillingMonthSummaryByService)
+ summaryByService: List[BillingMonthSummaryByService] = TiDBCloudyListField(BillingMonthSummaryByService)
def __repr__(self):
return "".format(self.overview.billedMonth)
diff --git a/tidbcloudy/util/timestamp.py b/tidbcloudy/util/timestamp.py
index ab29deb..7e0a756 100644
--- a/tidbcloudy/util/timestamp.py
+++ b/tidbcloudy/util/timestamp.py
@@ -1,26 +1,31 @@
-import datetime
+from datetime import datetime, tzinfo
+from typing import Union
-def timestamp_to_string(timestamp: int) -> str:
+def timestamp_to_string(timestamp: Union[int, None], timezone: tzinfo = None) -> str:
"""
Convert timestamp to datetime string.
Args:
- timestamp:
+ timestamp: the timestamp to convert.
+ timezone: the timezone to use.
Returns:
- the datetime string.
+ the datetime string in format of YYYY-MM-DD HH:MM:SS.
"""
if timestamp is None:
return ""
- return datetime.datetime.fromtimestamp(timestamp).isoformat()
+ return datetime.fromtimestamp(timestamp, tz=timezone).strftime("%Y-%m-%d %H:%M:%S")
-def get_current_year_month() -> str:
+
+def get_current_year_month(timezone: tzinfo = None) -> str:
"""
Get current year and month.
+ Args:
+ timezone: the timezone to use.
Returns:
the year and month string in format of YYYY-MM.
"""
- return datetime.datetime.now().strftime("%Y-%m")
+ return datetime.now(tz=timezone).strftime("%Y-%m")