Skip to content

Commit

Permalink
Merge pull request #63 from ExpediaGroup/feature/add_elastic_search_o…
Browse files Browse the repository at this point in the history
…nline_store

Add elasticsearch online store - update method
  • Loading branch information
piket authored Oct 24, 2023
2 parents 8597890 + 53748e5 commit 1ddee5f
Show file tree
Hide file tree
Showing 7 changed files with 548 additions and 0 deletions.
170 changes: 170 additions & 0 deletions sdk/python/feast/expediagroup/vectordb/elasticsearch_online_store.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
import json
import logging
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple

from bidict import bidict
from elasticsearch import Elasticsearch
from pydantic.typing import Literal

from feast import Entity, FeatureView, RepoConfig
from feast.infra.online_stores.online_store import OnlineStore
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.repo_config import FeastConfigBaseModel
from feast.types import (
Bool,
Bytes,
ComplexFeastType,
FeastType,
Float32,
Float64,
Int32,
Int64,
String,
UnixTimestamp,
)

logger = logging.getLogger(__name__)

TYPE_MAPPING = bidict(
{
Bytes: "binary",
Int32: "integer",
Int64: "long",
Float32: "float",
Float64: "double",
Bool: "boolean",
String: "text",
UnixTimestamp: "date_nanos",
}
)


class ElasticsearchOnlineStoreConfig(FeastConfigBaseModel):
"""Online store config for the Elasticsearch online store"""

type: Literal["elasticsearch"] = "elasticsearch"
"""Online store type selector"""

endpoint: str
""" the http endpoint URL """

username: str
""" username to connect to Elasticsearch """

password: str
""" password to connect to Elasticsearch """

token: str
""" bearer token for authentication """


class ElasticsearchConnectionManager:
def __init__(self, online_config: RepoConfig):
self.online_config = online_config

def __enter__(self):
# Connecting to Elasticsearch
logger.info(
f"Connecting to Elasticsearch with endpoint {self.online_config.endpoint}"
)
if len(self.online_config.token) > 0:
self.client = Elasticsearch(
self.online_config.endpoint, bearer_auth=self.online_config.token
)
else:
self.client = Elasticsearch(
self.online_config.endpoint,
basic_auth=(self.online_config.username, self.online_config.password),
)
return self.client

def __exit__(self, exc_type, exc_value, traceback):
# Disconnecting from Elasticsearch
logger.info("Closing the connection to Elasticsearch")
self.client.transport.close()


class ElasticsearchOnlineStore(OnlineStore):
def online_write_batch(
self,
config: RepoConfig,
table: FeatureView,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
with ElasticsearchConnectionManager(config):
pass

def online_read(
self,
config: RepoConfig,
table: FeatureView,
entity_keys: List[EntityKeyProto],
requested_features: Optional[List[str]] = None,
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
pass

def update(
self,
config: RepoConfig,
tables_to_delete: Sequence[FeatureView],
tables_to_keep: Sequence[FeatureView],
entities_to_delete: Sequence[Entity],
entities_to_keep: Sequence[Entity],
partial: bool,
):
with ElasticsearchConnectionManager(config) as es:
for fv in tables_to_delete:
resp = es.indices.exists(index=fv.name)
if resp.body:
es.indices.delete(index=fv.name)
for fv in tables_to_keep:
resp = es.indices.exists(index=fv.name)
if not resp.body:
self._create_index(es, fv)

def teardown(
self,
config: RepoConfig,
tables: Sequence[FeatureView],
entities: Sequence[Entity],
):
pass

def _create_index(self, es, fv):
index_mapping = {"properties": {}}
for feature in fv.schema:
is_primary = True if feature.name in fv.join_keys else False
if "index_type" in feature.tags:
dimensions = int(feature.tags.get("dimensions", "0"))
metric_type = feature.tags.get("metric_type", "l2_norm")
index_mapping["properties"][feature.name] = {
"type": "dense_vector",
"dims": dimensions,
"index": True,
"similarity": metric_type,
}
index_params = json.loads(feature.tags.get("index_params", "{}"))
if len(index_params) > 0:
index_params["type"] = feature.tags.get(
"index_type", "hnsw"
).lower()
index_mapping["properties"][feature.name][
"index_options"
] = index_params
else:
t = self._get_data_type(feature.dtype)
t = "keyword" if is_primary and t == "text" else t
index_mapping["properties"][feature.name] = {"type": t}
if is_primary:
index_mapping["properties"][feature.name]["index"] = True
es.indices.create(index=fv.name, mappings=index_mapping)

def _get_data_type(self, t: FeastType) -> str:
if isinstance(t, ComplexFeastType):
return "text"
return TYPE_MAPPING.get(t, "text")
4 changes: 4 additions & 0 deletions sdk/python/requirements/py3.10-ci-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,10 @@ docker==6.1.3
# testcontainers
docutils==0.19
# via sphinx
elastic-transport==8.4.1
# via elasticsearch
elasticsearch==8.8.0
# via eg-feast (setup.py)
entrypoints==0.4
# via altair
environs==9.5.0
Expand Down
4 changes: 4 additions & 0 deletions sdk/python/requirements/py3.8-ci-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,10 @@ docker==6.1.3
# testcontainers
docutils==0.19
# via sphinx
elastic-transport==8.4.1
# via elasticsearch
elasticsearch==8.8.0
# via eg-feast (setup.py)
entrypoints==0.4
# via altair
exceptiongroup==1.1.1
Expand Down
4 changes: 4 additions & 0 deletions sdk/python/requirements/py3.9-ci-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,10 @@ docker==6.1.3
# testcontainers
docutils==0.19
# via sphinx
elastic-transport==8.4.1
# via elasticsearch
elasticsearch==8.8.0
# via eg-feast (setup.py)
entrypoints==0.4
# via altair
environs==9.5.0
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import logging

from testcontainers.elasticsearch import ElasticSearchContainer

from tests.integration.feature_repos.universal.online_store_creator import (
OnlineStoreCreator,
)

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class ElasticsearchOnlineCreator(OnlineStoreCreator):
def __init__(self, project_name: str, es_port: int):
super().__init__(project_name)
self.elasticsearch_container = ElasticSearchContainer(
image="docker.elastic.co/elasticsearch/elasticsearch:8.8.2",
port_to_expose=es_port,
)

def create_online_store(self):
# Start the container
self.elasticsearch_container.start()
elasticsearch_host = self.elasticsearch_container.get_container_host_ip()
elasticsearch_http_port = self.elasticsearch_container.get_exposed_port(9200)
return {
"host": elasticsearch_host,
"port": elasticsearch_http_port,
"username": "",
"password": "",
"token": "",
}

def teardown(self):
self.elasticsearch_container.stop()
Loading

0 comments on commit 1ddee5f

Please sign in to comment.