Skip to content

Commit

Permalink
Merge pull request #973 from superagent-ai/feat/native-function-calling
Browse files Browse the repository at this point in the history
Native Function Calling
  • Loading branch information
elisalimli authored Apr 29, 2024
2 parents 3ee2776 + a0f4560 commit 50b96de
Show file tree
Hide file tree
Showing 24 changed files with 469 additions and 96 deletions.
33 changes: 31 additions & 2 deletions fern/apis/prod/openapi/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ openapi: 3.0.2
info:
title: Superagent
description: 🥷 Run AI-agents with an API
version: 0.2.29
version: 0.2.32
servers:
- url: https://api.beta.superagent.sh
paths:
Expand Down Expand Up @@ -195,8 +195,8 @@ paths:
$ref: '#/components/schemas/HTTPValidationError'
security:
- HTTPBearer: []
x-fern-sdk-group-name: agent
x-fern-sdk-method-name: invoke
x-fern-sdk-group-name: agent
/api/v1/agents/{agent_id}/llms:
post:
tags:
Expand Down Expand Up @@ -1507,6 +1507,33 @@ paths:
$ref: '#/components/schemas/HTTPValidationError'
security:
- HTTPBearer: []
delete:
tags:
- Vector Database
summary: Delete
description: Delete a Vector Database
operationId: delete_api_v1_vector_dbs__vector_db_id__delete
parameters:
- required: true
schema:
title: Vector Db Id
type: string
name: vector_db_id
in: path
responses:
'200':
description: Successful Response
content:
application/json:
schema: {}
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
security:
- HTTPBearer: []
patch:
tags:
- Vector Database
Expand Down Expand Up @@ -1783,6 +1810,8 @@ components:
- TOGETHER_AI
- ANTHROPIC
- BEDROCK
- GROQ
- MISTRAL
type: string
description: An enumeration.
OpenAiAssistantParameters:
Expand Down
52 changes: 46 additions & 6 deletions libs/superagent/app/agents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from app.models.request import LLMParams as LLMParamsRequest
from app.utils.callbacks import CustomAsyncIteratorCallbackHandler
from prisma.enums import AgentType
from prisma.enums import AgentType, LLMProvider
from prisma.models import LLM, Agent


Expand All @@ -21,9 +21,21 @@ class LLMParams(BaseModel):
class LLMData(BaseModel):
llm: LLM
params: LLMParams
model: str


class AgentBase(ABC):
_input: str
_messages: list = []
prompt: Any
tools: Any
session_id: str
enable_streaming: bool
output_schema: str
callbacks: List[CustomAsyncIteratorCallbackHandler]
agent_data: Agent
llm_data: LLMData

def __init__(
self,
session_id: str,
Expand All @@ -40,10 +52,6 @@ def __init__(
self.llm_data = llm_data
self.agent_data = agent_data

_input: str
prompt: Any
tools: Any

@property
def input(self):
return self._input
Expand All @@ -52,6 +60,14 @@ def input(self):
def input(self, value: str):
self._input = value

@property
def messages(self):
return self._messages

@messages.setter
def messages(self, value: list):
self._messages = value

@property
@abstractmethod
def prompt(self) -> Any:
Expand Down Expand Up @@ -95,7 +111,31 @@ def llm_data(self):
**(params),
}

return LLMData(llm=llm, params=LLMParams.parse_obj(options))
params = LLMParams(
temperature=options.get("temperature"),
max_tokens=options.get("max_tokens"),
aws_access_key_id=(
options.get("aws_access_key_id")
if llm.provider == LLMProvider.BEDROCK
else None
),
aws_secret_access_key=(
options.get("aws_secret_access_key")
if llm.provider == LLMProvider.BEDROCK
else None
),
aws_region_name=(
options.get("aws_region_name")
if llm.provider == LLMProvider.BEDROCK
else None
),
)

return LLMData(
llm=llm,
params=LLMParams.parse_obj(options),
model=self.agent_data.llmModel or self.agent_data.metadata.get("model"),
)

async def get_agent(self):
if self.agent_data.type == AgentType.OPENAI_ASSISTANT:
Expand Down
2 changes: 1 addition & 1 deletion libs/superagent/app/agents/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _get_llm(self):

if llm_data.llm.provider == LLMProvider.OPENAI:
return ChatOpenAI(
model=LLM_MAPPING[self.agent_data.llmModel],
model=LLM_MAPPING[self.llm_data.model],
openai_api_key=llm_data.llm.apiKey,
streaming=self.enable_streaming,
callbacks=self.callbacks,
Expand Down
Loading

0 comments on commit 50b96de

Please sign in to comment.