-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhf_agents_components.py
165 lines (129 loc) · 5.34 KB
/
hf_agents_components.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
from xai_components.base import InArg, OutArg, InCompArg,secret, BaseComponent, Component, xai_component
from transformers import Tool, AutoModelForCausalLM, AutoTokenizer, pipeline
from io import BytesIO
from PIL import Image
import torch
import os
hf_token = os.getenv("HF_TOKEN")
@xai_component
class HfAgentMakeTool(Component):
"""A component to create a custom tool using Hugging Face Transformers.
##### inPorts:
- name: The name of the custom tool.
- description: A description of what the tool does.
- input_ref: Reference to the input data required by the tool.
##### outPorts:
- tool_ref: The created custom tool object.
- output_ref: Reference to the output of the tool.
"""
run_tool: BaseComponent
name: InCompArg[str]
description: InCompArg[str]
input_ref: InArg[str]
tool_ref: OutArg[Tool]
output_ref: OutArg[str]
def execute(self, ctx) -> None:
other_self = self
class CustomTool(Tool):
name = other_self.name.value
description = other_self.description.value
inputs = {"text": {"type": "string", "description": "The input question to answer."}}
output_type = "string"
def forward(self, text):
response = ctx['hf_agent'](
text,
max_length=50,
temperature=0.5,
top_p=0.9,
truncation=True
)
if isinstance(response, list) and len(response) > 0 and 'generated_text' in response[0]:
return response[0]['generated_text']
else:
return "No valid response received."
self.tool_ref.value = CustomTool()
@xai_component
class HfAgentInit(Component):
"""A component to initialize a Hugging Face language model agent.
##### inPorts:
- agent_type: The type of language model to use (e.g., "gpt2").
- tools: A list of tools to assist the language model.
- token: The Hugging Face API token (secret).
- from_env: Whether to retrieve the token from the environment variable `HF_TOKEN`.
"""
agent_type: InCompArg[str]
tools: InArg[Tool]
token: InArg[secret]
from_env: InArg[bool]
def execute(self, ctx) -> None:
hf_token = os.getenv("HF_TOKEN") if self.from_env.value else self.token.value
tools = self.tools.value if isinstance(self.tools.value, list) else [self.tools.value]
if not all(isinstance(tool, Tool) for tool in tools):
raise ValueError("All items in tools must be instances of Tool.")
model_name = self.agent_type.value or "gpt2"
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
model = AutoModelForCausalLM.from_pretrained(model_name, token=hf_token)
llm_engine = pipeline(
"text-generation", model=model, tokenizer=tokenizer,
device=0 if torch.cuda.is_available() else -1,
pad_token_id=tokenizer.eos_token_id
)
ctx['hf_agent'] = llm_engine
@xai_component
class HfAgentRun(Component):
"""A component to run a prompt through the initialized Hugging Face agent and retrieve a response.
##### inPorts:
- prompt: The text prompt to pass to the agent.
- document: Optional document input for context.
##### outPorts:
- response_text: The generated response text.
- response_file: Path to a saved response file, if applicable.
"""
prompt: InCompArg[str]
document: InArg[any]
response_text: OutArg[str]
response_file: OutArg[str]
def execute(self, ctx) -> None:
agent = ctx['hf_agent']
prompt_text = f" '{self.prompt.value}'"
if self.document.value:
if isinstance(self.document.value, bytes):
image_file = BytesIO(self.document.value)
self.document.value = Image.open(image_file)
ret = agent(
prompt_text,
max_length=50,
temperature=0.7,
top_p=0.9,
truncation=True
)
if isinstance(ret, list) and len(ret) > 0 and 'generated_text' in ret[0]:
self.response_text.value = ret[0]['generated_text']
print("Response Text:", self.response_text.value)
else:
print("No valid response received.")
else:
ret = agent(
prompt_text,
max_length=50,
temperature=0.7,
top_p=0.9,
truncation=True
)
if isinstance(ret, list) and len(ret) > 0 and 'generated_text' in ret[0]:
self.response_text.value = ret[0]['generated_text']
print("Response Text:", self.response_text.value)
else:
print("No valid response received.")
@xai_component
class HfReadImage(Component):
"""A component to read an image file and output it as a PIL image.
##### inPorts:
- file_path: The path to the image file.
##### outPorts:
- out_image: The loaded PIL Image object.
"""
file_path: InCompArg[str]
out_image: OutArg[Image.Image]
def execute(self, ctx) -> None:
self.out_image.value = Image.open(self.file_path.value)