Skip to content

Commit fac1656

Browse files
authored
Merge pull request #1937 from Giskard-AI/fix/bedrock_client
Fixed system prompt extraction inside of the bedrock client
2 parents c4cad71 + 6011d1f commit fac1656

File tree

1 file changed

+5
-9
lines changed

1 file changed

+5
-9
lines changed

giskard/llm/client/bedrock.py

+5-9
Original file line numberDiff line numberDiff line change
@@ -39,17 +39,13 @@ def complete(
3939
if "claude-3" not in self.model:
4040
raise LLMConfigurationError(f"Only claude-3 models are supported as of now, got {self.model}")
4141

42-
# extract system prompt from messages
43-
system_prompt = ""
44-
if len(messages) > 1:
45-
if messages[0].role.lower() == "user" and messages[1].role.lower() == "user":
46-
system_prompt = messages[0].content
47-
messages = messages[1:]
48-
4942
# Create the messages format needed for bedrock specifically
5043
input_msg_prompt = []
44+
system_prompts = []
5145
for msg in messages:
52-
if msg.role.lower() == "assistant":
46+
if msg.role.lower() == "system":
47+
system_prompts.append(msg.content)
48+
elif msg.role.lower() == "assistant":
5349
input_msg_prompt.append({"role": "assistant", "content": [{"type": "text", "text": msg.content}]})
5450
else:
5551
input_msg_prompt.append({"role": "user", "content": [{"type": "text", "text": msg.content}]})
@@ -60,7 +56,7 @@ def complete(
6056
"anthropic_version": "bedrock-2023-05-31",
6157
"max_tokens": max_tokens,
6258
"temperature": temperature,
63-
"system": system_prompt,
59+
"system": "\n".join(system_prompts),
6460
"messages": input_msg_prompt,
6561
}
6662
)

0 commit comments

Comments
 (0)