@@ -39,17 +39,13 @@ def complete(
39
39
if "claude-3" not in self .model :
40
40
raise LLMConfigurationError (f"Only claude-3 models are supported as of now, got { self .model } " )
41
41
42
- # extract system prompt from messages
43
- system_prompt = ""
44
- if len (messages ) > 1 :
45
- if messages [0 ].role .lower () == "user" and messages [1 ].role .lower () == "user" :
46
- system_prompt = messages [0 ].content
47
- messages = messages [1 :]
48
-
49
42
# Create the messages format needed for bedrock specifically
50
43
input_msg_prompt = []
44
+ system_prompts = []
51
45
for msg in messages :
52
- if msg .role .lower () == "assistant" :
46
+ if msg .role .lower () == "system" :
47
+ system_prompts .append (msg .content )
48
+ elif msg .role .lower () == "assistant" :
53
49
input_msg_prompt .append ({"role" : "assistant" , "content" : [{"type" : "text" , "text" : msg .content }]})
54
50
else :
55
51
input_msg_prompt .append ({"role" : "user" , "content" : [{"type" : "text" , "text" : msg .content }]})
@@ -60,7 +56,7 @@ def complete(
60
56
"anthropic_version" : "bedrock-2023-05-31" ,
61
57
"max_tokens" : max_tokens ,
62
58
"temperature" : temperature ,
63
- "system" : system_prompt ,
59
+ "system" : " \n " . join ( system_prompts ) ,
64
60
"messages" : input_msg_prompt ,
65
61
}
66
62
)
0 commit comments