Skip to content

Commit 750db5c

Browse files
authored
Merge pull request #2042 from Giskard-AI/feature/gsk-3835-update-setting-up-llm-client-doc-to-include-embedding
[GSK-3835] Update setting up LLM doc
2 parents e3d3548 + bb45c5b commit 750db5c

File tree

3 files changed

+185
-65
lines changed

3 files changed

+185
-65
lines changed

docs/open_source/scan/scan_llm/index.md

+60-20
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,19 @@ like this:
5555
::::::{tab-item} OpenAI
5656

5757
```python
58-
import giskard
5958
import os
59+
import giskard
6060
from giskard.llm.client.openai import OpenAIClient
6161

62+
# Set the OpenAI API key
6263
os.environ["OPENAI_API_KEY"] = "sk-…"
6364

65+
# Create a giskard OpenAI client
66+
openai_client = OpenAIClient(model="gpt-4o")
67+
68+
# Set the default client
6469
giskard.llm.set_llm_api("openai")
65-
oc = OpenAIClient(model="gpt-4-turbo-preview")
66-
giskard.llm.set_default_client(oc)
70+
giskard.llm.set_default_client(openai_client)
6771
```
6872

6973
::::::
@@ -73,30 +77,38 @@ Require `openai>=1.0.0`
7377

7478
```python
7579
import os
76-
from giskard.llm import set_llm_model
77-
from giskard.llm.embeddings.openai import set_embedding_model
80+
import giskard
7881

82+
# Set the Azure OpenAI API key and endpoint
7983
os.environ['AZURE_OPENAI_API_KEY'] = '...'
8084
os.environ['AZURE_OPENAI_ENDPOINT'] = 'https://xxx.openai.azure.com'
8185
os.environ['OPENAI_API_VERSION'] = '2023-07-01-preview'
8286

8387
# You'll need to provide the name of the model that you've deployed
8488
# Beware, the model provided must be capable of using function calls
85-
set_llm_model('my-gpt-4-model')
86-
set_embedding_model('my-embedding-model') # Optional
89+
giskard.llm.set_llm_model('my-gpt-4-model')
90+
giskard.llm.embeddings.set_embedding_model('my-embedding-model')
8791
```
8892

8993
::::::
9094
::::::{tab-item} Mistral
9195

9296
```python
9397
import os
98+
import giskard
9499
from giskard.llm.client.mistral import MistralClient
95100

96-
os.environ["MISTRAL_API_KEY"] = "sk-…"
101+
# Set the Mistral API key
102+
os.environ["MISTRAL_API_KEY"] = ""
97103

98-
mc = MistralClient()
99-
giskard.llm.set_default_client(mc)
104+
# Create a giskard Mistral client
105+
mistral_client = MistralClient()
106+
107+
# Set the default client
108+
giskard.llm.set_default_client(mistral_client)
109+
110+
# You may also want to set the default embedding model
111+
# Check the Custom Client code snippet for more details
100112
```
101113

102114
::::::
@@ -106,11 +118,18 @@ giskard.llm.set_default_client(mc)
106118
import giskard
107119
from openai import OpenAI
108120
from giskard.llm.client.openai import OpenAIClient
121+
from giskard.llm.embeddings.openai import OpenAIEmbedding
109122

110-
# Setup the Ollama client with API key and base URL
123+
# Setup the OpenAI client with API key and base URL for Ollama
111124
_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama")
112-
oc = OpenAIClient(model="gemma:2b", client=_client)
113-
giskard.llm.set_default_client(oc)
125+
126+
# Wrap the original OpenAI client with giskard OpenAI client and embedding
127+
llm_client = OpenAIClient(model="llama3.2", client=_client)
128+
embed_client = OpenAIEmbedding(model="nomic-embed-text", client=_client)
129+
130+
# Set the default client and embedding
131+
giskard.llm.set_default_client(llm_client)
132+
giskard.llm.embeddings.set_default_embedding(embed_client)
114133
```
115134

116135
::::::
@@ -123,13 +142,17 @@ import giskard
123142

124143
from giskard.llm.client.bedrock import ClaudeBedrockClient
125144
from giskard.llm.embeddings.bedrock import BedrockEmbedding
126-
from giskard.llm.embeddings import set_default_embedding
127145

146+
# Create a Bedrock client
128147
bedrock_runtime = boto3.client("bedrock-runtime", region_name=os.environ["AWS_DEFAULT_REGION"])
148+
149+
# Wrap the Beddock client with giskard Bedrock client and embedding
129150
claude_client = ClaudeBedrockClient(bedrock_runtime, model="anthropic.claude-3-haiku-20240307-v1:0")
130151
embed_client = BedrockEmbedding(bedrock_runtime, model="amazon.titan-embed-text-v1")
152+
153+
# Set the default client and embedding
131154
giskard.llm.set_default_client(claude_client)
132-
set_default_embedding(embed_client)
155+
giskard.llm.embeddings.set_default_embedding(embed_client)
133156
```
134157

135158
::::::
@@ -138,14 +161,23 @@ set_default_embedding(embed_client)
138161
```python
139162
import os
140163
import giskard
141-
142164
import google.generativeai as genai
143-
144165
from giskard.llm.client.gemini import GeminiClient
145166

167+
# Set the Gemini API key
168+
os.environ["GEMINI_API_KEY"] = ""
169+
170+
# Configure the Gemini API
146171
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
147172

148-
giskard.llm.set_default_client(GeminiClient())
173+
# Create a giskard Gemini client
174+
gemini_client = GeminiClient()
175+
176+
# Set the default client
177+
giskard.llm.set_default_client(gemini_client)
178+
179+
# You may also want to set the default embedding model
180+
# Check the Custom Client code snippet for more details
149181
```
150182

151183
::::::
@@ -157,7 +189,7 @@ from typing import Sequence, Optional
157189
from giskard.llm.client import set_default_client
158190
from giskard.llm.client.base import LLMClient, ChatMessage
159191

160-
192+
# Create a custom client by extending the LLMClient class
161193
class MyLLMClient(LLMClient):
162194
def __init__(self, my_client):
163195
self._client = my_client
@@ -202,9 +234,17 @@ class MyLLMClient(LLMClient):
202234

203235
return ChatMessage(role="assistant", message=data["completion"])
204236

237+
# Create an instance of the custom client
238+
llm_client = MyLLMClient()
205239

206-
set_default_client(MyLLMClient())
240+
# Set the default client
241+
set_default_client(llm_client)
207242

243+
# It's also possible to create a custom embedding class extending BaseEmbedding
244+
# Or you can use FastEmbed for a pre-built embedding model:
245+
from giskard.llm.embeddings.fastembed import try_get_fastembed_embeddings
246+
embed_client = try_get_fastembed_embeddings()
247+
giskard.llm.embeddings.set_default_embedding(embed_client)
208248
```
209249

210250
::::::

docs/open_source/setting_up/index.md

+63-22
Original file line numberDiff line numberDiff line change
@@ -11,60 +11,78 @@ This guide focuses primarily on configuring and using various LLM clients suppor
1111
## OpenAI GPT-4 Client Setup
1212

1313
```python
14-
import giskard
1514
import os
15+
import giskard
1616
from giskard.llm.client.openai import OpenAIClient
1717

18+
# Set the OpenAI API key
1819
os.environ["OPENAI_API_KEY"] = "sk-…"
1920

21+
# Create a giskard OpenAI client
22+
openai_client = OpenAIClient(model="gpt-4o")
23+
24+
# Set the default client
2025
giskard.llm.set_llm_api("openai")
21-
oc = OpenAIClient(model="gpt-4-turbo-preview")
22-
giskard.llm.set_default_client(oc)
26+
giskard.llm.set_default_client(openai_client)
2327
```
2428

2529
## Azure OpenAI Client Setup
2630

2731
```python
2832
import os
29-
from giskard.llm import set_llm_model
30-
from giskard.llm.embeddings.openai import set_embedding_model
33+
import giskard
3134

35+
# Set the Azure OpenAI API key and endpoint
3236
os.environ['AZURE_OPENAI_API_KEY'] = '...'
3337
os.environ['AZURE_OPENAI_ENDPOINT'] = 'https://xxx.openai.azure.com'
3438
os.environ['OPENAI_API_VERSION'] = '2023-07-01-preview'
3539

36-
3740
# You'll need to provide the name of the model that you've deployed
3841
# Beware, the model provided must be capable of using function calls
39-
set_llm_model('my-gpt-4-model')
40-
set_embedding_model('my-embedding-model') # Optional
42+
giskard.llm.set_llm_model('my-gpt-4-model')
43+
giskard.llm.embeddings.set_embedding_model('my-embedding-model')
4144
```
4245

4346
## Mistral Client Setup
4447

4548
```python
4649
import os
50+
import giskard
4751
from giskard.llm.client.mistral import MistralClient
4852

49-
os.environ["MISTRAL_API_KEY"] = "sk-…"
53+
# Set the Mistral API key
54+
os.environ["MISTRAL_API_KEY"] = ""
5055

51-
mc = MistralClient()
52-
giskard.llm.set_default_client(mc)
56+
# Create a giskard Mistral client
57+
mistral_client = MistralClient()
58+
59+
# Set the default client
60+
giskard.llm.set_default_client(mistral_client)
61+
62+
# You may also want to set the default embedding model
63+
# Check the Custom Client Setup section for more details
5364
```
5465

5566
## Ollama Client Setup
5667

5768
The Ollama setup involves configuring an OpenAI client customized for the Ollama API:
5869

5970
```python
71+
import giskard
6072
from openai import OpenAI
6173
from giskard.llm.client.openai import OpenAIClient
62-
from giskard.llm.client.mistral import MistralClient
74+
from giskard.llm.embeddings.openai import OpenAIEmbedding
6375

64-
# Setup the Ollama client with API key and base URL
76+
# Setup the OpenAI client with API key and base URL for Ollama
6577
_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama")
66-
oc = OpenAIClient(model="gemma:2b", client=_client)
67-
giskard.llm.set_default_client(oc)
78+
79+
# Wrap the original OpenAI client with giskard OpenAI client and embedding
80+
llm_client = OpenAIClient(model="llama3.2", client=_client)
81+
embed_client = OpenAIEmbedding(model="nomic-embed-text", client=_client)
82+
83+
# Set the default client and embedding
84+
giskard.llm.set_default_client(llm_client)
85+
giskard.llm.embeddings.set_default_embedding(embed_client)
6886
```
6987

7088
## Claude 3 Client Setup
@@ -78,28 +96,41 @@ import giskard
7896

7997
from giskard.llm.client.bedrock import ClaudeBedrockClient
8098
from giskard.llm.embeddings.bedrock import BedrockEmbedding
81-
from giskard.llm.embeddings import set_default_embedding
8299

100+
# Create a Bedrock client
83101
bedrock_runtime = boto3.client("bedrock-runtime", region_name=os.environ["AWS_DEFAULT_REGION"])
102+
103+
# Wrap the Beddock client with giskard Bedrock client and embedding
84104
claude_client = ClaudeBedrockClient(bedrock_runtime, model="anthropic.claude-3-haiku-20240307-v1:0")
85105
embed_client = BedrockEmbedding(bedrock_runtime, model="amazon.titan-embed-text-v1")
106+
107+
# Set the default client and embedding
86108
giskard.llm.set_default_client(claude_client)
87-
set_default_embedding(embed_client)
109+
giskard.llm.embeddings.set_default_embedding(embed_client)
88110
```
89111

90112
## Gemini Client Setup
91113

92114
```python
93115
import os
94116
import giskard
95-
96117
import google.generativeai as genai
97-
98118
from giskard.llm.client.gemini import GeminiClient
99119

120+
# Set the Gemini API key
121+
os.environ["GEMINI_API_KEY"] = ""
122+
123+
# Configure the Gemini API
100124
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
101125

102-
giskard.llm.set_default_client(GeminiClient())
126+
# Create a giskard Gemini client
127+
gemini_client = GeminiClient()
128+
129+
# Set the default client
130+
giskard.llm.set_default_client(gemini_client)
131+
132+
# You may also want to set the default embedding model
133+
# Check the Custom Client Setup section for more details
103134
```
104135

105136
## Custom Client Setup
@@ -110,7 +141,7 @@ from typing import Sequence, Optional
110141
from giskard.llm.client import set_default_client
111142
from giskard.llm.client.base import LLMClient, ChatMessage
112143

113-
144+
# Create a custom client by extending the LLMClient class
114145
class MyLLMClient(LLMClient):
115146
def __init__(self, my_client):
116147
self._client = my_client
@@ -155,7 +186,17 @@ class MyLLMClient(LLMClient):
155186

156187
return ChatMessage(role="assistant", message=data["completion"])
157188

158-
set_default_client(MyLLMClient())
189+
# Create an instance of the custom client
190+
llm_client = MyLLMClient()
191+
192+
# Set the default client
193+
set_default_client(llm_client)
194+
195+
# It's also possible to create a custom embedding class extending BaseEmbedding
196+
# Or you can use FastEmbed for a pre-built embedding model:
197+
from giskard.llm.embeddings.fastembed import try_get_fastembed_embeddings
198+
embed_client = try_get_fastembed_embeddings()
199+
giskard.llm.embeddings.set_default_embedding(embed_client)
159200
```
160201

161202
If you run into any issues configuring the LLM client, don't hesitate to [ask us on Discord](https://discord.com/invite/ABvfpbu69R) or open a new issue on [our GitHub repo](https://github.com/Giskard-AI/giskard).

0 commit comments

Comments
 (0)