From edb701232d958bbba02917cc2b4d9c41a5b3c3bb Mon Sep 17 00:00:00 2001 From: PriNova <31413214+PriNova@users.noreply.github.com> Date: Tue, 27 Aug 2024 12:39:17 +0100 Subject: [PATCH] Update install-vscode.mdx This update reflects the configuration parameters for the dev models according to the interface https://sourcegraph.com/github.com/sourcegraph/cody@main/-/blob/vscode/src/models/sync.ts?L98-105 --- docs/cody/clients/install-vscode.mdx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/cody/clients/install-vscode.mdx b/docs/cody/clients/install-vscode.mdx index 32b30b5ec..61815e2f7 100644 --- a/docs/cody/clients/install-vscode.mdx +++ b/docs/cody/clients/install-vscode.mdx @@ -394,14 +394,15 @@ Example VS Code user settings JSON configuration: { "provider": "google", "model": "gemini-1.5-pro-latest", - "tokens": 1000000, + "inputTokens": 1000000, + "outputTokens": 8192, "apiKey": "xyz" }, // Groq (e.g. llama2 70b) { "provider": "groq", "model": "llama2-70b-4096", - "tokens": 4096, + "inputTokens": 4096, "apiKey": "xyz" }, // OpenAI & OpenAI-compatible APIs @@ -427,8 +428,10 @@ Example VS Code user settings JSON configuration: - The LLM provider type. - `model`: `string` - The ID of the model, e.g. `"gemini-1.5-pro-latest"` -- `tokens`: `number` - optional +- `inputTokens`: `number` - optional - The context window size of the model. Default: `7000`. +- `outputTokens`: `number` - optional + - The number of tokens for the response of the model. Default: `1000`. - `apiKey`: `string` - optional - The API key for the endpoint. Required if the provider is `"google"` or `"groq"`. - `apiEndpoint`: `string` - optional