diff --git a/CHANGELOG.md b/CHANGELOG.md index 334efa8..f5afc35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p ### Added -- Adds a `gitlens.experimental.openAIModel` setting to specify the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command (defaults to `gpt-3.5-turbo`) — closes [#2636](https://github.com/gitkraken/vscode-gitlens/issues/2636) thanks to [PR #2637](https://github.com/gitkraken/vscode-gitlens/pull/2637) by Daniel Rodríguez ([@sadasant](https://github.com/sadasant)) +- Adds ability to choose the OpenAI model used for GitLens' experimental AI features — closes [#2636](https://github.com/gitkraken/vscode-gitlens/issues/2636) thanks to [PR #2637](https://github.com/gitkraken/vscode-gitlens/pull/2637) by Daniel Rodríguez ([@sadasant](https://github.com/sadasant)) + - Adds a `gitlens.ai.experimental.openai.model` setting to specify the OpenAI model (defaults to `gpt-3.5-turbo`) ## [13.6.0] - 2023-05-11 diff --git a/README.md b/README.md index eb77b94..95656cf 100644 --- a/README.md +++ b/README.md @@ -1194,6 +1194,7 @@ A big thanks to the people that have contributed to this project: - David Rees ([@studgeek](https://github.com/studgeek)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=studgeek) - Rickard ([@rickardp](https://github.com/rickardp)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=rickardp) - Johannes Rieken ([@jrieken](https://github.com/jrieken)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=jrieken) +- Daniel Rodríguez ([@sadasant](https://github.com/sadasant)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=sadasant) - Guillaume Rozan ([@rozangu1](https://github.com/rozangu1)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=rozangu1) - ryenus ([@ryenus](https://github.com/ryenus)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=ryenus) - Felipe Santos ([@felipecrs](https://github.com/felipecrs)) — [contributions](https://github.com/gitkraken/vscode-gitlens/commits?author=felipecrs) diff --git a/package.json b/package.json index c1bb65b..2038151 100644 --- a/package.json +++ b/package.json @@ -2806,6 +2806,43 @@ } }, { + "id": "ai", + "title": "AI", + "order": 113, + "properties": { + "gitlens.experimental.generateCommitMessagePrompt": { + "type": "string", + "default": "Commit messages must have a short description that is less than 50 chars followed by a newline and a more detailed description.\n- Write concisely using an informal tone and avoid specific names from the code", + "markdownDescription": "Specifies the prompt to use to tell OpenAI how to structure or format the generated commit message", + "scope": "window", + "order": 1 + }, + "gitlens.ai.experimental.openai.model": { + "type": "string", + "default": "gpt-3.5-turbo", + "enum": [ + "gpt-3.5-turbo", + "gpt-3.5-turbo-0301", + "gpt-4", + "gpt-4-0314", + "gpt-4-32k", + "gpt-4-32k-0314" + ], + "enumDescriptions": [ + "GPT 3.5 Turbo", + "GPT 3.5 Turbo (March 1, 2021)", + "GPT 4", + "GPT 4 (March 14, 2021)", + "GPT 4 32k", + "GPT 4 32k (March 14, 2021)" + ], + "markdownDescription": "Specifies the OpenAI model to use for GitLens' experimental AI features", + "scope": "window", + "order": 100 + } + } + }, + { "id": "date-times", "title": "Date & Times", "order": 120, @@ -3660,20 +3697,6 @@ "scope": "window", "order": 50 }, - "gitlens.experimental.generateCommitMessagePrompt": { - "type": "string", - "default": "Commit messages must have a short description that is less than 50 chars followed by a newline and a more detailed description.\n- Write concisely using an informal tone and avoid specific names from the code", - "markdownDescription": "Specifies the prompt to use to tell OpenAI how to structure or format the generated commit message", - "scope": "window", - "order": 55 - }, - "gitlens.experimental.openAIModel": { - "type": "string", - "default": "gpt-3.5-turbo", - "markdownDescription": "Specifies the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command", - "scope": "window", - "order": 56 - }, "gitlens.advanced.externalDiffTool": { "type": [ "string", diff --git a/src/ai/openaiProvider.ts b/src/ai/openaiProvider.ts index 020738b..4fc4a3d 100644 --- a/src/ai/openaiProvider.ts +++ b/src/ai/openaiProvider.ts @@ -7,23 +7,25 @@ import type { Storage } from '../system/storage'; import { supportedInVSCodeVersion } from '../system/utils'; import type { AIProvider } from './aiProviderService'; -const maxCodeCharacters = 12000; - export class OpenAIProvider implements AIProvider { readonly id = 'openai'; readonly name = 'OpenAI'; - private model: OpenAIChatCompletionModels = 'gpt-3.5-turbo'; + + private get model(): OpenAIModels { + return configuration.get('ai.experimental.openai.model') || 'gpt-3.5-turbo'; + } constructor(private readonly container: Container) {} dispose() {} async generateCommitMessage(diff: string, options?: { context?: string }): Promise { - this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo'; - const openaiApiKey = await getApiKey(this.container.storage); if (openaiApiKey == null) return undefined; + const model = this.model; + const maxCodeCharacters = getMaxCharacters(model); + const code = diff.substring(0, maxCodeCharacters); if (diff.length > maxCodeCharacters) { void window.showWarningMessage( @@ -37,7 +39,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: this.model, + model: model, messages: [ { role: 'system', @@ -82,11 +84,12 @@ export class OpenAIProvider implements AIProvider { } async explainChanges(message: string, diff: string): Promise { - this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo'; - const openaiApiKey = await getApiKey(this.container.storage); if (openaiApiKey == null) return undefined; + const model = this.model; + const maxCodeCharacters = getMaxCharacters(model); + const code = diff.substring(0, maxCodeCharacters); if (diff.length > maxCodeCharacters) { void window.showWarningMessage( @@ -95,7 +98,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: this.model, + model: model, messages: [ { role: 'system', @@ -200,10 +203,23 @@ async function getApiKey(storage: Storage): Promise { return openaiApiKey; } -export type OpenAIChatCompletionModels = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; +function getMaxCharacters(model: OpenAIModels): number { + if (model === 'gpt-4-32k' || model === 'gpt-4-32k-0314') { + return 43000; + } + return 12000; +} + +export type OpenAIModels = + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-0301' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-32k' + | 'gpt-4-32k-0314'; interface OpenAIChatCompletionRequest { - model: OpenAIChatCompletionModels; + model: OpenAIModels; messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; temperature?: number; top_p?: number; diff --git a/src/config.ts b/src/config.ts index 68f260a..d8e7f9c 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,7 +1,15 @@ +import type { OpenAIModels } from './ai/openaiProvider'; import type { DateTimeFormat } from './system/date'; import { LogLevel } from './system/logger.constants'; export interface Config { + ai: { + experimental: { + openai: { + model?: OpenAIModels; + }; + }; + }; autolinks: AutolinkReference[] | null; blame: { avatars: boolean; @@ -49,7 +57,6 @@ export interface Config { detectNestedRepositories: boolean; experimental: { generateCommitMessagePrompt: string; - openAIModel?: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; }; fileAnnotations: { command: string | null;