Browse Source

Closes #2636 - Adds experimental.OpenAIModel

main
Daniel Rodriguez 1 year ago
committed by Eric Amodio
parent
commit
bd0e1f310e
4 changed files with 22 additions and 3 deletions
  1. +4
    -0
      CHANGELOG.md
  2. +7
    -0
      package.json
  3. +10
    -3
      src/ai/openaiProvider.ts
  4. +1
    -0
      src/config.ts

+ 4
- 0
CHANGELOG.md View File

@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p
## [Unreleased]
### Added
- Adds a `gitlens.experimental.openAIModel` setting to specify the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command (defaults to `gpt-3.5-turbo`) — closes [#2636](https://github.com/gitkraken/vscode-gitlens/issues/2636) thanks to [PR #2637](https://github.com/gitkraken/vscode-gitlens/pull/2637) by Daniel Rodríguez ([@sadasant](https://github.com/sadasant))
## [13.6.0] - 2023-05-11
### Added

+ 7
- 0
package.json View File

@ -3667,6 +3667,13 @@
"scope": "window",
"order": 55
},
"gitlens.experimental.openAIModel": {
"type": "string",
"default": "gpt-3.5-turbo",
"markdownDescription": "Specifies the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command",
"scope": "window",
"order": 56
},
"gitlens.advanced.externalDiffTool": {
"type": [
"string",

+ 10
- 3
src/ai/openaiProvider.ts View File

@ -12,12 +12,15 @@ const maxCodeCharacters = 12000;
export class OpenAIProvider implements AIProvider {
readonly id = 'openai';
readonly name = 'OpenAI';
private model: OpenAIChatCompletionModels = 'gpt-3.5-turbo';
constructor(private readonly container: Container) {}
dispose() {}
async generateCommitMessage(diff: string, options?: { context?: string }): Promise<string | undefined> {
this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo';
const openaiApiKey = await getApiKey(this.container.storage);
if (openaiApiKey == null) return undefined;
@ -34,7 +37,7 @@ export class OpenAIProvider implements AIProvider {
}
const data: OpenAIChatCompletionRequest = {
model: 'gpt-3.5-turbo',
model: this.model,
messages: [
{
role: 'system',
@ -79,6 +82,8 @@ export class OpenAIProvider implements AIProvider {
}
async explainChanges(message: string, diff: string): Promise<string | undefined> {
this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo';
const openaiApiKey = await getApiKey(this.container.storage);
if (openaiApiKey == null) return undefined;
@ -90,7 +95,7 @@ export class OpenAIProvider implements AIProvider {
}
const data: OpenAIChatCompletionRequest = {
model: 'gpt-3.5-turbo',
model: this.model,
messages: [
{
role: 'system',
@ -195,8 +200,10 @@ async function getApiKey(storage: Storage): Promise {
return openaiApiKey;
}
export type OpenAIChatCompletionModels = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314';
interface OpenAIChatCompletionRequest {
model: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301';
model: OpenAIChatCompletionModels;
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
temperature?: number;
top_p?: number;

+ 1
- 0
src/config.ts View File

@ -49,6 +49,7 @@ export interface Config {
detectNestedRepositories: boolean;
experimental: {
generateCommitMessagePrompt: string;
openAIModel?: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314';
};
fileAnnotations: {
command: string | null;

Loading…
Cancel
Save