Browse Source

Adds Claude 2.1 support and improves model choice

main
Eric Amodio 1 year ago
parent
commit
b9c0fa5e53
8 changed files with 155 additions and 78 deletions
  1. +20
    -9
      package.json
  2. +46
    -20
      src/ai/aiProviderService.ts
  3. +31
    -16
      src/ai/anthropicProvider.ts
  4. +21
    -7
      src/ai/openaiProvider.ts
  5. +1
    -7
      src/commands/switchAIModel.ts
  6. +5
    -4
      src/config.ts
  7. +7
    -0
      src/constants.ts
  8. +24
    -15
      src/quickpicks/aiModelPicker.ts

+ 20
- 9
package.json View File

@ -3183,8 +3183,11 @@
"order": 2
},
"gitlens.ai.experimental.provider": {
"type": "string",
"default": "openai",
"type": [
"string",
"null"
],
"default": null,
"enum": [
"openai",
"anthropic"
@ -3198,8 +3201,11 @@
"order": 100
},
"gitlens.ai.experimental.openai.model": {
"type": "string",
"default": "gpt-3.5-turbo",
"type": [
"string",
"null"
],
"default": null,
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
@ -3237,15 +3243,20 @@
"order": 102
},
"gitlens.ai.experimental.anthropic.model": {
"type": "string",
"default": "claude-instant-1",
"type": [
"string",
"null"
],
"default": null,
"enum": [
"claude-instant-1",
"claude-2"
"claude-2",
"claude-2.1"
],
"enumDescriptions": [
"Claude Instant",
"Claude"
"Claude Instant 1.2",
"Claude 2",
"Claude 2.1"
],
"markdownDescription": "Specifies the Anthropic model to use for GitLens' experimental AI features",
"scope": "window",

+ 46
- 20
src/ai/aiProviderService.ts View File

@ -8,6 +8,7 @@ import { uncommitted, uncommittedStaged } from '../git/models/constants';
import type { GitRevisionReference } from '../git/models/reference';
import type { Repository } from '../git/models/repository';
import { isRepository } from '../git/models/repository';
import { showAIModelPicker } from '../quickpicks/aiModelPicker';
import { configuration } from '../system/configuration';
import type { Storage } from '../system/storage';
import type { AnthropicModels } from './anthropicProvider';
@ -15,8 +16,8 @@ import { AnthropicProvider } from './anthropicProvider';
import type { OpenAIModels } from './openaiProvider';
import { OpenAIProvider } from './openaiProvider';
export interface AIProvider extends Disposable {
readonly id: AIProviders;
export interface AIProvider<Provider extends AIProviders = AIProviders> extends Disposable {
readonly id: Provider;
readonly name: string;
generateCommitMessage(diff: string, options?: { context?: string }): Promise<string | undefined>;
@ -26,21 +27,6 @@ export interface AIProvider extends Disposable {
export class AIProviderService implements Disposable {
private _provider: AIProvider | undefined;
private get provider() {
const providerId = configuration.get('ai.experimental.provider');
if (providerId === this._provider?.id) return this._provider;
this._provider?.dispose();
if (providerId === 'anthropic') {
this._provider = new AnthropicProvider(this.container);
} else {
this._provider = new OpenAIProvider(this.container);
}
return this._provider;
}
constructor(private readonly container: Container) {}
dispose() {
@ -48,7 +34,7 @@ export class AIProviderService implements Disposable {
}
get providerId() {
return this.provider?.id;
return this._provider?.id;
}
public async generateCommitMessage(
@ -86,7 +72,8 @@ export class AIProviderService implements Disposable {
changes = diff.contents;
}
const provider = this.provider;
const provider = await this.getOrChooseProvider();
if (provider == null) return undefined;
const confirmed = await confirmAIProviderToS(provider, this.container.storage);
if (!confirmed) return undefined;
@ -132,7 +119,8 @@ export class AIProviderService implements Disposable {
const diff = await this.container.git.getDiff(commit.repoPath, commit.sha);
if (!diff?.contents) throw new Error('No changes found to explain.');
const provider = this.provider;
const provider = await this.getOrChooseProvider();
if (provider == null) return undefined;
const confirmed = await confirmAIProviderToS(provider, this.container.storage);
if (!confirmed) return undefined;
@ -159,6 +147,41 @@ export class AIProviderService implements Disposable {
void this.container.storage.delete(`confirm:ai:tos:${providerId}`);
void this.container.storage.deleteWorkspace(`confirm:ai:tos:${providerId}`);
}
supports(provider: AIProviders | string) {
return provider === 'anthropic' || provider === 'openai';
}
async switchProvider() {
void (await this.getOrChooseProvider(true));
}
private async getOrChooseProvider(force?: boolean): Promise<AIProvider | undefined> {
let providerId = !force ? configuration.get('ai.experimental.provider') || undefined : undefined;
if (providerId == null || !this.supports(providerId)) {
const pick = await showAIModelPicker();
if (pick == null) return undefined;
providerId = pick.provider;
await configuration.updateEffective('ai.experimental.provider', providerId);
await configuration.updateEffective(`ai.experimental.${providerId}.model`, pick.model);
}
if (providerId !== this._provider?.id) {
this._provider?.dispose();
if (providerId === 'anthropic') {
this._provider = new AnthropicProvider(this.container);
} else {
this._provider = new OpenAIProvider(this.container);
if (providerId !== 'openai') {
await configuration.updateEffective('ai.experimental.provider', 'openai');
}
}
}
return this._provider;
}
}
async function confirmAIProviderToS(provider: AIProvider, storage: Storage): Promise<boolean> {
@ -220,6 +243,9 @@ export function getMaxCharacters(model: OpenAIModels | AnthropicModels, outputLe
case 'gpt-3.5-turbo': // Will point to gpt-3.5-turbo-1106 starting Dec 11, 2023
tokens = 4096;
break;
case 'claude-2.1': // 200,000 tokens
tokens = 200000;
break;
case 'claude-2': // 100,000 tokens
case 'claude-instant-1':
tokens = 100000;

+ 31
- 16
src/ai/anthropicProvider.ts View File

@ -2,29 +2,42 @@ import type { Disposable, QuickInputButton } from 'vscode';
import { env, ThemeIcon, Uri, window } from 'vscode';
import { fetch } from '@env/fetch';
import type { Container } from '../container';
import { showAIModelPicker } from '../quickpicks/aiModelPicker';
import { configuration } from '../system/configuration';
import type { Storage } from '../system/storage';
import { supportedInVSCodeVersion } from '../system/utils';
import type { AIProvider } from './aiProviderService';
import { getMaxCharacters } from './aiProviderService';
export class AnthropicProvider implements AIProvider {
export class AnthropicProvider implements AIProvider<'anthropic'> {
readonly id = 'anthropic';
readonly name = 'Anthropic';
private get model(): AnthropicModels {
return configuration.get('ai.experimental.anthropic.model') || 'claude-instant-1';
}
constructor(private readonly container: Container) {}
dispose() {}
private get model(): AnthropicModels | null {
return configuration.get('ai.experimental.anthropic.model') || null;
}
private async getOrChooseModel(): Promise<AnthropicModels | undefined> {
const model = this.model;
if (model != null) return model;
const pick = await showAIModelPicker(this.id);
if (pick == null) return undefined;
await configuration.updateEffective(`ai.experimental.${pick.provider}.model`, pick.model);
return pick.model;
}
async generateCommitMessage(diff: string, options?: { context?: string }): Promise<string | undefined> {
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;
const model = this.model;
const model = await this.getOrChooseModel();
if (model == null) return undefined;
let retries = 0;
let maxCodeCharacters = getMaxCharacters(model, 2600);
@ -65,7 +78,6 @@ Assistant:`;
prompt: prompt,
stream: false,
max_tokens_to_sample: 5000,
stop_sequences: ['\n\nHuman:'],
};
const rsp = await this.fetch(apiKey, request);
if (!rsp.ok) {
@ -108,19 +120,23 @@ Assistant:`;
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;
const model = this.model;
const model = await this.getOrChooseModel();
if (model == null) return undefined;
let retries = 0;
let maxCodeCharacters = getMaxCharacters(model, 3000);
while (true) {
const code = diff.substring(0, maxCodeCharacters);
const prompt = `\n\nHuman: You are an advanced AI programming assistant tasked with summarizing code changes into an explanation that is both easy to understand and meaningful. Construct an explanation that:
- Concisely synthesizes meaningful information from the provided code diff
- Incorporates any additional context provided by the user to understand the rationale behind the code changes
- Places the emphasis on the 'why' of the change, clarifying its benefits or addressing the problem that necessitated the change, beyond just detailing the 'what' has changed
// FYI, only Claude 2.1 support system prompts
const prompt = `${
model !== 'claude-2.1' ? '\n\nHuman: ' : ''
}You are an advanced AI programming assistant tasked with summarizing code changes into an explanation that is both easy to understand and meaningful. Construct an explanation that:
- Concisely synthesizes meaningful information from the provided code diff
- Incorporates any additional context provided by the user to understand the rationale behind the code changes
- Places the emphasis on the 'why' of the change, clarifying its benefits or addressing the problem that necessitated the change, beyond just detailing the 'what' has changed
Do not make any assumptions or invent details that are not supported by the code diff or the user-provided context.
Do not make any assumptions or invent details that are not supported by the code diff or the user-provided context.
Human: Here is additional context provided by the author of the changes, which should provide some explanation to why these changes where made. Please strongly consider this information when generating your explanation:
@ -139,7 +155,6 @@ Assistant:`;
prompt: prompt,
stream: false,
max_tokens_to_sample: 5000,
stop_sequences: ['\n\nHuman:'],
};
const rsp = await this.fetch(apiKey, request);
@ -255,7 +270,7 @@ async function getApiKey(storage: Storage): Promise {
return apiKey;
}
export type AnthropicModels = 'claude-instant-1' | 'claude-2';
export type AnthropicModels = 'claude-instant-1' | 'claude-2' | 'claude-2.1';
interface AnthropicCompletionRequest {
model: string;
@ -263,7 +278,7 @@ interface AnthropicCompletionRequest {
stream: boolean;
max_tokens_to_sample: number;
stop_sequences: string[];
stop_sequences?: string[];
temperature?: number;
top_k?: number;

+ 21
- 7
src/ai/openaiProvider.ts View File

@ -2,33 +2,46 @@ import type { Disposable, QuickInputButton } from 'vscode';
import { env, ThemeIcon, Uri, window } from 'vscode';
import { fetch } from '@env/fetch';
import type { Container } from '../container';
import { showAIModelPicker } from '../quickpicks/aiModelPicker';
import { configuration } from '../system/configuration';
import type { Storage } from '../system/storage';
import { supportedInVSCodeVersion } from '../system/utils';
import type { AIProvider } from './aiProviderService';
import { getMaxCharacters } from './aiProviderService';
export class OpenAIProvider implements AIProvider {
export class OpenAIProvider implements AIProvider<'openai'> {
readonly id = 'openai';
readonly name = 'OpenAI';
private get model(): OpenAIModels {
return configuration.get('ai.experimental.openai.model') || 'gpt-3.5-turbo';
}
constructor(private readonly container: Container) {}
dispose() {}
private get model(): OpenAIModels | null {
return configuration.get('ai.experimental.openai.model') || null;
}
private get url(): string {
return configuration.get('ai.experimental.openai.url') || 'https://api.openai.com/v1/chat/completions';
}
private async getOrChooseModel(): Promise<OpenAIModels | undefined> {
const model = this.model;
if (model != null) return model;
const pick = await showAIModelPicker(this.id);
if (pick == null) return undefined;
await configuration.updateEffective(`ai.experimental.${pick.provider}.model`, pick.model);
return pick.model;
}
async generateCommitMessage(diff: string, options?: { context?: string }): Promise<string | undefined> {
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;
const model = this.model;
const model = await this.getOrChooseModel();
if (model == null) return undefined;
let retries = 0;
let maxCodeCharacters = getMaxCharacters(model, 2600);
@ -122,7 +135,8 @@ Follow the user's instructions carefully, don't repeat yourself, don't include t
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;
const model = this.model;
const model = await this.getOrChooseModel();
if (model == null) return undefined;
let retries = 0;
let maxCodeCharacters = getMaxCharacters(model, 3000);

+ 1
- 7
src/commands/switchAIModel.ts View File

@ -1,8 +1,6 @@
import { Commands } from '../constants';
import type { Container } from '../container';
import { showAIModelPicker } from '../quickpicks/aiModelPicker';
import { command } from '../system/command';
import { configuration } from '../system/configuration';
import { Command } from './base';
@command()
@ -12,10 +10,6 @@ export class SwitchAIModelCommand extends Command {
}
async execute() {
const pick = await showAIModelPicker();
if (pick == null) return;
await configuration.updateEffective('ai.experimental.provider', pick.provider);
await configuration.updateEffective(`ai.experimental.${pick.provider}.model`, pick.model);
await this.container.ai.switchProvider();
}
}

+ 5
- 4
src/config.ts View File

@ -1,5 +1,6 @@
import type { AnthropicModels } from './ai/anthropicProvider';
import type { OpenAIModels } from './ai/openaiProvider';
import type { AIProviders } from './constants';
import type { DateTimeFormat } from './system/date';
import type { LogLevel } from './system/logger.constants';
@ -9,13 +10,13 @@ export interface Config {
readonly generateCommitMessage: {
readonly enabled: boolean;
};
readonly provider: 'openai' | 'anthropic';
readonly provider: AIProviders | null;
readonly openai: {
readonly model?: OpenAIModels;
readonly url?: string | null;
readonly model: OpenAIModels | null;
readonly url: string | null;
};
readonly anthropic: {
readonly model?: AnthropicModels;
readonly model: AnthropicModels | null;
};
};
};

+ 7
- 0
src/constants.ts View File

@ -1,3 +1,5 @@
import type { AnthropicModels } from './ai/anthropicProvider';
import type { OpenAIModels } from './ai/openaiProvider';
import type { ViewShowBranchComparison } from './config';
import type { Environment } from './container';
import type { StoredSearchQuery } from './git/search';
@ -782,6 +784,11 @@ export type TelemetryEvents =
| 'usage/track';
export type AIProviders = 'anthropic' | 'openai';
export type AIModels<Provider extends AIProviders = AIProviders> = Provider extends 'openai'
? OpenAIModels
: Provider extends 'anthropic'
? AnthropicModels
: OpenAIModels | AnthropicModels;
export type SecretKeys =
| `gitlens.integration.auth:${string}`

+ 24
- 15
src/quickpicks/aiModelPicker.ts View File

@ -1,35 +1,44 @@
import type { QuickPickItem } from 'vscode';
import { QuickPickItemKind, window } from 'vscode';
import type { AnthropicModels } from '../ai/anthropicProvider';
import type { OpenAIModels } from '../ai/openaiProvider';
import type { AIProviders } from '../constants';
import type { AIModels, AIProviders } from '../constants';
import { configuration } from '../system/configuration';
export interface ModelQuickPickItem extends QuickPickItem {
provider: AIProviders;
model: OpenAIModels | AnthropicModels;
export interface ModelQuickPickItem<
Provider extends AIProviders = AIProviders,
Model extends AIModels<Provider> = AIModels<Provider>,
> extends QuickPickItem {
provider: Provider;
model: Model;
}
export async function showAIModelPicker(): Promise<ModelQuickPickItem | undefined> {
const provider = configuration.get('ai.experimental.provider') ?? 'openai';
let model = configuration.get(`ai.experimental.${provider}.model`);
if (model == null) {
model = provider === 'anthropic' ? 'claude-instant-1' : 'gpt-3.5-turbo';
}
export async function showAIModelPicker(): Promise<ModelQuickPickItem | undefined>;
export async function showAIModelPicker<T extends AIProviders>(provider: T): Promise<ModelQuickPickItem<T> | undefined>;
export async function showAIModelPicker(provider?: AIProviders): Promise<ModelQuickPickItem | undefined> {
type QuickPickSeparator = { label: string; kind: QuickPickItemKind.Separator };
const items: (ModelQuickPickItem | QuickPickSeparator)[] = [
let items: (ModelQuickPickItem | QuickPickSeparator)[] = [
{ label: 'OpenAI', kind: QuickPickItemKind.Separator },
{ label: 'OpenAI', description: 'GPT-4 Turbo', provider: 'openai', model: 'gpt-4-1106-preview' },
{ label: 'OpenAI', description: 'GPT-4', provider: 'openai', model: 'gpt-4' },
{ label: 'OpenAI', description: 'GPT-4 32k', provider: 'openai', model: 'gpt-4-32k' },
{ label: 'OpenAI', description: 'GPT-3.5 Turbo', provider: 'openai', model: 'gpt-3.5-turbo-1106' },
{ label: 'Anthropic', kind: QuickPickItemKind.Separator },
{ label: 'Anthropic', description: 'Claude', provider: 'anthropic', model: 'claude-2' },
{ label: 'Anthropic', description: 'Claude 2.1', provider: 'anthropic', model: 'claude-2.1' },
{ label: 'Anthropic', description: 'Claude 2.0', provider: 'anthropic', model: 'claude-2' },
{ label: 'Anthropic', description: 'Claude Instant', provider: 'anthropic', model: 'claude-instant-1' },
];
if (provider != null) {
items = items.filter(i => i.kind !== QuickPickItemKind.Separator && i.provider === provider);
} else {
provider = configuration.get('ai.experimental.provider') ?? 'openai';
}
let model = configuration.get(`ai.experimental.${provider}.model`);
if (model == null) {
model = provider === 'anthropic' ? 'claude-2.1' : 'gpt-4-1106-preview';
}
for (const item of items) {
if (item.kind === QuickPickItemKind.Separator) continue;

Loading…
Cancel
Save