This commit is contained in:
Caleb Owens 2024-05-22 13:46:10 +02:00
parent 660d296745
commit b97339dae9
19 changed files with 376 additions and 24 deletions

View File

@ -1,6 +1,6 @@
import { SHORT_DEFAULT_COMMIT_TEMPLATE, SHORT_DEFAULT_BRANCH_TEMPLATE } from '$lib/ai/prompts';
import { fetch, Body } from '@tauri-apps/api/http';
import type { AIClient, AnthropicModelName, PromptMessage } from '$lib/ai/types';
import type { AIClient, AnthropicModelName, Prompt } from '$lib/ai/types';
type AnthropicAPIResponse = { content: { text: string }[] };
@ -13,7 +13,7 @@ export class AnthropicAIClient implements AIClient {
private modelName: AnthropicModelName
) {}
async evaluate(prompt: PromptMessage[]) {
async evaluate(prompt: Prompt) {
const body = Body.json({
messages: prompt,
max_tokens: 1024,

View File

@ -1,5 +1,5 @@
import { SHORT_DEFAULT_BRANCH_TEMPLATE, SHORT_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts';
import type { AIClient, ModelKind, PromptMessage } from '$lib/ai/types';
import type { AIClient, ModelKind, Prompt } from '$lib/ai/types';
import type { HttpClient } from '$lib/backend/httpClient';
export class ButlerAIClient implements AIClient {
@ -12,7 +12,7 @@ export class ButlerAIClient implements AIClient {
private modelKind: ModelKind
) {}
async evaluate(prompt: PromptMessage[]) {
async evaluate(prompt: Prompt) {
const response = await this.cloud.post<{ message: string }>('evaluate_prompt/predict.json', {
body: {
messages: prompt,

View File

@ -1,5 +1,5 @@
import { LONG_DEFAULT_BRANCH_TEMPLATE, LONG_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts';
import { MessageRole, type PromptMessage, type AIClient } from '$lib/ai/types';
import { MessageRole, type PromptMessage, type AIClient, type Prompt } from '$lib/ai/types';
import { isNonEmptyObject } from '$lib/utils/typeguards';
import { fetch, Body, Response } from '@tauri-apps/api/http';
@ -22,7 +22,7 @@ interface OllamaRequestOptions {
interface OllamaChatRequest {
model: string;
messages: PromptMessage[];
messages: Prompt;
stream: boolean;
format?: 'json';
options?: OllamaRequestOptions;
@ -81,7 +81,7 @@ export class OllamaClient implements AIClient {
private modelName: string
) {}
async evaluate(prompt: PromptMessage[]) {
async evaluate(prompt: Prompt) {
const messages = this.formatPrompt(prompt);
const response = await this.chat(messages);
const rawResponse = JSON.parse(response.message.content);
@ -96,7 +96,7 @@ export class OllamaClient implements AIClient {
* Appends a system message which instructs the model to respond using a particular JSON schema
* Modifies the prompt's Assistant messages to make use of the correct schema
*/
private formatPrompt(prompt: PromptMessage[]) {
private formatPrompt(prompt: Prompt) {
const withFormattedResponses = prompt.map((promptMessage) => {
if (promptMessage.role == MessageRole.Assistant) {
return {
@ -146,7 +146,7 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
* @returns A Promise that resolves to an LLMResponse object representing the response from the LLM model.
*/
private async chat(
messages: PromptMessage[],
messages: Prompt,
options?: OllamaRequestOptions
): Promise<OllamaChatResponse> {
const result = await this.fetchChat({

View File

@ -1,5 +1,5 @@
import { SHORT_DEFAULT_BRANCH_TEMPLATE, SHORT_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts';
import type { OpenAIModelName, PromptMessage, AIClient } from '$lib/ai/types';
import type { OpenAIModelName, Prompt, AIClient } from '$lib/ai/types';
import type OpenAI from 'openai';
export class OpenAIClient implements AIClient {
@ -11,7 +11,7 @@ export class OpenAIClient implements AIClient {
private openAI: OpenAI
) {}
async evaluate(prompt: PromptMessage[]) {
async evaluate(prompt: Prompt) {
const response = await this.openAI.chat.completions.create({
messages: prompt,
model: this.modelName,

View File

@ -0,0 +1,38 @@
import { LONG_DEFAULT_BRANCH_TEMPLATE, LONG_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts';
import { persisted, type Persisted } from '$lib/persisted/persisted';
import type { Prompts, UserPrompt } from '$lib/ai/types';
enum PromptPersistedKey {
Branch = 'aiBranchPrompts',
Commit = 'aiCommitPrompts'
}
export class PromptService {
get branchPrompts(): Prompts {
return {
defaultPrompt: LONG_DEFAULT_BRANCH_TEMPLATE,
userPrompts: persisted<UserPrompt[]>([], PromptPersistedKey.Branch)
};
}
get commitPrompts(): Prompts {
return {
defaultPrompt: LONG_DEFAULT_COMMIT_TEMPLATE,
userPrompts: persisted<UserPrompt[]>([], PromptPersistedKey.Commit)
};
}
selectedBranchPrompt(projectId: string): Persisted<UserPrompt | undefined> {
return persisted<UserPrompt | undefined>(
undefined,
`${PromptPersistedKey.Branch}-${projectId}`
);
}
selectedCommitPrompt(projectId: string): Persisted<UserPrompt | undefined> {
return persisted<UserPrompt | undefined>(
undefined,
`${PromptPersistedKey.Commit}-${projectId}`
);
}
}

View File

@ -1,6 +1,6 @@
import { type PromptMessage, MessageRole } from '$lib/ai/types';
import { type Prompt, MessageRole } from '$lib/ai/types';
export const SHORT_DEFAULT_COMMIT_TEMPLATE: PromptMessage[] = [
export const SHORT_DEFAULT_COMMIT_TEMPLATE: Prompt = [
{
role: MessageRole.User,
content: `Please could you write a commit message for my changes.
@ -20,7 +20,7 @@ Here is my git diff:
}
];
export const LONG_DEFAULT_COMMIT_TEMPLATE: PromptMessage[] = [
export const LONG_DEFAULT_COMMIT_TEMPLATE: Prompt = [
{
role: MessageRole.User,
content: `Please could you write a commit message for my changes.
@ -59,7 +59,7 @@ Added an utility function to check whether a given value is an array of a specif
...SHORT_DEFAULT_COMMIT_TEMPLATE
];
export const SHORT_DEFAULT_BRANCH_TEMPLATE: PromptMessage[] = [
export const SHORT_DEFAULT_BRANCH_TEMPLATE: Prompt = [
{
role: MessageRole.User,
content: `Please could you write a branch name for my changes.
@ -73,7 +73,7 @@ Here is my git diff:
}
];
export const LONG_DEFAULT_BRANCH_TEMPLATE: PromptMessage[] = [
export const LONG_DEFAULT_BRANCH_TEMPLATE: Prompt = [
{
role: MessageRole.User,
content: `Please could you write a branch name for my changes.

View File

@ -8,7 +8,7 @@ import {
ModelKind,
OpenAIModelName,
type AIClient,
type PromptMessage
type Prompt
} from '$lib/ai/types';
import { HttpClient } from '$lib/backend/httpClient';
import * as toasts from '$lib/utils/toasts';
@ -51,7 +51,7 @@ class DummyAIClient implements AIClient {
defaultBranchTemplate = SHORT_DEFAULT_BRANCH_TEMPLATE;
constructor(private response = 'lorem ipsum') {}
async evaluate(_prompt: PromptMessage[]) {
async evaluate(_prompt: Prompt) {
return this.response;
}
}

View File

@ -1,3 +1,5 @@
import type { Persisted } from '$lib/persisted/persisted';
export enum ModelKind {
OpenAI = 'openai',
Anthropic = 'anthropic',
@ -28,9 +30,21 @@ export interface PromptMessage {
role: MessageRole;
}
export interface AIClient {
evaluate(prompt: PromptMessage[]): Promise<string>;
export type Prompt = PromptMessage[];
defaultBranchTemplate: PromptMessage[];
defaultCommitTemplate: PromptMessage[];
export interface AIClient {
evaluate(prompt: Prompt): Promise<string>;
defaultBranchTemplate: Prompt;
defaultCommitTemplate: Prompt;
}
export interface UserPrompt {
name: string;
prompt: Prompt;
}
export interface Prompts {
defaultPrompt: Prompt;
userPrompts: Persisted<UserPrompt[]>;
}

View File

@ -0,0 +1,119 @@
<script lang="ts">
import { PromptService } from '$lib/ai/promptService';
import Content from '$lib/components/AIPromptEdit/Content.svelte';
import Expandable from '$lib/components/AIPromptEdit/Expandable.svelte';
import Button from '$lib/components/Button.svelte';
import TextBox from '$lib/components/TextBox.svelte';
import { getContext } from '$lib/utils/context';
import { get } from 'svelte/store';
import type { Prompts, UserPrompt } from '$lib/ai/types';
export let promptUse: 'commits' | 'branches';
const promptService = getContext(PromptService);
let prompts: Prompts;
if (promptUse == 'commits') {
prompts = promptService.commitPrompts;
} else {
prompts = promptService.branchPrompts;
}
$: userPrompts = prompts.userPrompts;
function createNewPrompt() {
prompts.userPrompts.set([
...get(prompts.userPrompts),
{
name: 'My Prompt',
prompt: []
}
]);
console.log($userPrompts);
}
function deletePrompt(targetPrompt: UserPrompt) {
const filteredPrompts = get(prompts.userPrompts).filter((prompt) => prompt != targetPrompt);
prompts.userPrompts.set(filteredPrompts);
}
function preventBubbling(e: Event) {
e.stopPropagation();
}
</script>
{#if prompts && $userPrompts}
<div class="container">
<h3 class="text-head-20 text-bold">
{promptUse == 'commits' ? 'Commit Prompts' : 'Branch Prompts'}
</h3>
<div>
{#if prompts.defaultPrompt}
<!-- svelte-ignore a11y-no-static-element-interactions -->
<Expandable position="top">
<svelte:fragment slot="header">
<div class="prompt-name">
<p class="text-base-15 text-bold default">Default Prompt</p>
</div>
</svelte:fragment>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div on:click={preventBubbling} class="not-clickable">
<Content displayMode="readOnly" bind:promptMessages={prompts.defaultPrompt} />
</div>
</Expandable>
{/if}
{#each $userPrompts as prompt, index}
<Expandable position={$userPrompts.length == index + 1 ? 'bottom' : 'middle'}>
<svelte:fragment slot="header">
<div class="prompt-name">
<TextBox bind:value={prompt.name} wide on:click={preventBubbling} />
<Button
on:click={(e) => {
preventBubbling(e);
deletePrompt(prompt);
}}
on:input={preventBubbling}
icon="bin"
/>
</div>
</svelte:fragment>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div on:click={preventBubbling} class="not-clickable">
<Content displayMode="writable" bind:promptMessages={prompt.prompt} />
</div>
</Expandable>
{/each}
</div>
<div>
<Button style="pop" on:click={createNewPrompt}>Create new prompt</Button>
</div>
</div>
{/if}
<style lang="postcss">
.container {
display: flex;
flex-direction: column;
gap: var(--size-8);
}
.prompt-name {
display: flex;
align-items: center;
gap: var(--size-8);
& .default {
opacity: 50%;
}
}
.not-clickable {
cursor: default;
}
</style>

View File

@ -0,0 +1,102 @@
<script lang="ts">
import { MessageRole, type Prompt } from '$lib/ai/types';
import Button from '$lib/components/Button.svelte';
import Tag from '$lib/components/Tag.svelte';
import TextArea from '$lib/components/TextArea.svelte';
export let displayMode: 'readOnly' | 'writable' = 'writable';
export let promptMessages: Prompt;
// Ensure the prompt messages have a default user prompt
if (promptMessages.length == 0) {
promptMessages = [
...promptMessages,
{
role: MessageRole.User,
content: ''
}
];
}
function addExample() {
promptMessages = [
...promptMessages,
{
role: MessageRole.Assistant,
content: ''
},
{
role: MessageRole.User,
content: ''
}
];
console.log(promptMessages);
}
function removeLastExample() {
promptMessages = promptMessages.slice(0, -2);
}
</script>
<div class="cards">
{#each promptMessages as promptMessage, index}
<div class="content-card">
<div class="actions">
{#if promptMessage.role == MessageRole.User}
<Tag kind="soft" style="pop" shrinkable>User</Tag>
{:else}
<Tag kind="soft" style="neutral" shrinkable>Assistant</Tag>
{/if}
{#if index + 1 == promptMessages.length && promptMessages.length > 1 && displayMode == 'writable'}
<Button icon="bin" on:click={removeLastExample} />
{/if}
</div>
{#if displayMode == 'writable'}
<TextArea bind:value={promptMessage.content} resizeable />
{:else}
<pre>{promptMessage.content}</pre>
{/if}
</div>
{/each}
{#if displayMode == 'writable'}
<div class="content-card">
<Tag kind="soft" style="neutral" shrinkable>Assistant</Tag>
<div>
<Button on:click={addExample}>Add an example</Button>
</div>
</div>
{/if}
</div>
<style lang="postcss">
.cards {
display: grid;
grid-template-columns: 100%;
gap: 8px;
}
.content-card {
display: flex;
flex-direction: column;
gap: 8px;
background-color: #fafafa;
border: 1px solid #efefef;
border-radius: var(--radius-s);
padding: var(--size-8);
}
.actions {
display: flex;
align-items: center;
justify-content: space-between;
}
pre {
text-wrap: wrap;
user-select: text;
}
</style>

View File

@ -0,0 +1,22 @@
<script lang="ts">
import SectionCard from '$lib/components/SectionCard.svelte';
export let position: 'top' | 'middle' | 'bottom';
let expanded = false;
</script>
<SectionCard
roundedTop={position == 'top'}
roundedBottom={position == 'bottom'}
clickable
on:click={() => (expanded = !expanded)}
>
<svelte:fragment slot="title">
<slot name="header"></slot>
</svelte:fragment>
{#if expanded}
<slot></slot>
{/if}
</SectionCard>

View File

@ -0,0 +1,34 @@
<script lang="ts">
import { PromptService } from '$lib/ai/promptService';
import { Project } from '$lib/backend/projects';
import SectionCard from '$lib/components/SectionCard.svelte';
import { getContext } from '$lib/utils/context';
import type { Prompts } from '$lib/ai/types';
export let promptUse: 'commits' | 'branches';
const project = getContext(Project);
const promptService = getContext(PromptService);
let prompts: Prompts;
if (promptUse == 'commits') {
prompts = promptService.commitPrompts;
} else {
prompts = promptService.branchPrompts;
}
console.log(promptService);
console.log(prompts);
$: userPrompts = prompts.userPrompts;
</script>
<h3 class="text-base-15 text-bold">
{promptUse == 'commits' ? 'Commit Prompts' : 'Branch Prompts'}
</h3>
{#each $userPrompts as userPrompt, index}
<SectionCard roundedTop={index == 0} roundedBottom={index + 1 == $userPrompts.length}
></SectionCard>
{/each}

View File

@ -2,6 +2,7 @@
import SectionCard from './SectionCard.svelte';
import WelcomeSigninAction from './WelcomeSigninAction.svelte';
import { Project, ProjectService } from '$lib/backend/projects';
import AiPromptSelect from '$lib/components/AIPromptSelect.svelte';
import Link from '$lib/components/Link.svelte';
import Spacer from '$lib/components/Spacer.svelte';
import Toggle from '$lib/components/Toggle.svelte';
@ -104,6 +105,8 @@
</svelte:fragment>
</SectionCard>
</div>
<AiPromptSelect promptUse="commits" />
<AiPromptSelect promptUse="branches" />
</Section>
{#if $user?.role === 'admin'}

View File

@ -16,12 +16,15 @@
export let noBorder = false;
export let labelFor = '';
export let disabled = false;
export let clickable = false;
const SLOTS = $$props.$$slots;
const dispatch = createEventDispatcher<{ hover: boolean }>();
</script>
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
<!-- svelte-ignore a11y-click-events-have-key-events -->
<label
for={labelFor}
class="section-card"
@ -36,8 +39,9 @@
class:loading={background == 'loading'}
class:success={background == 'success'}
class:error={background == 'error'}
class:clickable={labelFor !== ''}
class:clickable={labelFor !== '' || clickable}
class:disabled
on:click
on:mouseenter={() => dispatch('hover', true)}
on:mouseleave={() => dispatch('hover', false)}
>

View File

@ -229,6 +229,7 @@
.shrinkable {
overflow: hidden;
width: fit-content;
& span {
overflow: hidden;

View File

@ -11,6 +11,7 @@
export let autocorrect: string | undefined = undefined;
export let spellcheck = false;
export let label: string | undefined = undefined;
export let resizeable = false;
const dispatch = createEventDispatcher<{ input: string; change: string }>();
</script>
@ -23,6 +24,7 @@
{/if}
<textarea
class="text-input text-base-body-13 textarea"
class:resizeable
bind:value
{disabled}
{id}
@ -52,6 +54,10 @@
padding-bottom: var(--size-12);
}
.resizeable {
resize: vertical;
}
.textbox__label {
color: var(--clr-scale-ntrl-50);
}

View File

@ -1,6 +1,7 @@
<script lang="ts">
import '../styles/main.postcss';
import { PromptService as AIPromptService } from '$lib/ai/promptService';
import { AIService } from '$lib/ai/service';
import { AuthService } from '$lib/backend/auth';
import { GitConfigService } from '$lib/backend/gitConfigService';
@ -42,6 +43,7 @@
setContext(AuthService, data.authService);
setContext(HttpClient, data.cloud);
setContext(User, data.userService.user);
setContext(AIPromptService, data.promptService);
let shareIssueModal: ShareIssueModal;

View File

@ -1,3 +1,4 @@
import { PromptService as AIPromptService } from '$lib/ai/promptService';
import { AIService } from '$lib/ai/service';
import { initAnalyticsIfEnabled } from '$lib/analytics/analytics';
import { AuthService } from '$lib/backend/auth';
@ -52,6 +53,7 @@ export async function load() {
const gitConfig = new GitConfigService();
const aiService = new AIService(gitConfig, httpClient);
const aiPromptService = new AIPromptService();
return {
authService,
@ -64,6 +66,7 @@ export async function load() {
// These observables are provided for convenience
remoteUrl$,
gitConfig,
aiService
aiService,
aiPromptService
};
}

View File

@ -2,6 +2,7 @@
import { AIService, GitAIConfigKey, KeyOption } from '$lib/ai/service';
import { OpenAIModelName, AnthropicModelName, ModelKind } from '$lib/ai/types';
import { GitConfigService } from '$lib/backend/gitConfigService';
import AIPromptEdit from '$lib/components/AIPromptEdit/index.svelte';
import InfoMessage from '$lib/components/InfoMessage.svelte';
import RadioButton from '$lib/components/RadioButton.svelte';
import SectionCard from '$lib/components/SectionCard.svelte';
@ -322,6 +323,9 @@
</svelte:fragment>
</SectionCard>
<AIPromptEdit promptUse="commits" />
<AIPromptEdit promptUse="branches" />
<style>
.ai-settings__text {
color: var(--clr-text-2);