feat: refector prompt refresh (#7605)

This commit is contained in:
darkskygit 2024-07-26 04:51:07 +00:00
parent 54da85ec62
commit 3f0e4c04d7
No known key found for this signature in database
GPG Key ID: 97B7D036B1566E9D
29 changed files with 208 additions and 515 deletions

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class Prompts1712068777394 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class RefreshPrompt1713185798895 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompt1713522040090 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1713777617122 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompt1713864641056 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714021969665 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714386922280 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714454280973 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714982671938 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714992100105 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1714998654392 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class AddMakeItRealWithTextPrompt1715149980782 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,22 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1715672224087 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(db: PrismaClient) {
await db.aiPrompt.updateMany({
where: {
model: 'gpt-4o',
},
data: {
model: 'gpt-4-vision-preview',
},
});
}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1715936358947 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1716451792364 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1716800288136 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1716882419364 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1717139930406 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1717140940966 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1717490700326 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1720413813993 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1720600411073 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,13 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { refreshPrompts } from './utils/prompts';
export class UpdatePrompts1721814446774 {
// do the migration
static async up(db: PrismaClient) {
await refreshPrompts(db);
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@ -1,16 +1,14 @@
import { type Tokenizer } from '@affine/server-native';
import { Injectable, Logger } from '@nestjs/common';
import { AiPrompt, PrismaClient } from '@prisma/client';
import { Logger } from '@nestjs/common';
import { AiPrompt } from '@prisma/client';
import Mustache from 'mustache';
import {
getTokenEncoder,
PromptConfig,
PromptConfigSchema,
PromptMessage,
PromptMessageSchema,
PromptParams,
} from './types';
} from '../types';
// disable escaping
Mustache.escape = (text: string) => text;
@ -151,139 +149,3 @@ export class ChatPrompt {
);
}
}
@Injectable()
export class PromptService {
private readonly cache = new Map<string, ChatPrompt>();
constructor(private readonly db: PrismaClient) {}
/**
* list prompt names
* @returns prompt names
*/
async listNames() {
return this.db.aiPrompt
.findMany({ select: { name: true } })
.then(prompts => Array.from(new Set(prompts.map(p => p.name))));
}
async list() {
return this.db.aiPrompt.findMany({
select: {
name: true,
action: true,
model: true,
config: true,
messages: {
select: {
role: true,
content: true,
params: true,
},
orderBy: {
idx: 'asc',
},
},
},
});
}
/**
* get prompt messages by prompt name
* @param name prompt name
* @returns prompt messages
*/
async get(name: string): Promise<ChatPrompt | null> {
const cached = this.cache.get(name);
if (cached) return cached;
const prompt = await this.db.aiPrompt.findUnique({
where: {
name,
},
select: {
name: true,
action: true,
model: true,
config: true,
messages: {
select: {
role: true,
content: true,
params: true,
},
orderBy: {
idx: 'asc',
},
},
},
});
const messages = PromptMessageSchema.array().safeParse(prompt?.messages);
const config = PromptConfigSchema.safeParse(prompt?.config);
if (prompt && messages.success && config.success) {
const chatPrompt = ChatPrompt.createFromPrompt({
...prompt,
config: config.data,
messages: messages.data,
});
this.cache.set(name, chatPrompt);
return chatPrompt;
}
return null;
}
async set(
name: string,
model: string,
messages: PromptMessage[],
config?: PromptConfig | null
) {
return await this.db.aiPrompt
.create({
data: {
name,
model,
config: config || undefined,
messages: {
create: messages.map((m, idx) => ({
idx,
...m,
attachments: m.attachments || undefined,
params: m.params || undefined,
})),
},
},
})
.then(ret => ret.id);
}
async update(name: string, messages: PromptMessage[], config?: PromptConfig) {
const { id } = await this.db.aiPrompt.update({
where: { name },
data: {
config: config || undefined,
messages: {
// cleanup old messages
deleteMany: {},
create: messages.map((m, idx) => ({
idx,
...m,
attachments: m.attachments || undefined,
params: m.params || undefined,
})),
},
},
});
this.cache.delete(name);
return id;
}
async delete(name: string) {
const { id } = await this.db.aiPrompt.delete({ where: { name } });
this.cache.delete(name);
return id;
}
}

View File

@ -0,0 +1,3 @@
export { ChatPrompt } from './chat-prompt';
export { prompts } from './prompts';
export { PromptService } from './service';

View File

@ -1,32 +1,14 @@
import { AiPromptRole, PrismaClient } from '@prisma/client';
import { AiPrompt, PrismaClient } from '@prisma/client';
type PromptMessage = {
role: AiPromptRole;
content: string;
params?: Record<string, string | string[]>;
};
import { PromptConfig, PromptMessage } from '../types';
type PromptConfig = {
jsonMode?: boolean;
frequencyPenalty?: number;
presencePenalty?: number;
temperature?: number;
topP?: number;
maxTokens?: number;
// fal
modelName?: string;
loras?: { path: string; scale?: number }[];
};
type Prompt = {
name: string;
type Prompt = Omit<AiPrompt, 'id' | 'createdAt' | 'action' | 'config'> & {
action?: string;
model: string;
config?: PromptConfig;
messages: PromptMessage[];
config?: PromptConfig;
};
const workflow: Prompt[] = [
const workflows: Prompt[] = [
{
name: 'debug:action:fal-teed',
action: 'fal-teed',
@ -295,29 +277,7 @@ const workflow: Prompt[] = [
},
];
export const prompts: Prompt[] = [
{
name: 'debug:chat:gpt4',
model: 'gpt-4o',
messages: [
{
role: 'system',
content:
"You are AFFiNE AI, a professional and humorous copilot within AFFiNE. You are powered by latest GPT model from OpenAI and AFFiNE. AFFiNE is an open source general purposed productivity tool that contains unified building blocks that users can use on any interfaces, including block-based docs editor, infinite canvas based edgeless graphic mode, or multi-dimensional table with multiple transformable views. Your mission is always to try your very best to assist users to use AFFiNE to write docs, draw diagrams or plan things with these abilities. You always think step-by-step and describe your plan for what to build, using well-structured and clear markdown, written out in great detail. Unless otherwise specified, where list, JSON, or code blocks are required for giving the output. Minimize any other prose so that your responses can be directly used and inserted into the docs. You are able to access to API of AFFiNE to finish your job. You always respect the users' privacy and would not leak their info to anyone else. AFFiNE is made by Toeverything .Pte .Ltd, a company registered in Singapore with a diverse and international team. The company also open sourced blocksuite and octobase for building tools similar to Affine. The name AFFiNE comes from the idea of AFFiNE transform, as blocks in affine can all transform in page, edgeless or database mode. AFFiNE team is now having 25 members, an open source company driven by engineers.",
},
],
},
{
name: 'chat:gpt4',
model: 'gpt-4o',
messages: [
{
role: 'system',
content:
"You are AFFiNE AI, a professional and humorous copilot within AFFiNE. You are powered by latest GPT model from OpenAI and AFFiNE. AFFiNE is an open source general purposed productivity tool that contains unified building blocks that users can use on any interfaces, including block-based docs editor, infinite canvas based edgeless graphic mode, or multi-dimensional table with multiple transformable views. Your mission is always to try your very best to assist users to use AFFiNE to write docs, draw diagrams or plan things with these abilities. You always think step-by-step and describe your plan for what to build, using well-structured and clear markdown, written out in great detail. Unless otherwise specified, where list, JSON, or code blocks are required for giving the output. Minimize any other prose so that your responses can be directly used and inserted into the docs. You are able to access to API of AFFiNE to finish your job. You always respect the users' privacy and would not leak their info to anyone else. AFFiNE is made by Toeverything .Pte .Ltd, a company registered in Singapore with a diverse and international team. The company also open sourced blocksuite and octobase for building tools similar to Affine. The name AFFiNE comes from the idea of AFFiNE transform, as blocks in affine can all transform in page, edgeless or database mode. AFFiNE team is now having 25 members, an open source company driven by engineers.",
},
],
},
const actions: Prompt[] = [
{
name: 'debug:action:gpt4',
action: 'text',
@ -873,23 +833,49 @@ content: {{content}}`,
},
],
},
...workflow,
];
const chat: Prompt[] = [
{
name: 'debug:chat:gpt4',
model: 'gpt-4o',
messages: [
{
role: 'system',
content:
"You are AFFiNE AI, a professional and humorous copilot within AFFiNE. You are powered by latest GPT model from OpenAI and AFFiNE. AFFiNE is an open source general purposed productivity tool that contains unified building blocks that users can use on any interfaces, including block-based docs editor, infinite canvas based edgeless graphic mode, or multi-dimensional table with multiple transformable views. Your mission is always to try your very best to assist users to use AFFiNE to write docs, draw diagrams or plan things with these abilities. You always think step-by-step and describe your plan for what to build, using well-structured and clear markdown, written out in great detail. Unless otherwise specified, where list, JSON, or code blocks are required for giving the output. Minimize any other prose so that your responses can be directly used and inserted into the docs. You are able to access to API of AFFiNE to finish your job. You always respect the users' privacy and would not leak their info to anyone else. AFFiNE is made by Toeverything .Pte .Ltd, a company registered in Singapore with a diverse and international team. The company also open sourced blocksuite and octobase for building tools similar to Affine. The name AFFiNE comes from the idea of AFFiNE transform, as blocks in affine can all transform in page, edgeless or database mode. AFFiNE team is now having 25 members, an open source company driven by engineers.",
},
],
},
{
name: 'chat:gpt4',
model: 'gpt-4o',
messages: [
{
role: 'system',
content:
"You are AFFiNE AI, a professional and humorous copilot within AFFiNE. You are powered by latest GPT model from OpenAI and AFFiNE. AFFiNE is an open source general purposed productivity tool that contains unified building blocks that users can use on any interfaces, including block-based docs editor, infinite canvas based edgeless graphic mode, or multi-dimensional table with multiple transformable views. Your mission is always to try your very best to assist users to use AFFiNE to write docs, draw diagrams or plan things with these abilities. You always think step-by-step and describe your plan for what to build, using well-structured and clear markdown, written out in great detail. Unless otherwise specified, where list, JSON, or code blocks are required for giving the output. Minimize any other prose so that your responses can be directly used and inserted into the docs. You are able to access to API of AFFiNE to finish your job. You always respect the users' privacy and would not leak their info to anyone else. AFFiNE is made by Toeverything .Pte .Ltd, a company registered in Singapore with a diverse and international team. The company also open sourced blocksuite and octobase for building tools similar to Affine. The name AFFiNE comes from the idea of AFFiNE transform, as blocks in affine can all transform in page, edgeless or database mode. AFFiNE team is now having 25 members, an open source company driven by engineers.",
},
],
},
];
export const prompts: Prompt[] = [...actions, ...chat, ...workflows];
export async function refreshPrompts(db: PrismaClient) {
for (const prompt of prompts) {
await db.aiPrompt.upsert({
create: {
name: prompt.name,
action: prompt.action,
config: prompt.config,
config: prompt.config || undefined,
model: prompt.model,
messages: {
create: prompt.messages.map((message, idx) => ({
idx,
role: message.role,
content: message.content,
params: message.params,
params: message.params || undefined,
})),
},
},
@ -903,7 +889,7 @@ export async function refreshPrompts(db: PrismaClient) {
idx,
role: message.role,
content: message.content,
params: message.params,
params: message.params || undefined,
})),
},
},

View File

@ -0,0 +1,151 @@
import { Injectable, OnModuleInit } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import {
PromptConfig,
PromptConfigSchema,
PromptMessage,
PromptMessageSchema,
} from '../types';
import { ChatPrompt } from './chat-prompt';
import { refreshPrompts } from './prompts';
@Injectable()
export class PromptService implements OnModuleInit {
private readonly cache = new Map<string, ChatPrompt>();
constructor(private readonly db: PrismaClient) {}
async onModuleInit() {
await refreshPrompts(this.db);
}
/**
* list prompt names
* @returns prompt names
*/
async listNames() {
return this.db.aiPrompt
.findMany({ select: { name: true } })
.then(prompts => Array.from(new Set(prompts.map(p => p.name))));
}
async list() {
return this.db.aiPrompt.findMany({
select: {
name: true,
action: true,
model: true,
config: true,
messages: {
select: {
role: true,
content: true,
params: true,
},
orderBy: {
idx: 'asc',
},
},
},
});
}
/**
* get prompt messages by prompt name
* @param name prompt name
* @returns prompt messages
*/
async get(name: string): Promise<ChatPrompt | null> {
const cached = this.cache.get(name);
if (cached) return cached;
const prompt = await this.db.aiPrompt.findUnique({
where: {
name,
},
select: {
name: true,
action: true,
model: true,
config: true,
messages: {
select: {
role: true,
content: true,
params: true,
},
orderBy: {
idx: 'asc',
},
},
},
});
const messages = PromptMessageSchema.array().safeParse(prompt?.messages);
const config = PromptConfigSchema.safeParse(prompt?.config);
if (prompt && messages.success && config.success) {
const chatPrompt = ChatPrompt.createFromPrompt({
...prompt,
config: config.data,
messages: messages.data,
});
this.cache.set(name, chatPrompt);
return chatPrompt;
}
return null;
}
async set(
name: string,
model: string,
messages: PromptMessage[],
config?: PromptConfig | null
) {
return await this.db.aiPrompt
.create({
data: {
name,
model,
config: config || undefined,
messages: {
create: messages.map((m, idx) => ({
idx,
...m,
attachments: m.attachments || undefined,
params: m.params || undefined,
})),
},
},
})
.then(ret => ret.id);
}
async update(name: string, messages: PromptMessage[], config?: PromptConfig) {
const { id } = await this.db.aiPrompt.update({
where: { name },
data: {
config: config || undefined,
messages: {
// cleanup old messages
deleteMany: {},
create: messages.map((m, idx) => ({
idx,
...m,
attachments: m.attachments || undefined,
params: m.params || undefined,
})),
},
},
});
this.cache.delete(name);
return id;
}
async delete(name: string) {
const { id } = await this.db.aiPrompt.delete({ where: { name } });
this.cache.delete(name);
return id;
}
}

View File

@ -9,10 +9,9 @@ import Sinon from 'sinon';
import { AuthService } from '../src/core/auth';
import { WorkspaceModule } from '../src/core/workspaces';
import { prompts } from '../src/data/migrations/utils/prompts';
import { ConfigModule } from '../src/fundamentals/config';
import { CopilotModule } from '../src/plugins/copilot';
import { PromptService } from '../src/plugins/copilot/prompt';
import { prompts, PromptService } from '../src/plugins/copilot/prompt';
import {
CopilotProviderService,
FalProvider,
@ -95,10 +94,6 @@ test.beforeEach(async t => {
await prompt.set(promptName, 'test', [
{ role: 'system', content: 'hello {{word}}' },
]);
for (const p of prompts) {
await prompt.set(p.name, p.model, p.messages, p.config);
}
});
test.afterEach.always(async t => {

View File

@ -7,10 +7,9 @@ import Sinon from 'sinon';
import { AuthService } from '../src/core/auth';
import { QuotaModule } from '../src/core/quota';
import { prompts } from '../src/data/migrations/utils/prompts';
import { ConfigModule } from '../src/fundamentals/config';
import { CopilotModule } from '../src/plugins/copilot';
import { PromptService } from '../src/plugins/copilot/prompt';
import { prompts, PromptService } from '../src/plugins/copilot/prompt';
import {
CopilotProviderService,
OpenAIProvider,
@ -115,13 +114,18 @@ test.beforeEach(async t => {
test('should be able to manage prompt', async t => {
const { prompt } = t.context;
t.is((await prompt.listNames()).length, 0, 'should have no prompt');
const internalPromptCount = (await prompt.listNames()).length;
t.is(internalPromptCount, prompts.length, 'should list names');
await prompt.set('test', 'test', [
{ role: 'system', content: 'hello' },
{ role: 'user', content: 'hello' },
]);
t.is((await prompt.listNames()).length, 1, 'should have one prompt');
t.is(
(await prompt.listNames()).length,
internalPromptCount + 1,
'should have one prompt'
);
t.is(
(await prompt.get('test'))!.finish({}).length,
2,
@ -136,7 +140,11 @@ test('should be able to manage prompt', async t => {
);
await prompt.delete('test');
t.is((await prompt.listNames()).length, 0, 'should have no prompt');
t.is(
(await prompt.listNames()).length,
internalPromptCount,
'should be delete prompt'
);
t.is(await prompt.get('test'), null, 'should not have the prompt');
});
@ -795,7 +803,7 @@ test('should be able to run pre defined workflow', async t => {
});
test('should be able to run workflow', async t => {
const { prompt, workflow, executors } = t.context;
const { workflow, executors } = t.context;
executors.text.register();
unregisterCopilotProvider(OpenAIProvider.type);
@ -803,10 +811,6 @@ test('should be able to run workflow', async t => {
const executor = Sinon.spy(executors.text, 'next');
for (const p of prompts) {
await prompt.set(p.name, p.model, p.messages, p.config);
}
const graphName = 'presentation';
const graph = WorkflowGraphList.find(g => g.name === graphName);
t.truthy(graph, `graph ${graphName} not defined`);