Skip to content

Commit

Permalink
ai fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
YousefED committed Oct 11, 2023
1 parent b058b74 commit f914978
Show file tree
Hide file tree
Showing 11 changed files with 207 additions and 77 deletions.
2 changes: 2 additions & 0 deletions packages/editor/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@
"@typecell-org/parsers": "^0.0.3",
"@typecell-org/frame": "^0.0.3",
"@typecell-org/y-penpal": "^0.0.3",
"openai": "^4.11.1",
"ai": "2.2.14",
"speakingurl": "^14.0.1",
"classnames": "^2.3.1",
"fractional-indexing": "^2.0.0",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import { IframeBridgeMethods } from "@typecell-org/shared";
import { HostBridgeMethods, IframeBridgeMethods } from "@typecell-org/shared";
import { ContainedElement, useResource } from "@typecell-org/util";
import { PenPalProvider } from "@typecell-org/y-penpal";
import { AsyncMethodReturns, connectToChild } from "penpal";
import { useRef } from "react";
import * as awarenessProtocol from "y-protocols/awareness";
import { parseIdentifier } from "../../../identifiers";
import { queryOpenAI } from "../../../integrations/ai/openai";
import { DocumentResource } from "../../../store/DocumentResource";
import { DocumentResourceModelProvider } from "../../../store/DocumentResourceModelProvider";
import { SessionStore } from "../../../store/local/SessionStore";
Expand Down Expand Up @@ -64,7 +65,7 @@ export function FrameHost(props: {
{ provider: DocumentResourceModelProvider; forwarder: ModelForwarder }
>();

const methods = {
const methods: HostBridgeMethods = {
processYjsMessage: async (message: ArrayBuffer) => {
provider.onMessage(message, "penpal");
},
Expand Down Expand Up @@ -110,6 +111,7 @@ export function FrameHost(props: {
moduleManager.forwarder.dispose();
moduleManagers.delete(identifierStr);
},
queryLLM: queryOpenAI,
};

const iframe = document.createElement("iframe");
Expand Down
42 changes: 42 additions & 0 deletions packages/editor/src/integrations/ai/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { OpenAIStream, StreamingTextResponse } from "ai";
import { ChatCompletionCreateParamsBase, OpenAI } from "openai";

export async function queryOpenAI(parameters: {
messages: ChatCompletionCreateParamsBase["messages"];
functions?: ChatCompletionCreateParamsBase["functions"];
function_key?: ChatCompletionCreateParamsBase["function_key"];
}) {
// get key from localstorage
let key = localStorage.getItem("oai-key");
if (!key) {
key = prompt(
"Please enter your OpenAI key (not shared with TypeCell, stored in your browser):",
);
if (!key) {
return {
status: "error",
error: "no-key",
} as const;
}
localStorage.setItem("oai-key", key);
}

const openai = new OpenAI({
apiKey: key,
dangerouslyAllowBrowser: true,
});

const response = await openai.chat.completions.create({
model: "gpt-4",
stream: true,
...parameters,
});
const stream = OpenAIStream(response);
// Respond with the stream
const ret = new StreamingTextResponse(stream);
const data = await ret.text();
return {
status: "ok",
result: data,
} as const;
}
6 changes: 3 additions & 3 deletions packages/frame/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
"mobx": "^6.2.0",
"mobx-react-lite": "^3.2.0",
"mobx-utils": "^6.0.8",
"openai": "^4.11.1",
"ai": "2.2.14",
"prosemirror-model": "^1.19.3",
"prosemirror-view": "^1.31.7",
"prosemirror-state": "^1.4.3",
Expand All @@ -48,7 +46,9 @@
"@vitest/coverage-v8": "^0.33.0",
"@vitejs/plugin-react": "^4.1.0",
"@types/prettier": "^3.0.0",
"chai": "^4.3.7"
"chai": "^4.3.7",
"openai": "^4.11.1",
"ai": "2.2.14"
},
"type": "module",
"source": "src/index.ts",
Expand Down
8 changes: 7 additions & 1 deletion packages/frame/src/Frame.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,13 @@ export const Frame: React.FC<Props> = observer((props) => {
execute: async (editor: BlockNoteEditor<any>) => {
const p = prompt("AI");

const commands = await getAICode(p!, tools.newExecutionHost, editor);
const commands = await getAICode(
p!,
tools.newExecutionHost,
editor,
editorStore.current!,
connectionMethods.current!.queryLLM,
);
// debugger;
// const commands = [
// {
Expand Down
159 changes: 100 additions & 59 deletions packages/frame/src/ai/ai.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
// import LocalExecutionHost from "../../../runtime/executor/executionHosts/local/LocalExecutionHost"
import "@blocknote/core/style.css";
import { OpenAIStream, StreamingTextResponse } from "ai";
import * as mobx from "mobx";
import * as monaco from "monaco-editor";
import { OpenAI } from "openai";
import { ChatCompletionMessageParam } from "openai";

import { BlockNoteEditor } from "@blocknote/core";
import { HostBridgeMethods } from "@typecell-org/shared";
import { uri } from "vscode-lib";
import { EditorStore } from "../EditorStore";
import { compile } from "../runtime/compiler/compilers/MonacoCompiler";
import { ExecutionHost } from "../runtime/executor/executionHosts/ExecutionHost";
import { customStringify } from "../stringify";
Expand Down Expand Up @@ -114,35 +115,61 @@ but instead:
$.complexObject.newProperty = 5;
`;

const openai = new OpenAI({
apiKey: "",
dangerouslyAllowBrowser: true,
});

export async function getAICode(
prompt: string,
executionHost: ExecutionHost,
editor: BlockNoteEditor<any>,
editorStore: EditorStore,
queryLLM: HostBridgeMethods["queryLLM"],
) {
const models = monaco.editor.getModels();
const typeCellModels = models.filter((m) =>
m.uri.path.startsWith("/!typecell:typecell.org"),
);
const blocks = editor.topLevelBlocks;

let blockContexts: any[] = [];
const iterateBlocks = (blocks: any[]) => {
for (const block of blocks) {
const b = editorStore.getBlock(block.id);
if (b?.context?.default) {
blockContexts.push(b.context.default);
}
iterateBlocks(block.children);
}
};
iterateBlocks(blocks);

blockContexts = blockContexts.map((output) =>
Object.fromEntries(
Object.getOwnPropertyNames(output).map((key) => [
key,
mobx.toJS(output[key]),
]),
),
);

const tmpModel = monaco.editor.createModel(
"",
"typescript",
uri.URI.parse("file:///tmp.tsx"),
);
tmpModel.setValue(`import React from "!typecell:typecell.org/dqBLFEyFuSUu1";
import * as $ from "!typecell:typecell.org/dqBLFEyFuSUu1";
tmpModel.setValue(`import * as React from "react";
import * as $ from "!typecell:typecell.org/dVeeYvbKcq2Nz";
// expands object types one level deep
type Expand<T> = T extends infer O ? { [K in keyof O]: O[K] extends { Key: React.Key | null } ? "[REACT]" : O[K] } : never;
// expands object types recursively
type ExpandRecursively<T> = T extends object
? T extends infer O ? { [K in keyof O]: O[K] extends { key: React.Key } ? "[REACT ELEMENT]" : ExpandRecursively<O[K]> } : never
? T extends (...args: any[]) => any
? T
: T extends infer O
? {
[K in keyof O]: O[K] extends { key: React.Key }
? "[REACT ELEMENT]"
: ExpandRecursively<O[K]>;
}
: never
: T;
// ? T extends infer O ? { [K in keyof O]: ExpandRecursively<O[K]> } : never
Expand All @@ -158,7 +185,6 @@ type ExpandRecursively<T> = T extends object
const def2 = await ts.getQuickInfoAtPosition(tmpModel.uri.toString(), pos);
const contextType = def2.displayParts.map((x: any) => x.text).join("");
// const def3 = await ts.get(tmpModel.uri.toString(), pos, {});

tmpModel.dispose();

const codeInfoPromises = typeCellModels.map(async (m) => {
Expand Down Expand Up @@ -212,7 +238,9 @@ type ExpandRecursively<T> = T extends object
if (block.children) {
block.children = block.children.map(cleanBlock);
}
block.content = block.content.map((x: any) => x.text).join("");
if (Array.isArray(block.content)) {
block.content = block.content.map((x: any) => x.text).join("");
}
return block;
}
// console.log("request", JSON.stringify(blocks).length);
Expand All @@ -225,48 +253,60 @@ type ExpandRecursively<T> = T extends object
contextType.replace("type ContextType = ", "const $: ") +
" = " +
JSON.stringify(outputJS);
// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
// model: "gpt-3.5-turbo-16k",
model: "gpt-4",
stream: true,
messages: [
{
role: "system",
content: TYPECELL_PROMPT,
},
{
role: "user",
content: `This is my document data:

const blockContextInfo = blockContexts.length
? `typecell.editor.findBlocks = (predicate: (context) => boolean) {
return (${JSON.stringify(blockContexts)}).find(predicate);
}`
: undefined;

const messages: Array<ChatCompletionMessageParam> = [
{
role: "system",
content: TYPECELL_PROMPT,
},
{
role: "user",
content: `This is my document data:
"""${JSON.stringify(sanitized)}"""`,
},
{
role: "user",
content:
"This is the type and runtime data available under the reactive $ variable for read / write access. If you need to change / read some information from the live document, it's likely you need to access it from here using $.<variable name> \n" +
contextInfo,
},
// codeInfos.length
// ? {
// role: "user",
// content: `This is the runtime / compiler data of the Code Blocks (CodeBlockRuntimeInfo[]):
// """${JSON.stringify(codeInfos)}"""`,
// }
// : {
// role: "user",
// content: `There are no code blocks in the document, so there's no runtime / compiler data for these (CodeBlockRuntimeInfo[]).`,
// },
{
role: "system",
content: `You are an AI assistant helping user to modify his document. This means changes can either be code related (in that case, you'll need to add / modify Code Blocks),
},
{
role: "user",
content:
"This is the type and runtime data available under the reactive $ variable for read / write access. If you need to change / read some information from the live document, it's likely you need to access it from here using $.<variable name> \n" +
contextInfo +
(blockContextInfo
? "\n" +
`We also have this function "typecell.editor.findBlocks" to extract runtime data from blocks \n` +
blockContextInfo
: ""),
},

// codeInfos.length
// ? {
// role: "user",
// content: `This is the runtime / compiler data of the Code Blocks (CodeBlockRuntimeInfo[]):
// """${JSON.stringify(codeInfos)}"""`,
// }
// : {
// role: "user",
// content: `There are no code blocks in the document, so there's no runtime / compiler data for these (CodeBlockRuntimeInfo[]).`,
// },
{
role: "system",
content: `You are an AI assistant helping user to modify his document. This means changes can either be code related (in that case, you'll need to add / modify Code Blocks),
or not at all (in which case you'll need to add / modify regular blocks), or a mix of both.`,
},
{
role: "user",
content: prompt, // +
// " . \n\nRemember to reply ONLY with OperationsResponse JSON (DO NOT add any further comments). So start with [{ and end with }]",
},
],
},
{
role: "user",
content: prompt, // +
// " . \n\nRemember to reply ONLY with OperationsResponse JSON (DO NOT add any further comments). So start with [{ and end with }]",
},
];

// Ask OpenAI for a streaming chat completion given the prompt
const response = await queryLLM({
messages,
functions: [
{
name: "updateDocument",
Expand Down Expand Up @@ -372,12 +412,13 @@ type ExpandRecursively<T> = T extends object
},
});

const stream = OpenAIStream(response);

// Respond with the stream
const ret = new StreamingTextResponse(stream);
const data = await ret.json();
console.log(data);
console.log(messages);

return JSON.parse(data.function_call.arguments).operations;
if (response.status === "ok") {
const data = JSON.parse(response.result);
return JSON.parse(data.function_call.arguments).operations;
} else {
console.error("queryLLM error", response.error);
}
return undefined;
}
Loading

0 comments on commit f914978

Please sign in to comment.