Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Steve/pre-generated-preview-2 #1068

Merged
merged 3 commits into from
Jul 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
170 changes: 144 additions & 26 deletions bricks/ai/src/chat-agent/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ const { defineElement, property, event, method } = createDecorators();

export interface ChatAgentProps {
agentId?: string;
robotId?: string;
conversationId?: string;
alwaysUseNewConversation?: boolean;
}

export interface Message extends BaseMessage {
Expand All @@ -38,6 +40,17 @@ export interface MessageChunk {
partial?: boolean;
}

export interface LowLevelMessageChunk {
choices: LowLevelChoice[];
}

export interface LowLevelChoice {
delta: {
role: "assistant";
content?: string;
};
}

export const ChatAgentComponent = forwardRef(LegacyChatAgentComponent);

/**
Expand All @@ -51,17 +64,49 @@ class ChatAgent extends ReactNextElement implements ChatAgentProps {
@property()
accessor agentId: string | undefined;

@property()
accessor robotId: string | undefined;

@property()
accessor conversationId: string | undefined;

@property()
accessor alwaysUseNewConversation: boolean | undefined;

/**
* 发送消息到默认的聊天 API
*/
@method()
postMessage(content: string) {
return this.#ref.current?.postMessage(content);
}

/**
* 发送聊天请求到指定的 URL
*/
@method()
sendRequest(content: string, url: string, options: Options<MessageChunk>) {
return this.#ref.current?.sendRequest(content, url, options);
sendRequest(
leadingMessages: string | BaseMessage[],
url: string,
options: Options<MessageChunk>
) {
return this.#ref.current?.sendRequest(leadingMessages, url, options);
}

/**
* 发送底层聊天请求到指定的 URL。接口的请求和响应的数据结构和 OpenAI 聊天接口一致。
*/
@method()
lowLevelSendRequest(
leadingMessages: string | BaseMessage[],
url: string,
options: Options<MessageChunk>
) {
return this.#ref.current?.lowLevelSendRequest(
leadingMessages,
url,
options
);
}

@method()
Expand Down Expand Up @@ -104,7 +149,9 @@ class ChatAgent extends ReactNextElement implements ChatAgentProps {
<ChatAgentComponent
ref={this.#ref}
agentId={this.agentId}
robotId={this.robotId}
conversationId={this.conversationId}
alwaysUseNewConversation={this.alwaysUseNewConversation}
// onMessageChunkPush={this.#handleMessageChunkPush}
onMessagesUpdate={this.#handleMessagesUpdate}
onBusyChange={this.#handleBusyChange}
Expand All @@ -124,7 +171,12 @@ export interface ChatAgentComponentProps extends ChatAgentProps {
export interface ChatAgentRef {
postMessage(content: string): Promise<string | null>;
sendRequest(
content: string,
leadingMessages: string | BaseMessage[],
url: string,
options: Options<MessageChunk>
): Promise<string | null>;
lowLevelSendRequest(
leadingMessages: string | BaseMessage[],
url: string,
options: Options<MessageChunk>
): Promise<string | null>;
Expand All @@ -134,7 +186,9 @@ export interface ChatAgentRef {
export function LegacyChatAgentComponent(
{
agentId,
robotId,
conversationId: propConversationId,
alwaysUseNewConversation,
onMessageChunkPush,
onMessagesUpdate,
onBusyChange,
Expand Down Expand Up @@ -181,12 +235,20 @@ export function LegacyChatAgentComponent(
[onMessageChunkPush]
);

const sendRequest = useCallback(
async (content: string, url: string, options: Options<MessageChunk>) => {
const legacySendRequest = useCallback(
async (
isLowLevel: boolean,
leadingMessages: string | BaseMessage[],
url: string,
options: Options<MessageChunk | LowLevelMessageChunk>
) => {
// Use ref instead of state to handle sync sequential calls.
if (busyRef.current) {
return null;
}
if (alwaysUseNewConversation || isLowLevel) {
setFullMessages((prev) => (prev.length === 0 ? prev : []));
}
const thisChatId = chatIdRef.current;
let newConversationError: Error | undefined;
const checkNewConversation = async () => {
Expand All @@ -201,19 +263,35 @@ export function LegacyChatAgentComponent(

const userKey = getMessageChunkKey();
const assistantKey = getMessageChunkKey();
let currentConversationId = conversationId;
let currentConversationId =
alwaysUseNewConversation || isLowLevel ? null : conversationId;

onBusyChange?.((busyRef.current = true));

try {
pushPartialMessage?.({
key: userKey,
delta: {
content: content,
role: "user",
},
});
const request = createSSEStream<MessageChunk>(
if (Array.isArray(leadingMessages)) {
for (const msg of leadingMessages) {
const isAssistant = msg.role === "assistant";
if (isAssistant || msg.role === "user") {
pushPartialMessage?.({
key: isAssistant ? assistantKey : userKey,
delta: {
role: msg.role,
content: msg.content,
},
});
}
}
} else {
pushPartialMessage?.({
key: userKey,
delta: {
content: leadingMessages,
role: "user",
},
});
}
const request = createSSEStream<MessageChunk | LowLevelMessageChunk>(
new URL(url, `${location.origin}${getBasePath()}`).toString(),
options
);
Expand Down Expand Up @@ -246,13 +324,34 @@ export function LegacyChatAgentComponent(

await checkNewConversation();

pushPartialMessage?.({
delta: value.delta,
key: assistantKey,
partial: true,
});
if (value.conversationId && !currentConversationId) {
setConversationId((currentConversationId = value.conversationId));
if (isLowLevel) {
const delta = (value as LowLevelMessageChunk).choices?.[0]?.delta;
if (delta?.content) {
pushPartialMessage({
delta: {
role: delta.role,
content: delta.content,
},
key: assistantKey,
partial: true,
});
}
} else {
pushPartialMessage?.({
delta: (value as MessageChunk).delta,
key: assistantKey,
partial: true,
});
if (
!alwaysUseNewConversation &&
(value as MessageChunk).conversationId &&
!currentConversationId
) {
setConversationId(
(currentConversationId = (value as MessageChunk)
.conversationId!)
);
}
}
}

Expand Down Expand Up @@ -304,24 +403,36 @@ export function LegacyChatAgentComponent(

return currentConversationId;
},
[conversationId, getMessageChunkKey, onBusyChange, pushPartialMessage]
[
conversationId,
alwaysUseNewConversation,
getMessageChunkKey,
onBusyChange,
pushPartialMessage,
]
);

useImperativeHandle(
ref,
() => ({
sendRequest,
lowLevelSendRequest: (...args) => legacySendRequest(true, ...args),
sendRequest: (...args) => legacySendRequest(false, ...args),
postMessage(content: string) {
return sendRequest(
return legacySendRequest(
false,
content,
"api/gateway/[email protected]/api/aiops_chat/v1/chat/completions",
{
method: "POST",
body: JSON.stringify({
agentId,
robotId,
input: content,
stream: true,
conversationId,
conversationId:
alwaysUseNewConversation || conversationId === null
? undefined
: conversationId,
}),
headers: {
"giraffe-contract-name":
Expand All @@ -339,7 +450,14 @@ export function LegacyChatAgentComponent(
}
},
}),
[agentId, conversationId, onBusyChange, sendRequest]
[
legacySendRequest,
agentId,
robotId,
alwaysUseNewConversation,
conversationId,
onBusyChange,
]
);

useEffect(() => {
Expand Down
2 changes: 2 additions & 0 deletions bricks/visual-builder/src/bootstrap.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,6 @@ import "./property-editor/index.js";
import "./data-providers/check-editor-by-name.js";
import "./data-providers/get-data-dependency.js";
import "./data-providers/get-dependency-tree.js";
import "./raw-data-preview/index.js";
import "./data-providers/parse-path.js";
import "./data-providers/batch-update-raw-data-generated-view.js";
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { describe, test, expect } from "@jest/globals";
import { InstanceApi_createInstance } from "@next-api-sdk/cmdb-sdk";
import { batchUpdateRawDataGeneratedView } from "./batch-update-raw-data-generated-view.js";

jest.mock("@next-api-sdk/cmdb-sdk");

describe("batchUpdateRawDataGeneratedView", () => {
test("should work", async () => {
const result = await batchUpdateRawDataGeneratedView([
{
attrInstanceId: "i-1",
input: "input-1",
output: "output-1",
list: [],
},
{
attrInstanceId: "i-2",
input: "input-2",
output: "output-2",
list: [],
},
]);

expect(result).toMatchInlineSnapshot(`
[
{
"status": "fulfilled",
"value": undefined,
},
{
"status": "fulfilled",
"value": undefined,
},
]
`);
expect(InstanceApi_createInstance).toBeCalledTimes(2);
expect(InstanceApi_createInstance).toHaveBeenNthCalledWith(
1,
"RAW_DATA_GENERATED_VIEW@EASYOPS",
{
input: "input-1",
output: "output-1",
list: [],
attr: ["i-1"],
}
);
expect(InstanceApi_createInstance).toHaveBeenNthCalledWith(
2,
"RAW_DATA_GENERATED_VIEW@EASYOPS",
{
input: "input-2",
output: "output-2",
list: [],
attr: ["i-2"],
}
);
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { createProviderClass } from "@next-core/utils/general";
import { InstanceApi_createInstance } from "@next-api-sdk/cmdb-sdk";

export interface GeneratedView {
attrInstanceId: string;
input: string;
output: string;
list: unknown[];
defaultVisualWeight?: number;
systemPromptVersion?: string;
}

export async function batchUpdateRawDataGeneratedView(
generations: GeneratedView[]
): Promise<unknown> {
return Promise.allSettled(
generations.map(({ attrInstanceId, ...props }) =>
InstanceApi_createInstance("RAW_DATA_GENERATED_VIEW@EASYOPS", {
...props,
attr: [attrInstanceId],
})
)
);
}

customElements.define(
"visual-builder.batch-update-raw-data-generated-view",
createProviderClass(batchUpdateRawDataGeneratedView)
);
Loading
Loading