Skip to content

Commit

Permalink
audio player and fix sending feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
vincelwt committed Dec 3, 2024
1 parent bf036ef commit 6b7ac70
Show file tree
Hide file tree
Showing 6 changed files with 158 additions and 34 deletions.
12 changes: 2 additions & 10 deletions packages/backend/src/api/v1/users.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import Router from "koa-router";
import { hasAccess, roles } from "shared";
import { z } from "zod";
import { signJWT } from "./auth/utils";
import { sendSlackMessage } from "@/src/utils/notifications";

const users = new Router({
prefix: "/users",
Expand Down Expand Up @@ -66,16 +67,7 @@ users.get("/me/org", async (ctx: Context) => {

users.post("/feedback", async (ctx: Context) => {
const { text } = ctx.request.body as { text: string };
await fetch(
"https://hooks.slack.com/services/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",
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ text }),
},
);
await sendSlackMessage(text, "feedback");
ctx.body = { ok: true };
});

Expand Down
3 changes: 2 additions & 1 deletion packages/backend/src/utils/notifications.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
const channels = {
billing: process.env.SLACK_BILLING_CHANNEL,
users: process.env.SLACK_USERS_CHANNEL,
feedback: process.env.SLACK_FEEDBACK_CHANNEL,
};

export const sendSlackMessage = async (
msg: string,
thread: "billing" | "users",
thread: "billing" | "users" | "feedback",
) => {
if (!process.env.SLACK_BOT_TOKEN) return;

Expand Down
92 changes: 92 additions & 0 deletions packages/frontend/components/SmartViewer/AudioPlayer.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
import { ActionIcon, Group, Paper, Slider, Text } from "@mantine/core";
import { IconPlayerPause, IconPlayerPlay } from "@tabler/icons-react";
import { useEffect, useRef, useState } from "react";
import classes from "./index.module.css";

interface AudioPlayerProps {
src: string;
compact?: boolean;
}

export function AudioPlayer({ src, compact = false }: AudioPlayerProps) {
const [isPlaying, setIsPlaying] = useState(false);
const [currentTime, setCurrentTime] = useState(0);
const [duration, setDuration] = useState(0);
const audioRef = useRef<HTMLAudioElement>(null);

useEffect(() => {
const audio = audioRef.current;
if (!audio) return;

const updateTime = () => setCurrentTime(audio.currentTime);
const handleLoadedMetadata = () => setDuration(audio.duration);
const handleEnded = () => setIsPlaying(false);

audio.addEventListener("timeupdate", updateTime);
audio.addEventListener("loadedmetadata", handleLoadedMetadata);
audio.addEventListener("ended", handleEnded);

return () => {
audio.removeEventListener("timeupdate", updateTime);
audio.removeEventListener("loadedmetadata", handleLoadedMetadata);
audio.removeEventListener("ended", handleEnded);
};
}, []);

const togglePlay = () => {
if (!audioRef.current) return;

if (isPlaying) {
audioRef.current.pause();
} else {
audioRef.current.play();
}
setIsPlaying(!isPlaying);
};

const handleSliderChange = (value: number) => {
if (!audioRef.current) return;
audioRef.current.currentTime = value;
setCurrentTime(value);
};

const formatTime = (time: number) => {
const minutes = Math.floor(time / 60);
const seconds = Math.floor(time % 60);
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
};

return (
<Paper className={classes.audioPlayer} p="xs">
<audio ref={audioRef} src={src} />
<Group gap="xs">
<ActionIcon
variant="subtle"
onClick={togglePlay}
size={compact ? "sm" : "md"}
>
{isPlaying ? (
<IconPlayerPause size={16} />
) : (
<IconPlayerPlay size={16} />
)}
</ActionIcon>

<Slider
value={currentTime}
onChange={handleSliderChange}
max={duration}
min={0}
size="xs"
style={{ flex: 1 }}
/>

{!compact && (
<Text size="xs" c="dimmed" w={45}>
{formatTime(currentTime)}
</Text>
)}
</Group>
</Paper>
);
}
45 changes: 38 additions & 7 deletions packages/frontend/components/SmartViewer/Message.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,14 @@ import ProtectedText from "../blocks/ProtectedText";
import { RenderJson } from "./RenderJson";
import classes from "./index.module.css";

import { useEffect, useMemo, useState } from "react";
import { useEffect, useMemo } from "react";

import { SentimentEnrichment2 } from "@/utils/enrichment";
import { getFlagEmoji, getLanguageName } from "@/utils/format";
import { openConfirmModal } from "@mantine/modals";
import HighlightPii from "./HighlightPii";
import AppUserAvatar from "../blocks/AppUserAvatar";
import { useDisclosure, useLocalStorage } from "@mantine/hooks";
import { useAnalyticsChartData } from "@/utils/dataHooks/analytics";
import { deserializeDateRange, getDefaultDateRange } from "@/pages/analytics";
import { AudioPlayer } from "./AudioPlayer";

const ghostTextAreaStyles = {
variant: "unstyled",
Expand Down Expand Up @@ -122,6 +120,8 @@ function FunctionCallMessage({ data, color, compact, piiDetection }) {
data={data}
compact={compact}
piiDetection={piiDetection}
editable={false}
onChange={() => {}}
/>
);
}
Expand Down Expand Up @@ -266,7 +266,30 @@ function MiniatureImage({ src }) {
);
}

function ImageMessage({ data, compact }) {
// Based on OpenAI's ChatCompletionContentPart
type ChatMessageBlock =
| {
type: "text";
text: string;
}
| {
type: "image_url";
image_url: { url: string };
}
| {
type: "input_audio";
input_audio: { data: string; format: "wav" | "mp3" };
};

function BlockMessage({
data,
compact,
}: {
data: {
content: ChatMessageBlock[];
};
compact: boolean;
}) {
return (
<Code className={classes.textMessage}>
<Stack gap={compact ? "5" : "md"}>
Expand All @@ -279,6 +302,14 @@ function ImageMessage({ data, compact }) {
) : (
<ResponsiveImage key={index} src={item.imageUrl.url} />
);
} else if (item.type === "input_audio") {
return (
<AudioPlayer
key={index}
src={`data:audio/${item.input_audio.format};base64,${item.input_audio.data}`}
compact={compact}
/>
);
}
return null;
})}
Expand Down Expand Up @@ -319,7 +350,7 @@ function ChatMessageContent({
}) {
const textContent = data?.text || data?.content;
const hasTextContent = typeof textContent === "string";
const hasImageContent = Array.isArray(data?.content);
const hasBlockContent = Array.isArray(data?.content);
const hasFunctionCall = data?.functionCall;
const hasToolCalls = data?.toolCalls || data?.tool_calls;

Expand Down Expand Up @@ -362,7 +393,7 @@ function ChatMessageContent({
/>
)}

{hasImageContent && <ImageMessage data={data} compact={compact} />}
{hasBlockContent && <BlockMessage data={data} compact={compact} />}

{hasFunctionCall && (
<FunctionCallMessage
Expand Down
6 changes: 6 additions & 0 deletions packages/frontend/components/SmartViewer/index.module.css
Original file line number Diff line number Diff line change
Expand Up @@ -150,3 +150,9 @@
border-radius: var(--mantine-radius-lg);
max-width: 430px;
}

.audioPlayer {
background-color: var(--mantine-color-default);
border-radius: var(--mantine-radius-sm);
width: 100%;
}
34 changes: 18 additions & 16 deletions packages/frontend/components/layout/Empty.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@ const IntegrationButton = ({

const CODE_SAMPLES = {
openai: {
js: `import { monitorOpenAI } from "lunary/openai"
import OpenAI from "openai"
js: `import OpenAI from "openai"
import { monitorOpenAI } from "lunary/openai"
const openai = monitorOpenAI(new OpenAI())
Expand All @@ -74,9 +74,22 @@ lunary.monitor(client)
chat_completion = client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Say this is a test"}]
messages=[{"role": "user", "content": "Hello!"}]
)`,
},
anthropic: {
js: `
import Anthropic from "anthropic"
import { monitorAnthropic } from "lunary/anthropic"
const anthropic = monitorAnthropic(new Anthropic())
const result = await anthropic.messages.create({
model: "claude-3-5-sonnet-20240620",
messages: [{ role: "user", content: "Hello!" }]
})`,
py: `coming soon`,
},
langchain: {
js: `import { ChatOpenAI } from "langchain/chat_models/openai"
import { LunaryHandler } from "lunary/langchain"
Expand All @@ -102,7 +115,7 @@ litellm.failure_callback = ["lunary"]
response = completion(
model="gpt-4",
messages=[{"role": "user", "content": "Hi - i'm openai"}]
messages=[{"role": "user", "content": "Hello!"}]
)`,
},
custom: {
Expand Down Expand Up @@ -503,18 +516,7 @@ export function EmptyOnboarding() {

const content = {
openai: <IntegrationStepper integration="openai" />,
anthropic: (
<Stack>
<Alert icon={<IconInfoCircle size={32} />} color="blue">
<Text size="md">
We're working hard on the Anthropic integration. It will be
available soon! In the meantime, you can use our LiteLLM
integration to connect to Anthropic.
</Text>
</Alert>
<RequestIntegrationForm integrationName="Anthropic" />
</Stack>
),
anthropic: <IntegrationStepper integration="anthropic" />,
langchain: <IntegrationStepper integration="langchain" />,
llamaindex: (
<Stack>
Expand Down

0 comments on commit 6b7ac70

Please sign in to comment.