Skip to content

feat: Complete chat history panel integration #44

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 46 additions & 6 deletions app/frontend/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,27 @@ import { Mic, MicOff } from "lucide-react";
import { useTranslation } from "react-i18next";

import { Button } from "@/components/ui/button";
import { GroundingFiles } from "@/components/ui/grounding-files";
import GroundingFileView from "@/components/ui/grounding-file-view";
import StatusMessage from "@/components/ui/status-message";
import HistoryPanel from "@/components/ui/history-panel";

import useRealTime from "@/hooks/useRealtime";
import useAudioRecorder from "@/hooks/useAudioRecorder";
import useAudioPlayer from "@/hooks/useAudioPlayer";

import { GroundingFile, ToolResult } from "./types";
import { GroundingFile, HistoryItem, ToolResult } from "./types";

import logo from "./assets/logo.svg";

function App() {
const [isRecording, setIsRecording] = useState(false);
const [groundingFiles, setGroundingFiles] = useState<GroundingFile[]>([]);
const [selectedFile, setSelectedFile] = useState<GroundingFile | null>(null);
const [groundingFiles, setGroundingFiles] = useState<GroundingFile[]>([]);
const [showTranscript, setShowTranscript] = useState(false);
const [history, setHistory] = useState<HistoryItem[]>([]);

const { startSession, addUserAudio, inputAudioBufferClear } = useRealTime({
enableInputAudioTranscription: true, // Enable input audio transcription from the user to show in the history
onWebSocketOpen: () => console.log("WebSocket connection opened"),
onWebSocketClose: () => console.log("WebSocket connection closed"),
onWebSocketError: event => console.error("WebSocket error:", event),
Expand All @@ -33,12 +36,39 @@ function App() {
},
onReceivedExtensionMiddleTierToolResponse: message => {
const result: ToolResult = JSON.parse(message.tool_result);

const files: GroundingFile[] = result.sources.map(x => {
return { id: x.chunk_id, name: x.title, content: x.chunk };
});

setGroundingFiles(prev => [...prev, ...files]);
setGroundingFiles(files); // Store the grounding files for the assistant
},
onReceivedInputAudioTranscriptionCompleted: message => {
// Update history with input audio transcription when completed
const newHistoryItem: HistoryItem = {
id: message.event_id,
transcript: message.transcript,
groundingFiles: [],
sender: "user",
timestamp: new Date() // Add timestamp
};
setHistory(prev => [...prev, newHistoryItem]);
},
onReceivedResponseDone: message => {
const transcript = message.response.output.map(output => output.content?.map(content => content.transcript).join(" ")).join(" ");
if (!transcript) {
return;
}

// Update history with response done
const newHistoryItem: HistoryItem = {
id: message.event_id,
transcript: transcript,
groundingFiles: groundingFiles,
sender: "assistant",
timestamp: new Date() // Add timestamp
};
setHistory(prev => [...prev, newHistoryItem]);
setGroundingFiles([]); // Clear the assistant grounding files after use
}
});

Expand Down Expand Up @@ -91,14 +121,24 @@ function App() {
</Button>
<StatusMessage isRecording={isRecording} />
</div>
<GroundingFiles files={groundingFiles} onSelected={setSelectedFile} />
<div className="mb-4 flex space-x-4">
<button
onClick={() => setShowTranscript(!showTranscript)}
className="text-blue-500 hover:underline focus:outline-none"
aria-label={t("app.showTranscript")}
>
{t("app.showTranscript")}
</button>
</div>
</main>

<footer className="py-4 text-center">
<p>{t("app.footer")}</p>
</footer>

<GroundingFileView groundingFile={selectedFile} onClosed={() => setSelectedFile(null)} />

<HistoryPanel show={showTranscript} history={history} onClosed={() => setShowTranscript(false)} onSelectedGroundingFile={setSelectedFile} />
</div>
);
}
Expand Down
93 changes: 70 additions & 23 deletions app/frontend/src/components/ui/history-panel.tsx
Original file line number Diff line number Diff line change
@@ -1,22 +1,57 @@
import { useEffect, useRef, useState, memo } from "react";
import { AnimatePresence, motion } from "framer-motion";
import { X } from "lucide-react";
import { useTranslation } from "react-i18next";

import { Button } from "./button";
import GroundingFile from "./grounding-file";

import { GroundingFile as GroundingFileType, HistoryItem } from "@/types";

import { useTranslation } from "react-i18next";

type Properties = {
history: HistoryItem[];
show: boolean;
onClosed: () => void;
onSelectedGroundingFile: (file: GroundingFileType) => void;
};

export default function HistoryPanel({ show, history, onClosed, onSelectedGroundingFile }: Properties) {
const HistoryPanel = ({ show, history, onClosed, onSelectedGroundingFile }: Properties) => {
const { t } = useTranslation();
const historyEndRef = useRef<HTMLDivElement>(null);
const [currentTime, setCurrentTime] = useState(new Date());

// Scroll to the bottom whenever the history changes
useEffect(() => {
if (historyEndRef.current) {
historyEndRef.current.scrollIntoView({ behavior: "smooth" });
}
}, [history]);

// Update current time every second
useEffect(() => {
const interval = setInterval(() => {
setCurrentTime(new Date());
}, 1000);
return () => clearInterval(interval);
}, []);

const formatTimestamp = (timestamp: Date) => {
const options: Intl.DateTimeFormatOptions = {
hour: "numeric",
minute: "numeric",
hour12: true
};
return new Intl.DateTimeFormat(navigator.language, options).format(timestamp);
};

const shouldShowTimestamp = (current: Date, next?: Date) => {
const nextTime = next ? next.getTime() : currentTime.getTime();
const diff = (nextTime - current.getTime()) / 1000; // Difference in seconds

return diff > 60; // Show timestamp if more than 60 seconds have passed
};

const MemoizedGroundingFile = memo(GroundingFile);

return (
<AnimatePresence>
Expand All @@ -28,27 +63,37 @@ export default function HistoryPanel({ show, history, onClosed, onSelectedGround
transition={{ type: "spring", stiffness: 300, damping: 30 }}
className="fixed inset-y-0 right-0 z-40 w-full overflow-y-auto bg-white shadow-lg sm:w-96"
>
<div className="sticky top-0 z-10 mb-4 flex items-center justify-between bg-white px-4 py-2">
<h2 className="text-xl font-bold">{t("history.transcriptHistory")}</h2>
<Button variant="ghost" size="sm" onClick={onClosed}>
<X className="h-5 w-5" />
</Button>
</div>
<div className="p-4">
<div className="mb-4 flex items-center justify-between">
<h2 className="text-xl font-bold">{t("history.answerHistory")}</h2>
<Button variant="ghost" size="sm" onClick={onClosed}>
<X className="h-5 w-5" />
</Button>
</div>
{history.length > 0 ? (
history.map((item, index) => (
<div key={index} className="mb-6 border-b border-gray-200 pb-6">
<h3 className="mb-2 font-semibold">{item.id}</h3>
<pre className="mb-2 overflow-x-auto whitespace-pre-wrap rounded-md bg-gray-100 p-3 text-sm">
<code className="block h-24 overflow-y-auto">{item.transcript}</code>
</pre>
<div className="mt-2 flex flex-wrap gap-2">
{item.groundingFiles.map((file, index) => (
<GroundingFile key={index} value={file} onClick={() => onSelectedGroundingFile(file)} />
))}
</div>
</div>
))
<div className="space-y-4">
{history.map((item, index) => {
const nextItem = history[index + 1];
const showTimestamp = shouldShowTimestamp(item.timestamp, nextItem ? nextItem.timestamp : undefined);
return (
<div key={index}>
<div
className={`rounded-lg p-4 shadow ${item.sender === "user" ? "ml-auto bg-blue-100 pl-4" : "bg-gray-100"}`}
style={{ maxWidth: "75%" }}
>
<p className="text-sm text-gray-700">{item.transcript}</p>
<div className="mt-2 flex flex-wrap gap-2">
{item.groundingFiles?.map((file, index) => (
<MemoizedGroundingFile key={index} value={file} onClick={() => onSelectedGroundingFile(file)} />
))}
</div>
</div>
{showTimestamp && <div className="mt-2 text-center text-xs text-gray-500">{formatTimestamp(item.timestamp)}</div>}
</div>
);
})}
<div ref={historyEndRef} />
</div>
) : (
<p className="text-gray-500">{t("history.noHistory")}</p>
)}
Expand All @@ -57,4 +102,6 @@ export default function HistoryPanel({ show, history, onClosed, onSelectedGround
)}
</AnimatePresence>
);
}
};

export default HistoryPanel;
3 changes: 2 additions & 1 deletion app/frontend/src/locales/en/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"app": {
"title": "Talk to your data",
"footer": "Built with Azure AI Search + Azure OpenAI",
"showTranscript": "Show transcript",
"stopRecording": "Stop recording",
"startRecording": "Start recording",
"stopConversation": "Stop conversation"
Expand All @@ -11,7 +12,7 @@
"conversationInProgress": "Conversation in progress"
},
"history": {
"answerHistory": "Answer history",
"transcriptHistory": "Transcript history",
"noHistory": "No history yet."
},
"groundingFiles": {
Expand Down
3 changes: 2 additions & 1 deletion app/frontend/src/locales/es/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"app": {
"title": "Habla con tus datos",
"footer": "Creado con Azure AI Search + Azure OpenAI",
"showTranscript": "Mostrar transcripción",
"stopRecording": "Detener grabación",
"startRecording": "Comenzar grabación",
"stopConversation": "Detener conversación"
Expand All @@ -11,7 +12,7 @@
"conversationInProgress": "Conversación en progreso"
},
"history": {
"answerHistory": "Historial de respuestas",
"transcriptHistory": "Historial de transcripciones",
"noHistory": "Aún no hay historial."
},
"groundingFiles": {
Expand Down
3 changes: 2 additions & 1 deletion app/frontend/src/locales/fr/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"app": {
"title": "Parlez à vos données",
"footer": "Créée avec Azure AI Search + Azure OpenAI",
"showTranscript": "Afficher la transcription",
"stopRecording": "Arrêter l'enregistrement",
"startRecording": "Commencer l'enregistrement",
"stopConversation": "Arrêter la conversation"
Expand All @@ -11,7 +12,7 @@
"conversationInProgress": "Conversation en cours"
},
"history": {
"answerHistory": "Historique des réponses",
"transcriptHistory": "Historique de la transcription",
"noHistory": "Pas encore d'historique."
},
"groundingFiles": {
Expand Down
3 changes: 2 additions & 1 deletion app/frontend/src/locales/ja/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"app": {
"title": "データと話す",
"footer": "Azure AI Search + Azure OpenAI で構築",
"showTranscript": "トランスクリプトを表示",
"stopRecording": "録音を停止",
"startRecording": "録音を開始",
"stopConversation": "会話を停止"
Expand All @@ -11,7 +12,7 @@
"conversationInProgress": "会話が進行中"
},
"history": {
"answerHistory": "回答履歴",
"transcriptHistory": "トランスクリプト履歴",
"noHistory": "まだ履歴はありません。"
},
"groundingFiles": {
Expand Down
21 changes: 19 additions & 2 deletions app/frontend/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,50 +1,64 @@
// Represents a grounding file
export type GroundingFile = {
id: string;
name: string;
content: string;
};

// Represents an item in the history
export type HistoryItem = {
id: string;
transcript: string;
groundingFiles: GroundingFile[];
groundingFiles?: GroundingFile[];
sender: "user" | "assistant";
timestamp: Date; // Add timestamp field
};

// Represents a command to update the session
export type SessionUpdateCommand = {
type: "session.update";
session: {
turn_detection?: {
type: "server_vad" | "none";
threshold?: number;
prefix_padding_ms?: number;
silence_duration_ms?: number;
};
input_audio_transcription?: {
model: "whisper-1";
};
};
};

// Represents a command to append audio to the input buffer
export type InputAudioBufferAppendCommand = {
type: "input_audio_buffer.append";
audio: string;
audio: string; // Ensure this is a valid base64-encoded string
};

// Represents a command to clear the input audio buffer
export type InputAudioBufferClearCommand = {
type: "input_audio_buffer.clear";
};

// Represents a generic message
export type Message = {
type: string;
};

// Represents a response containing an audio delta
export type ResponseAudioDelta = {
type: "response.audio.delta";
delta: string;
};

// Represents a response containing an audio transcript delta
export type ResponseAudioTranscriptDelta = {
type: "response.audio_transcript.delta";
delta: string;
};

// Represents a response indicating that input audio transcription is completed
export type ResponseInputAudioTranscriptionCompleted = {
type: "conversation.item.input_audio_transcription.completed";
event_id: string;
Expand All @@ -53,6 +67,7 @@ export type ResponseInputAudioTranscriptionCompleted = {
transcript: string;
};

// Represents a response indicating that the response is done
export type ResponseDone = {
type: "response.done";
event_id: string;
Expand All @@ -62,13 +77,15 @@ export type ResponseDone = {
};
};

// Represents a response from an extension middle tier tool
export type ExtensionMiddleTierToolResponse = {
type: "extension.middle_tier_tool.response";
previous_item_id: string;
tool_name: string;
tool_result: string; // JSON string that needs to be parsed into ToolResult
};

// Represents the result from a tool
export type ToolResult = {
sources: { chunk_id: string; title: string; chunk: string }[];
};