Feature:Add voice dialogue functionality to the agent application (#11668)

### What problem does this PR solve?

Feature:Add voice dialogue functionality to the agent application

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
chanx
2025-12-02 19:39:43 +08:00
committed by GitHub
parent 962bd5f5df
commit 1388c4420d
16 changed files with 677 additions and 60 deletions

97
web/package-lock.json generated
View File

@@ -76,6 +76,7 @@
"pptx-preview": "^1.0.5",
"rc-tween-one": "^3.0.6",
"react": "^18.2.0",
"react-audio-voice-recorder": "^2.2.0",
"react-copy-to-clipboard": "^5.1.0",
"react-day-picker": "^9.8.0",
"react-dom": "^18.2.0",
@@ -2852,6 +2853,69 @@
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
},
"node_modules/@ffmpeg/ffmpeg": {
"version": "0.11.6",
"resolved": "https://registry.npmmirror.com/@ffmpeg/ffmpeg/-/ffmpeg-0.11.6.tgz",
"integrity": "sha512-uN8J8KDjADEavPhNva6tYO9Fj0lWs9z82swF3YXnTxWMBoFLGq3LZ6FLlIldRKEzhOBKnkVfA8UnFJuvGvNxcA==",
"license": "MIT",
"dependencies": {
"is-url": "^1.2.4",
"node-fetch": "^2.6.1",
"regenerator-runtime": "^0.13.7",
"resolve-url": "^0.2.1"
},
"engines": {
"node": ">=12.16.1"
}
},
"node_modules/@ffmpeg/ffmpeg/node_modules/node-fetch": {
"version": "2.7.0",
"resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-2.7.0.tgz",
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/@ffmpeg/ffmpeg/node_modules/regenerator-runtime": {
"version": "0.13.11",
"resolved": "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==",
"license": "MIT"
},
"node_modules/@ffmpeg/ffmpeg/node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmmirror.com/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT"
},
"node_modules/@ffmpeg/ffmpeg/node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"license": "BSD-2-Clause"
},
"node_modules/@ffmpeg/ffmpeg/node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"license": "MIT",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/@floating-ui/core": {
"version": "0.6.2",
"resolved": "https://registry.npmmirror.com/@floating-ui/core/-/core-0.6.2.tgz",
@@ -21653,6 +21717,12 @@
"node": ">= 0.4"
}
},
"node_modules/is-url": {
"version": "1.2.4",
"resolved": "https://registry.npmmirror.com/is-url/-/is-url-1.2.4.tgz",
"integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==",
"license": "MIT"
},
"node_modules/is-weakmap": {
"version": "2.0.1",
"resolved": "https://registry.npmmirror.com/is-weakmap/-/is-weakmap-2.0.1.tgz",
@@ -29630,6 +29700,30 @@
"node": ">=0.10.0"
}
},
"node_modules/react-audio-visualize": {
"version": "1.2.0",
"resolved": "https://registry.npmmirror.com/react-audio-visualize/-/react-audio-visualize-1.2.0.tgz",
"integrity": "sha512-rfO5nmT0fp23gjU0y2WQT6+ZOq2ZsuPTMphchwX1PCz1Di4oaIr6x7JZII8MLrbHdG7UB0OHfGONTIsWdh67kQ==",
"license": "MIT",
"peerDependencies": {
"react": ">=16.2.0",
"react-dom": ">=16.2.0"
}
},
"node_modules/react-audio-voice-recorder": {
"version": "2.2.0",
"resolved": "https://registry.npmmirror.com/react-audio-voice-recorder/-/react-audio-voice-recorder-2.2.0.tgz",
"integrity": "sha512-Hq+143Zs99vJojT/uFvtpxUuiIKoLbMhxhA7qgxe5v8hNXrh5/qTnvYP92hFaE5V+GyoCXlESONa0ufk7t5kHQ==",
"license": "MIT",
"dependencies": {
"@ffmpeg/ffmpeg": "^0.11.6",
"react-audio-visualize": "^1.1.3"
},
"peerDependencies": {
"react": ">=16.2.0",
"react-dom": ">=16.2.0"
}
},
"node_modules/react-copy-to-clipboard": {
"version": "5.1.0",
"resolved": "https://registry.npmmirror.com/react-copy-to-clipboard/-/react-copy-to-clipboard-5.1.0.tgz",
@@ -32102,8 +32196,7 @@
"version": "0.2.1",
"resolved": "https://registry.npmmirror.com/resolve-url/-/resolve-url-0.2.1.tgz",
"integrity": "sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==",
"deprecated": "https://github.com/lydell/resolve-url#deprecated",
"dev": true
"deprecated": "https://github.com/lydell/resolve-url#deprecated"
},
"node_modules/resolve.exports": {
"version": "2.0.2",

View File

@@ -89,6 +89,7 @@
"pptx-preview": "^1.0.5",
"rc-tween-one": "^3.0.6",
"react": "^18.2.0",
"react-audio-voice-recorder": "^2.2.0",
"react-copy-to-clipboard": "^5.1.0",
"react-day-picker": "^9.8.0",
"react-dom": "^18.2.0",

View File

@@ -18,7 +18,9 @@ import { cn } from '@/lib/utils';
import { t } from 'i18next';
import { CircleStop, Paperclip, Send, Upload, X } from 'lucide-react';
import * as React from 'react';
import { useEffect } from 'react';
import { toast } from 'sonner';
import { AudioButton } from '../ui/audio-button';
interface IProps {
disabled: boolean;
@@ -52,6 +54,22 @@ export function NextMessageInput({
removeFile,
}: IProps) {
const [files, setFiles] = React.useState<File[]>([]);
const [audioInputValue, setAudioInputValue] = React.useState<string | null>(
null,
);
useEffect(() => {
if (audioInputValue !== null) {
onInputChange({
target: { value: audioInputValue },
} as React.ChangeEvent<HTMLTextAreaElement>);
setTimeout(() => {
onPressEnter();
setAudioInputValue(null);
}, 0);
}
}, [audioInputValue, onInputChange, onPressEnter]);
const onFileReject = React.useCallback((file: File, message: string) => {
toast(message, {
@@ -171,15 +189,24 @@ export function NextMessageInput({
<CircleStop />
</Button>
) : (
<Button
className="size-5 rounded-sm"
disabled={
sendDisabled || isUploading || sendLoading || !value.trim()
}
>
<Send />
<span className="sr-only">Send message</span>
</Button>
<div className="flex items-center gap-3">
{/* <div className="bg-bg-input rounded-md hover:bg-bg-card p-1"> */}
<AudioButton
onOk={(value) => {
setAudioInputValue(value);
}}
/>
{/* </div> */}
<Button
className="size-5 rounded-sm"
disabled={
sendDisabled || isUploading || sendLoading || !value.trim()
}
>
<Send />
<span className="sr-only">Send message</span>
</Button>
</div>
)}
</div>
</form>

View File

@@ -3,6 +3,8 @@ import CopyToClipboard from '@/components/copy-to-clipboard';
import { useSetModalState } from '@/hooks/common-hooks';
import { IRemoveMessageById } from '@/hooks/logic-hooks';
import { AgentChatContext } from '@/pages/agent/context';
import { downloadFile } from '@/services/file-manager-service';
import { downloadFileFromBlob } from '@/utils/file-util';
import {
DeleteOutlined,
DislikeOutlined,
@@ -12,7 +14,7 @@ import {
SyncOutlined,
} from '@ant-design/icons';
import { Radio, Tooltip } from 'antd';
import { NotebookText } from 'lucide-react';
import { Download, NotebookText } from 'lucide-react';
import { useCallback, useContext } from 'react';
import { useTranslation } from 'react-i18next';
import { ToggleGroup, ToggleGroupItem } from '../ui/toggle-group';
@@ -28,6 +30,11 @@ interface IProps {
audioBinary?: string;
showLoudspeaker?: boolean;
showLog?: boolean;
attachment?: {
file_name: string;
doc_id: string;
format: string;
};
}
export const AssistantGroupButton = ({
@@ -38,6 +45,7 @@ export const AssistantGroupButton = ({
showLikeButton,
showLoudspeaker = true,
showLog = true,
attachment,
}: IProps) => {
const { visible, hideModal, showModal, onFeedbackOk, loading } =
useSendFeedback(messageId);
@@ -98,6 +106,27 @@ export const AssistantGroupButton = ({
<NotebookText className="size-4" />
</ToggleGroupItem>
)}
{!!attachment?.doc_id && (
<ToggleGroupItem
value="g"
onClick={async () => {
try {
const response = await downloadFile({
docId: attachment.doc_id,
ext: attachment.format,
});
const blob = new Blob([response.data], {
type: response.data.type,
});
downloadFileFromBlob(blob, attachment.file_name);
} catch (error) {
console.error('Download failed:', error);
}
}}
>
<Download size={16} />
</ToggleGroupItem>
)}
</ToggleGroup>
{visible && (
<FeedbackModal

View File

@@ -21,10 +21,8 @@ import { INodeEvent, MessageEventType } from '@/hooks/use-send-message';
import { cn } from '@/lib/utils';
import { AgentChatContext } from '@/pages/agent/context';
import { WorkFlowTimeline } from '@/pages/agent/log-sheet/workflow-timeline';
import { downloadFile } from '@/services/file-manager-service';
import { downloadFileFromBlob } from '@/utils/file-util';
import { isEmpty } from 'lodash';
import { Atom, ChevronDown, ChevronUp, Download } from 'lucide-react';
import { Atom, ChevronDown, ChevronUp } from 'lucide-react';
import MarkdownContent from '../next-markdown-content';
import { RAGFlowAvatar } from '../ragflow-avatar';
import { useTheme } from '../theme-provider';
@@ -176,6 +174,7 @@ function MessageItem({
audioBinary={item.audio_binary}
showLoudspeaker={showLoudspeaker}
showLog={showLog}
attachment={item.attachment}
></AssistantGroupButton>
)}
{!isShare && (
@@ -187,6 +186,7 @@ function MessageItem({
audioBinary={item.audio_binary}
showLoudspeaker={showLoudspeaker}
showLog={showLog}
attachment={item.attachment}
></AssistantGroupButton>
)}
</>
@@ -250,7 +250,7 @@ function MessageItem({
{isUser && (
<UploadedMessageFiles files={item.files}></UploadedMessageFiles>
)}
{isAssistant && item.attachment && item.attachment.doc_id && (
{/* {isAssistant && item.attachment && item.attachment.doc_id && (
<div className="w-full flex items-center justify-end">
<Button
variant="link"
@@ -275,7 +275,7 @@ function MessageItem({
<Download size={16} />
</Button>
</div>
)}
)} */}
</section>
</div>
</section>

View File

@@ -0,0 +1,422 @@
import { AudioRecorder, useAudioRecorder } from 'react-audio-voice-recorder';
import { Button } from '@/components/ui/button';
import { Authorization } from '@/constants/authorization';
import { cn } from '@/lib/utils';
import api from '@/utils/api';
import { getAuthorization } from '@/utils/authorization-util';
import { Loader2, Mic, Square } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import { useIsDarkTheme } from '../theme-provider';
import { Input } from './input';
import { Popover, PopoverContent, PopoverTrigger } from './popover';
const VoiceVisualizer = ({ isRecording }: { isRecording: boolean }) => {
const canvasRef = useRef<HTMLCanvasElement>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const animationFrameRef = useRef<number>(0);
const streamRef = useRef<MediaStream | null>(null);
const isDark = useIsDarkTheme();
const startVisualization = async () => {
try {
// Check if the browser supports getUserMedia
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
console.error('Browser does not support getUserMedia API');
return;
}
// Request microphone permission
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
streamRef.current = stream;
// Create audio context and analyzer
const audioContext = new (window.AudioContext ||
(window as any).webkitAudioContext)();
audioContextRef.current = audioContext;
const analyser = audioContext.createAnalyser();
analyserRef.current = analyser;
analyser.fftSize = 32;
// Connect audio nodes
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
// Start drawing
draw();
} catch (error) {
console.error(
'Unable to access microphone for voice visualization:',
error,
);
}
};
const stopVisualization = () => {
// Stop animation frame
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
// Stop audio stream
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
}
// Close audio context
if (audioContextRef.current && audioContextRef.current.state !== 'closed') {
audioContextRef.current.close();
}
// Clear canvas
const canvas = canvasRef.current;
if (canvas) {
const ctx = canvas.getContext('2d');
if (ctx) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
}
};
useEffect(() => {
if (isRecording) {
startVisualization();
} else {
stopVisualization();
}
return () => {
stopVisualization();
};
}, [isRecording]);
const draw = () => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const analyser = analyserRef.current;
if (!analyser) return;
// Set canvas dimensions
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const centerY = height / 2;
if (canvas.width !== width || canvas.height !== height) {
canvas.width = width;
canvas.height = height;
}
// Clear canvas
ctx.clearRect(0, 0, width, height);
// Get frequency data
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyser.getByteFrequencyData(dataArray);
// Draw waveform
const barWidth = (width / bufferLength) * 1.5;
let x = 0;
for (let i = 0; i < bufferLength; i = i + 2) {
const barHeight = (dataArray[i] / 255) * centerY;
// Create gradient
const gradient = ctx.createLinearGradient(
0,
centerY - barHeight,
0,
centerY + barHeight,
);
gradient.addColorStop(0, '#3ba05c'); // Blue
gradient.addColorStop(1, '#3ba05c'); // Light blue
// gradient.addColorStop(0, isDark ? '#fff' : '#000'); // Blue
// gradient.addColorStop(1, isDark ? '#eee' : '#eee'); // Light blue
ctx.fillStyle = gradient;
ctx.fillRect(x, centerY - barHeight, barWidth, barHeight * 2);
x += barWidth + 2;
}
animationFrameRef.current = requestAnimationFrame(draw);
};
return (
<div className="w-full h-6 bg-transparent flex items-center justify-center overflow-hidden ">
<canvas ref={canvasRef} className="w-full h-full" />
</div>
);
};
const VoiceInputBox = ({
isRecording,
onStop,
recordingTime,
value,
}: {
value: string;
isRecording: boolean;
onStop: () => void;
recordingTime: number;
}) => {
// Format recording time
const formatTime = (seconds: number) => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
};
return (
<div className="w-full">
<div className=" absolute w-full h-6 translate-y-full">
<VoiceVisualizer isRecording={isRecording} />
</div>
<Input
rootClassName="w-full"
className="flex-1 "
readOnly
value={value}
suffix={
<div className="flex justify-end px-1 items-center gap-1 w-20">
<Button
variant={'ghost'}
size="sm"
className="text-text-primary p-1 border-none hover:bg-transparent"
onClick={onStop}
>
<Square className="text-text-primary" size={12} />
</Button>
<span className="text-xs text-text-secondary">
{formatTime(recordingTime)}
</span>
</div>
}
/>
</div>
);
};
export const AudioButton = ({
onOk,
}: {
onOk?: (transcript: string) => void;
}) => {
// const [showInputBox, setShowInputBox] = useState(false);
const [isRecording, setIsRecording] = useState(false);
const [isProcessing, setIsProcessing] = useState(false);
const [recordingTime, setRecordingTime] = useState(0);
const [transcript, setTranscript] = useState('');
const [popoverOpen, setPopoverOpen] = useState(false);
const recorderControls = useAudioRecorder();
const intervalRef = useRef<NodeJS.Timeout | null>(null);
// Handle logic after recording is complete
const handleRecordingComplete = async (blob: Blob) => {
setIsRecording(false);
// const url = URL.createObjectURL(blob);
// const a = document.createElement('a');
// a.href = url;
// a.download = 'recording.webm';
// document.body.appendChild(a);
// a.click();
setIsProcessing(true);
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
try {
const audioFile = new File([blob], 'recording.webm', {
type: blob.type || 'audio/webm',
// type: 'audio/mpeg',
});
const formData = new FormData();
formData.append('file', audioFile);
formData.append('stream', 'false');
const response = await fetch(api.sequence2txt, {
method: 'POST',
headers: {
[Authorization]: getAuthorization(),
// 'Content-Type': blob.type || 'audio/webm',
},
body: formData,
});
// if (!response.ok) {
// throw new Error(`HTTP error! status: ${response.status}`);
// }
// if (!response.body) {
// throw new Error('ReadableStream not supported in this browser');
// }
console.log('Response:', response);
const { data, code } = await response.json();
if (code === 0 && data && data.text) {
setTranscript(data.text);
console.log('Transcript:', data.text);
onOk?.(data.text);
}
setPopoverOpen(false);
} catch (error) {
console.error('Failed to process audio:', error);
// setTranscript(t('voiceRecorder.processingError'));
} finally {
setIsProcessing(false);
}
};
// Start recording
const startRecording = () => {
recorderControls.startRecording();
setIsRecording(true);
// setShowInputBox(true);
setPopoverOpen(true);
setRecordingTime(0);
// Start timing
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
intervalRef.current = setInterval(() => {
setRecordingTime((prev) => prev + 1);
}, 1000);
};
// Stop recording
const stopRecording = () => {
recorderControls.stopRecording();
setIsRecording(false);
// setShowInputBox(false);
setPopoverOpen(false);
setRecordingTime(0);
// Clear timer
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
};
// Clear transcription content
// const clearTranscript = () => {
// setTranscript('');
// };
useEffect(() => {
return () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
};
}, []);
return (
<div>
{false && (
<div className="flex flex-col items-center space-y-4">
<div className="relative">
<Popover
open={popoverOpen}
onOpenChange={(open) => {
setPopoverOpen(true);
}}
>
<PopoverTrigger asChild>
<Button
variant="outline"
size="sm"
onClick={() => {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
}}
className={`w-6 h-6 p-2 rounded-full border-none bg-transparent hover:bg-transparent ${
isRecording ? 'animate-pulse' : ''
}`}
disabled={isProcessing}
>
<Mic size={16} className="text-text-primary" />
</Button>
</PopoverTrigger>
<PopoverContent
align="end"
sideOffset={-20}
className="p-0 border-none"
>
<VoiceInputBox
isRecording={isRecording}
value={transcript}
onStop={stopRecording}
recordingTime={recordingTime}
/>
</PopoverContent>
</Popover>
</div>
</div>
)}
<div className=" relative w-6 h-6 flex items-center justify-center">
{isRecording && (
<div
className={cn(
'absolute inset-0 w-full h-6 rounded-full overflow-hidden flex items-center justify-center p-1',
{ 'bg-state-success-5': isRecording },
)}
>
<VoiceVisualizer isRecording={isRecording} />
</div>
)}
{isRecording && (
<div className="absolute inset-0 rounded-full border-2 border-state-success animate-ping opacity-75"></div>
)}
<Button
variant="outline"
size="sm"
// onMouseDown={() => {
// startRecording();
// }}
// onMouseUp={() => {
// stopRecording();
// }}
onClick={() => {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
}}
className={`w-6 h-6 p-2 rounded-md border-none bg-transparent hover:bg-state-success-5 ${
isRecording
? 'animate-pulse bg-state-success-5 text-state-success'
: ''
}`}
disabled={isProcessing}
>
{isProcessing ? (
<Loader2 size={16} className=" animate-spin" />
) : isRecording ? (
<></>
) : (
// <Mic size={16} className="text-text-primary" />
// <Square size={12} className="text-text-primary" />
<Mic size={16} />
)}
</Button>
</div>
{/* Hide original component */}
<div className="hidden">
<AudioRecorder
onRecordingComplete={handleRecordingComplete}
recorderControls={recorderControls}
/>
</div>
</div>
);
};

View File

@@ -9,10 +9,23 @@ export interface InputProps
value?: string | number | readonly string[] | undefined;
prefix?: React.ReactNode;
suffix?: React.ReactNode;
rootClassName?: string;
}
const Input = React.forwardRef<HTMLInputElement, InputProps>(
({ className, type, value, onChange, prefix, suffix, ...props }, ref) => {
(
{
className,
rootClassName,
type,
value,
onChange,
prefix,
suffix,
...props
},
ref,
) => {
const isControlled = value !== undefined;
const { defaultValue, ...restProps } = props;
const inputValue = isControlled ? value : defaultValue;
@@ -89,7 +102,7 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
if (prefix || suffix || isPasswordInput) {
return (
<div className="relative">
<div className={cn('relative', rootClassName)}>
{prefix && (
<span
ref={prefixRef}

View File

@@ -51,6 +51,7 @@ export interface IAttachment {
}
export interface IMessageData {
content: string;
audio_binary: string;
outputs: any;
start_to_think?: boolean;
end_to_think?: boolean;

View File

@@ -72,7 +72,7 @@ export const useFetchTenantInfo = (
): ResponseGetType<ITenantInfo> => {
const { t } = useTranslation();
const { data, isFetching: loading } = useQuery({
queryKey: [UserSettingApiAction.TenantInfo],
queryKey: [UserSettingApiAction.TenantInfo, showEmptyModelWarn],
initialData: {},
gcTime: 0,
queryFn: async () => {

View File

@@ -1072,6 +1072,7 @@ Example: Virtual Hosted Style`,
pleaseUploadAtLeastOneFile: 'Please upload at least one file',
},
flow: {
autoPlay: 'Auto play audio',
downloadFileTypeTip: 'The file type to download',
downloadFileType: 'Download file type',
formatTypeError: 'Format or type error',

View File

@@ -992,6 +992,7 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
pleaseUploadAtLeastOneFile: '请上传至少一个文件',
},
flow: {
autoPlay: '自动播放',
downloadFileTypeTip: '文件下载的类型',
downloadFileType: '文件类型',
formatTypeError: '格式或类型错误',

View File

@@ -50,10 +50,13 @@ export function findMessageFromList(eventList: IEventList) {
let startIndex = -1;
let endIndex = -1;
let audioBinary = undefined;
messageEventList.forEach((x, idx) => {
const { data } = x;
const { content, start_to_think, end_to_think } = data;
const { content, start_to_think, end_to_think, audio_binary } = data;
if (audio_binary) {
audioBinary = audio_binary;
}
if (start_to_think === true) {
nextContent += '<think>' + content;
startIndex = idx;
@@ -82,6 +85,7 @@ export function findMessageFromList(eventList: IEventList) {
return {
id: eventList[0]?.message_id,
content: nextContent,
audio_binary: audioBinary,
attachment: workflowFinished?.data?.outputs?.attachment || {},
};
}
@@ -393,12 +397,15 @@ export const useSendAgentMessage = ({
}, [sendMessageInTaskMode]);
useEffect(() => {
const { content, id, attachment } = findMessageFromList(answerList);
const { content, id, attachment, audio_binary } =
findMessageFromList(answerList);
const inputAnswer = findInputFromList(answerList);
const answer = content || getLatestError(answerList);
if (answerList.length > 0) {
addNewestOneAnswer({
answer: answer ?? '',
audio_binary: audio_binary,
attachment: attachment as IAttachment,
id: id,
...inputAnswer,

View File

@@ -9,6 +9,7 @@ import {
FormMessage,
} from '@/components/ui/form';
import { RAGFlowSelect } from '@/components/ui/select';
import { Switch } from '@/components/ui/switch';
import { zodResolver } from '@hookform/resolvers/zod';
import { X } from 'lucide-react';
import { memo } from 'react';
@@ -36,12 +37,14 @@ function MessageForm({ node }: INextOperatorForm) {
)
.optional(),
output_format: z.string().optional(),
auto_play: z.boolean().optional(),
});
const form = useForm({
defaultValues: {
...values,
output_format: values.output_format,
auto_play: values.auto_play,
},
resolver: zodResolver(FormSchema),
});
@@ -56,40 +59,6 @@ function MessageForm({ node }: INextOperatorForm) {
return (
<Form {...form}>
<FormWrapper>
<FormContainer>
<FormItem>
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
{t('flow.downloadFileType')}
</FormLabel>
<FormField
control={form.control}
name={`output_format`}
render={({ field }) => (
<FormItem className="flex-1">
<FormControl>
<RAGFlowSelect
options={Object.keys(ExportFileType).map(
(key: string) => {
return {
value:
ExportFileType[
key as keyof typeof ExportFileType
],
label: key,
};
},
)}
{...field}
onValueChange={field.onChange}
placeholder={t('common.selectPlaceholder')}
allowClear
></RAGFlowSelect>
</FormControl>
</FormItem>
)}
/>
</FormItem>
</FormContainer>
<FormContainer>
<FormItem>
<FormLabel tooltip={t('flow.msgTip')}>{t('flow.msg')}</FormLabel>
@@ -132,6 +101,57 @@ function MessageForm({ node }: INextOperatorForm) {
<FormMessage />
</FormItem>
</FormContainer>
<FormContainer>
<FormItem>
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
{t('flow.downloadFileType')}
</FormLabel>
<FormField
control={form.control}
name={`output_format`}
render={({ field }) => (
<FormItem className="flex-1">
<FormControl>
<RAGFlowSelect
options={Object.keys(ExportFileType).map(
(key: string) => {
return {
value:
ExportFileType[
key as keyof typeof ExportFileType
],
label: key,
};
},
)}
{...field}
onValueChange={field.onChange}
placeholder={t('common.selectPlaceholder')}
allowClear
></RAGFlowSelect>
</FormControl>
</FormItem>
)}
/>
</FormItem>
<FormItem>
<FormLabel>{t('flow.autoPlay')}</FormLabel>
<FormField
control={form.control}
name={`auto_play`}
render={({ field }) => (
<FormItem className="flex-1">
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
/>
</FormControl>
</FormItem>
)}
/>
</FormItem>
</FormContainer>
</FormWrapper>
</Form>
);

View File

@@ -40,7 +40,7 @@ export default function RetrievalTesting() {
<Plus /> Add New Test
</Button> */}
</div>
<div className="h-[calc(100vh-241px)] overflow-auto scrollbar-thin">
<div className="h-[calc(100vh-241px)] overflow-auto scrollbar-thin px-1">
<TestingForm
loading={loading}
setValues={setValues}

View File

@@ -100,9 +100,9 @@ export function TestingResult({
</>
)}
{!data.chunks?.length && !loading && (
<div className="flex justify-center items-center w-full h-[calc(100vh-241px)]">
<div className="flex justify-center items-center w-full h-[calc(100vh-280px)]">
<div>
<Empty type={EmptyType.SearchData}>
<Empty type={EmptyType.SearchData} iconWidth={80}>
{data.isRuned && (
<div className="text-text-secondary">
{t('knowledgeDetails.noTestResultsForRuned')}

View File

@@ -50,6 +50,8 @@ export default {
// plugin
llm_tools: `${api_host}/plugin/llm_tools`,
sequence2txt: `${api_host}/conversation/sequence2txt`,
// knowledge base
check_embedding: `${api_host}/kb/check_embedding`,