modern frontend
This commit is contained in:
@@ -0,0 +1,23 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# Testing
|
||||
/coverage
|
||||
|
||||
# Next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# Production
|
||||
/build
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
# Environment variables
|
||||
.env*.local
|
||||
.env
|
||||
@@ -0,0 +1,43 @@
|
||||
# Frontend Setup Instructions
|
||||
|
||||
## Installation
|
||||
|
||||
To install the dependencies and set up Tailwind CSS:
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
```
|
||||
|
||||
This will install all the required dependencies including:
|
||||
- Tailwind CSS
|
||||
- PostCSS
|
||||
- Autoprefixer
|
||||
|
||||
## Development
|
||||
|
||||
To start the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
The application will be available at `http://localhost:3000`
|
||||
|
||||
## Features
|
||||
|
||||
The new UI includes:
|
||||
- Modern gradient background with purple/slate theme
|
||||
- Animated recording button with visual feedback
|
||||
- Status indicators for recording and processing states
|
||||
- Audio visualizer for playback
|
||||
- Responsive design that works on desktop and mobile
|
||||
- User-friendly error notifications
|
||||
- Step-by-step usage instructions
|
||||
- Glassmorphism design elements
|
||||
|
||||
## Customization
|
||||
|
||||
You can customize the colors and animations by editing:
|
||||
- `tailwind.config.js` - Theme configuration
|
||||
- `styles/globals.css` - Custom CSS classes and animations
|
||||
@@ -1,6 +1,9 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
env: {
|
||||
NEXT_PUBLIC_BACKEND_URL: process.env.NEXT_PUBLIC_BACKEND_URL,
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = nextConfig
|
||||
|
||||
Generated
+925
-7
File diff suppressed because it is too large
Load Diff
@@ -11,5 +11,10 @@
|
||||
"next": "14.0.0",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"autoprefixer": "^10.4.21",
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"tailwindcss": "^4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
import '../styles/globals.css'
|
||||
|
||||
export default function App({ Component, pageProps }) {
|
||||
return <Component {...pageProps} />
|
||||
}
|
||||
+142
-70
@@ -1,97 +1,169 @@
|
||||
import { useState, useRef } from 'react'
|
||||
import { startRecording, stopRecording, sendAudioToBackend, playAudioBlob } from '../services/audioService'
|
||||
|
||||
export default function Home() {
|
||||
const [recording, setRecording] = useState(false)
|
||||
const [playing, setPlaying] = useState(false)
|
||||
const [isProcessing, setIsProcessing] = useState(false)
|
||||
const mediaRecorderRef = useRef(null)
|
||||
const audioChunksRef = useRef([])
|
||||
const audioRef = useRef(null)
|
||||
|
||||
async function startRecording() {
|
||||
if (!navigator.mediaDevices) return alert('No microphone available')
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
|
||||
const mr = new MediaRecorder(stream)
|
||||
mr.ondataavailable = (e) => audioChunksRef.current.push(e.data)
|
||||
mr.onstop = async () => {
|
||||
const blob = new Blob(audioChunksRef.current, { type: 'audio/webm' })
|
||||
audioChunksRef.current = []
|
||||
await sendAudio(blob)
|
||||
}
|
||||
mediaRecorderRef.current = mr
|
||||
audioChunksRef.current = []
|
||||
mr.start()
|
||||
setRecording(true)
|
||||
async function handleStartRecording() {
|
||||
await startRecording(mediaRecorderRef, audioChunksRef, setRecording, handleSendAudio)
|
||||
}
|
||||
|
||||
function stopRecording() {
|
||||
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
|
||||
mediaRecorderRef.current.stop()
|
||||
setRecording(false)
|
||||
}
|
||||
function handleStopRecording() {
|
||||
stopRecording(mediaRecorderRef, setRecording)
|
||||
}
|
||||
|
||||
async function sendAudio(blob) {
|
||||
async function handleSendAudio(blob) {
|
||||
try {
|
||||
const form = new FormData()
|
||||
// Convert webm to wav on the client is complex; many servers accept webm/ogg.
|
||||
form.append('file', blob, 'recording.webm')
|
||||
|
||||
console.log('Sending request to backend...')
|
||||
const res = await fetch('http://localhost:8000/chat', { method: 'POST', body: form })
|
||||
console.log('Response received:', res.status, res.statusText)
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text()
|
||||
alert('Error: ' + res.status + ' ' + text)
|
||||
return
|
||||
}
|
||||
|
||||
console.log('Converting response to blob...')
|
||||
const audioBlob = await res.blob()
|
||||
console.log('Audio blob created, size:', audioBlob.size)
|
||||
|
||||
const url = URL.createObjectURL(audioBlob)
|
||||
if (audioRef.current) {
|
||||
audioRef.current.src = url
|
||||
audioRef.current.play()
|
||||
setPlaying(true)
|
||||
audioRef.current.onended = () => setPlaying(false)
|
||||
}
|
||||
setIsProcessing(true)
|
||||
const audioBlob = await sendAudioToBackend(blob)
|
||||
playAudioBlob(audioBlob, audioRef, setPlaying)
|
||||
} catch (error) {
|
||||
console.error('Error in sendAudio:', error)
|
||||
alert('Failed to process audio: ' + error.message)
|
||||
// Show a more user-friendly error notification
|
||||
const errorDiv = document.createElement('div')
|
||||
errorDiv.className = 'fixed top-4 right-4 bg-red-500 text-white px-6 py-4 rounded-lg shadow-lg z-50 transform transition-all duration-300'
|
||||
errorDiv.textContent = `Error: ${error.message}`
|
||||
document.body.appendChild(errorDiv)
|
||||
setTimeout(() => {
|
||||
errorDiv.style.transform = 'translateX(100%)'
|
||||
setTimeout(() => document.body.removeChild(errorDiv), 300)
|
||||
}, 3000)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ padding: 24, fontFamily: 'sans-serif' }}>
|
||||
<h1>Local Voice Assistant — Frontend</h1>
|
||||
<p>Press and hold the big button to record, or click to toggle.</p>
|
||||
<div className="min-h-screen bg-gradient-to-br from-slate-900 via-purple-900 to-slate-900 text-white">
|
||||
{/* Header */}
|
||||
<div className="container mx-auto px-6 py-8">
|
||||
<div className="text-center mb-12">
|
||||
<h1 className="text-5xl font-bold bg-gradient-to-r from-blue-400 via-purple-400 to-pink-400 bg-clip-text text-transparent mb-4">
|
||||
🎤 Voice Assistant
|
||||
</h1>
|
||||
<p className="text-xl text-gray-300 max-w-2xl mx-auto">
|
||||
Experience the future of voice interaction. Press and hold to record your voice,
|
||||
or simply click to toggle recording mode.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Main Interface */}
|
||||
<div className="flex flex-col lg:flex-row items-center justify-center gap-12 max-w-6xl mx-auto">
|
||||
|
||||
{/* Recording Section */}
|
||||
<div className="flex flex-col items-center space-y-6">
|
||||
<div className="relative">
|
||||
{/* Animated rings around button when recording */}
|
||||
{recording && (
|
||||
<>
|
||||
<div className="absolute inset-0 rounded-full border-4 border-red-400 animate-ping opacity-20"></div>
|
||||
<div className="absolute inset-0 rounded-full border-4 border-red-400 animate-ping opacity-40 animation-delay-75"></div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<div style={{ display: 'flex', gap: 12, alignItems: 'center' }}>
|
||||
<button
|
||||
onMouseDown={startRecording}
|
||||
onMouseUp={stopRecording}
|
||||
onTouchStart={startRecording}
|
||||
onTouchEnd={stopRecording}
|
||||
onClick={() => (recording ? stopRecording() : startRecording())}
|
||||
style={{
|
||||
width: 140,
|
||||
height: 140,
|
||||
borderRadius: 70,
|
||||
background: recording ? 'red' : '#0b84ff',
|
||||
color: 'white',
|
||||
fontSize: 18,
|
||||
border: 'none',
|
||||
}}
|
||||
onMouseDown={handleStartRecording}
|
||||
onMouseUp={handleStopRecording}
|
||||
onTouchStart={handleStartRecording}
|
||||
onTouchEnd={handleStopRecording}
|
||||
onClick={() => (recording ? handleStopRecording() : handleStartRecording())}
|
||||
disabled={isProcessing}
|
||||
className={`record-button w-40 h-40 rounded-full border-none text-white text-lg font-semibold
|
||||
${recording
|
||||
? 'bg-gradient-to-r from-red-500 to-red-600 recording-pulse shadow-red-500/50'
|
||||
: isProcessing
|
||||
? 'bg-gradient-to-r from-yellow-500 to-orange-500 shadow-yellow-500/50'
|
||||
: 'bg-gradient-to-r from-blue-500 to-purple-600 shadow-blue-500/50'
|
||||
}
|
||||
${isProcessing ? 'cursor-not-allowed opacity-75' : 'cursor-pointer hover:shadow-xl'}
|
||||
disabled:cursor-not-allowed disabled:opacity-75
|
||||
`}
|
||||
>
|
||||
{recording ? 'Recording...' : 'Hold / Click'}
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="text-3xl mb-2">
|
||||
{recording ? '🎙️' : isProcessing ? '⏳' : '🎤'}
|
||||
</div>
|
||||
<div className="text-sm">
|
||||
{recording ? 'Recording...' : isProcessing ? 'Processing...' : 'Hold / Click'}
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p>Playback:</p>
|
||||
<audio ref={audioRef} controls />
|
||||
{playing && <div>Playing...</div>}
|
||||
{/* Status indicators */}
|
||||
<div className="flex space-x-4">
|
||||
<div className={`flex items-center space-x-2 px-4 py-2 rounded-full transition-all duration-300 ${
|
||||
recording ? 'bg-red-500/20 text-red-300' : 'bg-gray-700/50 text-gray-400'
|
||||
}`}>
|
||||
<div className={`w-2 h-2 rounded-full ${recording ? 'bg-red-400 animate-pulse' : 'bg-gray-500'}`}></div>
|
||||
<span className="text-sm font-medium">Recording</span>
|
||||
</div>
|
||||
|
||||
<div className={`flex items-center space-x-2 px-4 py-2 rounded-full transition-all duration-300 ${
|
||||
isProcessing ? 'bg-yellow-500/20 text-yellow-300' : 'bg-gray-700/50 text-gray-400'
|
||||
}`}>
|
||||
<div className={`w-2 h-2 rounded-full ${isProcessing ? 'bg-yellow-400 animate-pulse' : 'bg-gray-500'}`}></div>
|
||||
<span className="text-sm font-medium">Processing</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Audio Playback Section */}
|
||||
<div className="bg-white/10 backdrop-blur-lg rounded-2xl p-8 shadow-2xl border border-white/20 min-w-[400px]">
|
||||
<div className="text-center mb-6">
|
||||
<h3 className="text-2xl font-semibold text-white mb-2">🔊 Audio Response</h3>
|
||||
<p className="text-gray-300">Your AI assistant's response will play here</p>
|
||||
</div>
|
||||
|
||||
<div className="audio-visualizer bg-gradient-to-r from-blue-500/10 to-purple-500/10 rounded-xl p-6">
|
||||
<audio
|
||||
ref={audioRef}
|
||||
controls
|
||||
className="w-full mb-4"
|
||||
style={{
|
||||
filter: 'drop-shadow(0 4px 6px rgba(0, 0, 0, 0.1))',
|
||||
}}
|
||||
/>
|
||||
|
||||
{playing && (
|
||||
<div className="flex items-center justify-center space-x-3 text-green-400">
|
||||
<div className="flex space-x-1">
|
||||
<div className="w-1 h-6 bg-green-400 rounded animate-bounce"></div>
|
||||
<div className="w-1 h-8 bg-green-400 rounded animate-bounce" style={{animationDelay: '0.1s'}}></div>
|
||||
<div className="w-1 h-6 bg-green-400 rounded animate-bounce" style={{animationDelay: '0.2s'}}></div>
|
||||
<div className="w-1 h-10 bg-green-400 rounded animate-bounce" style={{animationDelay: '0.3s'}}></div>
|
||||
<div className="w-1 h-6 bg-green-400 rounded animate-bounce" style={{animationDelay: '0.4s'}}></div>
|
||||
</div>
|
||||
<span className="font-medium">Playing response...</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Instructions */}
|
||||
<div className="mt-16 text-center">
|
||||
<div className="bg-white/5 backdrop-blur-sm rounded-xl p-6 max-w-3xl mx-auto border border-white/10">
|
||||
<h4 className="text-lg font-semibold text-white mb-4">💡 How to Use</h4>
|
||||
<div className="grid md:grid-cols-3 gap-4 text-sm text-gray-300">
|
||||
<div className="flex flex-col items-center space-y-2">
|
||||
<div className="w-12 h-12 bg-blue-500/20 rounded-full flex items-center justify-center text-2xl">1️⃣</div>
|
||||
<p><strong>Press & Hold</strong><br/>Hold the button down while speaking</p>
|
||||
</div>
|
||||
<div className="flex flex-col items-center space-y-2">
|
||||
<div className="w-12 h-12 bg-purple-500/20 rounded-full flex items-center justify-center text-2xl">2️⃣</div>
|
||||
<p><strong>Or Click Toggle</strong><br/>Click once to start, click again to stop</p>
|
||||
</div>
|
||||
<div className="flex flex-col items-center space-y-2">
|
||||
<div className="w-12 h-12 bg-pink-500/20 rounded-full flex items-center justify-center text-2xl">3️⃣</div>
|
||||
<p><strong>Listen</strong><br/>Your AI assistant will respond with audio</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
'@tailwindcss/postcss': {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
const BACKEND_URL = process.env.NEXT_PUBLIC_BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
export async function startRecording(mediaRecorderRef, audioChunksRef, setRecording, onRecordingComplete) {
|
||||
if (!navigator.mediaDevices) {
|
||||
alert('No microphone available')
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
|
||||
const mr = new MediaRecorder(stream)
|
||||
|
||||
mr.ondataavailable = (e) => audioChunksRef.current.push(e.data)
|
||||
mr.onstop = async () => {
|
||||
const blob = new Blob(audioChunksRef.current, { type: 'audio/webm' })
|
||||
audioChunksRef.current = []
|
||||
if (onRecordingComplete) {
|
||||
await onRecordingComplete(blob)
|
||||
}
|
||||
}
|
||||
|
||||
mediaRecorderRef.current = mr
|
||||
audioChunksRef.current = []
|
||||
mr.start()
|
||||
setRecording(true)
|
||||
return true
|
||||
} catch (error) {
|
||||
console.error('Error starting recording:', error)
|
||||
alert('Failed to start recording: ' + error.message)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function stopRecording(mediaRecorderRef, setRecording) {
|
||||
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
|
||||
mediaRecorderRef.current.stop()
|
||||
setRecording(false)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
export async function sendAudioToBackend(audioBlob) {
|
||||
try {
|
||||
const form = new FormData()
|
||||
// Convert webm to wav on the client is complex; many servers accept webm/ogg.
|
||||
form.append('file', audioBlob, 'recording.webm')
|
||||
|
||||
console.log('Sending request to backend...')
|
||||
const res = await fetch(`${BACKEND_URL}/chat`, {
|
||||
method: 'POST',
|
||||
body: form
|
||||
})
|
||||
console.log('Response received:', res.status, res.statusText)
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text()
|
||||
throw new Error(`${res.status} ${text}`)
|
||||
}
|
||||
|
||||
console.log('Converting response to blob...')
|
||||
const audioBlob = await res.blob()
|
||||
console.log('Audio blob created, size:', audioBlob.size)
|
||||
|
||||
return audioBlob
|
||||
} catch (error) {
|
||||
console.error('Error in sendAudioToBackend:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export function playAudioBlob(audioBlob, audioRef, setPlaying) {
|
||||
const url = URL.createObjectURL(audioBlob)
|
||||
if (audioRef.current) {
|
||||
audioRef.current.src = url
|
||||
audioRef.current.play()
|
||||
setPlaying(true)
|
||||
audioRef.current.onended = () => setPlaying(false)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
@import "tailwindcss";
|
||||
|
||||
|
||||
@layer base {
|
||||
body {
|
||||
@apply bg-gradient-to-br from-slate-900 via-purple-900 to-slate-900;
|
||||
@apply min-h-screen;
|
||||
}
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.record-button {
|
||||
@apply relative overflow-hidden;
|
||||
@apply transition-all duration-300 ease-in-out;
|
||||
@apply transform hover:scale-105 active:scale-95;
|
||||
@apply shadow-2xl;
|
||||
}
|
||||
|
||||
.record-button::before {
|
||||
content: '';
|
||||
@apply absolute inset-0 bg-gradient-to-r from-transparent via-white to-transparent;
|
||||
@apply opacity-0 translate-x-[-100%];
|
||||
@apply transition-all duration-700;
|
||||
}
|
||||
|
||||
.record-button:hover::before {
|
||||
@apply opacity-20 translate-x-[100%];
|
||||
}
|
||||
|
||||
.audio-visualizer {
|
||||
@apply relative overflow-hidden rounded-xl;
|
||||
}
|
||||
|
||||
.audio-visualizer::after {
|
||||
content: '';
|
||||
@apply absolute inset-0 bg-gradient-to-r from-blue-500/20 to-purple-500/20;
|
||||
@apply animate-pulse;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./pages/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
"./components/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
"./app/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
primary: {
|
||||
50: '#eff6ff',
|
||||
100: '#dbeafe',
|
||||
200: '#bfdbfe',
|
||||
300: '#93c5fd',
|
||||
400: '#60a5fa',
|
||||
500: '#3b82f6',
|
||||
600: '#2563eb',
|
||||
700: '#1d4ed8',
|
||||
800: '#1e40af',
|
||||
900: '#1e3a8a',
|
||||
},
|
||||
},
|
||||
animation: {
|
||||
'pulse-slow': 'pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite',
|
||||
'bounce-slow': 'bounce 2s infinite',
|
||||
}
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
Reference in New Issue
Block a user