switch to the new VN-style design
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
This commit is contained in:
parent
15d88dfa25
commit
7fcb4784e3
4 changed files with 326 additions and 116 deletions
BIN
public/tutor_f.png
Normal file
BIN
public/tutor_f.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 860 KiB |
BIN
public/tutor_m.png
Normal file
BIN
public/tutor_m.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 717 KiB |
153
src/App.css
153
src/App.css
|
@ -1,42 +1,137 @@
|
||||||
#root {
|
* {
|
||||||
max-width: 1280px;
|
box-sizing: border-box;
|
||||||
margin: 0 auto;
|
}
|
||||||
padding: 2rem;
|
|
||||||
|
:root {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
background: linear-gradient(to bottom, #bae6fd, #ffffff);
|
||||||
|
color: #1f2937;
|
||||||
|
height: 100vh;
|
||||||
|
overflow-x: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.app-container {
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
padding: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scene-wrapper {
|
||||||
|
position: relative;
|
||||||
|
max-width: 640px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.avatar {
|
||||||
|
position: absolute;
|
||||||
|
top: -80px;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
width: 128px;
|
||||||
|
height: 128px;
|
||||||
|
border-radius: 50%;
|
||||||
|
border: 4px solid white;
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);
|
||||||
|
object-fit: cover;
|
||||||
|
animation: popIn 0.4s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dialogue-box {
|
||||||
|
background: rgba(255, 255, 255, 0.9);
|
||||||
|
border: 1px solid #d1d5db;
|
||||||
|
border-radius: 1.5rem;
|
||||||
|
padding: 1.5rem;
|
||||||
|
margin-top: 6rem;
|
||||||
|
box-shadow: 0 10px 20px rgba(0, 0, 0, 0.1);
|
||||||
|
animation: fadeIn 0.5s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dialogue-text {
|
||||||
|
font-size: 1.125rem;
|
||||||
|
color: #374151;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dialogue-text strong {
|
||||||
|
color: #111827;
|
||||||
|
}
|
||||||
|
|
||||||
|
.control-button {
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
padding: 0.75rem 1.5rem;
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
border-radius: 9999px;
|
||||||
|
box-shadow: 0 6px 14px rgba(0, 0, 0, 0.1);
|
||||||
|
transition: background 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.control-button.recording {
|
||||||
|
background-color: #ef4444;
|
||||||
|
}
|
||||||
|
|
||||||
|
.control-button.idle {
|
||||||
|
background-color: #3b82f6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.control-button.idle:hover {
|
||||||
|
background-color: #2563eb;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recording-indicator {
|
||||||
|
color: red;
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 1rem;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
|
animation: pulse 1s infinite;
|
||||||
}
|
}
|
||||||
|
|
||||||
.logo {
|
.waveform-canvas {
|
||||||
height: 6em;
|
width: 300px; /* same as element width */
|
||||||
padding: 1.5em;
|
height: 60px; /* same as element height */
|
||||||
will-change: filter;
|
background-color: #000;
|
||||||
transition: filter 300ms;
|
border-radius: 8px;
|
||||||
}
|
margin: 10px auto;
|
||||||
.logo:hover {
|
display: block;
|
||||||
filter: drop-shadow(0 0 2em #646cffaa);
|
|
||||||
}
|
|
||||||
.logo.react:hover {
|
|
||||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes logo-spin {
|
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0% { opacity: 1; }
|
||||||
|
50% { opacity: 0.3; }
|
||||||
|
100% { opacity: 1; }
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes fadeIn {
|
||||||
from {
|
from {
|
||||||
transform: rotate(0deg);
|
opacity: 0;
|
||||||
|
transform: translateY(10px);
|
||||||
}
|
}
|
||||||
to {
|
to {
|
||||||
transform: rotate(360deg);
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (prefers-reduced-motion: no-preference) {
|
@keyframes popIn {
|
||||||
a:nth-of-type(2) .logo {
|
0% {
|
||||||
animation: logo-spin infinite 20s linear;
|
transform: scale(0.8);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
100% {
|
||||||
|
transform: scale(1);
|
||||||
|
opacity: 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.card {
|
|
||||||
padding: 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.read-the-docs {
|
|
||||||
color: #888;
|
|
||||||
}
|
|
||||||
|
|
171
src/App.jsx
171
src/App.jsx
|
@ -1,4 +1,5 @@
|
||||||
import React, { useState, useRef } from 'react';
|
// ai-tutor-poc/src/App.jsx
|
||||||
|
import React, { useState, useRef, useEffect } from 'react';
|
||||||
import './App.css';
|
import './App.css';
|
||||||
|
|
||||||
const OPENAI_API_KEY = import.meta.env.VITE_OPENAI_API_KEY;
|
const OPENAI_API_KEY = import.meta.env.VITE_OPENAI_API_KEY;
|
||||||
|
@ -9,20 +10,27 @@ function App() {
|
||||||
const [isRecording, setIsRecording] = useState(false);
|
const [isRecording, setIsRecording] = useState(false);
|
||||||
const mediaRecorderRef = useRef(null);
|
const mediaRecorderRef = useRef(null);
|
||||||
const audioChunksRef = useRef([]);
|
const audioChunksRef = useRef([]);
|
||||||
|
const audioStreamRef = useRef(null);
|
||||||
|
const silenceTimerRef = useRef(null);
|
||||||
|
const canvasRef = useRef(null);
|
||||||
|
const analyserRef = useRef(null);
|
||||||
|
const dataArrayRef = useRef(null);
|
||||||
|
const audioContextRef = useRef(null);
|
||||||
|
|
||||||
const startRecording = async () => {
|
useEffect(() => {
|
||||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
const initRecording = async () => {
|
||||||
mediaRecorderRef.current = new MediaRecorder(stream);
|
audioStreamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||||
audioChunksRef.current = [];
|
mediaRecorderRef.current = new MediaRecorder(audioStreamRef.current);
|
||||||
|
|
||||||
mediaRecorderRef.current.ondataavailable = (e) => {
|
mediaRecorderRef.current.ondataavailable = (e) => {
|
||||||
audioChunksRef.current.push(e.data);
|
audioChunksRef.current.push(e.data);
|
||||||
};
|
};
|
||||||
|
|
||||||
mediaRecorderRef.current.onstop = async () => {
|
mediaRecorderRef.current.onstop = async () => {
|
||||||
const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
|
const inputAudioBlob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
|
||||||
|
audioChunksRef.current = []; // Clear for next recording
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', audioBlob, 'input.webm');
|
formData.append('file', inputAudioBlob, 'input.webm');
|
||||||
formData.append('model', 'whisper-1');
|
formData.append('model', 'whisper-1');
|
||||||
|
|
||||||
const whisperRes = await fetch('https://api.openai.com/v1/audio/transcriptions', {
|
const whisperRes = await fetch('https://api.openai.com/v1/audio/transcriptions', {
|
||||||
|
@ -44,7 +52,7 @@ function App() {
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-4o',
|
model: 'gpt-4o',
|
||||||
messages: [
|
messages: [
|
||||||
{ role: 'system', content: 'You are an English tutor that is kind, fun to be around and can teach English language lessons through adventurous stories very well. You are assigned to talk with a primary school EFL student about a movie they watched yesterday.' },
|
{ role: 'system', content: 'You are an English tutor that is kind, fun to be around and can teach English language lessons through adventurous stories very well. You are assigned to talk with a primary school EFL student about a movie they watched yesterday. The conversation will be in English.' },
|
||||||
{ role: 'user', content: text }
|
{ role: 'user', content: text }
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
@ -74,44 +82,151 @@ function App() {
|
||||||
const audioUrl = URL.createObjectURL(outputAudioBlob);
|
const audioUrl = URL.createObjectURL(outputAudioBlob);
|
||||||
const audio = new Audio(audioUrl);
|
const audio = new Audio(audioUrl);
|
||||||
audio.play();
|
audio.play();
|
||||||
};
|
|
||||||
|
|
||||||
|
audio.onended = () => {
|
||||||
|
if (mediaRecorderRef.current && audioStreamRef.current) {
|
||||||
|
audioChunksRef.current = [];
|
||||||
mediaRecorderRef.current.start();
|
mediaRecorderRef.current.start();
|
||||||
setIsRecording(true);
|
setIsRecording(true);
|
||||||
|
monitorSilence();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const stopRecording = () => {
|
initRecording();
|
||||||
mediaRecorderRef.current.stop();
|
}, []);
|
||||||
|
|
||||||
// Stop all tracks on the media stream
|
const monitorSilence = () => {
|
||||||
if (mediaRecorderRef.current.stream) {
|
if (!audioStreamRef.current) return;
|
||||||
mediaRecorderRef.current.stream.getTracks().forEach(track => track.stop());
|
|
||||||
|
if (audioContextRef.current) {
|
||||||
|
audioContextRef.current.close();
|
||||||
|
}
|
||||||
|
audioContextRef.current = new AudioContext();
|
||||||
|
|
||||||
|
// Resume AudioContext to avoid browser autoplay policy issues
|
||||||
|
audioContextRef.current.resume().then(() => {
|
||||||
|
const source = audioContextRef.current.createMediaStreamSource(audioStreamRef.current);
|
||||||
|
const analyser = audioContextRef.current.createAnalyser();
|
||||||
|
analyser.fftSize = 2048;
|
||||||
|
analyserRef.current = analyser;
|
||||||
|
const bufferLength = analyser.frequencyBinCount;
|
||||||
|
const dataArray = new Uint8Array(bufferLength);
|
||||||
|
dataArrayRef.current = dataArray;
|
||||||
|
source.connect(analyser);
|
||||||
|
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
const canvasCtx = canvas.getContext('2d');
|
||||||
|
|
||||||
|
const SILENCE_THRESHOLD = 0.02; // Adjust as needed (0 to 1 scale)
|
||||||
|
const SILENCE_TIMEOUT = 1500; // ms
|
||||||
|
|
||||||
|
let silenceStart = null;
|
||||||
|
|
||||||
|
const checkSilenceAndDraw = () => {
|
||||||
|
analyser.getByteTimeDomainData(dataArray);
|
||||||
|
|
||||||
|
// Draw waveform (same as before)
|
||||||
|
canvasCtx.fillStyle = '#000';
|
||||||
|
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
||||||
|
|
||||||
|
canvasCtx.lineWidth = 2;
|
||||||
|
canvasCtx.strokeStyle = '#00ff00';
|
||||||
|
canvasCtx.beginPath();
|
||||||
|
|
||||||
|
const sliceWidth = canvas.width / bufferLength;
|
||||||
|
let x = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < bufferLength; i++) {
|
||||||
|
const v = dataArray[i] / 128.0;
|
||||||
|
const y = v * canvas.height / 2;
|
||||||
|
if (i === 0) {
|
||||||
|
canvasCtx.moveTo(x, y);
|
||||||
|
} else {
|
||||||
|
canvasCtx.lineTo(x, y);
|
||||||
|
}
|
||||||
|
x += sliceWidth;
|
||||||
|
}
|
||||||
|
canvasCtx.lineTo(canvas.width, canvas.height / 2);
|
||||||
|
canvasCtx.stroke();
|
||||||
|
|
||||||
|
// RMS calculation
|
||||||
|
let sumSquares = 0;
|
||||||
|
for (let i = 0; i < bufferLength; i++) {
|
||||||
|
const normalized = (dataArray[i] - 128) / 128;
|
||||||
|
sumSquares += normalized * normalized;
|
||||||
|
}
|
||||||
|
const rms = Math.sqrt(sumSquares / bufferLength);
|
||||||
|
|
||||||
|
if (rms < SILENCE_THRESHOLD) {
|
||||||
|
// Silence detected
|
||||||
|
if (!silenceStart) silenceStart = Date.now();
|
||||||
|
else if (Date.now() - silenceStart > SILENCE_TIMEOUT) {
|
||||||
|
if (mediaRecorderRef.current && isRecording) {
|
||||||
|
mediaRecorderRef.current.stop();
|
||||||
|
setIsRecording(false);
|
||||||
|
audioContextRef.current.close();
|
||||||
|
}
|
||||||
|
silenceStart = null; // reset after stopping
|
||||||
|
return; // stop animation loop on silence stop
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Sound detected
|
||||||
|
silenceStart = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(checkSilenceAndDraw);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleRecording = () => {
|
||||||
|
if (!isRecording) {
|
||||||
|
audioChunksRef.current = [];
|
||||||
|
mediaRecorderRef.current.start();
|
||||||
|
setIsRecording(true);
|
||||||
|
monitorSilence();
|
||||||
|
} else {
|
||||||
|
mediaRecorderRef.current.stop();
|
||||||
setIsRecording(false);
|
setIsRecording(false);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-gray-100 flex flex-col justify-end p-4">
|
<div className="app-container">
|
||||||
<h1>AI English Tutor</h1>
|
<div className="scene-wrapper">
|
||||||
<div>เว็บไซต์นี้เป็นการทำ Proof of concept ของการใช้ AI ในการสอนภาษาอังกฤษ โดยเน้นไปที่การจำลองการสื่อสารด้วยเสียง</div>
|
<img src="/tutor_f.png" alt="Tutor Avatar" className="avatar" />
|
||||||
<div>ครูมีทักษะเสริมสร้างจินตนาการ สนุก ตลก ให้ข้อคิด สามารถสอนเนื้อหาผ่านเรื่องราวการผจญภัยได้ดี เช่น คำศัพท์ หรือข้อคิด คำถามปลายเปิด เป็นต้น</div>
|
|
||||||
<div>AI ได้รับบทให้พูดคุยกับนักเรียนในเรื่องภาพยนตร์ที่ได้รับชมไปเมื่อวาน</div>
|
|
||||||
|
|
||||||
<h3>วิธีใช้งาน</h3>
|
<div className="dialogue-box">
|
||||||
<div>กดปุ่ม Speak ด้านล่าง อนุญาตให้ใช้ไมโครโฟน พูดคุยกับ AI เป็นภาษาอังกฤษ แล้วกด Stop</div>
|
<div className="dialogue-text">
|
||||||
<div>เริ่มจากการทักทายกับ AI แล้วเล่าให้ฟังว่าเมื่อวานรับชมภาพยนตร์เรื่องใดมา AI จะถามคุณเกี่ยวกับภาพยนตร์เรื่องนั้น</div>
|
<strong>You:</strong> {transcript || <em>Say something…</em>}
|
||||||
|
</div>
|
||||||
|
<div className="dialogue-text">
|
||||||
|
<strong>Tutor:</strong> {aiReply || <em>Waiting for your question…</em>}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{isRecording && (
|
||||||
|
<canvas
|
||||||
|
ref={canvasRef}
|
||||||
|
width={300}
|
||||||
|
height={60}
|
||||||
|
className="waveform-canvas"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="button-container">
|
||||||
<button
|
<button
|
||||||
onClick={isRecording ? stopRecording : startRecording}
|
onClick={toggleRecording}
|
||||||
className={`mt-4 p-3 rounded-xl font-semibold text-white ${isRecording ? 'bg-red-500' : 'bg-blue-500'}`}
|
className={`control-button ${isRecording ? 'recording' : 'idle'}`}
|
||||||
>
|
>
|
||||||
{isRecording ? 'Stop' : 'Speak'}
|
{isRecording ? 'Stop' : 'Speak'}
|
||||||
</button>
|
</button>
|
||||||
<div className="mb-2 bg-white rounded-2xl p-4 shadow text-sm">
|
|
||||||
<div><strong>You:</strong> {transcript}</div>
|
|
||||||
<div className="mt-2"><strong>Tutor:</strong> {aiReply}</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue