| console.log("--> SERVER STARTING..."); |
| import express from 'express'; |
| |
| let SystemMessage, HumanMessage, AIMessage, LlamaCppLLM; |
| try { |
| const module = await import('./src/index.js'); |
| SystemMessage = module.SystemMessage; |
| HumanMessage = module.HumanMessage; |
| AIMessage = module.AIMessage; |
| LlamaCppLLM = module.LlamaCppLLM; |
| console.log("--> Modules loaded successfully"); |
| } catch (e) { |
| console.error("--> FATAL IMPORT ERROR:", e); |
| } |
|
|
| import bodyParser from 'body-parser'; |
| import fs from 'fs'; |
| import path from 'path'; |
|
|
| |
| const PORT = 7860; |
| const MODEL_PATH = './models/Qwen3-1.7B-Q8_0.gguf'; |
| const MODEL_NAME = "dahanhstd-1.0"; |
|
|
| |
| const PROMPTS = { |
| "default": "You are a helpful AI assistant. Answer concisely.", |
| "talk2people": fs.existsSync('MasterPrompt_Talk2People.txt') |
| ? fs.readFileSync('MasterPrompt_Talk2People.txt', 'utf-8') |
| : "You are a creative Video Director specializing in realistic Vietnamese scenes.", |
| "coder": "You are an expert programmer." |
| }; |
|
|
| |
| const HTML_PAGE = ` |
| <!DOCTYPE html> |
| <html lang="vi"> |
| <head> |
| <meta charset="UTF-8"> |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> |
| <title>Dạ Hành AI Studio</title> |
| <style> |
| :root { --primary:#00ff88; --bg:#121212; --chat-bg:#1e1e1e; --user-bg:#2a2a2a; --ai-bg:#0a3a2a; } |
| body { font-family:'Segoe UI',sans-serif; background:var(--bg); color:white; margin:0; display:flex; height:100vh; overflow:hidden; } |
| .sidebar { width:260px; background:#000; padding:20px; border-right:1px solid #333; display:flex; flex-direction:column; } |
| .main { flex:1; display:flex; flex-direction:column; padding:20px; position:relative; } |
| h1 { font-size:1.2rem; color:var(--primary); margin-bottom:20px; text-transform:uppercase; letter-spacing:1px; } |
| select { width:100%; padding:12px; background:#333; color:white; border:1px solid #555; border-radius:8px; margin-bottom:20px; outline:none; } |
| #chat-box { flex:1; overflow-y:auto; background:var(--chat-bg); border-radius:12px; padding:20px; margin-bottom:20px; display:flex; flex-direction:column; gap:15px; scroll-behavior:smooth; } |
| .message { max-width:80%; padding:12px 18px; border-radius:18px; line-height:1.5; font-size:0.95rem; animation:fadeIn 0.3s ease; } |
| .message.ai { align-self:flex-start; background:var(--ai-bg); border-bottom-left-radius:4px; } |
| .message.user { align-self:flex-end; background:var(--user-bg); border-bottom-right-radius:4px; color:#ddd; } |
| .input-area { display:flex; gap:10px; } |
| input { flex:1; padding:15px 20px; border-radius:30px; border:1px solid #444; background:#222; color:white; outline:none; font-size:1rem; } |
| input:focus { border-color:var(--primary); } |
| button { padding:10px 30px; border-radius:30px; border:none; background:var(--primary); color:black; font-weight:bold; cursor:pointer; transition:0.2s; } |
| button:hover { transform:scale(1.05); box-shadow:0 0 10px var(--primary); } |
| .status { margin-top:auto; font-size:0.8rem; color:#666; border-top:1px solid #333; padding-top:15px; } |
| @keyframes fadeIn { from { opacity:0; transform:translateY(10px); } to { opacity:1; transform:translateY(0); } } |
| /* Scrollbar */ |
| ::-webkit-scrollbar { width:8px; } |
| ::-webkit-scrollbar-track { background:#1e1e1e; } |
| ::-webkit-scrollbar-thumb { background:#444; border-radius:4px; } |
| </style> |
| </head> |
| <body> |
| <div class="sidebar"> |
| <h1>DẠ HÀNH STUDIO</h1> |
| <label style="font-size:0.9rem; color:#aaa; margin-bottom:5px">Chọn Role:</label> |
| <select id="role-select"> |
| <option value="talk2people">🎬 Đạo diễn (Talk2People)</option> |
| <option value="coder">💻 Coder Expert</option> |
| <option value="default">🤖 Trợ lý ảo</option> |
| </select> |
| <div class="status"> |
| Model: <b>${MODEL_NAME}</b><br> |
| Privacy: <b>Local Safe</b><br> |
| Status: <span id="status-text" style="color:yellow">Connecting...</span> |
| </div> |
| </div> |
| <div class="main"> |
| <div id="chat-box"></div> |
| <div class="input-area"> |
| <input type="text" id="user-input" placeholder="Nhập tin nhắn..." autocomplete="off"> |
| <button onclick="sendMessage()">GỬI</button> |
| </div> |
| </div> |
| <script> |
| const chatBox = document.getElementById('chat-box'); |
| const userInput = document.getElementById('user-input'); |
| const roleSelect = document.getElementById('role-select'); |
| const statusText = document.getElementById('status-text'); |
| let chatHistory = []; |
| |
| function addMessage(role, text) { |
| const div = document.createElement('div'); |
| div.className = 'message ' + role; |
| div.innerText = text; |
| chatBox.appendChild(div); |
| chatBox.scrollTop = chatBox.scrollHeight; |
| } |
| |
| async function checkHealth() { |
| try { |
| const res = await fetch('/info'); |
| const data = await res.json(); |
| statusText.innerText = "Online"; |
| statusText.style.color = "#00ff88"; |
| addMessage('ai', "Xin chào đây là hệ thống bot AI hoạt động riêng tư được đào tạo bởi Dạ Hành Studio, Không phụ thuộc vào Google"); |
| } catch (e) { |
| statusText.innerText = "Offline"; |
| statusText.style.color = "red"; |
| } |
| } |
| |
| async function sendMessage() { |
| const text = userInput.value.trim(); |
| if (!text) return; |
| |
| addMessage('user', text); |
| userInput.value = ''; |
| chatHistory.push({ role: 'user', content: text }); |
| |
| // Loading effect |
| const loadingId = 'loading-' + Date.now(); |
| const loadingDiv = document.createElement('div'); |
| loadingDiv.className = 'message ai'; |
| loadingDiv.id = loadingId; |
| loadingDiv.innerText = 'Đang suy nghĩ...'; |
| chatBox.appendChild(loadingDiv); |
| chatBox.scrollTop = chatBox.scrollHeight; |
| |
| try { |
| const response = await fetch('/chat', { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| message: text, |
| history: chatHistory.slice(-10), |
| role: roleSelect.value |
| }) |
| }); |
| const data = await response.json(); |
| |
| document.getElementById(loadingId).remove(); |
| |
| if(data.error) { |
| addMessage('ai', 'Lỗi: ' + data.error); |
| } else { |
| addMessage('ai', data.reply); |
| chatHistory.push({ role: 'ai', content: data.reply }); |
| } |
| } catch (error) { |
| document.getElementById(loadingId).remove(); |
| addMessage('ai', 'Lỗi kết nối: ' + error.message); |
| } |
| } |
| |
| userInput.addEventListener('keypress', (e) => { if (e.key === 'Enter') sendMessage(); }); |
| roleSelect.addEventListener('change', () => { |
| chatHistory = []; |
| chatBox.innerHTML = ''; |
| addMessage('ai', "Đã chuyển sang chế độ: " + roleSelect.options[roleSelect.selectedIndex].text); |
| }); |
| |
| checkHealth(); |
| </script> |
| </body> |
| </html> |
| `; |
|
|
| |
| let llm = null; |
|
|
| async function initModel() { |
| if (!LlamaCppLLM) return; |
| try { |
| console.log('--> Loading model...'); |
| if (!fs.existsSync(MODEL_PATH)) { |
| console.error(`--> Model not found at ${MODEL_PATH}`); |
| |
| return; |
| } |
| llm = new LlamaCppLLM({ |
| modelPath: MODEL_PATH, |
| temperature: 0.7, |
| maxTokens: 1024 |
| }); |
| await llm.invoke("Hello"); |
| console.log('--> Model loaded successfully.'); |
| } catch (err) { |
| console.error('--> FATAL MODEL ERROR:', err.message); |
| } |
| } |
|
|
| |
| const app = express(); |
| app.use(bodyParser.json()); |
|
|
| |
| app.post('/chat', async (req, res) => { |
| |
| const apiResponse = { |
| model: MODEL_NAME, |
| created: Date.now(), |
| reply: "", |
| error: null |
| }; |
|
|
| if (!llm) { |
| apiResponse.error = "System initializing or Model missing"; |
| return res.status(503).json(apiResponse); |
| } |
|
|
| try { |
| const { message, history = [], role = 'default' } = req.body; |
| const systemInstruction = PROMPTS[role] || PROMPTS['default']; |
| |
| const messages = [new SystemMessage(systemInstruction)]; |
| history.forEach(msg => { |
| if (msg.role === 'user') messages.push(new HumanMessage(msg.content)); |
| if (msg.role === 'ai') messages.push(new AIMessage(msg.content)); |
| }); |
| messages.push(new HumanMessage(message)); |
|
|
| const response = await llm.invoke(messages); |
| |
| apiResponse.reply = response.content; |
| res.json(apiResponse); |
|
|
| } catch (error) { |
| apiResponse.error = error.message; |
| res.status(500).json(apiResponse); |
| } |
| }); |
|
|
| |
| app.get('/info', (req, res) => { |
| res.json({ |
| model: MODEL_NAME, |
| status: llm ? "ready" : "loading", |
| roles: Object.keys(PROMPTS) |
| }); |
| }); |
|
|
| |
| app.get('/', (req, res) => { |
| res.send(HTML_PAGE); |
| }); |
|
|
| app.listen(PORT, '0.0.0.0', () => { |
| console.log(`--> Server listening on ${PORT}`); |
| initModel(); |
| }); |
|
|
|
|