Spaces:
Running
Running
Upload 2 files
Browse files- index.html +353 -0
- style.css +28 -0
index.html
ADDED
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!doctype html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="utf-8" />
|
5 |
+
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
6 |
+
<title>Browser LLM (WASM, mobile)</title>
|
7 |
+
<style>
|
8 |
+
:root { --bg:#0b0d10; --card:#14171b; --muted:#9aa4af; --accent:#6ee7b7; --danger:#f87171; --text:#dce3ea; }
|
9 |
+
* { box-sizing:border-box; }
|
10 |
+
body { margin:0; background:var(--bg); color:var(--text); font:16px/1.45 system-ui, -apple-system, Segoe UI, Roboto, "Helvetica Neue", Arial, "Apple Color Emoji","Segoe UI Emoji"; }
|
11 |
+
header { padding:14px 16px; border-bottom:1px solid #21262c; display:flex; gap:10px; align-items:center; }
|
12 |
+
header h1 { font-size:16px; margin:0; font-weight:600; }
|
13 |
+
header .pill { font-size:12px; color:var(--bg); background:var(--accent); padding:.2rem .55rem; border-radius:999px; font-weight:700; letter-spacing:.02em; }
|
14 |
+
main { display:grid; grid-template-rows:auto 1fr auto; height:calc(100dvh - 58px); }
|
15 |
+
.bar { display:flex; flex-wrap:wrap; gap:8px; padding:10px 12px; background:#0f1216; border-bottom:1px solid #21262c; align-items:center; }
|
16 |
+
select, input[type="number"], input[type="text"] { background:var(--card); color:var(--text); border:1px solid #29313a; border-radius:10px; padding:8px 10px; }
|
17 |
+
button { background:#1c2128; color:var(--text); border:1px solid #2a323c; border-radius:12px; padding:10px 12px; font-weight:600; cursor:pointer; }
|
18 |
+
button.primary { background:var(--accent); color:#08261b; border:none; }
|
19 |
+
button.ghost { background:transparent; border-color:#2a323c; }
|
20 |
+
button:disabled { opacity:.6; cursor:not-allowed; }
|
21 |
+
.grow { flex:1 1 auto; }
|
22 |
+
.progress { width:160px; height:8px; background:#1a1f25; border-radius:999px; overflow:hidden; border:1px solid #25303a; }
|
23 |
+
.progress > i { display:block; height:100%; width:0%; background:linear-gradient(90deg,#34d399,#10b981); transition:width .25s ease; }
|
24 |
+
#stats { font-size:12px; color:var(--muted); display:flex; gap:10px; align-items:center; }
|
25 |
+
#chat { padding:14px; overflow:auto; background:linear-gradient(#0b0d10, #0d1117); }
|
26 |
+
.msg { max-width:820px; margin:0 auto 10px auto; display:flex; gap:10px; align-items:flex-start; }
|
27 |
+
.msg .bubble { background:var(--card); padding:12px 14px; border-radius:16px; border:1px solid #242c35; white-space:pre-wrap; }
|
28 |
+
.msg.user .bubble { background:#1d2330; }
|
29 |
+
.msg.assistant .bubble { background:#151c24; }
|
30 |
+
.role { font-size:12px; color:var(--muted); min-width:68px; text-transform:uppercase; letter-spacing:.04em; }
|
31 |
+
.inputbar { display:flex; gap:8px; padding:10px; border-top:1px solid #21262c; background:#0f1216; }
|
32 |
+
textarea { resize:none; height:64px; padding:10px 12px; flex:1 1 auto; border-radius:12px; border:1px solid #2a323c; background:var(--card); color:var(--text); }
|
33 |
+
.tiny { font-size:12px; color:var(--muted); }
|
34 |
+
.warn { color:var(--danger); font-weight:600; }
|
35 |
+
.row { display:flex; gap:8px; align-items:center; flex-wrap:wrap; }
|
36 |
+
.spacer { flex:1; }
|
37 |
+
a { color:#93c5fd; }
|
38 |
+
details { margin-left:8px; }
|
39 |
+
.note { font-size:12px; color:var(--muted); max-width:720px; }
|
40 |
+
</style>
|
41 |
+
</head>
|
42 |
+
<body>
|
43 |
+
<header>
|
44 |
+
<h1>Browser LLM</h1>
|
45 |
+
<span class="pill">WASM • CPU‑only</span>
|
46 |
+
<span id="isoNote" class="tiny"></span>
|
47 |
+
</header>
|
48 |
+
|
49 |
+
<main>
|
50 |
+
<div class="bar">
|
51 |
+
<label for="model">Model:</label>
|
52 |
+
<select id="model" class="grow">
|
53 |
+
<option selected value='{"id":"ggml-org/gemma-3-270m-GGUF","file":"gemma-3-270m-Q8_0.gguf","label":"Gemma‑3‑270M Q8_0 (≈292 MB)"}'>Gemma‑3‑270M Q8_0 (≈292 MB)</option>
|
54 |
+
<option value='{"id":"mradermacher/OpenELM-270M-Instruct-GGUF","file":"OpenELM-270M-Instruct.Q3_K_S.gguf","label":"OpenELM‑270M‑Instruct Q3_K_S (≈134 MB)"}'>OpenELM‑270M‑Instruct Q3_K_S (≈134 MB)</option>
|
55 |
+
<option value='{"id":"mradermacher/OpenELM-270M-Instruct-GGUF","file":"OpenELM-270M-Instruct.Q4_K_M.gguf","label":"OpenELM‑270M‑Instruct Q4_K_M (≈175 MB)"}'>OpenELM‑270M‑Instruct Q4_K_M (≈175 MB)</option>
|
56 |
+
<option value='{"id":"mav23/SmolLM-135M-Instruct-GGUF","file":"smollm-135m-instruct.Q3_K_S.gguf","label":"SmolLM‑135M‑Instruct Q3_K_S (≈88 MB)"}'>SmolLM‑135M‑Instruct Q3_K_S (≈88 MB)</option>
|
57 |
+
<option value='{"id":"QuantFactory/SmolLM-360M-Instruct-GGUF","file":"SmolLM-360M-Instruct.Q3_K_S.gguf","label":"SmolLM‑360M‑Instruct Q3_K_S (≈219 MB)"}'>SmolLM‑360M‑Instruct Q3_K_S (≈219 MB)</option>
|
58 |
+
<option value='{"id":"Qwen/Qwen2.5-0.5B-Instruct-GGUF","file":"qwen2.5-0.5b-instruct-q3_k_m.gguf","label":"Qwen2.5‑0.5B‑Instruct Q3_K_M (≈432 MB)"}'>Qwen2.5‑0.5B‑Instruct Q3_K_M (≈432 MB)</option>
|
59 |
+
<option value='{"id":"Qwen/Qwen2.5-0.5B-Instruct-GGUF","file":"qwen2.5-0.5b-instruct-q4_k_m.gguf","label":"Qwen2.5‑0.5B‑Instruct Q4_K_M (≈491 MB)"}'>Qwen2.5‑0.5B‑Instruct Q4_K_M (≈491 MB)</option>
|
60 |
+
<option value='{"id":"TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF","file":"tinyllama-1.1b-chat-v1.0.Q3_K_S.gguf","label":"TinyLlama‑1.1B‑Chat Q3_K_S (≈500 MB)"}'>TinyLlama‑1.1B‑Chat Q3_K_S (≈500 MB)</option>
|
61 |
+
<option value='{"id":"QuantFactory/SmolLM-360M-GGUF","file":"SmolLM-360M.Q4_0.gguf","label":"SmolLM2‑360M Q4_0 (≈229 MB)"}'>SmolLM2‑360M Q4_0 (≈229 MB)</option>
|
62 |
+
<option value='{"id":"QuantFactory/SmolLM-360M-GGUF","file":"SmolLM-360M.Q3_K_S.gguf","label":"SmolLM2‑360M Q3_K_S (≈219 MB, faster)"}'>SmolLM2‑360M Q3_K_S (≈219 MB, faster)</option>
|
63 |
+
<option value='{"id":"QuantFactory/SmolLM-360M-GGUF","file":"SmolLM-360M.Q2_K.gguf","label":"SmolLM2‑360M Q2_K (≈200 MB, min RAM / quality drop)"}'>SmolLM2‑360M Q2_K (≈200 MB, min RAM / quality drop)</option>
|
64 |
+
<option value='{"custom":true,"label":"Custom HF GGUF (e.g., Gemma‑3‑270M)"}'>Custom HF GGUF (e.g., Gemma‑3‑270M)</option>
|
65 |
+
</select>
|
66 |
+
|
67 |
+
<details id="customBox">
|
68 |
+
<summary class="tiny">Custom GGUF (paste HF repo + file)</summary>
|
69 |
+
<div class="row">
|
70 |
+
<label class="tiny">HF repo id</label>
|
71 |
+
<input id="customRepo" type="text" placeholder="e.g. google/gemma-3-270m-GGUF (when available)" style="width:280px" />
|
72 |
+
<label class="tiny">file</label>
|
73 |
+
<input id="customFile" type="text" placeholder="e.g. gemma-3-270m.Q4_0.gguf" style="width:240px" />
|
74 |
+
</div>
|
75 |
+
<div class="note">Note: official <a href="https://huggingface.co/google/gemma-3-270m" target="_blank" rel="noreferrer">Gemma‑3‑270M</a> is the base HF repo. A ready‑to‑use public GGUF is now available at <a href="https://huggingface.co/ggml-org/gemma-3-270m-GGUF" target="_blank" rel="noreferrer">ggml‑org/gemma‑3‑270m‑GGUF</a> (currently providing <code>gemma-3-270m-Q8_0.gguf</code> ≈292 MB). For maximum speed on low‑RAM phones, the OpenELM‑270M‑Instruct Q3_K_S option above is even lighter, but Gemma‑3‑270M offers strong quality for its size.</div>
|
76 |
+
</details>
|
77 |
+
|
78 |
+
<div class="row">
|
79 |
+
<label>Max new tokens</label>
|
80 |
+
<input id="nPredict" type="number" min="1" max="512" step="1" value="128" />
|
81 |
+
</div>
|
82 |
+
<div class="row">
|
83 |
+
<label>Temp</label><input id="temp" type="number" min="0" max="2" step="0.1" value="0.7" style="width:80px" />
|
84 |
+
<label>Top‑p</label><input id="topp" type="number" min="0" max="1" step="0.05" value="0.9" style="width:80px" />
|
85 |
+
<label>Top‑k</label><input id="topk" type="number" min="1" max="100" step="1" value="40" style="width:80px" />
|
86 |
+
</div>
|
87 |
+
|
88 |
+
<div class="spacer"></div>
|
89 |
+
|
90 |
+
<button id="loadBtn" class="primary">Load model</button>
|
91 |
+
<button id="unloadBtn" class="ghost" disabled>Unload</button>
|
92 |
+
|
93 |
+
<div class="progress" title="download progress"><i id="prog"></i></div>
|
94 |
+
<div id="stats">idle</div>
|
95 |
+
</div>
|
96 |
+
|
97 |
+
<div id="chat" aria-live="polite"></div>
|
98 |
+
|
99 |
+
<form class="inputbar" id="form">
|
100 |
+
<textarea id="input" placeholder="Ask me anything…" required></textarea>
|
101 |
+
<div class="row" style="flex-direction:column; gap:6px; align-items:flex-end">
|
102 |
+
<button id="sendBtn" class="primary">Send</button>
|
103 |
+
<button id="stopBtn" type="button" class="ghost" disabled>Stop</button>
|
104 |
+
<div class="tiny">Context kept small for mobile perf</div>
|
105 |
+
</div>
|
106 |
+
</form>
|
107 |
+
</main>
|
108 |
+
|
109 |
+
<script type="module">
|
110 |
+
// ——— Imports ———
|
111 |
+
import { Wllama, LoggerWithoutDebug } from "https://cdn.jsdelivr.net/npm/@wllama/[email protected]/esm/index.js";
|
112 |
+
|
113 |
+
const CONFIG_PATHS = {
|
114 |
+
"single-thread/wllama.wasm": "https://cdn.jsdelivr.net/npm/@wllama/[email protected]/esm/single-thread/wllama.wasm",
|
115 |
+
"multi-thread/wllama.wasm" : "https://cdn.jsdelivr.net/npm/@wllama/[email protected]/esm/multi-thread/wllama.wasm",
|
116 |
+
};
|
117 |
+
|
118 |
+
// ——— DOM refs ———
|
119 |
+
const $model = document.getElementById('model');
|
120 |
+
const $load = document.getElementById('loadBtn');
|
121 |
+
const $unload= document.getElementById('unloadBtn');
|
122 |
+
const $prog = document.getElementById('prog');
|
123 |
+
const $stats = document.getElementById('stats');
|
124 |
+
const $chat = document.getElementById('chat');
|
125 |
+
const $form = document.getElementById('form');
|
126 |
+
const $input = document.getElementById('input');
|
127 |
+
const $send = document.getElementById('sendBtn');
|
128 |
+
const $stop = document.getElementById('stopBtn');
|
129 |
+
const $iso = document.getElementById('isoNote');
|
130 |
+
const $customBox = document.getElementById('customBox');
|
131 |
+
const $customRepo = document.getElementById('customRepo');
|
132 |
+
const $customFile = document.getElementById('customFile');
|
133 |
+
|
134 |
+
// ——— State ———
|
135 |
+
const decoder = new TextDecoder();
|
136 |
+
const wllama = new Wllama(CONFIG_PATHS, { logger: LoggerWithoutDebug });
|
137 |
+
let aborter = null;
|
138 |
+
let loaded = false;
|
139 |
+
let eotToken = -1;
|
140 |
+
let sysPrompt = "You are a helpful, concise assistant. Keep answers short and clear.";
|
141 |
+
|
142 |
+
// Keep RAM low for mobile: small context + FP16 V‑cache (WASM safe)
|
143 |
+
const LOAD_CONFIG = {
|
144 |
+
n_ctx: 768,
|
145 |
+
n_batch: 128,
|
146 |
+
cache_type_k: "q4_0",
|
147 |
+
cache_type_v: "f16",
|
148 |
+
flash_attn: false,
|
149 |
+
progressCallback: ({ loaded, total }) => {
|
150 |
+
const pct = (total && total > 0) ? Math.round(loaded / total * 100) : 0;
|
151 |
+
$prog.style.width = pct + '%';
|
152 |
+
}
|
153 |
+
};
|
154 |
+
|
155 |
+
const messages = [ { role: "system", content: sysPrompt } ];
|
156 |
+
|
157 |
+
// ——— Chat template for Gemma IT ———
|
158 |
+
const GEMMA_JINJA = `{{ bos_token }}
|
159 |
+
{%- if messages[0]['role'] == 'system' -%}
|
160 |
+
{%- if messages[0]['content'] is string -%}
|
161 |
+
{%- set first_user_prefix = messages[0]['content'] + '\n\n' -%}
|
162 |
+
{%- else -%}
|
163 |
+
{%- set first_user_prefix = messages[0]['content'][0]['text'] + '\n\n' -%}
|
164 |
+
{%- endif -%}
|
165 |
+
{%- set loop_messages = messages[1:] -%}
|
166 |
+
{%- else -%}
|
167 |
+
{%- set first_user_prefix = "" -%}
|
168 |
+
{%- set loop_messages = messages -%}
|
169 |
+
{%- endif -%}
|
170 |
+
{%- for message in loop_messages -%}
|
171 |
+
{%- set role = (message['role'] == 'assistant') and 'model' or message['role'] -%}
|
172 |
+
<start_of_turn>{{ role }}
|
173 |
+
{{ (loop.first and first_user_prefix or '') ~ (message['content'] if message['content'] is string else message['content'][0]['text']) | trim }}<end_of_turn>
|
174 |
+
{%- endfor -%}
|
175 |
+
{%- if add_generation_prompt -%}
|
176 |
+
<start_of_turn>model
|
177 |
+
{%- endif -%}`;
|
178 |
+
|
179 |
+
// ——— UI helpers ———
|
180 |
+
const ui = {
|
181 |
+
add(role, text) {
|
182 |
+
const row = document.createElement('div');
|
183 |
+
row.className = 'msg ' + role;
|
184 |
+
row.innerHTML = `
|
185 |
+
<div class="role">${role}</div>
|
186 |
+
<div class="bubble"></div>
|
187 |
+
`;
|
188 |
+
row.querySelector('.bubble').textContent = text;
|
189 |
+
$chat.appendChild(row);
|
190 |
+
$chat.scrollTop = $chat.scrollHeight;
|
191 |
+
return row.querySelector('.bubble');
|
192 |
+
},
|
193 |
+
setStats(txt) { $stats.textContent = txt; }
|
194 |
+
};
|
195 |
+
|
196 |
+
function noteIsolation() {
|
197 |
+
if (!crossOriginIsolated) {
|
198 |
+
$iso.innerHTML = 'Single‑thread mode (serve with COOP/COEP for multithread)';
|
199 |
+
} else {
|
200 |
+
$iso.textContent = 'Cross‑origin isolated: multithread on';
|
201 |
+
}
|
202 |
+
}
|
203 |
+
noteIsolation();
|
204 |
+
|
205 |
+
function truncateHistoryForMobile(maxTokensRough = 900) {
|
206 |
+
const maxChars = maxTokensRough * 4; // rough heuristic
|
207 |
+
function clip(s) { return s.length <= maxChars ? s : ('…' + s.slice(s.length - maxChars)); }
|
208 |
+
let kept = [messages[0]]; // keep system
|
209 |
+
const lastTurns = messages.slice(-8);
|
210 |
+
for (const m of lastTurns) kept.push({ role: m.role, content: clip(m.content) });
|
211 |
+
messages.length = 0; messages.push(...kept);
|
212 |
+
}
|
213 |
+
|
214 |
+
function getSelectedModel() {
|
215 |
+
const parsed = JSON.parse($model.value);
|
216 |
+
if (parsed.custom) {
|
217 |
+
const id = ($customRepo.value || '').trim();
|
218 |
+
const file = ($customFile.value || '').trim();
|
219 |
+
if (!id || !file) throw new Error('Enter HF repo id and GGUF file for custom model.');
|
220 |
+
return { id, file, label: `Custom: ${id}/${file}` };
|
221 |
+
}
|
222 |
+
return parsed;
|
223 |
+
}
|
224 |
+
|
225 |
+
function isGemmaSelected() {
|
226 |
+
const { id, file, label } = getSelectedModel();
|
227 |
+
return /gemma/i.test(id) || /gemma/i.test(file) || /gemma/i.test(label);
|
228 |
+
}
|
229 |
+
|
230 |
+
async function ensureLoaded() {
|
231 |
+
if (loaded) return;
|
232 |
+
$prog.style.width = '0%';
|
233 |
+
const choice = getSelectedModel();
|
234 |
+
ui.setStats(`Fetching ${choice.file}…`);
|
235 |
+
try {
|
236 |
+
await wllama.loadModelFromHF(choice.id, choice.file, LOAD_CONFIG);
|
237 |
+
} catch (e) {
|
238 |
+
throw new Error(`Load failed for ${choice.id}/${choice.file}. If the repo is gated or lacks CORS, try a public mirror / different quant. Details: ${e?.message || e}`);
|
239 |
+
}
|
240 |
+
loaded = true;
|
241 |
+
eotToken = wllama.getEOT();
|
242 |
+
const meta = await wllama.getModelMetadata();
|
243 |
+
const ctx = wllama.getLoadedContextInfo();
|
244 |
+
const thr = wllama.getNumThreads?.() ?? 1;
|
245 |
+
ui.setStats(`Loaded ${choice.file} • ${meta.n_params?.toLocaleString?.() || ''} params • ctx ${ctx.n_ctx} • threads ${thr}`);
|
246 |
+
$load.disabled = true; $unload.disabled = false;
|
247 |
+
}
|
248 |
+
|
249 |
+
async function unloadModel() {
|
250 |
+
try { await wllama.exit(); } catch {}
|
251 |
+
loaded = false;
|
252 |
+
$load.disabled = false; $unload.disabled = true;
|
253 |
+
$prog.style.width = '0%';
|
254 |
+
ui.setStats('idle');
|
255 |
+
}
|
256 |
+
|
257 |
+
// ——— Chat flow ———
|
258 |
+
document.getElementById('loadBtn').addEventListener('click', ensureLoaded);
|
259 |
+
document.getElementById('unloadBtn').addEventListener('click', unloadModel);
|
260 |
+
document.getElementById('stopBtn').addEventListener('click', () => aborter?.abort());
|
261 |
+
|
262 |
+
$model.addEventListener('change', () => {
|
263 |
+
const isCustom = JSON.parse($model.value).custom === true;
|
264 |
+
$customBox.open = isCustom;
|
265 |
+
});
|
266 |
+
|
267 |
+
$form.addEventListener('submit', async (ev) => {
|
268 |
+
ev.preventDefault();
|
269 |
+
const text = ($input.value || '').trim();
|
270 |
+
if (!text) return;
|
271 |
+
await ensureLoaded();
|
272 |
+
|
273 |
+
messages.push({ role: 'user', content: text });
|
274 |
+
const userBubble = ui.add('user', text);
|
275 |
+
$input.value = '';
|
276 |
+
|
277 |
+
const assistantBubble = ui.add('assistant', '');
|
278 |
+
truncateHistoryForMobile(600);
|
279 |
+
|
280 |
+
$send.disabled = true; $stop.disabled = true; // flip to false on stream start
|
281 |
+
aborter = new AbortController();
|
282 |
+
|
283 |
+
const nPredict = parseInt(document.getElementById('nPredict').value, 10);
|
284 |
+
const temp = parseFloat(document.getElementById('temp').value);
|
285 |
+
const top_p = parseFloat(document.getElementById('topp').value);
|
286 |
+
const top_k = parseInt(document.getElementById('topk').value, 10);
|
287 |
+
|
288 |
+
const t0 = performance.now();
|
289 |
+
let outText = '';
|
290 |
+
|
291 |
+
try {
|
292 |
+
const opts = {
|
293 |
+
stream: true,
|
294 |
+
useCache: true,
|
295 |
+
nPredict,
|
296 |
+
sampling: { temp, top_p, top_k },
|
297 |
+
stopTokens: eotToken > 0 ? [eotToken] : undefined,
|
298 |
+
abortSignal: aborter.signal
|
299 |
+
};
|
300 |
+
|
301 |
+
let stream;
|
302 |
+
if (isGemmaSelected()) {
|
303 |
+
// Render messages with Gemma template, then complete as plain text
|
304 |
+
const prompt = await wllama.formatChat(messages, /* addAssistant */ true, GEMMA_JINJA);
|
305 |
+
$stop.disabled = false;
|
306 |
+
stream = await wllama.createCompletion(prompt, opts);
|
307 |
+
} else {
|
308 |
+
// Other models: rely on their embedded chat templates
|
309 |
+
$stop.disabled = false;
|
310 |
+
stream = await wllama.createChatCompletion(messages, opts);
|
311 |
+
}
|
312 |
+
|
313 |
+
for await (const chunk of stream) {
|
314 |
+
const piece = new TextDecoder().decode(chunk.piece);
|
315 |
+
outText += piece;
|
316 |
+
assistantBubble.textContent = outText;
|
317 |
+
$chat.scrollTop = $chat.scrollHeight;
|
318 |
+
}
|
319 |
+
const dt = (performance.now() - t0) / 1000;
|
320 |
+
const tokSec = Math.max(1, Math.round(outText.length / 4)) / dt;
|
321 |
+
ui.setStats(`gen: ${tokSec.toFixed(1)} tok/s (rough)`);
|
322 |
+
messages.push({ role: 'assistant', content: outText });
|
323 |
+
} catch (err) {
|
324 |
+
if (err && err.name === 'AbortError') {
|
325 |
+
assistantBubble.textContent += '\n\n[stopped]';
|
326 |
+
} else {
|
327 |
+
console.error(err);
|
328 |
+
assistantBubble.innerHTML += `\n\n<span class="warn">Error: ${String(err)}</span>`;
|
329 |
+
}
|
330 |
+
} finally {
|
331 |
+
$send.disabled = false; $stop.disabled = true;
|
332 |
+
aborter = null;
|
333 |
+
}
|
334 |
+
});
|
335 |
+
|
336 |
+
// Enter‑to‑send on mobile; Shift+Enter for newline
|
337 |
+
$input.addEventListener('keydown', (e) => {
|
338 |
+
if (e.key === 'Enter' && !e.shiftKey) {
|
339 |
+
e.preventDefault();
|
340 |
+
$send.click();
|
341 |
+
}
|
342 |
+
});
|
343 |
+
</script>
|
344 |
+
|
345 |
+
<!--
|
346 |
+
Changes for Gemma:
|
347 |
+
• Added GEMMA_JINJA chat template (<start_of_turn>/<end_of_turn> with BOS).
|
348 |
+
• When a Gemma model is selected, messages are formatted via wllama.formatChat(..., GEMMA_JINJA)
|
349 |
+
and sent to createCompletion() to avoid ChatML (<im_start>/<im_end>) fallback.
|
350 |
+
• Non‑Gemma models still use createChatCompletion().
|
351 |
+
-->
|
352 |
+
</body>
|
353 |
+
</html>
|
style.css
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
body {
|
2 |
+
padding: 2rem;
|
3 |
+
font-family: -apple-system, BlinkMacSystemFont, "Arial", sans-serif;
|
4 |
+
}
|
5 |
+
|
6 |
+
h1 {
|
7 |
+
font-size: 16px;
|
8 |
+
margin-top: 0;
|
9 |
+
}
|
10 |
+
|
11 |
+
p {
|
12 |
+
color: rgb(107, 114, 128);
|
13 |
+
font-size: 15px;
|
14 |
+
margin-bottom: 10px;
|
15 |
+
margin-top: 5px;
|
16 |
+
}
|
17 |
+
|
18 |
+
.card {
|
19 |
+
max-width: 620px;
|
20 |
+
margin: 0 auto;
|
21 |
+
padding: 16px;
|
22 |
+
border: 1px solid lightgray;
|
23 |
+
border-radius: 16px;
|
24 |
+
}
|
25 |
+
|
26 |
+
.card p:last-child {
|
27 |
+
margin-bottom: 0;
|
28 |
+
}
|