Spaces:
Running
Running
Commit
•
990bbcc
1
Parent(s):
a0e5a60
add webllm with base model
Browse files- bun.lockb +0 -0
- package.json +1 -0
- src/pages/index.tsx +72 -13
bun.lockb
CHANGED
Binary files a/bun.lockb and b/bun.lockb differ
|
|
package.json
CHANGED
@@ -9,6 +9,7 @@
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
|
|
12 |
"@uiw/react-codemirror": "^4.23.0",
|
13 |
"codemirror-extension-inline-suggestion": "^0.0.3",
|
14 |
"next": "14.2.5",
|
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
+
"@mlc-ai/web-llm": "^0.2.60",
|
13 |
"@uiw/react-codemirror": "^4.23.0",
|
14 |
"codemirror-extension-inline-suggestion": "^0.0.3",
|
15 |
"next": "14.2.5",
|
src/pages/index.tsx
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
-
import Image from "next/image";
|
2 |
import { Inter } from "next/font/google";
|
3 |
import CodeMirror from "@uiw/react-codemirror";
|
4 |
import { inlineSuggestion } from "codemirror-extension-inline-suggestion";
|
5 |
import { EditorState } from "@codemirror/state";
|
|
|
|
|
6 |
|
7 |
const inter = Inter({ subsets: ["latin"] });
|
8 |
|
@@ -18,6 +19,59 @@ const fetchRandomWord = async (state: EditorState): Promise<string> => {
|
|
18 |
};
|
19 |
|
20 |
export default function Home() {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
return (
|
22 |
<div>
|
23 |
<h1 className="text-6xl text-slate-800 mt-28 font-bold font-sans text-center">
|
@@ -26,22 +80,27 @@ export default function Home() {
|
|
26 |
<p className="text-slate-800 italic text-sm mb-4 mt-2 text-center">
|
27 |
What if you had a 350M parameter model in your pocket?
|
28 |
</p>
|
29 |
-
|
30 |
-
<
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
|
|
|
|
36 |
inlineSuggestion({
|
37 |
-
fetchFn:
|
|
|
|
|
|
|
38 |
delay: 500
|
39 |
})
|
40 |
-
]
|
41 |
-
|
42 |
-
|
43 |
</div>
|
44 |
-
|
45 |
</div>
|
46 |
);
|
47 |
}
|
|
|
|
|
1 |
import { Inter } from "next/font/google";
|
2 |
import CodeMirror from "@uiw/react-codemirror";
|
3 |
import { inlineSuggestion } from "codemirror-extension-inline-suggestion";
|
4 |
import { EditorState } from "@codemirror/state";
|
5 |
+
import * as webllm from "@mlc-ai/web-llm";
|
6 |
+
import { useState, useEffect } from "react";
|
7 |
|
8 |
const inter = Inter({ subsets: ["latin"] });
|
9 |
|
|
|
19 |
};
|
20 |
|
21 |
export default function Home() {
|
22 |
+
const [engine, setEngine] = useState<webllm.MLCEngineInterface | null>(null);
|
23 |
+
const [isLoading, setIsLoading] = useState(false);
|
24 |
+
const [loadingStatus, setLoadingStatus] = useState('');
|
25 |
+
|
26 |
+
useEffect(() => {
|
27 |
+
async function loadWebLLM() {
|
28 |
+
setIsLoading(true);
|
29 |
+
const initProgressCallback = (report: webllm.InitProgressReport) => {
|
30 |
+
setLoadingStatus(report.text);
|
31 |
+
};
|
32 |
+
|
33 |
+
const selectedModel = "SmolLM-360M-q016-MLC";
|
34 |
+
const appConfig: webllm.AppConfig = {
|
35 |
+
model_list: [{
|
36 |
+
model: `https://huggingface.co/cfahlgren1/SmolLM-360M-q016-MLC`,
|
37 |
+
model_id: 'SmolLM-360M-q016-MLC',
|
38 |
+
model_lib: `${webllm.modelLibURLPrefix}${webllm.modelVersion}/SmolLM-360M-Instruct-q0f16-ctx2k_cs1k-webgpu.wasm`,
|
39 |
+
overrides: { context_window_size: 2048 },
|
40 |
+
}],
|
41 |
+
};
|
42 |
+
|
43 |
+
try {
|
44 |
+
const newEngine = await webllm.CreateMLCEngine(selectedModel, {
|
45 |
+
appConfig,
|
46 |
+
initProgressCallback,
|
47 |
+
logLevel: "INFO",
|
48 |
+
});
|
49 |
+
setEngine(newEngine);
|
50 |
+
} catch (err) {
|
51 |
+
console.error(`Failed to load the model: ${(err as Error).message}`);
|
52 |
+
} finally {
|
53 |
+
setIsLoading(false);
|
54 |
+
}
|
55 |
+
}
|
56 |
+
|
57 |
+
loadWebLLM();
|
58 |
+
}, []);
|
59 |
+
|
60 |
+
const generateCompletion = async (content: string) => {
|
61 |
+
if (!engine) return;
|
62 |
+
|
63 |
+
try {
|
64 |
+
const response = await engine.completions.create({
|
65 |
+
prompt: content,
|
66 |
+
max_tokens: 15,
|
67 |
+
});
|
68 |
+
return response.choices[0].text || "";
|
69 |
+
} catch (err) {
|
70 |
+
console.error(`Error: ${(err as Error).message}`);
|
71 |
+
return "";
|
72 |
+
}
|
73 |
+
};
|
74 |
+
|
75 |
return (
|
76 |
<div>
|
77 |
<h1 className="text-6xl text-slate-800 mt-28 font-bold font-sans text-center">
|
|
|
80 |
<p className="text-slate-800 italic text-sm mb-4 mt-2 text-center">
|
81 |
What if you had a 350M parameter model in your pocket?
|
82 |
</p>
|
83 |
+
{isLoading ? (
|
84 |
+
<p className="text-center mt-4">{loadingStatus}</p>
|
85 |
+
) : (
|
86 |
+
<div className="flex justify-center mt-10">
|
87 |
+
<div className="w-full border-2 border-slate-200 shadow-2xl rounded-lg max-w-4xl">
|
88 |
+
<CodeMirror
|
89 |
+
placeholder="Type anything to suggest a word"
|
90 |
+
height="400px"
|
91 |
+
extensions={[
|
92 |
inlineSuggestion({
|
93 |
+
fetchFn: async (state: EditorState) => {
|
94 |
+
const content = state.doc.toString();
|
95 |
+
return (await generateCompletion(content)) || "";
|
96 |
+
},
|
97 |
delay: 500
|
98 |
})
|
99 |
+
]}
|
100 |
+
/>
|
101 |
+
</div>
|
102 |
</div>
|
103 |
+
)}
|
104 |
</div>
|
105 |
);
|
106 |
}
|