This view is limited to 50 files because it contains too many changes.  See the raw diff here.
Files changed (50) hide show
  1. .dockerignore +11 -0
  2. .env +158 -0
  3. .env.ci +1 -0
  4. .env.template +298 -0
  5. .eslintignore +13 -0
  6. .eslintrc.cjs +44 -0
  7. .github/release.yml +16 -0
  8. .github/workflows/build-image.yml +125 -0
  9. .github/workflows/deploy-release.yml +44 -0
  10. .github/workflows/deploy-staging.yml +24 -0
  11. .github/workflows/lint-and-test.yml +56 -0
  12. .gitignore +15 -0
  13. .npmrc +1 -0
  14. .prettierignore +13 -0
  15. .prettierrc +8 -0
  16. .vscode/settings.json +8 -0
  17. Dockerfile +88 -0
  18. LICENSE +203 -0
  19. PRIVACY.md +42 -0
  20. PROMPTS.md +69 -0
  21. README.md +828 -1
  22. _app/env.js +0 -1
  23. _app/immutable/assets/0.DViICDYp.css +0 -1
  24. _app/immutable/assets/2.Dl1cvM0g.css +0 -1
  25. _app/immutable/assets/_layout.DViICDYp.css +0 -1
  26. _app/immutable/assets/_page.Dl1cvM0g.css +0 -1
  27. _app/immutable/chunks/entry.CsquK5o6.js +0 -3
  28. _app/immutable/chunks/index.C4D7lu78.js +0 -1
  29. _app/immutable/chunks/scheduler.CtbWrGNo.js +0 -1
  30. _app/immutable/entry/app.Bje1ZUR5.js +0 -2
  31. _app/immutable/entry/start.BZni4wHA.js +0 -1
  32. _app/immutable/nodes/0.dPy0WIMN.js +0 -1
  33. _app/immutable/nodes/1.BADQ-P6Z.js +0 -1
  34. _app/immutable/nodes/2.C-zSEB19.js +0 -3
  35. _app/version.json +0 -1
  36. entrypoint.sh +19 -0
  37. index.html +0 -48
  38. package-lock.json +0 -0
  39. package.json +95 -0
  40. postcss.config.js +6 -0
  41. scripts/populate.ts +269 -0
  42. scripts/updateLocalEnv.ts +20 -0
  43. scripts/updateProdEnv.ts +41 -0
  44. src/ambient.d.ts +4 -0
  45. src/app.d.ts +25 -0
  46. src/app.html +47 -0
  47. src/hooks.server.ts +210 -0
  48. src/lib/actions/clickOutside.ts +18 -0
  49. src/lib/actions/snapScrollToBottom.ts +54 -0
  50. src/lib/assistantStats/refresh-assistants-counts.ts +90 -0
.dockerignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Dockerfile
2
+ .vscode/
3
+ .idea
4
+ .gitignore
5
+ LICENSE
6
+ README.md
7
+ node_modules/
8
+ .svelte-kit/
9
+ .env*
10
+ !.env
11
+ .env.local
.env ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use .env.local to change these variables
2
+ # DO NOT EDIT THIS FILE WITH SENSITIVE DATA
3
+
4
+ MONGODB_URL=#your mongodb URL here
5
+ MONGODB_DB_NAME=chat-ui
6
+ MONGODB_DIRECT_CONNECTION=false
7
+
8
+ COOKIE_NAME=hf-chat
9
+ HF_TOKEN=#hf_<token> from https://huggingface.co/settings/token
10
+ HF_API_ROOT=https://api-inference.huggingface.co/models
11
+
12
+ OPENAI_API_KEY=#your openai api key here
13
+ ANTHROPIC_API_KEY=#your anthropic api key here
14
+ CLOUDFLARE_ACCOUNT_ID=#your cloudflare account id here
15
+ CLOUDFLARE_API_TOKEN=#your cloudflare api token here
16
+ COHERE_API_TOKEN=#your cohere api token here
17
+
18
+ HF_ACCESS_TOKEN=#LEGACY! Use HF_TOKEN instead
19
+
20
+ # used to activate search with web functionality. disabled if none are defined. choose one of the following:
21
+ YDC_API_KEY=#your docs.you.com api key here
22
+ SERPER_API_KEY=#your serper.dev api key here
23
+ SERPAPI_KEY=#your serpapi key here
24
+ SERPSTACK_API_KEY=#your serpstack api key here
25
+ USE_LOCAL_WEBSEARCH=#set to true to parse google results yourself, overrides other API keys
26
+ SEARXNG_QUERY_URL=# where '<query>' will be replaced with query keywords see https://docs.searxng.org/dev/search_api.html eg https://searxng.yourdomain.com/search?q=<query>&engines=duckduckgo,google&format=json
27
+
28
+ WEBSEARCH_ALLOWLIST=`[]` # if it's defined, allow websites from only this list.
29
+ WEBSEARCH_BLOCKLIST=`[]` # if it's defined, block websites from this list.
30
+
31
+ # Parameters to enable open id login
32
+ OPENID_CONFIG=`{
33
+ "PROVIDER_URL": "",
34
+ "CLIENT_ID": "",
35
+ "CLIENT_SECRET": "",
36
+ "SCOPES": "",
37
+ "NAME_CLAIM": ""
38
+ }`
39
+
40
+ # /!\ legacy openid settings, prefer the config above
41
+ OPENID_CLIENT_ID=
42
+ OPENID_CLIENT_SECRET=
43
+ OPENID_SCOPES="openid profile" # Add "email" for some providers like Google that do not provide preferred_username
44
+ OPENID_NAME_CLAIM="name" # Change to "username" for some providers that do not provide name
45
+ OPENID_PROVIDER_URL=https://huggingface.co # for Google, use https://accounts.google.com
46
+ OPENID_TOLERANCE=
47
+ OPENID_RESOURCE=
48
+
49
+ # Parameters to enable a global mTLS context for client fetch requests
50
+ USE_CLIENT_CERTIFICATE=false
51
+ CERT_PATH=#
52
+ KEY_PATH=#
53
+ CA_PATH=#
54
+ CLIENT_KEY_PASSWORD=#
55
+ REJECT_UNAUTHORIZED=true
56
+
57
+ TEXT_EMBEDDING_MODELS = `[
58
+ {
59
+ "name": "Xenova/gte-small",
60
+ "displayName": "Xenova/gte-small",
61
+ "description": "Local embedding model running on the server.",
62
+ "chunkCharLength": 512,
63
+ "endpoints": [
64
+ { "type": "transformersjs" }
65
+ ]
66
+ }
67
+ ]`
68
+
69
+ # 'name', 'userMessageToken', 'assistantMessageToken' are required
70
+ MODELS=`[
71
+ {
72
+ "name": "mistralai/Mistral-7B-Instruct-v0.1",
73
+ "displayName": "mistralai/Mistral-7B-Instruct-v0.1",
74
+ "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
75
+ "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
76
+ "preprompt": "",
77
+ "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
78
+ "parameters": {
79
+ "temperature": 0.1,
80
+ "top_p": 0.95,
81
+ "repetition_penalty": 1.2,
82
+ "top_k": 50,
83
+ "truncate": 3072,
84
+ "max_new_tokens": 1024,
85
+ "stop": ["</s>"]
86
+ },
87
+ "promptExamples": [
88
+ {
89
+ "title": "Write an email from bullet list",
90
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
91
+ }, {
92
+ "title": "Code a snake game",
93
+ "prompt": "Code a basic snake game in python, give explanations for each step."
94
+ }, {
95
+ "title": "Assist in a task",
96
+ "prompt": "How do I make a delicious lemon cheesecake?"
97
+ }
98
+ ]
99
+ }
100
+ ]`
101
+
102
+ OLD_MODELS=`[]`# any removed models, `{ name: string, displayName?: string, id?: string }`
103
+ TASK_MODEL= # name of the model used for tasks such as summarizing title, creating query, etc.
104
+
105
+ PUBLIC_ORIGIN=#https://huggingface.co
106
+ PUBLIC_SHARE_PREFIX=#https://hf.co/chat
107
+ PUBLIC_GOOGLE_ANALYTICS_ID=#G-XXXXXXXX / Leave empty to disable
108
+ PUBLIC_PLAUSIBLE_SCRIPT_URL=#/js/script.js / Leave empty to disable
109
+ PUBLIC_ANNOUNCEMENT_BANNERS=`[
110
+ {
111
+ "title": "Code Llama 70B is available! 🦙",
112
+ "linkTitle": "try it",
113
+ "linkHref": "https://huggingface.co/chat?model=codellama/CodeLlama-70b-Instruct-hf"
114
+ }
115
+ ]`
116
+
117
+ PUBLIC_APPLE_APP_ID=#1234567890 / Leave empty to disable
118
+
119
+ PARQUET_EXPORT_DATASET=
120
+ PARQUET_EXPORT_HF_TOKEN=
121
+ ADMIN_API_SECRET=# secret to admin API calls, like computing usage stats or exporting parquet data
122
+
123
+ PARQUET_EXPORT_SECRET=#DEPRECATED, use ADMIN_API_SECRET instead
124
+
125
+ RATE_LIMIT= # /!\ Legacy definition of messages per minute. Use USAGE_LIMITS.messagesPerMinute instead
126
+ MESSAGES_BEFORE_LOGIN=# how many messages a user can send in a conversation before having to login. set to 0 to force login right away
127
+
128
+ APP_BASE="" # base path of the app, e.g. /chat, left blank as default
129
+ PUBLIC_APP_NAME=ChatUI # name used as title throughout the app
130
+ PUBLIC_APP_ASSETS=chatui # used to find logos & favicons in static/$PUBLIC_APP_ASSETS
131
+ PUBLIC_APP_COLOR=blue # can be any of tailwind colors: https://tailwindcss.com/docs/customizing-colors#default-color-palette
132
+ PUBLIC_APP_DESCRIPTION=# description used throughout the app (if not set, a default one will be used)
133
+ PUBLIC_APP_DATA_SHARING=#set to 1 to enable options & text regarding data sharing
134
+ PUBLIC_APP_DISCLAIMER=#set to 1 to show a disclaimer on login page
135
+ PUBLIC_APP_DISCLAIMER_MESSAGE="Disclaimer: AI is an area of active research with known problems such as biased generation and misinformation. Do not use this application for high-stakes decisions or advice. Do not insert your personal data, especially sensitive, like health data."
136
+ LLM_SUMMERIZATION=true
137
+
138
+ EXPOSE_API=true
139
+ # PUBLIC_APP_NAME=HuggingChat
140
+ # PUBLIC_APP_ASSETS=huggingchat
141
+ # PUBLIC_APP_COLOR=yellow
142
+ # PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
143
+ # PUBLIC_APP_DATA_SHARING=1
144
+ # PUBLIC_APP_DISCLAIMER=1
145
+
146
+ ENABLE_ASSISTANTS=false #set to true to enable assistants feature
147
+ ENABLE_ASSISTANTS_RAG=false # /!\ This will let users specify arbitrary URLs that the server will then request. Make sure you have the proper firewall rules in place.
148
+ REQUIRE_FEATURED_ASSISTANTS=false
149
+ ENABLE_LOCAL_FETCH=false #set to true to disable the blocklist for local fetches. Only enable this if you have the proper firewall rules to prevent SSRF attacks and understand the implications.
150
+ ALTERNATIVE_REDIRECT_URLS=`[]` #valide alternative redirect URL for OAuth
151
+ WEBHOOK_URL_REPORT_ASSISTANT=#provide webhook url to get notified when an assistant gets reported
152
+
153
+ ALLOWED_USER_EMAILS=`[]` # if it's defined, only these emails will be allowed to use the app
154
+
155
+ USAGE_LIMITS=`{}`
156
+ ALLOW_INSECURE_COOKIES=false # recommended to keep this to false but set to true if you need to run over http without tls
157
+ METRICS_PORT=
158
+ LOG_LEVEL=info
.env.ci ADDED
@@ -0,0 +1 @@
 
 
1
+ MONGODB_URL=mongodb://localhost:27017/
.env.template ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # template used in production for HuggingChat.
2
+
3
+ MODELS=`[
4
+ {
5
+ "name" : "CohereForAI/c4ai-command-r-plus",
6
+ "tokenizer": "Xenova/c4ai-command-r-v01-tokenizer",
7
+ "description": "Command R+ is Cohere's latest LLM and is the first open weight model to beat GPT4 in the Chatbot Arena!",
8
+ "modelUrl": "https://huggingface.co/CohereForAI/c4ai-command-r-plus",
9
+ "websiteUrl": "https://docs.cohere.com/docs/command-r-plus",
10
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/cohere-logo.png",
11
+ "parameters": {
12
+ "stop": ["<|END_OF_TURN_TOKEN|>"],
13
+ "truncate" : 28672,
14
+ "max_new_tokens" : 4096,
15
+ "temperature" : 0.3
16
+ },
17
+ "promptExamples" : [
18
+ {
19
+ "title": "Write an email from bullet list",
20
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
21
+ }, {
22
+ "title": "Code a snake game",
23
+ "prompt": "Code a basic snake game in python, give explanations for each step."
24
+ }, {
25
+ "title": "Assist in a task",
26
+ "prompt": "How do I make a delicious lemon cheesecake?"
27
+ }
28
+ ]
29
+ },
30
+ {
31
+ "name" : "meta-llama/Meta-Llama-3-70B-Instruct",
32
+ "description": "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.",
33
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
34
+ "modelUrl": "https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct",
35
+ "websiteUrl": "https://llama.meta.com/llama3/",
36
+ "tokenizer" : "philschmid/meta-llama-3-tokenizer",
37
+ "promptExamples" : [
38
+ {
39
+ "title": "Write an email from bullet list",
40
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
41
+ }, {
42
+ "title": "Code a snake game",
43
+ "prompt": "Code a basic snake game in python, give explanations for each step."
44
+ }, {
45
+ "title": "Assist in a task",
46
+ "prompt": "How do I make a delicious lemon cheesecake?"
47
+ }
48
+ ],
49
+ "parameters": {
50
+ "stop": ["<|eot_id|>"],
51
+ "truncate": 6144,
52
+ "max_new_tokens": 2047
53
+ }
54
+ },
55
+ {
56
+ "name" : "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
57
+ "tokenizer": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
58
+ "description": "Zephyr 141B-A35B is a fine-tuned version of Mistral 8x22B, trained using ORPO, a novel alignment algorithm.",
59
+ "modelUrl": "https://huggingface.co/HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
60
+ "websiteUrl": "https://huggingface.co/HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
61
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/zephyr-logo.png",
62
+ "parameters": {
63
+ "truncate" : 24576,
64
+ "max_new_tokens" : 8192,
65
+ },
66
+ "preprompt" : "You are Zephyr, an assistant developed by KAIST AI, Argilla, and Hugging Face. You should give concise responses to very simple questions, but provide thorough responses to more complex and open-ended questions. You are happy to help with writing, analysis, question answering, math, coding, and all sorts of other tasks.",
67
+ "promptExamples" : [
68
+ {
69
+ "title": "Write a poem",
70
+ "prompt": "Write a poem to help me remember the first 10 elements on the periodic table, giving each element its own line."
71
+ }, {
72
+ "title": "Code a snake game",
73
+ "prompt": "Code a basic snake game in python, give explanations for each step."
74
+ }, {
75
+ "title": "Assist in a task",
76
+ "prompt": "How do I make a delicious lemon cheesecake?"
77
+ }
78
+ ]
79
+ },
80
+ {
81
+ "name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
82
+ "description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
83
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
84
+ "websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
85
+ "modelUrl": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
86
+ "tokenizer": "mistralai/Mixtral-8x7B-Instruct-v0.1",
87
+ "preprompt" : "",
88
+ "chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}",
89
+ "parameters" : {
90
+ "temperature" : 0.6,
91
+ "top_p" : 0.95,
92
+ "repetition_penalty" : 1.2,
93
+ "top_k" : 50,
94
+ "truncate" : 24576,
95
+ "max_new_tokens" : 8192,
96
+ "stop" : ["</s>"]
97
+ },
98
+ "promptExamples" : [
99
+ {
100
+ "title": "Write an email from bullet list",
101
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
102
+ }, {
103
+ "title": "Code a snake game",
104
+ "prompt": "Code a basic snake game in python, give explanations for each step."
105
+ }, {
106
+ "title": "Assist in a task",
107
+ "prompt": "How do I make a delicious lemon cheesecake?"
108
+ }
109
+ ]
110
+ },
111
+ {
112
+ "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
113
+ "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
114
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
115
+ "websiteUrl" : "https://nousresearch.com/",
116
+ "modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
117
+ "tokenizer": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
118
+ "chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
119
+ "promptExamples": [
120
+ {
121
+ "title": "Write an email from bullet list",
122
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
123
+ }, {
124
+ "title": "Code a snake game",
125
+ "prompt": "Code a basic snake game in python, give explanations for each step."
126
+ }, {
127
+ "title": "Assist in a task",
128
+ "prompt": "How do I make a delicious lemon cheesecake?"
129
+ }
130
+ ],
131
+ "parameters": {
132
+ "temperature": 0.7,
133
+ "top_p": 0.95,
134
+ "repetition_penalty": 1,
135
+ "top_k": 50,
136
+ "truncate": 24576,
137
+ "max_new_tokens": 2048,
138
+ "stop": ["<|im_end|>"]
139
+ }
140
+ },
141
+ {
142
+ "name" : "google/gemma-1.1-7b-it",
143
+ "description": "Gemma 7B 1.1 is the latest release in the Gemma family of lightweight models built by Google, trained using a novel RLHF method.",
144
+ "websiteUrl" : "https://blog.google/technology/developers/gemma-open-models/",
145
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/google-logo.png",
146
+ "modelUrl": "https://huggingface.co/google/gemma-1.1-7b-it",
147
+ "preprompt": "",
148
+ "chatPromptTemplate" : "{{#each messages}}{{#ifUser}}<start_of_turn>user\n{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<end_of_turn>\n<start_of_turn>model\n{{/ifUser}}{{#ifAssistant}}{{content}}<end_of_turn>\n{{/ifAssistant}}{{/each}}",
149
+ "promptExamples": [
150
+ {
151
+ "title": "Write an email from bullet list",
152
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
153
+ }, {
154
+ "title": "Code a snake game",
155
+ "prompt": "Code a basic snake game in python, give explanations for each step."
156
+ }, {
157
+ "title": "Assist in a task",
158
+ "prompt": "How do I make a delicious lemon cheesecake?"
159
+ }
160
+ ],
161
+ "parameters": {
162
+ "do_sample": true,
163
+ "truncate": 7168,
164
+ "max_new_tokens": 1024,
165
+ "stop" : ["<end_of_turn>"]
166
+ }
167
+ },
168
+
169
+ {
170
+ "name": "mistralai/Mistral-7B-Instruct-v0.2",
171
+ "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
172
+ "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
173
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
174
+ "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
175
+ "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
176
+ "tokenizer": "mistralai/Mistral-7B-Instruct-v0.2",
177
+ "preprompt": "",
178
+ "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
179
+ "parameters": {
180
+ "temperature": 0.3,
181
+ "top_p": 0.95,
182
+ "repetition_penalty": 1.2,
183
+ "top_k": 50,
184
+ "truncate": 3072,
185
+ "max_new_tokens": 1024,
186
+ "stop": ["</s>"]
187
+ },
188
+ "promptExamples": [
189
+ {
190
+ "title": "Write an email from bullet list",
191
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
192
+ }, {
193
+ "title": "Code a snake game",
194
+ "prompt": "Code a basic snake game in python, give explanations for each step."
195
+ }, {
196
+ "title": "Assist in a task",
197
+ "prompt": "How do I make a delicious lemon cheesecake?"
198
+ }
199
+ ]
200
+ },
201
+ {
202
+ "name": "microsoft/Phi-3-mini-4k-instruct",
203
+ "tokenizer": "microsoft/Phi-3-mini-4k-instruct",
204
+ "description" : "Phi-3 Mini-4K-Instruct is a 3.8B parameters, lightweight, state-of-the-art open model built upon datasets used for Phi-2.",
205
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/microsoft-logo.png",
206
+ "modelUrl": "https://huggingface.co/microsoft/Phi-3-mini-4k-instruct",
207
+ "websiteUrl": "https://azure.microsoft.com/en-us/blog/introducing-phi-3-redefining-whats-possible-with-slms/",
208
+ "preprompt": "",
209
+ "chatPromptTemplate": "<s>{{preprompt}}{{#each messages}}{{#ifUser}}<|user|>\n{{content}}<|end|>\n<|assistant|>\n{{/ifUser}}{{#ifAssistant}}{{content}}<|end|>\n{{/ifAssistant}}{{/each}}",
210
+ "parameters": {
211
+ "stop": ["<|end|>", "<|endoftext|>", "<|assistant|>"],
212
+ "max_new_tokens": 1024,
213
+ "truncate": 3071
214
+ },
215
+ "promptExamples": [
216
+ {
217
+ "title": "Write an email from bullet list",
218
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
219
+ }, {
220
+ "title": "Code a snake game",
221
+ "prompt": "Code a basic snake game in python, give explanations for each step."
222
+ }, {
223
+ "title": "Assist in a task",
224
+ "prompt": "How do I make a delicious lemon cheesecake?"
225
+ }
226
+ ]
227
+ },
228
+ {
229
+ "name": "meta-llama/Meta-Llama-3-8B-Instruct",
230
+ "tokenizer" : "philschmid/meta-llama-3-tokenizer",
231
+ "parameters": {
232
+ "temperature": 0.1,
233
+ "stop": ["<|eot_id|>"],
234
+ "truncate": 1024,
235
+ },
236
+ "unlisted": true
237
+ }
238
+ ]`
239
+
240
+ OLD_MODELS=`[
241
+ {"name":"bigcode/starcoder"},
242
+ {"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"},
243
+ {"name":"HuggingFaceH4/zephyr-7b-alpha"},
244
+ {"name":"openchat/openchat_3.5"},
245
+ {"name":"openchat/openchat-3.5-1210"},
246
+ {"name": "tiiuae/falcon-180B-chat"},
247
+ {"name": "codellama/CodeLlama-34b-Instruct-hf"},
248
+ {"name": "google/gemma-7b-it"},
249
+ {"name": "meta-llama/Llama-2-70b-chat-hf"},
250
+ {"name": "codellama/CodeLlama-70b-Instruct-hf"},
251
+ {"name": "openchat/openchat-3.5-0106"}
252
+ ]`
253
+
254
+ TASK_MODEL='meta-llama/Meta-Llama-3-8B-Instruct'
255
+
256
+ TEXT_EMBEDDING_MODELS = `[
257
+ {
258
+ "name": "bge-base-en-v1-5-sxa",
259
+ "displayName": "bge-base-en-v1-5-sxa",
260
+ "chunkCharLength": 512,
261
+ "endpoints": [
262
+ { "type": "tei",
263
+ "url" : "https://huggingchat-tei.hf.space/"
264
+ }
265
+ ]
266
+ }
267
+ ]`
268
+
269
+
270
+ APP_BASE="/chat"
271
+ PUBLIC_ORIGIN=https://huggingface.co
272
+ PUBLIC_SHARE_PREFIX=https://hf.co/chat
273
+ PUBLIC_ANNOUNCEMENT_BANNERS=`[]`
274
+
275
+ PUBLIC_APP_NAME=HuggingChat
276
+ PUBLIC_APP_ASSETS=huggingchat
277
+ PUBLIC_APP_COLOR=yellow
278
+ PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
279
+ PUBLIC_APP_DISCLAIMER_MESSAGE="Disclaimer: AI is an area of active research with known problems such as biased generation and misinformation. Do not use this application for high-stakes decisions or advice."
280
+ PUBLIC_APP_DATA_SHARING=0
281
+ PUBLIC_APP_DISCLAIMER=1
282
+
283
+ PUBLIC_PLAUSIBLE_SCRIPT_URL="/js/script.js"
284
+ PUBLIC_APPLE_APP_ID=6476778843
285
+ # Not part of the .env but set as other variables in the space
286
+ # ADDRESS_HEADER=X-Forwarded-For
287
+ # XFF_DEPTH=2
288
+
289
+ ENABLE_ASSISTANTS=true
290
+ ENABLE_ASSISTANTS_RAG=true
291
+ REQUIRE_FEATURED_ASSISTANTS=true
292
+ EXPOSE_API=true
293
+
294
+ ALTERNATIVE_REDIRECT_URLS=`[
295
+ huggingchat://login/callback
296
+ ]`
297
+
298
+ WEBSEARCH_BLOCKLIST=`["youtube.com", "twitter.com"]`
.eslintignore ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .DS_Store
2
+ node_modules
3
+ /build
4
+ /.svelte-kit
5
+ /package
6
+ .env
7
+ .env.*
8
+ !.env.example
9
+
10
+ # Ignore files for PNPM, NPM and YARN
11
+ pnpm-lock.yaml
12
+ package-lock.json
13
+ yarn.lock
.eslintrc.cjs ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ module.exports = {
2
+ root: true,
3
+ parser: "@typescript-eslint/parser",
4
+ extends: [
5
+ "eslint:recommended",
6
+ "plugin:@typescript-eslint/recommended",
7
+ "plugin:svelte/recommended",
8
+ "prettier",
9
+ ],
10
+ plugins: ["@typescript-eslint"],
11
+ ignorePatterns: ["*.cjs"],
12
+ overrides: [
13
+ {
14
+ files: ["*.svelte"],
15
+ parser: "svelte-eslint-parser",
16
+ parserOptions: {
17
+ parser: "@typescript-eslint/parser",
18
+ },
19
+ },
20
+ ],
21
+ parserOptions: {
22
+ sourceType: "module",
23
+ ecmaVersion: 2020,
24
+ extraFileExtensions: [".svelte"],
25
+ },
26
+ rules: {
27
+ "no-shadow": ["error"],
28
+ "@typescript-eslint/no-explicit-any": "error",
29
+ "@typescript-eslint/no-non-null-assertion": "error",
30
+ "@typescript-eslint/no-unused-vars": [
31
+ // prevent variables with a _ prefix from being marked as unused
32
+ "error",
33
+ {
34
+ argsIgnorePattern: "^_",
35
+ },
36
+ ],
37
+ "object-shorthand": ["error", "always"],
38
+ },
39
+ env: {
40
+ browser: true,
41
+ es2017: true,
42
+ node: true,
43
+ },
44
+ };
.github/release.yml ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ changelog:
2
+ exclude:
3
+ labels:
4
+ - huggingchat
5
+ - CI/CD
6
+ - documentation
7
+ categories:
8
+ - title: Features
9
+ labels:
10
+ - enhancement
11
+ - title: Bugfixes
12
+ labels:
13
+ - bug
14
+ - title: Other changes
15
+ labels:
16
+ - "*"
.github/workflows/build-image.yml ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Build and Publish Image
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - "main"
7
+ pull_request:
8
+ branches:
9
+ - "*"
10
+ paths:
11
+ - "Dockerfile"
12
+ - "entrypoint.sh"
13
+ workflow_dispatch:
14
+ release:
15
+ types: [published, edited]
16
+
17
+ jobs:
18
+ build-and-publish-image-with-db:
19
+ runs-on: ubuntu-latest
20
+ steps:
21
+ - name: Checkout
22
+ uses: actions/checkout@v4
23
+
24
+ - name: Extract package version
25
+ id: package-version
26
+ run: |
27
+ VERSION=$(jq -r .version package.json)
28
+ echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
29
+ MAJOR=$(echo $VERSION | cut -d '.' -f1)
30
+ echo "MAJOR=$MAJOR" >> $GITHUB_OUTPUT
31
+ MINOR=$(echo $VERSION | cut -d '.' -f1).$(echo $VERSION | cut -d '.' -f2)
32
+ echo "MINOR=$MINOR" >> $GITHUB_OUTPUT
33
+
34
+ - name: Docker metadata
35
+ id: meta
36
+ uses: docker/metadata-action@v5
37
+ with:
38
+ images: |
39
+ ghcr.io/huggingface/chat-ui-db
40
+ tags: |
41
+ type=raw,value=${{ steps.package-version.outputs.VERSION }},enable=${{github.event_name == 'release'}}
42
+ type=raw,value=${{ steps.package-version.outputs.MAJOR }},enable=${{github.event_name == 'release'}}
43
+ type=raw,value=${{ steps.package-version.outputs.MINOR }},enable=${{github.event_name == 'release'}}
44
+ type=raw,value=latest,enable={{is_default_branch}}
45
+ type=sha,enable={{is_default_branch}}
46
+
47
+ - name: Set up QEMU
48
+ uses: docker/setup-qemu-action@v3
49
+
50
+ - name: Set up Docker Buildx
51
+ uses: docker/setup-buildx-action@v3
52
+
53
+ - name: Login to GitHub Container Registry
54
+ if: github.event_name != 'pull_request'
55
+ uses: docker/login-action@v3
56
+ with:
57
+ registry: ghcr.io
58
+ username: ${{ github.repository_owner }}
59
+ password: ${{ secrets.GITHUB_TOKEN }}
60
+
61
+ - name: Build and Publish Docker Image with DB
62
+ uses: docker/build-push-action@v5
63
+ with:
64
+ context: .
65
+ file: Dockerfile
66
+ push: ${{ github.event_name != 'pull_request' }}
67
+ tags: ${{ steps.meta.outputs.tags }}
68
+ labels: ${{ steps.meta.outputs.labels }}
69
+ platforms: linux/amd64,linux/arm64
70
+ build-args: |
71
+ INCLUDE_DB=true
72
+ build-and-publish-image-nodb:
73
+ runs-on: ubuntu-latest
74
+ steps:
75
+ - name: Checkout
76
+ uses: actions/checkout@v4
77
+
78
+ - name: Extract package version
79
+ id: package-version
80
+ run: |
81
+ VERSION=$(jq -r .version package.json)
82
+ echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
83
+ MAJOR=$(echo $VERSION | cut -d '.' -f1)
84
+ echo "MAJOR=$MAJOR" >> $GITHUB_OUTPUT
85
+ MINOR=$(echo $VERSION | cut -d '.' -f1).$(echo $VERSION | cut -d '.' -f2)
86
+ echo "MINOR=$MINOR" >> $GITHUB_OUTPUT
87
+
88
+ - name: Docker metadata
89
+ id: meta
90
+ uses: docker/metadata-action@v5
91
+ with:
92
+ images: |
93
+ ghcr.io/huggingface/chat-ui
94
+ tags: |
95
+ type=raw,value=${{ steps.package-version.outputs.VERSION }},enable=${{github.event_name == 'release'}}
96
+ type=raw,value=${{ steps.package-version.outputs.MAJOR }},enable=${{github.event_name == 'release'}}
97
+ type=raw,value=${{ steps.package-version.outputs.MINOR }},enable=${{github.event_name == 'release'}}
98
+ type=raw,value=latest,enable={{is_default_branch}}
99
+ type=sha,enable={{is_default_branch}}
100
+
101
+ - name: Set up QEMU
102
+ uses: docker/setup-qemu-action@v3
103
+
104
+ - name: Set up Docker Buildx
105
+ uses: docker/setup-buildx-action@v3
106
+
107
+ - name: Login to GitHub Container Registry
108
+ if: github.event_name != 'pull_request'
109
+ uses: docker/login-action@v3
110
+ with:
111
+ registry: ghcr.io
112
+ username: ${{ github.repository_owner }}
113
+ password: ${{ secrets.GITHUB_TOKEN }}
114
+
115
+ - name: Build and Publish Docker Image without DB
116
+ uses: docker/build-push-action@v5
117
+ with:
118
+ context: .
119
+ file: Dockerfile
120
+ push: ${{ github.event_name != 'pull_request' }}
121
+ tags: ${{ steps.meta.outputs.tags }}
122
+ labels: ${{ steps.meta.outputs.labels }}
123
+ platforms: linux/amd64,linux/arm64
124
+ build-args: |
125
+ INCLUDE_DB=false
.github/workflows/deploy-release.yml ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy to production
2
+ on:
3
+ # run this workflow manually from the Actions tab
4
+ workflow_dispatch:
5
+
6
+ jobs:
7
+ update-env:
8
+ runs-on: ubuntu-latest
9
+ timeout-minutes: 10
10
+
11
+ steps:
12
+ - uses: actions/checkout@v3
13
+ - uses: actions/setup-node@v3
14
+ with:
15
+ node-version: "20"
16
+ cache: "npm"
17
+ - run: npm install ci
18
+ - name: "Update DOTENV_LOCAL in prod"
19
+ env:
20
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
21
+ SERPER_API_KEY: ${{ secrets.SERPER_API_KEY }}
22
+ OPENID_CONFIG: ${{ secrets.OPENID_CONFIG }}
23
+ MONGODB_URL: ${{ secrets.MONGODB_URL }}
24
+ HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
25
+ WEBHOOK_URL_REPORT_ASSISTANT: ${{ secrets.WEBHOOK_URL_REPORT_ASSISTANT }}
26
+ ADMIN_API_SECRET: ${{ secrets.ADMIN_API_SECRET }}
27
+ USAGE_LIMITS: ${{ secrets.USAGE_LIMITS }}
28
+ MESSAGES_BEFORE_LOGIN: ${{ secrets.MESSAGES_BEFORE_LOGIN }}
29
+ run: npm run updateProdEnv
30
+ sync-to-hub:
31
+ runs-on: ubuntu-latest
32
+ steps:
33
+ - name: Check large files
34
+ uses: ActionsDesk/[email protected]
35
+ with:
36
+ filesizelimit: 10485760 # this is 10MB so we can sync to HF Spaces
37
+ - uses: actions/checkout@v3
38
+ with:
39
+ fetch-depth: 0
40
+ lfs: true
41
+ - name: Push to hub
42
+ env:
43
+ HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
44
+ run: git push https://nsarrazin:[email protected]/spaces/huggingchat/chat-ui main
.github/workflows/deploy-staging.yml ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy to staging environment
2
+ on:
3
+ push:
4
+ branches: [main]
5
+
6
+ # to run this workflow manually from the Actions tab
7
+ workflow_dispatch:
8
+
9
+ jobs:
10
+ sync-to-hub:
11
+ runs-on: ubuntu-latest
12
+ steps:
13
+ - name: Check large files
14
+ uses: ActionsDesk/[email protected]
15
+ with:
16
+ filesizelimit: 10485760 # this is 10MB so we can sync to HF Spaces
17
+ - uses: actions/checkout@v3
18
+ with:
19
+ fetch-depth: 0
20
+ lfs: true
21
+ - name: Push to hub
22
+ env:
23
+ HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
24
+ run: git push https://nsarrazin:[email protected]/spaces/huggingchat/chat-ui-staging main
.github/workflows/lint-and-test.yml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Lint and test
2
+ on:
3
+ pull_request:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ lint:
10
+ runs-on: ubuntu-latest
11
+ timeout-minutes: 10
12
+
13
+ steps:
14
+ - uses: actions/checkout@v3
15
+
16
+ - uses: actions/setup-node@v3
17
+ with:
18
+ node-version: "20"
19
+ cache: "npm"
20
+ - run: |
21
+ npm install ci
22
+ - name: "Checking lint/format errors"
23
+ run: |
24
+ npm run lint
25
+ - name: "Checking type errors"
26
+ run: |
27
+ npm run check
28
+ test:
29
+ runs-on: ubuntu-latest
30
+ timeout-minutes: 10
31
+
32
+ services:
33
+ mongodb:
34
+ image: mongo:6.0.5
35
+ ports:
36
+ - 27017:27017
37
+
38
+ steps:
39
+ - uses: actions/checkout@v3
40
+
41
+ - uses: actions/setup-node@v3
42
+ with:
43
+ node-version: "20"
44
+ cache: "npm"
45
+ - run: |
46
+ npm ci
47
+ - name: "Tests"
48
+ run: |
49
+ npm run test
50
+ build-check:
51
+ runs-on: ubuntu-latest
52
+ timeout-minutes: 10
53
+ steps:
54
+ - uses: actions/checkout@v3
55
+ - name: Build Docker image
56
+ run: docker build --secret id=DOTENV_LOCAL,src=.env.ci -t chat-ui:latest .
.gitignore ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .DS_Store
2
+ node_modules
3
+ /build
4
+ /.svelte-kit
5
+ /package
6
+ .env
7
+ .env.*
8
+ vite.config.js.timestamp-*
9
+ vite.config.ts.timestamp-*
10
+ SECRET_CONFIG
11
+ .idea
12
+ !.env.ci
13
+ !.env
14
+ !.env.template
15
+ gcp-*.json
.npmrc ADDED
@@ -0,0 +1 @@
 
 
1
+ engine-strict=true
.prettierignore ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .DS_Store
2
+ node_modules
3
+ /build
4
+ /.svelte-kit
5
+ /package
6
+ .env
7
+ .env.*
8
+ !.env.example
9
+
10
+ # Ignore files for PNPM, NPM and YARN
11
+ pnpm-lock.yaml
12
+ package-lock.json
13
+ yarn.lock
.prettierrc ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "useTabs": true,
3
+ "trailingComma": "es5",
4
+ "printWidth": 100,
5
+ "plugins": ["prettier-plugin-svelte", "prettier-plugin-tailwindcss"],
6
+ "pluginSearchDirs": ["."],
7
+ "overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
8
+ }
.vscode/settings.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "editor.formatOnSave": true,
3
+ "editor.defaultFormatter": "esbenp.prettier-vscode",
4
+ "editor.codeActionsOnSave": {
5
+ "source.fixAll": "explicit"
6
+ },
7
+ "eslint.validate": ["javascript", "svelte"]
8
+ }
Dockerfile ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # syntax=docker/dockerfile:1
2
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
3
+ # you will also find guides on how best to write your Dockerfile
4
+ ARG INCLUDE_DB=false
5
+
6
+ # stage that install the dependencies
7
+ FROM node:20 as builder-production
8
+
9
+ WORKDIR /app
10
+
11
+ COPY --link --chown=1000 package-lock.json package.json ./
12
+ RUN --mount=type=cache,target=/app/.npm \
13
+ npm set cache /app/.npm && \
14
+ npm ci --omit=dev
15
+
16
+ FROM builder-production as builder
17
+
18
+ ARG APP_BASE=
19
+ ARG PUBLIC_APP_COLOR=blue
20
+
21
+ RUN --mount=type=cache,target=/app/.npm \
22
+ npm set cache /app/.npm && \
23
+ npm ci
24
+
25
+ COPY --link --chown=1000 . .
26
+
27
+ RUN npm run build
28
+
29
+ # mongo image
30
+ FROM mongo:latest as mongo
31
+
32
+ # image to be used if INCLUDE_DB is false
33
+ FROM node:20-slim as local_db_false
34
+
35
+ # image to be used if INCLUDE_DB is true
36
+ FROM node:20-slim as local_db_true
37
+
38
+ RUN apt-get update
39
+ RUN apt-get install gnupg curl -y
40
+ # copy mongo from the other stage
41
+ COPY --from=mongo /usr/bin/mongo* /usr/bin/
42
+
43
+ ENV MONGODB_URL=mongodb://localhost:27017
44
+ RUN mkdir -p /data/db
45
+ RUN chown -R 1000:1000 /data/db
46
+
47
+ # final image
48
+ FROM local_db_${INCLUDE_DB} as final
49
+
50
+ # build arg to determine if the database should be included
51
+ ARG INCLUDE_DB=false
52
+ ENV INCLUDE_DB=${INCLUDE_DB}
53
+
54
+ # svelte requires APP_BASE at build time so it must be passed as a build arg
55
+ ARG APP_BASE=
56
+ # tailwind requires the primary theme to be known at build time so it must be passed as a build arg
57
+ ARG PUBLIC_APP_COLOR=blue
58
+
59
+
60
+ # install dotenv-cli
61
+ RUN npm install -g dotenv-cli
62
+
63
+ # switch to a user that works for spaces
64
+ RUN userdel -r node
65
+ RUN useradd -m -u 1000 user
66
+ USER user
67
+
68
+ ENV HOME=/home/user \
69
+ PATH=/home/user/.local/bin:$PATH
70
+
71
+ WORKDIR /app
72
+
73
+ # add a .env.local if the user doesn't bind a volume to it
74
+ RUN touch /app/.env.local
75
+
76
+ # get the default config, the entrypoint script and the server script
77
+ COPY --chown=1000 package.json /app/package.json
78
+ COPY --chown=1000 .env /app/.env
79
+ COPY --chown=1000 entrypoint.sh /app/entrypoint.sh
80
+ COPY --chown=1000 gcp-*.json /app/
81
+
82
+ #import the build & dependencies
83
+ COPY --from=builder --chown=1000 /app/build /app/build
84
+ COPY --from=builder --chown=1000 /app/node_modules /app/node_modules
85
+
86
+ RUN chmod +x /app/entrypoint.sh
87
+
88
+ CMD ["/bin/bash", "-c", "/app/entrypoint.sh"]
LICENSE ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright 2018- The Hugging Face team. All rights reserved.
2
+
3
+ Apache License
4
+ Version 2.0, January 2004
5
+ http://www.apache.org/licenses/
6
+
7
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8
+
9
+ 1. Definitions.
10
+
11
+ "License" shall mean the terms and conditions for use, reproduction,
12
+ and distribution as defined by Sections 1 through 9 of this document.
13
+
14
+ "Licensor" shall mean the copyright owner or entity authorized by
15
+ the copyright owner that is granting the License.
16
+
17
+ "Legal Entity" shall mean the union of the acting entity and all
18
+ other entities that control, are controlled by, or are under common
19
+ control with that entity. For the purposes of this definition,
20
+ "control" means (i) the power, direct or indirect, to cause the
21
+ direction or management of such entity, whether by contract or
22
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
23
+ outstanding shares, or (iii) beneficial ownership of such entity.
24
+
25
+ "You" (or "Your") shall mean an individual or Legal Entity
26
+ exercising permissions granted by this License.
27
+
28
+ "Source" form shall mean the preferred form for making modifications,
29
+ including but not limited to software source code, documentation
30
+ source, and configuration files.
31
+
32
+ "Object" form shall mean any form resulting from mechanical
33
+ transformation or translation of a Source form, including but
34
+ not limited to compiled object code, generated documentation,
35
+ and conversions to other media types.
36
+
37
+ "Work" shall mean the work of authorship, whether in Source or
38
+ Object form, made available under the License, as indicated by a
39
+ copyright notice that is included in or attached to the work
40
+ (an example is provided in the Appendix below).
41
+
42
+ "Derivative Works" shall mean any work, whether in Source or Object
43
+ form, that is based on (or derived from) the Work and for which the
44
+ editorial revisions, annotations, elaborations, or other modifications
45
+ represent, as a whole, an original work of authorship. For the purposes
46
+ of this License, Derivative Works shall not include works that remain
47
+ separable from, or merely link (or bind by name) to the interfaces of,
48
+ the Work and Derivative Works thereof.
49
+
50
+ "Contribution" shall mean any work of authorship, including
51
+ the original version of the Work and any modifications or additions
52
+ to that Work or Derivative Works thereof, that is intentionally
53
+ submitted to Licensor for inclusion in the Work by the copyright owner
54
+ or by an individual or Legal Entity authorized to submit on behalf of
55
+ the copyright owner. For the purposes of this definition, "submitted"
56
+ means any form of electronic, verbal, or written communication sent
57
+ to the Licensor or its representatives, including but not limited to
58
+ communication on electronic mailing lists, source code control systems,
59
+ and issue tracking systems that are managed by, or on behalf of, the
60
+ Licensor for the purpose of discussing and improving the Work, but
61
+ excluding communication that is conspicuously marked or otherwise
62
+ designated in writing by the copyright owner as "Not a Contribution."
63
+
64
+ "Contributor" shall mean Licensor and any individual or Legal Entity
65
+ on behalf of whom a Contribution has been received by Licensor and
66
+ subsequently incorporated within the Work.
67
+
68
+ 2. Grant of Copyright License. Subject to the terms and conditions of
69
+ this License, each Contributor hereby grants to You a perpetual,
70
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
71
+ copyright license to reproduce, prepare Derivative Works of,
72
+ publicly display, publicly perform, sublicense, and distribute the
73
+ Work and such Derivative Works in Source or Object form.
74
+
75
+ 3. Grant of Patent License. Subject to the terms and conditions of
76
+ this License, each Contributor hereby grants to You a perpetual,
77
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
78
+ (except as stated in this section) patent license to make, have made,
79
+ use, offer to sell, sell, import, and otherwise transfer the Work,
80
+ where such license applies only to those patent claims licensable
81
+ by such Contributor that are necessarily infringed by their
82
+ Contribution(s) alone or by combination of their Contribution(s)
83
+ with the Work to which such Contribution(s) was submitted. If You
84
+ institute patent litigation against any entity (including a
85
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
86
+ or a Contribution incorporated within the Work constitutes direct
87
+ or contributory patent infringement, then any patent licenses
88
+ granted to You under this License for that Work shall terminate
89
+ as of the date such litigation is filed.
90
+
91
+ 4. Redistribution. You may reproduce and distribute copies of the
92
+ Work or Derivative Works thereof in any medium, with or without
93
+ modifications, and in Source or Object form, provided that You
94
+ meet the following conditions:
95
+
96
+ (a) You must give any other recipients of the Work or
97
+ Derivative Works a copy of this License; and
98
+
99
+ (b) You must cause any modified files to carry prominent notices
100
+ stating that You changed the files; and
101
+
102
+ (c) You must retain, in the Source form of any Derivative Works
103
+ that You distribute, all copyright, patent, trademark, and
104
+ attribution notices from the Source form of the Work,
105
+ excluding those notices that do not pertain to any part of
106
+ the Derivative Works; and
107
+
108
+ (d) If the Work includes a "NOTICE" text file as part of its
109
+ distribution, then any Derivative Works that You distribute must
110
+ include a readable copy of the attribution notices contained
111
+ within such NOTICE file, excluding those notices that do not
112
+ pertain to any part of the Derivative Works, in at least one
113
+ of the following places: within a NOTICE text file distributed
114
+ as part of the Derivative Works; within the Source form or
115
+ documentation, if provided along with the Derivative Works; or,
116
+ within a display generated by the Derivative Works, if and
117
+ wherever such third-party notices normally appear. The contents
118
+ of the NOTICE file are for informational purposes only and
119
+ do not modify the License. You may add Your own attribution
120
+ notices within Derivative Works that You distribute, alongside
121
+ or as an addendum to the NOTICE text from the Work, provided
122
+ that such additional attribution notices cannot be construed
123
+ as modifying the License.
124
+
125
+ You may add Your own copyright statement to Your modifications and
126
+ may provide additional or different license terms and conditions
127
+ for use, reproduction, or distribution of Your modifications, or
128
+ for any such Derivative Works as a whole, provided Your use,
129
+ reproduction, and distribution of the Work otherwise complies with
130
+ the conditions stated in this License.
131
+
132
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
133
+ any Contribution intentionally submitted for inclusion in the Work
134
+ by You to the Licensor shall be under the terms and conditions of
135
+ this License, without any additional terms or conditions.
136
+ Notwithstanding the above, nothing herein shall supersede or modify
137
+ the terms of any separate license agreement you may have executed
138
+ with Licensor regarding such Contributions.
139
+
140
+ 6. Trademarks. This License does not grant permission to use the trade
141
+ names, trademarks, service marks, or product names of the Licensor,
142
+ except as required for reasonable and customary use in describing the
143
+ origin of the Work and reproducing the content of the NOTICE file.
144
+
145
+ 7. Disclaimer of Warranty. Unless required by applicable law or
146
+ agreed to in writing, Licensor provides the Work (and each
147
+ Contributor provides its Contributions) on an "AS IS" BASIS,
148
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
149
+ implied, including, without limitation, any warranties or conditions
150
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
151
+ PARTICULAR PURPOSE. You are solely responsible for determining the
152
+ appropriateness of using or redistributing the Work and assume any
153
+ risks associated with Your exercise of permissions under this License.
154
+
155
+ 8. Limitation of Liability. In no event and under no legal theory,
156
+ whether in tort (including negligence), contract, or otherwise,
157
+ unless required by applicable law (such as deliberate and grossly
158
+ negligent acts) or agreed to in writing, shall any Contributor be
159
+ liable to You for damages, including any direct, indirect, special,
160
+ incidental, or consequential damages of any character arising as a
161
+ result of this License or out of the use or inability to use the
162
+ Work (including but not limited to damages for loss of goodwill,
163
+ work stoppage, computer failure or malfunction, or any and all
164
+ other commercial damages or losses), even if such Contributor
165
+ has been advised of the possibility of such damages.
166
+
167
+ 9. Accepting Warranty or Additional Liability. While redistributing
168
+ the Work or Derivative Works thereof, You may choose to offer,
169
+ and charge a fee for, acceptance of support, warranty, indemnity,
170
+ or other liability obligations and/or rights consistent with this
171
+ License. However, in accepting such obligations, You may act only
172
+ on Your own behalf and on Your sole responsibility, not on behalf
173
+ of any other Contributor, and only if You agree to indemnify,
174
+ defend, and hold each Contributor harmless for any liability
175
+ incurred by, or claims asserted against, such Contributor by reason
176
+ of your accepting any such warranty or additional liability.
177
+
178
+ END OF TERMS AND CONDITIONS
179
+
180
+ APPENDIX: How to apply the Apache License to your work.
181
+
182
+ To apply the Apache License to your work, attach the following
183
+ boilerplate notice, with the fields enclosed by brackets "[]"
184
+ replaced with your own identifying information. (Don't include
185
+ the brackets!) The text should be enclosed in the appropriate
186
+ comment syntax for the file format. We also recommend that a
187
+ file or class name and description of purpose be included on the
188
+ same "printed page" as the copyright notice for easier
189
+ identification within third-party archives.
190
+
191
+ Copyright [yyyy] [name of copyright owner]
192
+
193
+ Licensed under the Apache License, Version 2.0 (the "License");
194
+ you may not use this file except in compliance with the License.
195
+ You may obtain a copy of the License at
196
+
197
+ http://www.apache.org/licenses/LICENSE-2.0
198
+
199
+ Unless required by applicable law or agreed to in writing, software
200
+ distributed under the License is distributed on an "AS IS" BASIS,
201
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
202
+ See the License for the specific language governing permissions and
203
+ limitations under the License.
PRIVACY.md ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Privacy
2
+
3
+ > Last updated: April 15, 2024
4
+
5
+ Users of HuggingChat are authenticated through their HF user account.
6
+
7
+ We endorse Privacy by Design. As such, your conversations are private to you and will not be shared with anyone, including model authors, for any purpose, including for research or model training purposes.
8
+
9
+ You conversation data will only be stored to let you access past conversations. You can click on the Delete icon to delete any past conversation at any moment.
10
+
11
+ 🗓 Please also consult huggingface.co's main privacy policy at <https://huggingface.co/privacy>. To exercise any of your legal privacy rights, please send an email to <[email protected]>.
12
+
13
+ ## About available LLMs
14
+
15
+ The goal of this app is to showcase that it is now possible to build an open source alternative to ChatGPT. 💪
16
+
17
+ We aim to always provide a diverse set of state of the art open LLMs, hence we rotate the available models over time.
18
+
19
+ Historically, HuggingChat has been running models such as:
20
+
21
+ - [Llama 2 70B](https://huggingface.co/meta-llama/Llama-2-70b-chat-hf)
22
+ - [CodeLlama 35B](https://about.fb.com/news/2023/08/code-llama-ai-for-coding/)
23
+ - [Falcon 180B](https://www.tii.ae/news/technology-innovation-institute-introduces-worlds-most-powerful-open-llm-falcon-180b)
24
+ - [Mistral 7B](https://mistral.ai/news/announcing-mistral-7b/)
25
+ - [Cohere Command R+](https://huggingface.co/chat/models/CohereForAI/c4ai-command-r-plus)
26
+ - [Google Gemma 7B](https://huggingface.co/chat/models/google/gemma-1.1-7b-it)
27
+
28
+ This is only a partial list. Check the [models](https://huggingface.co/chat/models/) page for up-to-date list of the best available LLMs.
29
+
30
+ ## Technical details
31
+
32
+ This app is running in a [Space](https://huggingface.co/docs/hub/spaces-overview), which entails that the code for this UI is publicly visible [inside the Space repo](https://huggingface.co/spaces/huggingchat/chat-ui/tree/main).
33
+
34
+ **Further development takes place on the [huggingface/chat-ui GitHub repo](https://github.com/huggingface/chat-ui).**
35
+
36
+ The inference backend is running the optimized [text-generation-inference](https://github.com/huggingface/text-generation-inference) on HuggingFace's Inference API infrastructure.
37
+
38
+ It is therefore possible to deploy a copy of this app to a Space and customize it (swap model, add some UI elements, or store user messages according to your own Terms and conditions). You can also 1-click deploy your own instance using the [Chat UI Spaces Docker template](https://huggingface.co/new-space?template=huggingchat/chat-ui-template).
39
+
40
+ We welcome any feedback on this app: please participate to the public discussion at <https://huggingface.co/spaces/huggingchat/chat-ui/discussions>
41
+
42
+ <a target="_blank" href="https://huggingface.co/spaces/huggingchat/chat-ui/discussions"><img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-a-discussion-xl.svg" title="open a discussion"></a>
PROMPTS.md ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Prompt templates
2
+
3
+ These are the templates used to format the conversation history for different models used in HuggingChat. Set them in your `.env.local` [like so](https://github.com/huggingface/chat-ui#chatprompttemplate).
4
+
5
+ ## Llama 2
6
+
7
+ ```env
8
+ <s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}
9
+ ```
10
+
11
+ ## CodeLlama
12
+
13
+ ```env
14
+ <s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}
15
+ ```
16
+
17
+ ## Falcon
18
+
19
+ ```env
20
+ System: {{preprompt}}\nUser:{{#each messages}}{{#ifUser}}{{content}}\nFalcon:{{/ifUser}}{{#ifAssistant}}{{content}}\nUser:{{/ifAssistant}}{{/each}}
21
+ ```
22
+
23
+ ## Mistral
24
+
25
+ ```env
26
+ <s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s> {{/ifAssistant}}{{/each}}
27
+ ```
28
+
29
+ ## Zephyr
30
+
31
+ ```env
32
+ <|system|>\n{{preprompt}}</s>\n{{#each messages}}{{#ifUser}}<|user|>\n{{content}}</s>\n<|assistant|>\n{{/ifUser}}{{#ifAssistant}}{{content}}</s>\n{{/ifAssistant}}{{/each}}
33
+ ```
34
+
35
+ ## IDEFICS
36
+
37
+ ```env
38
+ {{#each messages}}{{#ifUser}}User: {{content}}{{/ifUser}}<end_of_utterance>\nAssistant: {{#ifAssistant}}{{content}}\n{{/ifAssistant}}{{/each}}
39
+ ```
40
+
41
+ ## OpenChat
42
+
43
+ ```env
44
+ <s>{{#each messages}}{{#ifUser}}GPT4 User: {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<|end_of_turn|>GPT4 Assistant: {{/ifUser}}{{#ifAssistant}}{{content}}<|end_of_turn|>{{/ifAssistant}}{{/each}}
45
+ ```
46
+
47
+ ## Mixtral
48
+
49
+ ```env
50
+ <s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}
51
+ ```
52
+
53
+ ## ChatML
54
+
55
+ ```env
56
+ {{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}
57
+ ```
58
+
59
+ ## CodeLlama 70B
60
+
61
+ ```env
62
+ <s>{{#if @root.preprompt}}Source: system\n\n {{@root.preprompt}} <step> {{/if}}{{#each messages}}{{#ifUser}}Source: user\n\n {{content}} <step> {{/ifUser}}{{#ifAssistant}}Source: assistant\n\n {{content}} <step> {{/ifAssistant}}{{/each}}Source: assistant\nDestination: user\n\n ``
63
+ ```
64
+
65
+ ## Gemma
66
+
67
+ ```env
68
+ {{#each messages}}{{#ifUser}}<start_of_turn>user\n{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<end_of_turn>\n<start_of_turn>model\n{{/ifUser}}{{#ifAssistant}}{{content}}<end_of_turn>\n{{/ifAssistant}}{{/each}}
69
+ ```
README.md CHANGED
@@ -3,5 +3,832 @@ title: chat-ui
3
  emoji: 🔥
4
  colorFrom: purple
5
  colorTo: purple
6
- sdk: static
 
 
 
 
 
 
7
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  emoji: 🔥
4
  colorFrom: purple
5
  colorTo: purple
6
+ sdk: docker
7
+ pinned: false
8
+ license: apache-2.0
9
+ base_path: /chat
10
+ app_port: 3000
11
+ failure_strategy: rollback
12
+ load_balancing_strategy: random
13
  ---
14
+
15
+ # Chat UI
16
+
17
+ ![Chat UI repository thumbnail](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/chatui-websearch.png)
18
+
19
+ A chat interface using open source models, eg OpenAssistant or Llama. It is a SvelteKit app and it powers the [HuggingChat app on hf.co/chat](https://huggingface.co/chat).
20
+
21
+ 0. [No Setup Deploy](#no-setup-deploy)
22
+ 1. [Setup](#setup)
23
+ 2. [Launch](#launch)
24
+ 3. [Web Search](#web-search)
25
+ 4. [Text Embedding Models](#text-embedding-models)
26
+ 5. [Extra parameters](#extra-parameters)
27
+ 6. [Common issues](#common-issues)
28
+ 7. [Deploying to a HF Space](#deploying-to-a-hf-space)
29
+ 8. [Building](#building)
30
+
31
+ ## No Setup Deploy
32
+
33
+ If you don't want to configure, setup, and launch your own Chat UI yourself, you can use this option as a fast deploy alternative.
34
+
35
+ You can deploy your own customized Chat UI instance with any supported [LLM](https://huggingface.co/models?pipeline_tag=text-generation&sort=trending) of your choice on [Hugging Face Spaces](https://huggingface.co/spaces). To do so, use the chat-ui template [available here](https://huggingface.co/new-space?template=huggingchat/chat-ui-template).
36
+
37
+ Set `HF_TOKEN` in [Space secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables) to deploy a model with gated access or a model in a private repository. It's also compatible with [Inference for PROs](https://huggingface.co/blog/inference-pro) curated list of powerful models with higher rate limits. Make sure to create your personal token first in your [User Access Tokens settings](https://huggingface.co/settings/tokens).
38
+
39
+ Read the full tutorial [here](https://huggingface.co/docs/hub/spaces-sdks-docker-chatui#chatui-on-spaces).
40
+
41
+ ## Setup
42
+
43
+ The default config for Chat UI is stored in the `.env` file. You will need to override some values to get Chat UI to run locally. This is done in `.env.local`.
44
+
45
+ Start by creating a `.env.local` file in the root of the repository. The bare minimum config you need to get Chat UI to run locally is the following:
46
+
47
+ ```env
48
+ MONGODB_URL=<the URL to your MongoDB instance>
49
+ HF_TOKEN=<your access token>
50
+ ```
51
+
52
+ ### Database
53
+
54
+ The chat history is stored in a MongoDB instance, and having a DB instance available is needed for Chat UI to work.
55
+
56
+ You can use a local MongoDB instance. The easiest way is to spin one up using docker:
57
+
58
+ ```bash
59
+ docker run -d -p 27017:27017 --name mongo-chatui mongo:latest
60
+ ```
61
+
62
+ In which case the url of your DB will be `MONGODB_URL=mongodb://localhost:27017`.
63
+
64
+ Alternatively, you can use a [free MongoDB Atlas](https://www.mongodb.com/pricing) instance for this, Chat UI should fit comfortably within their free tier. After which you can set the `MONGODB_URL` variable in `.env.local` to match your instance.
65
+
66
+ ### Hugging Face Access Token
67
+
68
+ If you use a remote inference endpoint, you will need a Hugging Face access token to run Chat UI locally. You can get one from [your Hugging Face profile](https://huggingface.co/settings/tokens).
69
+
70
+ ## Launch
71
+
72
+ After you're done with the `.env.local` file you can run Chat UI locally with:
73
+
74
+ ```bash
75
+ npm install
76
+ npm run dev
77
+ ```
78
+
79
+ ## Web Search
80
+
81
+ Chat UI features a powerful Web Search feature. It works by:
82
+
83
+ 1. Generating an appropriate search query from the user prompt.
84
+ 2. Performing web search and extracting content from webpages.
85
+ 3. Creating embeddings from texts using a text embedding model.
86
+ 4. From these embeddings, find the ones that are closest to the user query using a vector similarity search. Specifically, we use `inner product` distance.
87
+ 5. Get the corresponding texts to those closest embeddings and perform [Retrieval-Augmented Generation](https://huggingface.co/papers/2005.11401) (i.e. expand user prompt by adding those texts so that an LLM can use this information).
88
+
89
+ ## Text Embedding Models
90
+
91
+ By default (for backward compatibility), when `TEXT_EMBEDDING_MODELS` environment variable is not defined, [transformers.js](https://huggingface.co/docs/transformers.js) embedding models will be used for embedding tasks, specifically, [Xenova/gte-small](https://huggingface.co/Xenova/gte-small) model.
92
+
93
+ You can customize the embedding model by setting `TEXT_EMBEDDING_MODELS` in your `.env.local` file. For example:
94
+
95
+ ```env
96
+ TEXT_EMBEDDING_MODELS = `[
97
+ {
98
+ "name": "Xenova/gte-small",
99
+ "displayName": "Xenova/gte-small",
100
+ "description": "locally running embedding",
101
+ "chunkCharLength": 512,
102
+ "endpoints": [
103
+ {"type": "transformersjs"}
104
+ ]
105
+ },
106
+ {
107
+ "name": "intfloat/e5-base-v2",
108
+ "displayName": "intfloat/e5-base-v2",
109
+ "description": "hosted embedding model",
110
+ "chunkCharLength": 768,
111
+ "preQuery": "query: ", # See https://huggingface.co/intfloat/e5-base-v2#faq
112
+ "prePassage": "passage: ", # See https://huggingface.co/intfloat/e5-base-v2#faq
113
+ "endpoints": [
114
+ {
115
+ "type": "tei",
116
+ "url": "http://127.0.0.1:8080/",
117
+ "authorization": "TOKEN_TYPE TOKEN" // optional authorization field. Example: "Basic VVNFUjpQQVNT"
118
+ }
119
+ ]
120
+ }
121
+ ]`
122
+ ```
123
+
124
+ The required fields are `name`, `chunkCharLength` and `endpoints`.
125
+ Supported text embedding backends are: [`transformers.js`](https://huggingface.co/docs/transformers.js), [`TEI`](https://github.com/huggingface/text-embeddings-inference) and [`OpenAI`](https://platform.openai.com/docs/guides/embeddings). `transformers.js` models run locally as part of `chat-ui`, whereas `TEI` models run in a different environment & accessed through an API endpoint. `openai` models are accessed through the [OpenAI API](https://platform.openai.com/docs/guides/embeddings).
126
+
127
+ When more than one embedding models are supplied in `.env.local` file, the first will be used by default, and the others will only be used on LLM's which configured `embeddingModel` to the name of the model.
128
+
129
+ ## Extra parameters
130
+
131
+ ### OpenID connect
132
+
133
+ The login feature is disabled by default and users are attributed a unique ID based on their browser. But if you want to use OpenID to authenticate your users, you can add the following to your `.env.local` file:
134
+
135
+ ```env
136
+ OPENID_CONFIG=`{
137
+ PROVIDER_URL: "<your OIDC issuer>",
138
+ CLIENT_ID: "<your OIDC client ID>",
139
+ CLIENT_SECRET: "<your OIDC client secret>",
140
+ SCOPES: "openid profile",
141
+ TOLERANCE: // optional
142
+ RESOURCE: // optional
143
+ }`
144
+ ```
145
+
146
+ These variables will enable the openID sign-in modal for users.
147
+
148
+ ### Theming
149
+
150
+ You can use a few environment variables to customize the look and feel of chat-ui. These are by default:
151
+
152
+ ```env
153
+ PUBLIC_APP_NAME=ChatUI
154
+ PUBLIC_APP_ASSETS=chatui
155
+ PUBLIC_APP_COLOR=blue
156
+ PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
157
+ PUBLIC_APP_DATA_SHARING=
158
+ PUBLIC_APP_DISCLAIMER=
159
+ ```
160
+
161
+ - `PUBLIC_APP_NAME` The name used as a title throughout the app.
162
+ - `PUBLIC_APP_ASSETS` Is used to find logos & favicons in `static/$PUBLIC_APP_ASSETS`, current options are `chatui` and `huggingchat`.
163
+ - `PUBLIC_APP_COLOR` Can be any of the [tailwind colors](https://tailwindcss.com/docs/customizing-colors#default-color-palette).
164
+ - `PUBLIC_APP_DATA_SHARING` Can be set to 1 to add a toggle in the user settings that lets your users opt-in to data sharing with models creator.
165
+ - `PUBLIC_APP_DISCLAIMER` If set to 1, we show a disclaimer about generated outputs on login.
166
+
167
+ ### Web Search config
168
+
169
+ You can enable the web search through an API by adding `YDC_API_KEY` ([docs.you.com](https://docs.you.com)) or `SERPER_API_KEY` ([serper.dev](https://serper.dev/)) or `SERPAPI_KEY` ([serpapi.com](https://serpapi.com/)) or `SERPSTACK_API_KEY` ([serpstack.com](https://serpstack.com/)) to your `.env.local`.
170
+
171
+ You can also simply enable the local google websearch by setting `USE_LOCAL_WEBSEARCH=true` in your `.env.local` or specify a SearXNG instance by adding the query URL to `SEARXNG_QUERY_URL`.
172
+
173
+ ### Custom models
174
+
175
+ You can customize the parameters passed to the model or even use a new model by updating the `MODELS` variable in your `.env.local`. The default one can be found in `.env` and looks like this :
176
+
177
+ ```env
178
+ MODELS=`[
179
+ {
180
+ "name": "mistralai/Mistral-7B-Instruct-v0.2",
181
+ "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
182
+ "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
183
+ "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
184
+ "preprompt": "",
185
+ "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
186
+ "parameters": {
187
+ "temperature": 0.3,
188
+ "top_p": 0.95,
189
+ "repetition_penalty": 1.2,
190
+ "top_k": 50,
191
+ "truncate": 3072,
192
+ "max_new_tokens": 1024,
193
+ "stop": ["</s>"]
194
+ },
195
+ "promptExamples": [
196
+ {
197
+ "title": "Write an email from bullet list",
198
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
199
+ }, {
200
+ "title": "Code a snake game",
201
+ "prompt": "Code a basic snake game in python, give explanations for each step."
202
+ }, {
203
+ "title": "Assist in a task",
204
+ "prompt": "How do I make a delicious lemon cheesecake?"
205
+ }
206
+ ]
207
+ }
208
+ ]`
209
+
210
+ ```
211
+
212
+ You can change things like the parameters, or customize the preprompt to better suit your needs. You can also add more models by adding more objects to the array, with different preprompts for example.
213
+
214
+ #### chatPromptTemplate
215
+
216
+ When querying the model for a chat response, the `chatPromptTemplate` template is used. `messages` is an array of chat messages, it has the format `[{ content: string }, ...]`. To identify if a message is a user message or an assistant message the `ifUser` and `ifAssistant` block helpers can be used.
217
+
218
+ The following is the default `chatPromptTemplate`, although newlines and indentiation have been added for readability. You can find the prompts used in production for HuggingChat [here](https://github.com/huggingface/chat-ui/blob/main/PROMPTS.md).
219
+
220
+ ```prompt
221
+ {{preprompt}}
222
+ {{#each messages}}
223
+ {{#ifUser}}{{@root.userMessageToken}}{{content}}{{@root.userMessageEndToken}}{{/ifUser}}
224
+ {{#ifAssistant}}{{@root.assistantMessageToken}}{{content}}{{@root.assistantMessageEndToken}}{{/ifAssistant}}
225
+ {{/each}}
226
+ {{assistantMessageToken}}
227
+ ```
228
+
229
+ #### Multi modal model
230
+
231
+ We currently only support IDEFICS as a multimodal model, hosted on TGI. You can enable it by using the following config (if you have a PRO HF Api token):
232
+
233
+ ```env
234
+ {
235
+ "name": "HuggingFaceM4/idefics-80b-instruct",
236
+ "multimodal" : true,
237
+ "description": "IDEFICS is the new multimodal model by Hugging Face.",
238
+ "preprompt": "",
239
+ "chatPromptTemplate" : "{{#each messages}}{{#ifUser}}User: {{content}}{{/ifUser}}<end_of_utterance>\nAssistant: {{#ifAssistant}}{{content}}\n{{/ifAssistant}}{{/each}}",
240
+ "parameters": {
241
+ "temperature": 0.1,
242
+ "top_p": 0.95,
243
+ "repetition_penalty": 1.2,
244
+ "top_k": 12,
245
+ "truncate": 1000,
246
+ "max_new_tokens": 1024,
247
+ "stop": ["<end_of_utterance>", "User:", "\nUser:"]
248
+ }
249
+ }
250
+ ```
251
+
252
+ #### Running your own models using a custom endpoint
253
+
254
+ If you want to, instead of hitting models on the Hugging Face Inference API, you can run your own models locally.
255
+
256
+ A good option is to hit a [text-generation-inference](https://github.com/huggingface/text-generation-inference) endpoint. This is what is done in the official [Chat UI Spaces Docker template](https://huggingface.co/new-space?template=huggingchat/chat-ui-template) for instance: both this app and a text-generation-inference server run inside the same container.
257
+
258
+ To do this, you can add your own endpoints to the `MODELS` variable in `.env.local`, by adding an `"endpoints"` key for each model in `MODELS`.
259
+
260
+ ```env
261
+ {
262
+ // rest of the model config here
263
+ "endpoints": [{
264
+ "type" : "tgi",
265
+ "url": "https://HOST:PORT",
266
+ }]
267
+ }
268
+ ```
269
+
270
+ If `endpoints` are left unspecified, ChatUI will look for the model on the hosted Hugging Face inference API using the model name.
271
+
272
+ ##### OpenAI API compatible models
273
+
274
+ Chat UI can be used with any API server that supports OpenAI API compatibility, for example [text-generation-webui](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai), [LocalAI](https://github.com/go-skynet/LocalAI), [FastChat](https://github.com/lm-sys/FastChat/blob/main/docs/openai_api.md), [llama-cpp-python](https://github.com/abetlen/llama-cpp-python), and [ialacol](https://github.com/chenhunghan/ialacol).
275
+
276
+ The following example config makes Chat UI works with [text-generation-webui](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai), the `endpoint.baseUrl` is the url of the OpenAI API compatible server, this overrides the baseUrl to be used by OpenAI instance. The `endpoint.completion` determine which endpoint to be used, default is `chat_completions` which uses `v1/chat/completions`, change to `endpoint.completion` to `completions` to use the `v1/completions` endpoint.
277
+
278
+ ```
279
+ MODELS=`[
280
+ {
281
+ "name": "text-generation-webui",
282
+ "id": "text-generation-webui",
283
+ "parameters": {
284
+ "temperature": 0.9,
285
+ "top_p": 0.95,
286
+ "repetition_penalty": 1.2,
287
+ "top_k": 50,
288
+ "truncate": 1000,
289
+ "max_new_tokens": 1024,
290
+ "stop": []
291
+ },
292
+ "endpoints": [{
293
+ "type" : "openai",
294
+ "baseURL": "http://localhost:8000/v1"
295
+ }]
296
+ }
297
+ ]`
298
+
299
+ ```
300
+
301
+ The `openai` type includes official OpenAI models. You can add, for example, GPT4/GPT3.5 as a "openai" model:
302
+
303
+ ```
304
+ OPENAI_API_KEY=#your openai api key here
305
+ MODELS=`[{
306
+ "name": "gpt-4",
307
+ "displayName": "GPT 4",
308
+ "endpoints" : [{
309
+ "type": "openai"
310
+ }]
311
+ },
312
+ {
313
+ "name": "gpt-3.5-turbo",
314
+ "displayName": "GPT 3.5 Turbo",
315
+ "endpoints" : [{
316
+ "type": "openai"
317
+ }]
318
+ }]`
319
+ ```
320
+
321
+ You may also consume any model provider that provides compatible OpenAI API endpoint. For example, you may self-host [Portkey](https://github.com/Portkey-AI/gateway) gateway and experiment with Claude or GPTs offered by Azure OpenAI. Example for Claude from Anthropic:
322
+
323
+ ```
324
+ MODELS=`[{
325
+ "name": "claude-2.1",
326
+ "displayName": "Claude 2.1",
327
+ "description": "Anthropic has been founded by former OpenAI researchers...",
328
+ "parameters": {
329
+ "temperature": 0.5,
330
+ "max_new_tokens": 4096,
331
+ },
332
+ "endpoints": [
333
+ {
334
+ "type": "openai",
335
+ "baseURL": "https://gateway.example.com/v1",
336
+ "defaultHeaders": {
337
+ "x-portkey-config": '{"provider":"anthropic","api_key":"sk-ant-abc...xyz"}'
338
+ }
339
+ }
340
+ ]
341
+ }]`
342
+ ```
343
+
344
+ Example for GPT 4 deployed on Azure OpenAI:
345
+
346
+ ```
347
+ MODELS=`[{
348
+ "id": "gpt-4-1106-preview",
349
+ "name": "gpt-4-1106-preview",
350
+ "displayName": "gpt-4-1106-preview",
351
+ "parameters": {
352
+ "temperature": 0.5,
353
+ "max_new_tokens": 4096,
354
+ },
355
+ "endpoints": [
356
+ {
357
+ "type": "openai",
358
+ "baseURL": "https://{resource-name}.openai.azure.com/openai/deployments/{deployment-id}",
359
+ "defaultHeaders": {
360
+ "api-key": "{api-key}"
361
+ },
362
+ "defaultQuery": {
363
+ "api-version": "2023-05-15"
364
+ }
365
+ }
366
+ ]
367
+ }]`
368
+ ```
369
+
370
+ Or try Mistral from [Deepinfra](https://deepinfra.com/mistralai/Mistral-7B-Instruct-v0.1/api?example=openai-http):
371
+
372
+ > Note, apiKey can either be set custom per endpoint, or globally using `OPENAI_API_KEY` variable.
373
+
374
+ ```
375
+ MODELS=`[{
376
+ "name": "mistral-7b",
377
+ "displayName": "Mistral 7B",
378
+ "description": "A 7B dense Transformer, fast-deployed and easily customisable. Small, yet powerful for a variety of use cases. Supports English and code, and a 8k context window.",
379
+ "parameters": {
380
+ "temperature": 0.5,
381
+ "max_new_tokens": 4096,
382
+ },
383
+ "endpoints": [
384
+ {
385
+ "type": "openai",
386
+ "baseURL": "https://api.deepinfra.com/v1/openai",
387
+ "apiKey": "abc...xyz"
388
+ }
389
+ ]
390
+ }]`
391
+ ```
392
+
393
+ ##### Llama.cpp API server
394
+
395
+ chat-ui also supports the llama.cpp API server directly without the need for an adapter. You can do this using the `llamacpp` endpoint type.
396
+
397
+ If you want to run chat-ui with llama.cpp, you can do the following, using Zephyr as an example model:
398
+
399
+ 1. Get [the weights](https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/tree/main) from the hub
400
+ 2. Run the server with the following command: `./server -m models/zephyr-7b-beta.Q4_K_M.gguf -c 2048 -np 3`
401
+ 3. Add the following to your `.env.local`:
402
+
403
+ ```env
404
+ MODELS=`[
405
+ {
406
+ "name": "Local Zephyr",
407
+ "chatPromptTemplate": "<|system|>\n{{preprompt}}</s>\n{{#each messages}}{{#ifUser}}<|user|>\n{{content}}</s>\n<|assistant|>\n{{/ifUser}}{{#ifAssistant}}{{content}}</s>\n{{/ifAssistant}}{{/each}}",
408
+ "parameters": {
409
+ "temperature": 0.1,
410
+ "top_p": 0.95,
411
+ "repetition_penalty": 1.2,
412
+ "top_k": 50,
413
+ "truncate": 1000,
414
+ "max_new_tokens": 2048,
415
+ "stop": ["</s>"]
416
+ },
417
+ "endpoints": [
418
+ {
419
+ "url": "http://127.0.0.1:8080",
420
+ "type": "llamacpp"
421
+ }
422
+ ]
423
+ }
424
+ ]`
425
+ ```
426
+
427
+ Start chat-ui with `npm run dev` and you should be able to chat with Zephyr locally.
428
+
429
+ #### Ollama
430
+
431
+ We also support the Ollama inference server. Spin up a model with
432
+
433
+ ```cli
434
+ ollama run mistral
435
+ ```
436
+
437
+ Then specify the endpoints like so:
438
+
439
+ ```env
440
+ MODELS=`[
441
+ {
442
+ "name": "Ollama Mistral",
443
+ "chatPromptTemplate": "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s> {{/ifAssistant}}{{/each}}",
444
+ "parameters": {
445
+ "temperature": 0.1,
446
+ "top_p": 0.95,
447
+ "repetition_penalty": 1.2,
448
+ "top_k": 50,
449
+ "truncate": 3072,
450
+ "max_new_tokens": 1024,
451
+ "stop": ["</s>"]
452
+ },
453
+ "endpoints": [
454
+ {
455
+ "type": "ollama",
456
+ "url" : "http://127.0.0.1:11434",
457
+ "ollamaName" : "mistral"
458
+ }
459
+ ]
460
+ }
461
+ ]`
462
+ ```
463
+
464
+ #### Anthropic
465
+
466
+ We also support Anthropic models through the official SDK. You may provide your API key via the `ANTHROPIC_API_KEY` env variable, or alternatively, through the `endpoints.apiKey` as per the following example.
467
+
468
+ ```
469
+ MODELS=`[
470
+ {
471
+ "name": "claude-3-sonnet-20240229",
472
+ "displayName": "Claude 3 Sonnet",
473
+ "description": "Ideal balance of intelligence and speed",
474
+ "parameters": {
475
+ "max_new_tokens": 4096,
476
+ },
477
+ "endpoints": [
478
+ {
479
+ "type": "anthropic",
480
+ // optionals
481
+ "apiKey": "sk-ant-...",
482
+ "baseURL": "https://api.anthropic.com",
483
+ defaultHeaders: {},
484
+ defaultQuery: {}
485
+ }
486
+ ]
487
+ },
488
+ {
489
+ "name": "claude-3-opus-20240229",
490
+ "displayName": "Claude 3 Opus",
491
+ "description": "Most powerful model for highly complex tasks",
492
+ "parameters": {
493
+ "max_new_tokens": 4096
494
+ },
495
+ "endpoints": [
496
+ {
497
+ "type": "anthropic",
498
+ // optionals
499
+ "apiKey": "sk-ant-...",
500
+ "baseURL": "https://api.anthropic.com",
501
+ defaultHeaders: {},
502
+ defaultQuery: {}
503
+ }
504
+ ]
505
+ }
506
+ ]`
507
+ ```
508
+
509
+ #### Amazon
510
+
511
+ You can also specify your Amazon SageMaker instance as an endpoint for chat-ui. The config goes like this:
512
+
513
+ ```env
514
+ "endpoints": [
515
+ {
516
+ "type" : "aws",
517
+ "service" : "sagemaker"
518
+ "url": "",
519
+ "accessKey": "",
520
+ "secretKey" : "",
521
+ "sessionToken": "",
522
+ "region": "",
523
+
524
+ "weight": 1
525
+ }
526
+ ]
527
+ ```
528
+
529
+ You can also set `"service" : "lambda"` to use a lambda instance.
530
+
531
+ You can get the `accessKey` and `secretKey` from your AWS user, under programmatic access.
532
+
533
+ #### Cloudflare Workers AI
534
+
535
+ You can also use Cloudflare Workers AI to run your own models with serverless inference.
536
+
537
+ You will need to have a Cloudflare account, then get your [account ID](https://developers.cloudflare.com/fundamentals/setup/find-account-and-zone-ids/) as well as your [API token](https://developers.cloudflare.com/workers-ai/get-started/rest-api/#1-get-an-api-token) for Workers AI.
538
+
539
+ You can either specify them directly in your `.env.local` using the `CLOUDFLARE_ACCOUNT_ID` and `CLOUDFLARE_API_TOKEN` variables, or you can set them directly in the endpoint config.
540
+
541
+ You can find the list of models available on Cloudflare [here](https://developers.cloudflare.com/workers-ai/models/#text-generation).
542
+
543
+ ```env
544
+ {
545
+ "name" : "nousresearch/hermes-2-pro-mistral-7b",
546
+ "tokenizer": "nousresearch/hermes-2-pro-mistral-7b",
547
+ "parameters": {
548
+ "stop": ["<|im_end|>"]
549
+ },
550
+ "endpoints" : [
551
+ {
552
+ "type" : "cloudflare"
553
+ <!-- optionally specify these
554
+ "accountId": "your-account-id",
555
+ "authToken": "your-api-token"
556
+ -->
557
+ }
558
+ ]
559
+ }
560
+ ```
561
+
562
+ > [!NOTE]
563
+ > Cloudlare Workers AI currently do not support custom sampling parameters like temperature, top_p, etc.
564
+
565
+ #### Cohere
566
+
567
+ You can also use Cohere to run their models directly from chat-ui. You will need to have a Cohere account, then get your [API token](https://dashboard.cohere.com/api-keys). You can either specify it directly in your `.env.local` using the `COHERE_API_TOKEN` variable, or you can set it in the endpoint config.
568
+
569
+ Here is an example of a Cohere model config. You can set which model you want to use by setting the `id` field to the model name.
570
+
571
+ ```env
572
+ {
573
+ "name" : "CohereForAI/c4ai-command-r-v01",
574
+ "id": "command-r",
575
+ "description": "C4AI Command-R is a research release of a 35 billion parameter highly performant generative model",
576
+ "endpoints": [
577
+ {
578
+ "type": "cohere",
579
+ <!-- optionally specify these, or use COHERE_API_TOKEN
580
+ "apiKey": "your-api-token"
581
+ -->
582
+ }
583
+ ]
584
+ }
585
+ ```
586
+
587
+ ##### Google Vertex models
588
+
589
+ Chat UI can connect to the google Vertex API endpoints ([List of supported models](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models)).
590
+
591
+ To enable:
592
+
593
+ 1. [Select](https://console.cloud.google.com/project) or [create](https://cloud.google.com/resource-manager/docs/creating-managing-projects#creating_a_project) a Google Cloud project.
594
+ 1. [Enable billing for your project](https://cloud.google.com/billing/docs/how-to/modify-project).
595
+ 1. [Enable the Vertex AI API](https://console.cloud.google.com/flows/enableapi?apiid=aiplatform.googleapis.com).
596
+ 1. [Set up authentication with a service account](https://cloud.google.com/docs/authentication/getting-started)
597
+ so you can access the API from your local workstation.
598
+
599
+ The service account credentials file can be imported as an environmental variable:
600
+
601
+ ```env
602
+ GOOGLE_APPLICATION_CREDENTIALS = clientid.json
603
+ ```
604
+
605
+ Make sure your docker container has access to the file and the variable is correctly set.
606
+ Afterwards Google Vertex endpoints can be configured as following:
607
+
608
+ ```
609
+ MODELS=`[
610
+ //...
611
+ {
612
+ "name": "gemini-1.5-pro",
613
+ "displayName": "Vertex Gemini Pro 1.5",
614
+ "endpoints" : [{
615
+ "type": "vertex",
616
+ "project": "abc-xyz",
617
+ "location": "europe-west3",
618
+ "model": "gemini-1.5-pro-preview-0409", // model-name
619
+
620
+ // Optional
621
+ "safetyThreshold": "BLOCK_MEDIUM_AND_ABOVE",
622
+ "apiEndpoint": "", // alternative api endpoint url
623
+ }]
624
+ },
625
+ ]`
626
+
627
+ ```
628
+
629
+ ##### LangServe
630
+
631
+ LangChain applications that are deployed using LangServe can be called with the following config:
632
+
633
+ ```
634
+ MODELS=`[
635
+ //...
636
+ {
637
+ "name": "summarization-chain", //model-name
638
+ "endpoints" : [{
639
+ "type": "langserve",
640
+ "url" : "http://127.0.0.1:8100",
641
+ }]
642
+ },
643
+ ]`
644
+
645
+ ```
646
+
647
+ ### Custom endpoint authorization
648
+
649
+ #### Basic and Bearer
650
+
651
+ Custom endpoints may require authorization, depending on how you configure them. Authentication will usually be set either with `Basic` or `Bearer`.
652
+
653
+ For `Basic` we will need to generate a base64 encoding of the username and password.
654
+
655
+ `echo -n "USER:PASS" | base64`
656
+
657
+ > VVNFUjpQQVNT
658
+
659
+ For `Bearer` you can use a token, which can be grabbed from [here](https://huggingface.co/settings/tokens).
660
+
661
+ You can then add the generated information and the `authorization` parameter to your `.env.local`.
662
+
663
+ ```env
664
+ "endpoints": [
665
+ {
666
+ "url": "https://HOST:PORT",
667
+ "authorization": "Basic VVNFUjpQQVNT",
668
+ }
669
+ ]
670
+ ```
671
+
672
+ Please note that if `HF_TOKEN` is also set or not empty, it will take precedence.
673
+
674
+ #### Models hosted on multiple custom endpoints
675
+
676
+ If the model being hosted will be available on multiple servers/instances add the `weight` parameter to your `.env.local`. The `weight` will be used to determine the probability of requesting a particular endpoint.
677
+
678
+ ```env
679
+ "endpoints": [
680
+ {
681
+ "url": "https://HOST:PORT",
682
+ "weight": 1
683
+ },
684
+ {
685
+ "url": "https://HOST:PORT",
686
+ "weight": 2
687
+ }
688
+ ...
689
+ ]
690
+ ```
691
+
692
+ #### Client Certificate Authentication (mTLS)
693
+
694
+ Custom endpoints may require client certificate authentication, depending on how you configure them. To enable mTLS between Chat UI and your custom endpoint, you will need to set the `USE_CLIENT_CERTIFICATE` to `true`, and add the `CERT_PATH` and `KEY_PATH` parameters to your `.env.local`. These parameters should point to the location of the certificate and key files on your local machine. The certificate and key files should be in PEM format. The key file can be encrypted with a passphrase, in which case you will also need to add the `CLIENT_KEY_PASSWORD` parameter to your `.env.local`.
695
+
696
+ If you're using a certificate signed by a private CA, you will also need to add the `CA_PATH` parameter to your `.env.local`. This parameter should point to the location of the CA certificate file on your local machine.
697
+
698
+ If you're using a self-signed certificate, e.g. for testing or development purposes, you can set the `REJECT_UNAUTHORIZED` parameter to `false` in your `.env.local`. This will disable certificate validation, and allow Chat UI to connect to your custom endpoint.
699
+
700
+ #### Specific Embedding Model
701
+
702
+ A model can use any of the embedding models defined in `.env.local`, (currently used when web searching),
703
+ by default it will use the first embedding model, but it can be changed with the field `embeddingModel`:
704
+
705
+ ```env
706
+ TEXT_EMBEDDING_MODELS = `[
707
+ {
708
+ "name": "Xenova/gte-small",
709
+ "chunkCharLength": 512,
710
+ "endpoints": [
711
+ {"type": "transformersjs"}
712
+ ]
713
+ },
714
+ {
715
+ "name": "intfloat/e5-base-v2",
716
+ "chunkCharLength": 768,
717
+ "endpoints": [
718
+ {"type": "tei", "url": "http://127.0.0.1:8080/", "authorization": "Basic VVNFUjpQQVNT"},
719
+ {"type": "tei", "url": "http://127.0.0.1:8081/"}
720
+ ]
721
+ }
722
+ ]`
723
+
724
+ MODELS=`[
725
+ {
726
+ "name": "Ollama Mistral",
727
+ "chatPromptTemplate": "...",
728
+ "embeddingModel": "intfloat/e5-base-v2"
729
+ "parameters": {
730
+ ...
731
+ },
732
+ "endpoints": [
733
+ ...
734
+ ]
735
+ }
736
+ ]`
737
+ ```
738
+
739
+ ## Common issues
740
+
741
+ ### 403:You don't have access to this conversation
742
+
743
+ Most likely you are running chat-ui over HTTP. The recommended option is to setup something like NGINX to handle HTTPS and proxy the requests to chat-ui. If you really need to run over HTTP you can add `ALLOW_INSECURE_COOKIES=true` to your `.env.local`.
744
+
745
+ Make sure to set your `PUBLIC_ORIGIN` in your `.env.local` to the correct URL as well.
746
+
747
+ ## Deploying to a HF Space
748
+
749
+ Create a `DOTENV_LOCAL` secret to your HF space with the content of your .env.local, and they will be picked up automatically when you run.
750
+
751
+ ## Building
752
+
753
+ To create a production version of your app:
754
+
755
+ ```bash
756
+ npm run build
757
+ ```
758
+
759
+ You can preview the production build with `npm run preview`.
760
+
761
+ > To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.
762
+
763
+ ## Config changes for HuggingChat
764
+
765
+ The config file for HuggingChat is stored in the `.env.template` file at the root of the repository. It is the single source of truth that is used to generate the actual `.env.local` file using our CI/CD pipeline. See [updateProdEnv](https://github.com/huggingface/chat-ui/blob/cdb33a9583f5339ade724db615347393ef48f5cd/scripts/updateProdEnv.ts) for more details.
766
+
767
+ > [!TIP]
768
+ > If you want to make changes to the model config used in production for HuggingChat, you should do so against `.env.template`.
769
+
770
+ We currently use the following secrets for deploying HuggingChat in addition to the `.env.template` above:
771
+
772
+ - `MONGODB_URL`
773
+ - `HF_TOKEN`
774
+ - `OPENID_CONFIG`
775
+ - `SERPER_API_KEY`
776
+
777
+ ### Running a copy of HuggingChat locally
778
+
779
+ If you want to run an exact copy of HuggingChat locally, you will need to do the following first:
780
+
781
+ 1. Create an [OAuth App on the hub](https://huggingface.co/settings/applications/new) with `openid profile email` permissions. Make sure to set the callback URL to something like `http://localhost:5173/chat/login/callback` which matches the right path for your local instance.
782
+ 2. Create a [HF Token](https://huggingface.co/settings/tokens) with your Hugging Face account. You will need a Pro account to be able to access some of the larger models available through HuggingChat.
783
+ 3. Create a free account with [serper.dev](https://serper.dev/) (you will get 2500 free search queries)
784
+ 4. Run an instance of mongoDB, however you want. (Local or remote)
785
+
786
+ You can then create a new `.env.SECRET_CONFIG` file with the following content
787
+
788
+ ```env
789
+ MONGODB_URL=<link to your mongo DB from step 4>
790
+ HF_TOKEN=<your HF token from step 2>
791
+ OPENID_CONFIG=`{
792
+ PROVIDER_URL: "https://huggingface.co",
793
+ CLIENT_ID: "<your client ID from step 1>",
794
+ CLIENT_SECRET: "<your client secret from step 1>",
795
+ }`
796
+ SERPER_API_KEY=<your serper API key from step 3>
797
+ MESSAGES_BEFORE_LOGIN=<can be any numerical value, or set to 0 to require login>
798
+ ```
799
+
800
+ You can then run `npm run updateLocalEnv` in the root of chat-ui. This will create a `.env.local` file which combines the `.env.template` and the `.env.SECRET_CONFIG` file. You can then run `npm run dev` to start your local instance of HuggingChat.
801
+
802
+ ### Populate database
803
+
804
+ > [!WARNING]
805
+ > The `MONGODB_URL` used for this script will be fetched from `.env.local`. Make sure it's correct! The command runs directly on the database.
806
+
807
+ You can populate the database using faker data using the `populate` script:
808
+
809
+ ```bash
810
+ npm run populate <flags here>
811
+ ```
812
+
813
+ At least one flag must be specified, the following flags are available:
814
+
815
+ - `reset` - resets the database
816
+ - `all` - populates all tables
817
+ - `users` - populates the users table
818
+ - `settings` - populates the settings table for existing users
819
+ - `assistants` - populates the assistants table for existing users
820
+ - `conversations` - populates the conversations table for existing users
821
+
822
+ For example, you could use it like so:
823
+
824
+ ```bash
825
+ npm run populate reset
826
+ ```
827
+
828
+ to clear out the database. Then login in the app to create your user and run the following command:
829
+
830
+ ```bash
831
+ npm run populate users settings assistants conversations
832
+ ```
833
+
834
+ to populate the database with fake data, including fake conversations and assistants for your user.
_app/env.js DELETED
@@ -1 +0,0 @@
1
- export const env={}
 
 
_app/immutable/assets/0.DViICDYp.css DELETED
@@ -1 +0,0 @@
1
- *,:before,:after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}:before,:after{--tw-content: ""}html,:host{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]{display:none}:root,[data-theme]{background-color:hsl(var(--b1) / var(--tw-bg-opacity, 1));color:hsl(var(--bc) / var(--tw-text-opacity, 1))}html{-webkit-tap-highlight-color:transparent}:root{color-scheme:light;--pf: 259 94% 44%;--sf: 314 100% 40%;--af: 174 75% 39%;--nf: 214 20% 14%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 259 94% 51%;--pc: 259 96% 91%;--s: 314 100% 47%;--sc: 314 100% 91%;--a: 174 75% 46%;--ac: 174 75% 11%;--n: 214 20% 21%;--nc: 212 19% 87%;--b1: 0 0% 100%;--b2: 0 0% 95%;--b3: 180 2% 90%;--bc: 215 28% 17%}@media (prefers-color-scheme: dark){:root{color-scheme:dark;--pf: 262 80% 43%;--sf: 316 70% 43%;--af: 175 70% 34%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 262 80% 50%;--pc: 0 0% 100%;--s: 316 70% 50%;--sc: 0 0% 100%;--a: 175 70% 41%;--ac: 0 0% 100%;--n: 213 18% 20%;--nf: 212 17% 17%;--nc: 220 13% 69%;--b1: 212 18% 14%;--b2: 213 18% 12%;--b3: 213 18% 10%;--bc: 220 13% 69%}}[data-theme=light]{color-scheme:light;--pf: 259 94% 44%;--sf: 314 100% 40%;--af: 174 75% 39%;--nf: 214 20% 14%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 259 94% 51%;--pc: 259 96% 91%;--s: 314 100% 47%;--sc: 314 100% 91%;--a: 174 75% 46%;--ac: 174 75% 11%;--n: 214 20% 21%;--nc: 212 19% 87%;--b1: 0 0% 100%;--b2: 0 0% 95%;--b3: 180 2% 90%;--bc: 215 28% 17%}[data-theme=dark]{color-scheme:dark;--pf: 262 80% 43%;--sf: 316 70% 43%;--af: 175 70% 34%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 262 80% 50%;--pc: 0 0% 100%;--s: 316 70% 50%;--sc: 0 0% 100%;--a: 175 70% 41%;--ac: 0 0% 100%;--n: 213 18% 20%;--nf: 212 17% 17%;--nc: 220 13% 69%;--b1: 212 18% 14%;--b2: 213 18% 12%;--b3: 213 18% 10%;--bc: 220 13% 69%}*,:before,:after{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }@media (hover:hover){.table tr.hover:hover,.table tr.hover:nth-child(2n):hover{--tw-bg-opacity: 1;background-color:hsl(var(--b2) / var(--tw-bg-opacity))}.table-zebra tr.hover:hover,.table-zebra tr.hover:nth-child(2n):hover{--tw-bg-opacity: 1;background-color:hsl(var(--b3) / var(--tw-bg-opacity))}}.link{cursor:pointer;text-decoration-line:underline}@keyframes button-pop{0%{transform:scale(var(--btn-focus-scale, .98))}40%{transform:scale(1.02)}to{transform:scale(1)}}@keyframes checkmark{0%{background-position-y:5px}50%{background-position-y:-2px}to{background-position-y:0}}.link:focus{outline:2px solid transparent;outline-offset:2px}.link:focus-visible{outline:2px solid currentColor;outline-offset:2px}.mockup-phone .display{overflow:hidden;border-radius:40px;margin-top:-25px}@keyframes modal-pop{0%{opacity:0}}@keyframes progress-loading{50%{background-position-x:-115%}}@keyframes radiomark{0%{box-shadow:0 0 0 12px hsl(var(--b1)) inset,0 0 0 12px hsl(var(--b1)) inset}50%{box-shadow:0 0 0 3px hsl(var(--b1)) inset,0 0 0 3px hsl(var(--b1)) inset}to{box-shadow:0 0 0 4px hsl(var(--b1)) inset,0 0 0 4px hsl(var(--b1)) inset}}@keyframes rating-pop{0%{transform:translateY(-.125em)}40%{transform:translateY(-.125em)}to{transform:translateY(0)}}@keyframes toast-pop{0%{transform:scale(.9);opacity:0}to{transform:scale(1);opacity:1}}.mx-4{margin-left:1rem;margin-right:1rem}.mx-auto{margin-left:auto;margin-right:auto}.mb-12{margin-bottom:3rem}.inline{display:inline}.flex{display:flex}.contents{display:contents}.w-10{width:2.5rem}.w-fit{width:-moz-fit-content;width:fit-content}.max-w-4xl{max-width:56rem}.items-center{align-items:center}.justify-center{justify-content:center}.gap-2{gap:.5rem}.rounded-lg{border-radius:.5rem}.border{border-width:1px}.border-gray-200{--tw-border-opacity: 1;border-color:rgb(229 231 235 / var(--tw-border-opacity))}.bg-gray-900{--tw-bg-opacity: 1;background-color:rgb(17 24 39 / var(--tw-bg-opacity))}.p-4{padding:1rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.pt-10{padding-top:2.5rem}.text-center{text-align:center}.text-3xl{font-size:1.875rem;line-height:2.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.font-bold{font-weight:700}.leading-snug{line-height:1.375}.\!text-white{--tw-text-opacity: 1 !important;color:rgb(255 255 255 / var(--tw-text-opacity))!important}.text-gray-300{--tw-text-opacity: 1;color:rgb(209 213 219 / var(--tw-text-opacity))}.text-gray-400{--tw-text-opacity: 1;color:rgb(156 163 175 / var(--tw-text-opacity))}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.hover\:-translate-y-0\.5:hover{--tw-translate-y: -.125rem;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.hover\:border-gray-300:hover{--tw-border-opacity: 1;border-color:rgb(209 213 219 / var(--tw-border-opacity))}.hover\:bg-gray-800:hover{--tw-bg-opacity: 1;background-color:rgb(31 41 55 / var(--tw-bg-opacity))}
 
 
_app/immutable/assets/2.Dl1cvM0g.css DELETED
@@ -1 +0,0 @@
1
- .link.svelte-1hc9mw2{--tw-text-opacity:1;color:rgb(156 163 175 / var(--tw-text-opacity))}.link.svelte-1hc9mw2:hover{--tw-text-opacity:1;color:rgb(209 213 219 / var(--tw-text-opacity))}h3.svelte-1hc9mw2{text-align:center;font-size:1.875rem;line-height:2.25rem;font-weight:700;--tw-text-opacity:1;color:rgb(255 255 255 / var(--tw-text-opacity))}p.svelte-1hc9mw2{padding-top:.75rem;padding-bottom:.75rem;text-align:center;font-size:1.25rem;line-height:1.75rem;line-height:1.375;--tw-text-opacity:1;color:rgb(229 231 235 / var(--tw-text-opacity))}
 
 
_app/immutable/assets/_layout.DViICDYp.css DELETED
@@ -1 +0,0 @@
1
- *,:before,:after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}:before,:after{--tw-content: ""}html,:host{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]{display:none}:root,[data-theme]{background-color:hsl(var(--b1) / var(--tw-bg-opacity, 1));color:hsl(var(--bc) / var(--tw-text-opacity, 1))}html{-webkit-tap-highlight-color:transparent}:root{color-scheme:light;--pf: 259 94% 44%;--sf: 314 100% 40%;--af: 174 75% 39%;--nf: 214 20% 14%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 259 94% 51%;--pc: 259 96% 91%;--s: 314 100% 47%;--sc: 314 100% 91%;--a: 174 75% 46%;--ac: 174 75% 11%;--n: 214 20% 21%;--nc: 212 19% 87%;--b1: 0 0% 100%;--b2: 0 0% 95%;--b3: 180 2% 90%;--bc: 215 28% 17%}@media (prefers-color-scheme: dark){:root{color-scheme:dark;--pf: 262 80% 43%;--sf: 316 70% 43%;--af: 175 70% 34%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 262 80% 50%;--pc: 0 0% 100%;--s: 316 70% 50%;--sc: 0 0% 100%;--a: 175 70% 41%;--ac: 0 0% 100%;--n: 213 18% 20%;--nf: 212 17% 17%;--nc: 220 13% 69%;--b1: 212 18% 14%;--b2: 213 18% 12%;--b3: 213 18% 10%;--bc: 220 13% 69%}}[data-theme=light]{color-scheme:light;--pf: 259 94% 44%;--sf: 314 100% 40%;--af: 174 75% 39%;--nf: 214 20% 14%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 259 94% 51%;--pc: 259 96% 91%;--s: 314 100% 47%;--sc: 314 100% 91%;--a: 174 75% 46%;--ac: 174 75% 11%;--n: 214 20% 21%;--nc: 212 19% 87%;--b1: 0 0% 100%;--b2: 0 0% 95%;--b3: 180 2% 90%;--bc: 215 28% 17%}[data-theme=dark]{color-scheme:dark;--pf: 262 80% 43%;--sf: 316 70% 43%;--af: 175 70% 34%;--in: 198 93% 60%;--su: 158 64% 52%;--wa: 43 96% 56%;--er: 0 91% 71%;--inc: 198 100% 12%;--suc: 158 100% 10%;--wac: 43 100% 11%;--erc: 0 100% 14%;--rounded-box: 1rem;--rounded-btn: .5rem;--rounded-badge: 1.9rem;--animation-btn: .25s;--animation-input: .2s;--btn-text-case: uppercase;--btn-focus-scale: .95;--border-btn: 1px;--tab-border: 1px;--tab-radius: .5rem;--p: 262 80% 50%;--pc: 0 0% 100%;--s: 316 70% 50%;--sc: 0 0% 100%;--a: 175 70% 41%;--ac: 0 0% 100%;--n: 213 18% 20%;--nf: 212 17% 17%;--nc: 220 13% 69%;--b1: 212 18% 14%;--b2: 213 18% 12%;--b3: 213 18% 10%;--bc: 220 13% 69%}*,:before,:after{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }@media (hover:hover){.table tr.hover:hover,.table tr.hover:nth-child(2n):hover{--tw-bg-opacity: 1;background-color:hsl(var(--b2) / var(--tw-bg-opacity))}.table-zebra tr.hover:hover,.table-zebra tr.hover:nth-child(2n):hover{--tw-bg-opacity: 1;background-color:hsl(var(--b3) / var(--tw-bg-opacity))}}.link{cursor:pointer;text-decoration-line:underline}@keyframes button-pop{0%{transform:scale(var(--btn-focus-scale, .98))}40%{transform:scale(1.02)}to{transform:scale(1)}}@keyframes checkmark{0%{background-position-y:5px}50%{background-position-y:-2px}to{background-position-y:0}}.link:focus{outline:2px solid transparent;outline-offset:2px}.link:focus-visible{outline:2px solid currentColor;outline-offset:2px}.mockup-phone .display{overflow:hidden;border-radius:40px;margin-top:-25px}@keyframes modal-pop{0%{opacity:0}}@keyframes progress-loading{50%{background-position-x:-115%}}@keyframes radiomark{0%{box-shadow:0 0 0 12px hsl(var(--b1)) inset,0 0 0 12px hsl(var(--b1)) inset}50%{box-shadow:0 0 0 3px hsl(var(--b1)) inset,0 0 0 3px hsl(var(--b1)) inset}to{box-shadow:0 0 0 4px hsl(var(--b1)) inset,0 0 0 4px hsl(var(--b1)) inset}}@keyframes rating-pop{0%{transform:translateY(-.125em)}40%{transform:translateY(-.125em)}to{transform:translateY(0)}}@keyframes toast-pop{0%{transform:scale(.9);opacity:0}to{transform:scale(1);opacity:1}}.mx-4{margin-left:1rem;margin-right:1rem}.mx-auto{margin-left:auto;margin-right:auto}.mb-12{margin-bottom:3rem}.inline{display:inline}.flex{display:flex}.contents{display:contents}.w-10{width:2.5rem}.w-fit{width:-moz-fit-content;width:fit-content}.max-w-4xl{max-width:56rem}.items-center{align-items:center}.justify-center{justify-content:center}.gap-2{gap:.5rem}.rounded-lg{border-radius:.5rem}.border{border-width:1px}.border-gray-200{--tw-border-opacity: 1;border-color:rgb(229 231 235 / var(--tw-border-opacity))}.bg-gray-900{--tw-bg-opacity: 1;background-color:rgb(17 24 39 / var(--tw-bg-opacity))}.p-4{padding:1rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.pt-10{padding-top:2.5rem}.text-center{text-align:center}.text-3xl{font-size:1.875rem;line-height:2.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.font-bold{font-weight:700}.leading-snug{line-height:1.375}.\!text-white{--tw-text-opacity: 1 !important;color:rgb(255 255 255 / var(--tw-text-opacity))!important}.text-gray-300{--tw-text-opacity: 1;color:rgb(209 213 219 / var(--tw-text-opacity))}.text-gray-400{--tw-text-opacity: 1;color:rgb(156 163 175 / var(--tw-text-opacity))}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.hover\:-translate-y-0\.5:hover{--tw-translate-y: -.125rem;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.hover\:border-gray-300:hover{--tw-border-opacity: 1;border-color:rgb(209 213 219 / var(--tw-border-opacity))}.hover\:bg-gray-800:hover{--tw-bg-opacity: 1;background-color:rgb(31 41 55 / var(--tw-bg-opacity))}
 
 
_app/immutable/assets/_page.Dl1cvM0g.css DELETED
@@ -1 +0,0 @@
1
- .link.svelte-1hc9mw2{--tw-text-opacity:1;color:rgb(156 163 175 / var(--tw-text-opacity))}.link.svelte-1hc9mw2:hover{--tw-text-opacity:1;color:rgb(209 213 219 / var(--tw-text-opacity))}h3.svelte-1hc9mw2{text-align:center;font-size:1.875rem;line-height:2.25rem;font-weight:700;--tw-text-opacity:1;color:rgb(255 255 255 / var(--tw-text-opacity))}p.svelte-1hc9mw2{padding-top:.75rem;padding-bottom:.75rem;text-align:center;font-size:1.25rem;line-height:1.75rem;line-height:1.375;--tw-text-opacity:1;color:rgb(229 231 235 / var(--tw-text-opacity))}
 
 
_app/immutable/chunks/entry.CsquK5o6.js DELETED
@@ -1,3 +0,0 @@
1
- import{n as lt,s as le,t as fe}from"./scheduler.CtbWrGNo.js";new URL("sveltekit-internal://");function ue(t,n){return t==="/"||n==="ignore"?t:n==="never"?t.endsWith("/")?t.slice(0,-1):t:n==="always"&&!t.endsWith("/")?t+"/":t}function de(t){return t.split("%25").map(decodeURI).join("%25")}function he(t){for(const n in t)t[n]=decodeURIComponent(t[n]);return t}function ft({href:t}){return t.split("#")[0]}const pe=["href","pathname","search","toString","toJSON"];function ge(t,n,e){const r=new URL(t);Object.defineProperty(r,"searchParams",{value:new Proxy(r.searchParams,{get(a,o){if(o==="get"||o==="getAll"||o==="has")return s=>(e(s),a[o](s));n();const i=Reflect.get(a,o);return typeof i=="function"?i.bind(a):i}}),enumerable:!0,configurable:!0});for(const a of pe)Object.defineProperty(r,a,{get(){return n(),t[a]},enumerable:!0,configurable:!0});return r}const me="/__data.json",_e=".html__data.json";function ye(t){return t.endsWith(".html")?t.replace(/\.html$/,_e):t.replace(/\/$/,"")+me}function we(...t){let n=5381;for(const e of t)if(typeof e=="string"){let r=e.length;for(;r;)n=n*33^e.charCodeAt(--r)}else if(ArrayBuffer.isView(e)){const r=new Uint8Array(e.buffer,e.byteOffset,e.byteLength);let a=r.length;for(;a;)n=n*33^r[--a]}else throw new TypeError("value must be a string or TypedArray");return(n>>>0).toString(36)}function ve(t){const n=atob(t),e=new Uint8Array(n.length);for(let r=0;r<n.length;r++)e[r]=n.charCodeAt(r);return e.buffer}const Vt=window.fetch;window.fetch=(t,n)=>((t instanceof Request?t.method:(n==null?void 0:n.method)||"GET")!=="GET"&&G.delete(mt(t)),Vt(t,n));const G=new Map;function be(t,n){const e=mt(t,n),r=document.querySelector(e);if(r!=null&&r.textContent){let{body:a,...o}=JSON.parse(r.textContent);const i=r.getAttribute("data-ttl");return i&&G.set(e,{body:a,init:o,ttl:1e3*Number(i)}),r.getAttribute("data-b64")!==null&&(a=ve(a)),Promise.resolve(new Response(a,o))}return window.fetch(t,n)}function ke(t,n,e){if(G.size>0){const r=mt(t,e),a=G.get(r);if(a){if(performance.now()<a.ttl&&["default","force-cache","only-if-cached",void 0].includes(e==null?void 0:e.cache))return new Response(a.body,a.init);G.delete(r)}}return window.fetch(n,e)}function mt(t,n){let r=`script[data-sveltekit-fetched][data-url=${JSON.stringify(t instanceof Request?t.url:t)}]`;if(n!=null&&n.headers||n!=null&&n.body){const a=[];n.headers&&a.push([...new Headers(n.headers)].join(",")),n.body&&(typeof n.body=="string"||ArrayBuffer.isView(n.body))&&a.push(n.body),r+=`[data-hash="${we(...a)}"]`}return r}const Ee=/^(\[)?(\.\.\.)?(\w+)(?:=(\w+))?(\])?$/;function Se(t){const n=[];return{pattern:t==="/"?/^\/$/:new RegExp(`^${Re(t).map(r=>{const a=/^\[\.\.\.(\w+)(?:=(\w+))?\]$/.exec(r);if(a)return n.push({name:a[1],matcher:a[2],optional:!1,rest:!0,chained:!0}),"(?:/(.*))?";const o=/^\[\[(\w+)(?:=(\w+))?\]\]$/.exec(r);if(o)return n.push({name:o[1],matcher:o[2],optional:!0,rest:!1,chained:!0}),"(?:/([^/]+))?";if(!r)return;const i=r.split(/\[(.+?)\](?!\])/);return"/"+i.map((c,l)=>{if(l%2){if(c.startsWith("x+"))return ut(String.fromCharCode(parseInt(c.slice(2),16)));if(c.startsWith("u+"))return ut(String.fromCharCode(...c.slice(2).split("-").map(f=>parseInt(f,16))));const u=Ee.exec(c),[,h,g,d,_]=u;return n.push({name:d,matcher:_,optional:!!h,rest:!!g,chained:g?l===1&&i[0]==="":!1}),g?"(.*?)":h?"([^/]*)?":"([^/]+?)"}return ut(c)}).join("")}).join("")}/?$`),params:n}}function Ae(t){return!/^\([^)]+\)$/.test(t)}function Re(t){return t.slice(1).split("/").filter(Ae)}function Ie(t,n,e){const r={},a=t.slice(1),o=a.filter(s=>s!==void 0);let i=0;for(let s=0;s<n.length;s+=1){const c=n[s];let l=a[s-i];if(c.chained&&c.rest&&i&&(l=a.slice(s-i,s+1).filter(u=>u).join("/"),i=0),l===void 0){c.rest&&(r[c.name]="");continue}if(!c.matcher||e[c.matcher](l)){r[c.name]=l;const u=n[s+1],h=a[s+1];u&&!u.rest&&u.optional&&h&&c.chained&&(i=0),!u&&!h&&Object.keys(r).length===o.length&&(i=0);continue}if(c.optional&&c.chained){i++;continue}return}if(!i)return r}function ut(t){return t.normalize().replace(/[[\]]/g,"\\$&").replace(/%/g,"%25").replace(/\//g,"%2[Ff]").replace(/\?/g,"%3[Ff]").replace(/#/g,"%23").replace(/[.*+?^${}()|\\]/g,"\\$&")}function Le({nodes:t,server_loads:n,dictionary:e,matchers:r}){const a=new Set(n);return Object.entries(e).map(([s,[c,l,u]])=>{const{pattern:h,params:g}=Se(s),d={id:s,exec:_=>{const f=h.exec(_);if(f)return Ie(f,g,r)},errors:[1,...u||[]].map(_=>t[_]),layouts:[0,...l||[]].map(i),leaf:o(c)};return d.errors.length=d.layouts.length=Math.max(d.errors.length,d.layouts.length),d});function o(s){const c=s<0;return c&&(s=~s),[c,t[s]]}function i(s){return s===void 0?s:[a.has(s),t[s]]}}function Ft(t,n=JSON.parse){try{return n(sessionStorage[t])}catch{}}function Pt(t,n,e=JSON.stringify){const r=e(n);try{sessionStorage[t]=r}catch{}}const O=[];function _t(t,n=lt){let e;const r=new Set;function a(s){if(le(t,s)&&(t=s,e)){const c=!O.length;for(const l of r)l[1](),O.push(l,t);if(c){for(let l=0;l<O.length;l+=2)O[l][0](O[l+1]);O.length=0}}}function o(s){a(s(t))}function i(s,c=lt){const l=[s,c];return r.add(l),r.size===1&&(e=n(a,o)||lt),s(t),()=>{r.delete(l),r.size===0&&e&&(e(),e=null)}}return{set:a,update:o,subscribe:i}}var Dt;const P=((Dt=globalThis.__sveltekit_1klgtbu)==null?void 0:Dt.base)??"";var Ct;const Pe=((Ct=globalThis.__sveltekit_1klgtbu)==null?void 0:Ct.assets)??P,Te="1722243467595",qt="sveltekit:snapshot",Gt="sveltekit:scroll",Mt="sveltekit:states",Ue="sveltekit:pageurl",D="sveltekit:history",H="sveltekit:navigation",J={tap:1,hover:2,viewport:3,eager:4,off:-1,false:-1},z=location.origin;function Ht(t){if(t instanceof URL)return t;let n=document.baseURI;if(!n){const e=document.getElementsByTagName("base");n=e.length?e[0].href:document.URL}return new URL(t,n)}function yt(){return{x:pageXOffset,y:pageYOffset}}function j(t,n){return t.getAttribute(`data-sveltekit-${n}`)}const Tt={...J,"":J.hover};function Bt(t){let n=t.assignedSlot??t.parentNode;return(n==null?void 0:n.nodeType)===11&&(n=n.host),n}function Kt(t,n){for(;t&&t!==n;){if(t.nodeName.toUpperCase()==="A"&&t.hasAttribute("href"))return t;t=Bt(t)}}function ht(t,n){let e;try{e=new URL(t instanceof SVGAElement?t.href.baseVal:t.href,document.baseURI)}catch{}const r=t instanceof SVGAElement?t.target.baseVal:t.target,a=!e||!!r||rt(e,n)||(t.getAttribute("rel")||"").split(/\s+/).includes("external"),o=(e==null?void 0:e.origin)===z&&t.hasAttribute("download");return{url:e,external:a,target:r,download:o}}function W(t){let n=null,e=null,r=null,a=null,o=null,i=null,s=t;for(;s&&s!==document.documentElement;)r===null&&(r=j(s,"preload-code")),a===null&&(a=j(s,"preload-data")),n===null&&(n=j(s,"keepfocus")),e===null&&(e=j(s,"noscroll")),o===null&&(o=j(s,"reload")),i===null&&(i=j(s,"replacestate")),s=Bt(s);function c(l){switch(l){case"":case"true":return!0;case"off":case"false":return!1;default:return}}return{preload_code:Tt[r??"off"],preload_data:Tt[a??"off"],keepfocus:c(n),noscroll:c(e),reload:c(o),replace_state:c(i)}}function Ut(t){const n=_t(t);let e=!0;function r(){e=!0,n.update(i=>i)}function a(i){e=!1,n.set(i)}function o(i){let s;return n.subscribe(c=>{(s===void 0||e&&c!==s)&&i(s=c)})}return{notify:r,set:a,subscribe:o}}function xe(){const{set:t,subscribe:n}=_t(!1);let e;async function r(){clearTimeout(e);try{const a=await fetch(`${Pe}/_app/version.json`,{headers:{pragma:"no-cache","cache-control":"no-cache"}});if(!a.ok)return!1;const i=(await a.json()).version!==Te;return i&&(t(!0),clearTimeout(e)),i}catch{return!1}}return{subscribe:n,check:r}}function rt(t,n){return t.origin!==z||!t.pathname.startsWith(n)}const Ne=-1,Oe=-2,je=-3,$e=-4,De=-5,Ce=-6;function Ve(t,n){if(typeof t=="number")return a(t,!0);if(!Array.isArray(t)||t.length===0)throw new Error("Invalid input");const e=t,r=Array(e.length);function a(o,i=!1){if(o===Ne)return;if(o===je)return NaN;if(o===$e)return 1/0;if(o===De)return-1/0;if(o===Ce)return-0;if(i)throw new Error("Invalid input");if(o in r)return r[o];const s=e[o];if(!s||typeof s!="object")r[o]=s;else if(Array.isArray(s))if(typeof s[0]=="string"){const c=s[0],l=n==null?void 0:n[c];if(l)return r[o]=l(a(s[1]));switch(c){case"Date":r[o]=new Date(s[1]);break;case"Set":const u=new Set;r[o]=u;for(let d=1;d<s.length;d+=1)u.add(a(s[d]));break;case"Map":const h=new Map;r[o]=h;for(let d=1;d<s.length;d+=2)h.set(a(s[d]),a(s[d+1]));break;case"RegExp":r[o]=new RegExp(s[1],s[2]);break;case"Object":r[o]=Object(s[1]);break;case"BigInt":r[o]=BigInt(s[1]);break;case"null":const g=Object.create(null);r[o]=g;for(let d=1;d<s.length;d+=2)g[s[d]]=a(s[d+1]);break;default:throw new Error(`Unknown type ${c}`)}}else{const c=new Array(s.length);r[o]=c;for(let l=0;l<s.length;l+=1){const u=s[l];u!==Oe&&(c[l]=a(u))}}else{const c={};r[o]=c;for(const l in s){const u=s[l];c[l]=a(u)}}return r[o]}return a(0)}const zt=new Set(["load","prerender","csr","ssr","trailingSlash","config"]);[...zt];const Fe=new Set([...zt]);[...Fe];function qe(t){return t.filter(n=>n!=null)}class at{constructor(n,e){this.status=n,typeof e=="string"?this.body={message:e}:e?this.body=e:this.body={message:`Error: ${n}`}}toString(){return JSON.stringify(this.body)}}class Yt{constructor(n,e){this.status=n,this.location=e}}class wt extends Error{constructor(n,e,r){super(r),this.status=n,this.text=e}}const Ge="x-sveltekit-invalidated",Me="x-sveltekit-trailing-slash";function X(t){return t instanceof at||t instanceof wt?t.status:500}function He(t){return t instanceof wt?t.text:"Internal Error"}const N=Ft(Gt)??{},B=Ft(qt)??{},U={url:Ut({}),page:Ut({}),navigating:_t(null),updated:xe()};function vt(t){N[t]=yt()}function Be(t,n){let e=t+1;for(;N[e];)delete N[e],e+=1;for(e=n+1;B[e];)delete B[e],e+=1}function V(t){return location.href=t.href,new Promise(()=>{})}function xt(){}let ot,pt,Z,T,gt,F;const Jt=[],Q=[];let R=null;const Wt=[],Ke=[];let $=[],y={branch:[],error:null,url:null},bt=!1,tt=!1,Nt=!0,K=!1,q=!1,Xt=!1,kt=!1,Et,S,L,I,et;const M=new Set;async function rn(t,n,e){var a,o;document.URL!==location.href&&(location.href=location.href),F=t,ot=Le(t),T=document.documentElement,gt=n,pt=t.nodes[0],Z=t.nodes[1],pt(),Z(),S=(a=history.state)==null?void 0:a[D],L=(o=history.state)==null?void 0:o[H],S||(S=L=Date.now(),history.replaceState({...history.state,[D]:S,[H]:L},""));const r=N[S];r&&(history.scrollRestoration="manual",scrollTo(r.x,r.y)),e?await tn(gt,e):Ze(location.href,{replaceState:!0}),Qe()}function ze(){Jt.length=0,kt=!1}function Zt(t){Q.some(n=>n==null?void 0:n.snapshot)&&(B[t]=Q.map(n=>{var e;return(e=n==null?void 0:n.snapshot)==null?void 0:e.capture()}))}function Qt(t){var n;(n=B[t])==null||n.forEach((e,r)=>{var a,o;(o=(a=Q[r])==null?void 0:a.snapshot)==null||o.restore(e)})}function Ot(){vt(S),Pt(Gt,N),Zt(L),Pt(qt,B)}async function te(t,n,e,r){return Y({type:"goto",url:Ht(t),keepfocus:n.keepFocus,noscroll:n.noScroll,replace_state:n.replaceState,state:n.state,redirect_count:e,nav_token:r,accept:()=>{n.invalidateAll&&(kt=!0)}})}async function Ye(t){if(t.id!==(R==null?void 0:R.id)){const n={};M.add(n),R={id:t.id,token:n,promise:ne({...t,preload:n}).then(e=>(M.delete(n),e.type==="loaded"&&e.state.error&&(R=null),e))}}return R.promise}async function dt(t){const n=ot.find(e=>e.exec(re(t)));n&&await Promise.all([...n.layouts,n.leaf].map(e=>e==null?void 0:e[1]()))}function ee(t,n,e){var o;y=t.state;const r=document.querySelector("style[data-sveltekit]");r&&r.remove(),I=t.props.page,Et=new F.root({target:n,props:{...t.props,stores:U,components:Q},hydrate:e}),Qt(L);const a={from:null,to:{params:y.params,route:{id:((o=y.route)==null?void 0:o.id)??null},url:new URL(location.href)},willUnload:!1,type:"enter",complete:Promise.resolve()};$.forEach(i=>i(a)),tt=!0}function nt({url:t,params:n,branch:e,status:r,error:a,route:o,form:i}){let s="never";if(P&&(t.pathname===P||t.pathname===P+"/"))s="always";else for(const d of e)(d==null?void 0:d.slash)!==void 0&&(s=d.slash);t.pathname=ue(t.pathname,s),t.search=t.search;const c={type:"loaded",state:{url:t,params:n,branch:e,error:a,route:o},props:{constructors:qe(e).map(d=>d.node.component),page:I}};i!==void 0&&(c.props.form=i);let l={},u=!I,h=0;for(let d=0;d<Math.max(e.length,y.branch.length);d+=1){const _=e[d],f=y.branch[d];(_==null?void 0:_.data)!==(f==null?void 0:f.data)&&(u=!0),_&&(l={...l,..._.data},u&&(c.props[`data_${h}`]=l),h+=1)}return(!y.url||t.href!==y.url.href||y.error!==a||i!==void 0&&i!==I.form||u)&&(c.props.page={error:a,params:n,route:{id:(o==null?void 0:o.id)??null},state:{},status:r,url:new URL(t),form:i??null,data:u?l:I.data}),c}async function St({loader:t,parent:n,url:e,params:r,route:a,server_data_node:o}){var u,h,g;let i=null,s=!0;const c={dependencies:new Set,params:new Set,parent:!1,route:!1,url:!1,search_params:new Set},l=await t();if((u=l.universal)!=null&&u.load){let d=function(...f){for(const m of f){const{href:b}=new URL(m,e);c.dependencies.add(b)}};const _={route:new Proxy(a,{get:(f,m)=>(s&&(c.route=!0),f[m])}),params:new Proxy(r,{get:(f,m)=>(s&&c.params.add(m),f[m])}),data:(o==null?void 0:o.data)??null,url:ge(e,()=>{s&&(c.url=!0)},f=>{s&&c.search_params.add(f)}),async fetch(f,m){let b;f instanceof Request?(b=f.url,m={body:f.method==="GET"||f.method==="HEAD"?void 0:await f.blob(),cache:f.cache,credentials:f.credentials,headers:f.headers,integrity:f.integrity,keepalive:f.keepalive,method:f.method,mode:f.mode,redirect:f.redirect,referrer:f.referrer,referrerPolicy:f.referrerPolicy,signal:f.signal,...m}):b=f;const A=new URL(b,e);return s&&d(A.href),A.origin===e.origin&&(b=A.href.slice(e.origin.length)),tt?ke(b,A.href,m):be(b,m)},setHeaders:()=>{},depends:d,parent(){return s&&(c.parent=!0),n()},untrack(f){s=!1;try{return f()}finally{s=!0}}};i=await l.universal.load.call(null,_)??null}return{node:l,loader:t,server:o,universal:(h=l.universal)!=null&&h.load?{type:"data",data:i,uses:c}:null,data:i??(o==null?void 0:o.data)??null,slash:((g=l.universal)==null?void 0:g.trailingSlash)??(o==null?void 0:o.slash)}}function jt(t,n,e,r,a,o){if(kt)return!0;if(!a)return!1;if(a.parent&&t||a.route&&n||a.url&&e)return!0;for(const i of a.search_params)if(r.has(i))return!0;for(const i of a.params)if(o[i]!==y.params[i])return!0;for(const i of a.dependencies)if(Jt.some(s=>s(new URL(i))))return!0;return!1}function At(t,n){return(t==null?void 0:t.type)==="data"?t:(t==null?void 0:t.type)==="skip"?n??null:null}function Je(t,n){if(!t)return new Set(n.searchParams.keys());const e=new Set([...t.searchParams.keys(),...n.searchParams.keys()]);for(const r of e){const a=t.searchParams.getAll(r),o=n.searchParams.getAll(r);a.every(i=>o.includes(i))&&o.every(i=>a.includes(i))&&e.delete(r)}return e}function $t({error:t,url:n,route:e,params:r}){return{type:"loaded",state:{error:t,url:n,route:e,params:r,branch:[]},props:{page:I,constructors:[]}}}async function ne({id:t,invalidating:n,url:e,params:r,route:a,preload:o}){if((R==null?void 0:R.id)===t)return M.delete(R.token),R.promise;const{errors:i,layouts:s,leaf:c}=a,l=[...s,c];i.forEach(p=>p==null?void 0:p().catch(()=>{})),l.forEach(p=>p==null?void 0:p[1]().catch(()=>{}));let u=null;const h=y.url?t!==y.url.pathname+y.url.search:!1,g=y.route?a.id!==y.route.id:!1,d=Je(y.url,e);let _=!1;const f=l.map((p,v)=>{var x;const k=y.branch[v],E=!!(p!=null&&p[0])&&((k==null?void 0:k.loader)!==p[1]||jt(_,g,h,d,(x=k.server)==null?void 0:x.uses,r));return E&&(_=!0),E});if(f.some(Boolean)){try{u=await se(e,f)}catch(p){const v=await C(p,{url:e,params:r,route:{id:t}});return M.has(o)?$t({error:v,url:e,params:r,route:a}):st({status:X(p),error:v,url:e,route:a})}if(u.type==="redirect")return u}const m=u==null?void 0:u.nodes;let b=!1;const A=l.map(async(p,v)=>{var it;if(!p)return;const k=y.branch[v],E=m==null?void 0:m[v];if((!E||E.type==="skip")&&p[1]===(k==null?void 0:k.loader)&&!jt(b,g,h,d,(it=k.universal)==null?void 0:it.uses,r))return k;if(b=!0,(E==null?void 0:E.type)==="error")throw E;return St({loader:p[1],url:e,params:r,route:a,parent:async()=>{var Lt;const It={};for(let ct=0;ct<v;ct+=1)Object.assign(It,(Lt=await A[ct])==null?void 0:Lt.data);return It},server_data_node:At(E===void 0&&p[0]?{type:"skip"}:E??null,p[0]?k==null?void 0:k.server:void 0)})});for(const p of A)p.catch(()=>{});const w=[];for(let p=0;p<l.length;p+=1)if(l[p])try{w.push(await A[p])}catch(v){if(v instanceof Yt)return{type:"redirect",location:v.location};if(M.has(o))return $t({error:await C(v,{params:r,url:e,route:{id:a.id}}),url:e,params:r,route:a});let k=X(v),E;if(m!=null&&m.includes(v))k=v.status??k,E=v.error;else if(v instanceof at)E=v.body;else{if(await U.updated.check())return await V(e);E=await C(v,{params:r,url:e,route:{id:a.id}})}const x=await We(p,w,i);return x?nt({url:e,params:r,branch:w.slice(0,x.idx).concat(x.node),status:k,error:E,route:a}):await oe(e,{id:a.id},E,k)}else w.push(void 0);return nt({url:e,params:r,branch:w,status:200,error:null,route:a,form:n?void 0:null})}async function We(t,n,e){for(;t--;)if(e[t]){let r=t;for(;!n[r];)r-=1;try{return{idx:r+1,node:{node:await e[t](),loader:e[t],data:{},server:null,universal:null}}}catch{continue}}}async function st({status:t,error:n,url:e,route:r}){const a={};let o=null;if(F.server_loads[0]===0)try{const l=await se(e,[!0]);if(l.type!=="data"||l.nodes[0]&&l.nodes[0].type!=="data")throw 0;o=l.nodes[0]??null}catch{(e.origin!==z||e.pathname!==location.pathname||bt)&&await V(e)}const s=await St({loader:pt,url:e,params:a,route:r,parent:()=>Promise.resolve({}),server_data_node:At(o)}),c={node:await Z(),loader:Z,universal:null,server:null,data:null};return nt({url:e,params:a,branch:[s,c],status:t,error:n,route:null})}function Rt(t,n){if(!t||rt(t,P))return;let e;try{e=F.hooks.reroute({url:new URL(t)})??t.pathname}catch{return}const r=re(e);for(const a of ot){const o=a.exec(r);if(o)return{id:t.pathname+t.search,invalidating:n,route:a,params:he(o),url:t}}}function re(t){return de(t.slice(P.length)||"/")}function ae({url:t,type:n,intent:e,delta:r}){let a=!1;const o=ce(y,e,t,n);r!==void 0&&(o.navigation.delta=r);const i={...o.navigation,cancel:()=>{a=!0,o.reject(new Error("navigation cancelled"))}};return K||Wt.forEach(s=>s(i)),a?null:o}async function Y({type:t,url:n,popped:e,keepfocus:r,noscroll:a,replace_state:o,state:i={},redirect_count:s=0,nav_token:c={},accept:l=xt,block:u=xt}){const h=Rt(n,!1),g=ae({url:n,type:t,delta:e==null?void 0:e.delta,intent:h});if(!g){u();return}const d=S,_=L;l(),K=!0,tt&&U.navigating.set(g.navigation),et=c;let f=h&&await ne(h);if(!f){if(rt(n,P))return await V(n);f=await oe(n,{id:null},await C(new wt(404,"Not Found",`Not found: ${n.pathname}`),{url:n,params:{},route:{id:null}}),404)}if(n=(h==null?void 0:h.url)||n,et!==c)return g.reject(new Error("navigation aborted")),!1;if(f.type==="redirect")if(s>=20)f=await st({status:500,error:await C(new Error("Redirect loop"),{url:n,params:{},route:{id:null}}),url:n,route:{id:null}});else return te(new URL(f.location,n).href,{},s+1,c),!1;else f.props.page.status>=400&&await U.updated.check()&&await V(n);if(ze(),vt(d),Zt(_),f.props.page.url.pathname!==n.pathname&&(n.pathname=f.props.page.url.pathname),i=e?e.state:i,!e){const w=o?0:1,p={[D]:S+=w,[H]:L+=w,[Mt]:i};(o?history.replaceState:history.pushState).call(history,p,"",n),o||Be(S,L)}if(R=null,f.props.page.state=i,tt){y=f.state,f.props.page&&(f.props.page.url=n);const w=(await Promise.all(Ke.map(p=>p(g.navigation)))).filter(p=>typeof p=="function");if(w.length>0){let p=function(){$=$.filter(v=>!w.includes(v))};w.push(p),$.push(...w)}Et.$set(f.props),Xt=!0}else ee(f,gt,!1);const{activeElement:m}=document;await fe();const b=e?e.scroll:a?yt():null;if(Nt){const w=n.hash&&document.getElementById(decodeURIComponent(n.hash.slice(1)));b?scrollTo(b.x,b.y):w?w.scrollIntoView():scrollTo(0,0)}const A=document.activeElement!==m&&document.activeElement!==document.body;!r&&!A&&en(),Nt=!0,f.props.page&&(I=f.props.page),K=!1,t==="popstate"&&Qt(L),g.fulfil(void 0),$.forEach(w=>w(g.navigation)),U.navigating.set(null)}async function oe(t,n,e,r){return t.origin===z&&t.pathname===location.pathname&&!bt?await st({status:r,error:e,url:t,route:n}):await V(t)}function Xe(){let t;T.addEventListener("mousemove",o=>{const i=o.target;clearTimeout(t),t=setTimeout(()=>{r(i,2)},20)});function n(o){r(o.composedPath()[0],1)}T.addEventListener("mousedown",n),T.addEventListener("touchstart",n,{passive:!0});const e=new IntersectionObserver(o=>{for(const i of o)i.isIntersecting&&(dt(i.target.href),e.unobserve(i.target))},{threshold:0});function r(o,i){const s=Kt(o,T);if(!s)return;const{url:c,external:l,download:u}=ht(s,P);if(l||u)return;const h=W(s);if(!h.reload)if(i<=h.preload_data){const g=Rt(c,!1);g&&Ye(g)}else i<=h.preload_code&&dt(c.pathname)}function a(){e.disconnect();for(const o of T.querySelectorAll("a")){const{url:i,external:s,download:c}=ht(o,P);if(s||c)continue;const l=W(o);l.reload||(l.preload_code===J.viewport&&e.observe(o),l.preload_code===J.eager&&dt(i.pathname))}}$.push(a),a()}function C(t,n){if(t instanceof at)return t.body;const e=X(t),r=He(t);return F.hooks.handleError({error:t,event:n,status:e,message:r})??{message:r}}function Ze(t,n={}){return t=Ht(t),t.origin!==z?Promise.reject(new Error("goto: invalid URL")):te(t,n,0)}function Qe(){var n;history.scrollRestoration="manual",addEventListener("beforeunload",e=>{let r=!1;if(Ot(),!K){const a=ce(y,void 0,null,"leave"),o={...a.navigation,cancel:()=>{r=!0,a.reject(new Error("navigation cancelled"))}};Wt.forEach(i=>i(o))}r?(e.preventDefault(),e.returnValue=""):history.scrollRestoration="auto"}),addEventListener("visibilitychange",()=>{document.visibilityState==="hidden"&&Ot()}),(n=navigator.connection)!=null&&n.saveData||Xe(),T.addEventListener("click",async e=>{var g;if(e.button||e.which!==1||e.metaKey||e.ctrlKey||e.shiftKey||e.altKey||e.defaultPrevented)return;const r=Kt(e.composedPath()[0],T);if(!r)return;const{url:a,external:o,target:i,download:s}=ht(r,P);if(!a)return;if(i==="_parent"||i==="_top"){if(window.parent!==window)return}else if(i&&i!=="_self")return;const c=W(r);if(!(r instanceof SVGAElement)&&a.protocol!==location.protocol&&!(a.protocol==="https:"||a.protocol==="http:")||s)return;if(o||c.reload){ae({url:a,type:"link"})?K=!0:e.preventDefault();return}const[u,h]=a.href.split("#");if(h!==void 0&&u===ft(location)){const[,d]=y.url.href.split("#");if(d===h){e.preventDefault(),h===""||h==="top"&&r.ownerDocument.getElementById("top")===null?window.scrollTo({top:0}):(g=r.ownerDocument.getElementById(h))==null||g.scrollIntoView();return}if(q=!0,vt(S),t(a),!c.replace_state)return;q=!1}e.preventDefault(),await new Promise(d=>{requestAnimationFrame(()=>{setTimeout(d,0)}),setTimeout(d,100)}),Y({type:"link",url:a,keepfocus:c.keepfocus,noscroll:c.noscroll,replace_state:c.replace_state??a.href===location.href})}),T.addEventListener("submit",e=>{if(e.defaultPrevented)return;const r=HTMLFormElement.prototype.cloneNode.call(e.target),a=e.submitter;if(((a==null?void 0:a.formMethod)||r.method)!=="get")return;const i=new URL((a==null?void 0:a.hasAttribute("formaction"))&&(a==null?void 0:a.formAction)||r.action);if(rt(i,P))return;const s=e.target,c=W(s);if(c.reload)return;e.preventDefault(),e.stopPropagation();const l=new FormData(s),u=a==null?void 0:a.getAttribute("name");u&&l.append(u,(a==null?void 0:a.getAttribute("value"))??""),i.search=new URLSearchParams(l).toString(),Y({type:"form",url:i,keepfocus:c.keepfocus,noscroll:c.noscroll,replace_state:c.replace_state??i.href===location.href})}),addEventListener("popstate",async e=>{var r;if((r=e.state)!=null&&r[D]){const a=e.state[D];if(et={},a===S)return;const o=N[a],i=e.state[Mt]??{},s=new URL(e.state[Ue]??location.href),c=e.state[H],l=ft(location)===ft(y.url);if(c===L&&(Xt||l)){t(s),N[S]=yt(),o&&scrollTo(o.x,o.y),i!==I.state&&(I={...I,state:i},Et.$set({page:I})),S=a;return}const h=a-S;await Y({type:"popstate",url:s,popped:{state:i,scroll:o,delta:h},accept:()=>{S=a,L=c},block:()=>{history.go(-h)},nav_token:et})}else if(!q){const a=new URL(location.href);t(a)}}),addEventListener("hashchange",()=>{q&&(q=!1,history.replaceState({...history.state,[D]:++S,[H]:L},"",location.href))});for(const e of document.querySelectorAll("link"))e.rel==="icon"&&(e.href=e.href);addEventListener("pageshow",e=>{e.persisted&&U.navigating.set(null)});function t(e){y.url=e,U.page.set({...I,url:e}),U.page.notify()}}async function tn(t,{status:n=200,error:e,node_ids:r,params:a,route:o,data:i,form:s}){bt=!0;const c=new URL(location.href);({params:a={},route:o={id:null}}=Rt(c,!1)||{});let l;try{const u=r.map(async(d,_)=>{const f=i[_];return f!=null&&f.uses&&(f.uses=ie(f.uses)),St({loader:F.nodes[d],url:c,params:a,route:o,parent:async()=>{const m={};for(let b=0;b<_;b+=1)Object.assign(m,(await u[b]).data);return m},server_data_node:At(f)})}),h=await Promise.all(u),g=ot.find(({id:d})=>d===o.id);if(g){const d=g.layouts;for(let _=0;_<d.length;_++)d[_]||h.splice(_,0,void 0)}l=nt({url:c,params:a,branch:h,status:n,error:e,form:s,route:g??null})}catch(u){if(u instanceof Yt){await V(new URL(u.location,location.href));return}l=await st({status:X(u),error:await C(u,{url:c,params:a,route:o}),url:c,route:o})}l.props.page&&(l.props.page.state={}),ee(l,t,!0)}async function se(t,n){var a;const e=new URL(t);e.pathname=ye(t.pathname),t.pathname.endsWith("/")&&e.searchParams.append(Me,"1"),e.searchParams.append(Ge,n.map(o=>o?"1":"0").join(""));const r=await Vt(e.href);if(!r.ok){let o;throw(a=r.headers.get("content-type"))!=null&&a.includes("application/json")?o=await r.json():r.status===404?o="Not Found":r.status===500&&(o="Internal Error"),new at(r.status,o)}return new Promise(async o=>{var h;const i=new Map,s=r.body.getReader(),c=new TextDecoder;function l(g){return Ve(g,{Promise:d=>new Promise((_,f)=>{i.set(d,{fulfil:_,reject:f})})})}let u="";for(;;){const{done:g,value:d}=await s.read();if(g&&!u)break;for(u+=!d&&u?`
2
- `:c.decode(d,{stream:!0});;){const _=u.indexOf(`
3
- `);if(_===-1)break;const f=JSON.parse(u.slice(0,_));if(u=u.slice(_+1),f.type==="redirect")return o(f);if(f.type==="data")(h=f.nodes)==null||h.forEach(m=>{(m==null?void 0:m.type)==="data"&&(m.uses=ie(m.uses),m.data=l(m.data))}),o(f);else if(f.type==="chunk"){const{id:m,data:b,error:A}=f,w=i.get(m);i.delete(m),A?w.reject(l(A)):w.fulfil(l(b))}}}})}function ie(t){return{dependencies:new Set((t==null?void 0:t.dependencies)??[]),params:new Set((t==null?void 0:t.params)??[]),parent:!!(t!=null&&t.parent),route:!!(t!=null&&t.route),url:!!(t!=null&&t.url),search_params:new Set((t==null?void 0:t.search_params)??[])}}function en(){const t=document.querySelector("[autofocus]");if(t)t.focus();else{const n=document.body,e=n.getAttribute("tabindex");n.tabIndex=-1,n.focus({preventScroll:!0,focusVisible:!1}),e!==null?n.setAttribute("tabindex",e):n.removeAttribute("tabindex");const r=getSelection();if(r&&r.type!=="None"){const a=[];for(let o=0;o<r.rangeCount;o+=1)a.push(r.getRangeAt(o));setTimeout(()=>{if(r.rangeCount===a.length){for(let o=0;o<r.rangeCount;o+=1){const i=a[o],s=r.getRangeAt(o);if(i.commonAncestorContainer!==s.commonAncestorContainer||i.startContainer!==s.startContainer||i.endContainer!==s.endContainer||i.startOffset!==s.startOffset||i.endOffset!==s.endOffset)return}r.removeAllRanges()}})}}}function ce(t,n,e,r){var c,l;let a,o;const i=new Promise((u,h)=>{a=u,o=h});return i.catch(()=>{}),{navigation:{from:{params:t.params,route:{id:((c=t.route)==null?void 0:c.id)??null},url:t.url},to:e&&{params:(n==null?void 0:n.params)??null,route:{id:((l=n==null?void 0:n.route)==null?void 0:l.id)??null},url:e},willUnload:!n,type:r,complete:i},fulfil:a,reject:o}}export{rn as a,U as s};
 
 
 
 
_app/immutable/chunks/index.C4D7lu78.js DELETED
@@ -1 +0,0 @@
1
- var E=Object.defineProperty;var j=(e,t,n)=>t in e?E(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n;var p=(e,t,n)=>j(e,typeof t!="symbol"?t+"":t,n);import{r as h,n as y,f as w,h as C,i as S,j as B,k as b,l as I,m as P,p as N,q as T,v as q,w as H}from"./scheduler.CtbWrGNo.js";let $=!1;function M(){$=!0}function O(){$=!1}function D(e,t,n,a){for(;e<t;){const s=e+(t-e>>1);n(s)<=a?e=s+1:t=s}return e}function L(e){if(e.hydrate_init)return;e.hydrate_init=!0;let t=e.childNodes;if(e.nodeName==="HEAD"){const i=[];for(let r=0;r<t.length;r++){const o=t[r];o.claim_order!==void 0&&i.push(o)}t=i}const n=new Int32Array(t.length+1),a=new Int32Array(t.length);n[0]=-1;let s=0;for(let i=0;i<t.length;i++){const r=t[i].claim_order,o=(s>0&&t[n[s]].claim_order<=r?s+1:D(1,s,_=>t[n[_]].claim_order,r))-1;a[i]=n[o]+1;const u=o+1;n[u]=i,s=Math.max(u,s)}const c=[],l=[];let f=t.length-1;for(let i=n[s]+1;i!=0;i=a[i-1]){for(c.push(t[i-1]);f>=i;f--)l.push(t[f]);f--}for(;f>=0;f--)l.push(t[f]);c.reverse(),l.sort((i,r)=>i.claim_order-r.claim_order);for(let i=0,r=0;i<l.length;i++){for(;r<c.length&&l[i].claim_order>=c[r].claim_order;)r++;const o=r<c.length?c[r]:null;e.insertBefore(l[i],o)}}function R(e,t){if($){for(L(e),(e.actual_end_child===void 0||e.actual_end_child!==null&&e.actual_end_child.parentNode!==e)&&(e.actual_end_child=e.firstChild);e.actual_end_child!==null&&e.actual_end_child.claim_order===void 0;)e.actual_end_child=e.actual_end_child.nextSibling;t!==e.actual_end_child?(t.claim_order!==void 0||t.parentNode!==e)&&e.insertBefore(t,e.actual_end_child):e.actual_end_child=t.nextSibling}else(t.parentNode!==e||t.nextSibling!==null)&&e.appendChild(t)}function ee(e,t,n){$&&!n?R(e,t):(t.parentNode!==e||t.nextSibling!=n)&&e.insertBefore(t,n||null)}function U(e){e.parentNode&&e.parentNode.removeChild(e)}function V(e){return document.createElement(e)}function x(e){return document.createTextNode(e)}function te(){return x(" ")}function ne(){return x("")}function ie(e,t,n){n==null?e.removeAttribute(t):e.getAttribute(t)!==n&&e.setAttribute(t,n)}function re(e){return e.dataset.svelteH}function W(e){return Array.from(e.childNodes)}function z(e){e.claim_info===void 0&&(e.claim_info={last_index:0,total_claimed:0})}function A(e,t,n,a,s=!1){z(e);const c=(()=>{for(let l=e.claim_info.last_index;l<e.length;l++){const f=e[l];if(t(f)){const i=n(f);return i===void 0?e.splice(l,1):e[l]=i,s||(e.claim_info.last_index=l),f}}for(let l=e.claim_info.last_index-1;l>=0;l--){const f=e[l];if(t(f)){const i=n(f);return i===void 0?e.splice(l,1):e[l]=i,s?i===void 0&&e.claim_info.last_index--:e.claim_info.last_index=l,f}}return a()})();return c.claim_order=e.claim_info.total_claimed,e.claim_info.total_claimed+=1,c}function F(e,t,n,a){return A(e,s=>s.nodeName===t,s=>{const c=[];for(let l=0;l<s.attributes.length;l++){const f=s.attributes[l];n[f.name]||c.push(f.name)}c.forEach(l=>s.removeAttribute(l))},()=>a(t))}function ae(e,t,n){return F(e,t,n,V)}function G(e,t){return A(e,n=>n.nodeType===3,n=>{const a=""+t;if(n.data.startsWith(a)){if(n.data.length!==a.length)return n.splitText(a.length)}else n.data=a},()=>x(t),!0)}function le(e){return G(e," ")}function se(e,t){t=""+t,e.data!==t&&(e.data=t)}function fe(e,t,n,a){n==null?e.style.removeProperty(t):e.style.setProperty(t,n,"")}function ce(e,t){return new e(t)}const m=new Set;let d;function ue(){d={r:0,c:[],p:d}}function oe(){d.r||h(d.c),d=d.p}function J(e,t){e&&e.i&&(m.delete(e),e.i(t))}function de(e,t,n,a){if(e&&e.o){if(m.has(e))return;m.add(e),d.c.push(()=>{m.delete(e),a&&(n&&e.d(1),a())}),e.o(t)}else a&&a()}function _e(e){e&&e.c()}function me(e,t){e&&e.l(t)}function K(e,t,n){const{fragment:a,after_update:s}=e.$$;a&&a.m(t,n),b(()=>{const c=e.$$.on_mount.map(T).filter(S);e.$$.on_destroy?e.$$.on_destroy.push(...c):h(c),e.$$.on_mount=[]}),s.forEach(b)}function Q(e,t){const n=e.$$;n.fragment!==null&&(I(n.after_update),h(n.on_destroy),n.fragment&&n.fragment.d(t),n.on_destroy=n.fragment=null,n.ctx=[])}function X(e,t){e.$$.dirty[0]===-1&&(q.push(e),H(),e.$$.dirty.fill(0)),e.$$.dirty[t/31|0]|=1<<t%31}function he(e,t,n,a,s,c,l=null,f=[-1]){const i=P;N(e);const r=e.$$={fragment:null,ctx:[],props:c,update:y,not_equal:s,bound:w(),on_mount:[],on_destroy:[],on_disconnect:[],before_update:[],after_update:[],context:new Map(t.context||(i?i.$$.context:[])),callbacks:w(),dirty:f,skip_bound:!1,root:t.target||i.$$.root};l&&l(r.root);let o=!1;if(r.ctx=n?n(e,t.props||{},(u,_,...g)=>{const v=g.length?g[0]:_;return r.ctx&&s(r.ctx[u],r.ctx[u]=v)&&(!r.skip_bound&&r.bound[u]&&r.bound[u](v),o&&X(e,u)),_}):[],r.update(),o=!0,h(r.before_update),r.fragment=a?a(r.ctx):!1,t.target){if(t.hydrate){M();const u=W(t.target);r.fragment&&r.fragment.l(u),u.forEach(U)}else r.fragment&&r.fragment.c();t.intro&&J(e.$$.fragment),K(e,t.target,t.anchor),O(),C()}N(i)}class $e{constructor(){p(this,"$$");p(this,"$$set")}$destroy(){Q(this,1),this.$destroy=y}$on(t,n){if(!S(n))return y;const a=this.$$.callbacks[t]||(this.$$.callbacks[t]=[]);return a.push(n),()=>{const s=a.indexOf(n);s!==-1&&a.splice(s,1)}}$set(t){this.$$set&&!B(t)&&(this.$$.skip_bound=!0,this.$$set(t),this.$$.skip_bound=!1)}}const Y="4";typeof window<"u"&&(window.__svelte||(window.__svelte={v:new Set})).v.add(Y);export{$e as S,W as a,G as b,ae as c,U as d,V as e,le as f,ee as g,R as h,he as i,se as j,re as k,ie as l,J as m,de as n,ne as o,oe as p,fe as q,ue as r,te as s,x as t,ce as u,_e as v,me as w,K as x,Q as y};
 
 
_app/immutable/chunks/scheduler.CtbWrGNo.js DELETED
@@ -1 +0,0 @@
1
- function k(){}function x(t,n){for(const e in n)t[e]=n[e];return t}function w(t){return t()}function z(){return Object.create(null)}function j(t){t.forEach(w)}function F(t){return typeof t=="function"}function P(t,n){return t!=t?n==n:t!==n||t&&typeof t=="object"||typeof t=="function"}function S(t){return Object.keys(t).length===0}function E(t,...n){if(t==null){for(const o of n)o(void 0);return k}const e=t.subscribe(...n);return e.unsubscribe?()=>e.unsubscribe():e}function U(t,n,e){t.$$.on_destroy.push(E(n,e))}function A(t,n,e,o){if(t){const r=g(t,n,e,o);return t[0](r)}}function g(t,n,e,o){return t[1]&&o?x(e.ctx.slice(),t[1](o(n))):e.ctx}function B(t,n,e,o){if(t[2]&&o){const r=t[2](o(e));if(n.dirty===void 0)return r;if(typeof r=="object"){const a=[],f=Math.max(n.dirty.length,r.length);for(let s=0;s<f;s+=1)a[s]=n.dirty[s]|r[s];return a}return n.dirty|r}return n.dirty}function C(t,n,e,o,r,a){if(r){const f=g(n,e,o,a);t.p(f,r)}}function D(t){if(t.ctx.length>32){const n=[],e=t.ctx.length/32;for(let o=0;o<e;o++)n[o]=-1;return n}return-1}let i;function d(t){i=t}function m(){if(!i)throw new Error("Function called outside component initialization");return i}function G(t){m().$$.on_mount.push(t)}function H(t){m().$$.after_update.push(t)}const l=[],p=[];let u=[];const b=[],y=Promise.resolve();let h=!1;function v(){h||(h=!0,y.then(q))}function I(){return v(),y}function O(t){u.push(t)}const _=new Set;let c=0;function q(){if(c!==0)return;const t=i;do{try{for(;c<l.length;){const n=l[c];c++,d(n),M(n.$$)}}catch(n){throw l.length=0,c=0,n}for(d(null),l.length=0,c=0;p.length;)p.pop()();for(let n=0;n<u.length;n+=1){const e=u[n];_.has(e)||(_.add(e),e())}u.length=0}while(l.length);for(;b.length;)b.pop()();h=!1,_.clear(),d(t)}function M(t){if(t.fragment!==null){t.update(),j(t.before_update);const n=t.dirty;t.dirty=[-1],t.fragment&&t.fragment.p(t.ctx,n),t.after_update.forEach(O)}}function J(t){const n=[],e=[];u.forEach(o=>t.indexOf(o)===-1?n.push(o):e.push(o)),e.forEach(o=>o()),u=n}export{A as a,B as b,U as c,H as d,p as e,z as f,D as g,q as h,F as i,S as j,O as k,J as l,i as m,k as n,G as o,d as p,w as q,j as r,P as s,I as t,C as u,l as v,v as w};
 
 
_app/immutable/entry/app.Bje1ZUR5.js DELETED
@@ -1,2 +0,0 @@
1
- const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.dPy0WIMN.js","../chunks/scheduler.CtbWrGNo.js","../chunks/index.C4D7lu78.js","../assets/0.DViICDYp.css","../nodes/1.BADQ-P6Z.js","../chunks/entry.CsquK5o6.js","../nodes/2.C-zSEB19.js","../assets/2.Dl1cvM0g.css"])))=>i.map(i=>d[i]);
2
- import{s as V,d as B,o as U,e as A,t as j}from"../chunks/scheduler.CtbWrGNo.js";import{S as W,i as z,s as F,o as h,f as G,g as k,n as p,p as L,m as g,d as w,e as H,c as J,a as K,l as q,q as d,t as Q,b as X,j as Y,r as S,u as E,v as y,w as D,x as R,y as P}from"../chunks/index.C4D7lu78.js";const Z="modulepreload",M=function(a,e){return new URL(a,e).href},I={},C=function(e,n,i){let s=Promise.resolve();if(n&&n.length>0){const u=document.getElementsByTagName("link"),t=document.querySelector("meta[property=csp-nonce]"),r=(t==null?void 0:t.nonce)||(t==null?void 0:t.getAttribute("nonce"));s=Promise.all(n.map(o=>{if(o=M(o,i),o in I)return;I[o]=!0;const f=o.endsWith(".css"),l=f?'[rel="stylesheet"]':"";if(!!i)for(let b=u.length-1;b>=0;b--){const v=u[b];if(v.href===o&&(!f||v.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${o}"]${l}`))return;const _=document.createElement("link");if(_.rel=f?"stylesheet":Z,f||(_.as="script",_.crossOrigin=""),_.href=o,r&&_.setAttribute("nonce",r),document.head.appendChild(_),f)return new Promise((b,v)=>{_.addEventListener("load",b),_.addEventListener("error",()=>v(new Error(`Unable to preload CSS for ${o}`)))})}))}return s.then(()=>e()).catch(u=>{const t=new Event("vite:preloadError",{cancelable:!0});if(t.payload=u,window.dispatchEvent(t),!t.defaultPrevented)throw u})},re={};function $(a){let e,n,i;var s=a[1][0];function u(t,r){return{props:{data:t[3],form:t[2]}}}return s&&(e=E(s,u(a)),a[12](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&D(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][0])){if(e){S();const o=e;p(o.$$.fragment,1,0,()=>{P(o,1)}),L()}s?(e=E(s,u(t)),t[12](e),y(e.$$.fragment),g(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&8&&(o.data=t[3]),r&4&&(o.form=t[2]),e.$set(o)}},i(t){i||(e&&g(e.$$.fragment,t),i=!0)},o(t){e&&p(e.$$.fragment,t),i=!1},d(t){t&&w(n),a[12](null),e&&P(e,t)}}}function x(a){let e,n,i;var s=a[1][0];function u(t,r){return{props:{data:t[3],$$slots:{default:[ee]},$$scope:{ctx:t}}}}return s&&(e=E(s,u(a)),a[11](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&D(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][0])){if(e){S();const o=e;p(o.$$.fragment,1,0,()=>{P(o,1)}),L()}s?(e=E(s,u(t)),t[11](e),y(e.$$.fragment),g(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&8&&(o.data=t[3]),r&8215&&(o.$$scope={dirty:r,ctx:t}),e.$set(o)}},i(t){i||(e&&g(e.$$.fragment,t),i=!0)},o(t){e&&p(e.$$.fragment,t),i=!1},d(t){t&&w(n),a[11](null),e&&P(e,t)}}}function ee(a){let e,n,i;var s=a[1][1];function u(t,r){return{props:{data:t[4],form:t[2]}}}return s&&(e=E(s,u(a)),a[10](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&D(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][1])){if(e){S();const o=e;p(o.$$.fragment,1,0,()=>{P(o,1)}),L()}s?(e=E(s,u(t)),t[10](e),y(e.$$.fragment),g(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&16&&(o.data=t[4]),r&4&&(o.form=t[2]),e.$set(o)}},i(t){i||(e&&g(e.$$.fragment,t),i=!0)},o(t){e&&p(e.$$.fragment,t),i=!1},d(t){t&&w(n),a[10](null),e&&P(e,t)}}}function N(a){let e,n=a[6]&&O(a);return{c(){e=H("div"),n&&n.c(),this.h()},l(i){e=J(i,"DIV",{id:!0,"aria-live":!0,"aria-atomic":!0,style:!0});var s=K(e);n&&n.l(s),s.forEach(w),this.h()},h(){q(e,"id","svelte-announcer"),q(e,"aria-live","assertive"),q(e,"aria-atomic","true"),d(e,"position","absolute"),d(e,"left","0"),d(e,"top","0"),d(e,"clip","rect(0 0 0 0)"),d(e,"clip-path","inset(50%)"),d(e,"overflow","hidden"),d(e,"white-space","nowrap"),d(e,"width","1px"),d(e,"height","1px")},m(i,s){k(i,e,s),n&&n.m(e,null)},p(i,s){i[6]?n?n.p(i,s):(n=O(i),n.c(),n.m(e,null)):n&&(n.d(1),n=null)},d(i){i&&w(e),n&&n.d()}}}function O(a){let e;return{c(){e=Q(a[7])},l(n){e=X(n,a[7])},m(n,i){k(n,e,i)},p(n,i){i&128&&Y(e,n[7])},d(n){n&&w(e)}}}function te(a){let e,n,i,s,u;const t=[x,$],r=[];function o(l,m){return l[1][1]?0:1}e=o(a),n=r[e]=t[e](a);let f=a[5]&&N(a);return{c(){n.c(),i=F(),f&&f.c(),s=h()},l(l){n.l(l),i=G(l),f&&f.l(l),s=h()},m(l,m){r[e].m(l,m),k(l,i,m),f&&f.m(l,m),k(l,s,m),u=!0},p(l,[m]){let _=e;e=o(l),e===_?r[e].p(l,m):(S(),p(r[_],1,1,()=>{r[_]=null}),L(),n=r[e],n?n.p(l,m):(n=r[e]=t[e](l),n.c()),g(n,1),n.m(i.parentNode,i)),l[5]?f?f.p(l,m):(f=N(l),f.c(),f.m(s.parentNode,s)):f&&(f.d(1),f=null)},i(l){u||(g(n),u=!0)},o(l){p(n),u=!1},d(l){l&&(w(i),w(s)),r[e].d(l),f&&f.d(l)}}}function ne(a,e,n){let{stores:i}=e,{page:s}=e,{constructors:u}=e,{components:t=[]}=e,{form:r}=e,{data_0:o=null}=e,{data_1:f=null}=e;B(i.page.notify);let l=!1,m=!1,_=null;U(()=>{const c=i.page.subscribe(()=>{l&&(n(6,m=!0),j().then(()=>{n(7,_=document.title||"untitled page")}))});return n(5,l=!0),c});function b(c){A[c?"unshift":"push"](()=>{t[1]=c,n(0,t)})}function v(c){A[c?"unshift":"push"](()=>{t[0]=c,n(0,t)})}function T(c){A[c?"unshift":"push"](()=>{t[0]=c,n(0,t)})}return a.$$set=c=>{"stores"in c&&n(8,i=c.stores),"page"in c&&n(9,s=c.page),"constructors"in c&&n(1,u=c.constructors),"components"in c&&n(0,t=c.components),"form"in c&&n(2,r=c.form),"data_0"in c&&n(3,o=c.data_0),"data_1"in c&&n(4,f=c.data_1)},a.$$.update=()=>{a.$$.dirty&768&&i.page.set(s)},[t,u,r,o,f,l,m,_,i,s,b,v,T]}class oe extends W{constructor(e){super(),z(this,e,ne,te,V,{stores:8,page:9,constructors:1,components:0,form:2,data_0:3,data_1:4})}}const ae=[()=>C(()=>import("../nodes/0.dPy0WIMN.js"),__vite__mapDeps([0,1,2,3]),import.meta.url),()=>C(()=>import("../nodes/1.BADQ-P6Z.js"),__vite__mapDeps([4,1,2,5]),import.meta.url),()=>C(()=>import("../nodes/2.C-zSEB19.js"),__vite__mapDeps([6,1,2,7]),import.meta.url)],le=[],fe={"/":[2]},ce={handleError:({error:a})=>{console.error(a)},reroute:()=>{}};export{fe as dictionary,ce as hooks,re as matchers,ae as nodes,oe as root,le as server_loads};
 
 
 
_app/immutable/entry/start.BZni4wHA.js DELETED
@@ -1 +0,0 @@
1
- import{a as t}from"../chunks/entry.CsquK5o6.js";export{t as start};
 
 
_app/immutable/nodes/0.dPy0WIMN.js DELETED
@@ -1 +0,0 @@
1
- import{s as l,a as r,u as i,g as u,b as _}from"../chunks/scheduler.CtbWrGNo.js";import{S as f,i as c,m as p,n as m}from"../chunks/index.C4D7lu78.js";const d=!0,S=Object.freeze(Object.defineProperty({__proto__:null,prerender:d},Symbol.toStringTag,{value:"Module"}));function $(n){let s;const a=n[1].default,t=r(a,n,n[0],null);return{c(){t&&t.c()},l(e){t&&t.l(e)},m(e,o){t&&t.m(e,o),s=!0},p(e,[o]){t&&t.p&&(!s||o&1)&&i(t,a,e,e[0],s?_(a,e[0],o,null):u(e[0]),null)},i(e){s||(p(t,e),s=!0)},o(e){m(t,e),s=!1},d(e){t&&t.d(e)}}}function g(n,s,a){let{$$slots:t={},$$scope:e}=s;return n.$$set=o=>{"$$scope"in o&&a(0,e=o.$$scope)},[e,t]}class v extends f{constructor(s){super(),c(this,s,g,$,l,{})}}export{v as component,S as universal};
 
 
_app/immutable/nodes/1.BADQ-P6Z.js DELETED
@@ -1 +0,0 @@
1
- import{s as S,n as _,c as x}from"../chunks/scheduler.CtbWrGNo.js";import{S as j,i as q,e as f,t as d,s as y,c as g,a as h,b as v,d as u,f as C,g as m,h as $,j as E}from"../chunks/index.C4D7lu78.js";import{s as H}from"../chunks/entry.CsquK5o6.js";const P=()=>{const s=H;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},k={subscribe(s){return P().page.subscribe(s)}};function w(s){var b;let t,r=s[0].status+"",o,n,i,c=((b=s[0].error)==null?void 0:b.message)+"",l;return{c(){t=f("h1"),o=d(r),n=y(),i=f("p"),l=d(c)},l(e){t=g(e,"H1",{});var a=h(t);o=v(a,r),a.forEach(u),n=C(e),i=g(e,"P",{});var p=h(i);l=v(p,c),p.forEach(u)},m(e,a){m(e,t,a),$(t,o),m(e,n,a),m(e,i,a),$(i,l)},p(e,[a]){var p;a&1&&r!==(r=e[0].status+"")&&E(o,r),a&1&&c!==(c=((p=e[0].error)==null?void 0:p.message)+"")&&E(l,c)},i:_,o:_,d(e){e&&(u(t),u(n),u(i))}}}function z(s,t,r){let o;return x(s,k,n=>r(0,o=n)),[o]}let F=class extends j{constructor(t){super(),q(this,t,z,w,S,{})}};export{F as component};
 
 
_app/immutable/nodes/2.C-zSEB19.js DELETED
@@ -1,3 +0,0 @@
1
- import{s as l,n as a}from"../chunks/scheduler.CtbWrGNo.js";import{S as i,i as c,e as o,c as h,k as g,l as m,g as u,d as f}from"../chunks/index.C4D7lu78.js";function d(r){let e,s=`<article class="mx-4 max-w-4xl text-gray-300"><a href="https://huggingface.co/chat" target="_blank" rel="noreferrer" class="mx-auto mb-12 flex w-fit items-center justify-center gap-2 rounded-lg border border-gray-200 p-4 text-xl font-bold !text-white transition-all hover:-translate-y-0.5 hover:border-gray-300 hover:bg-gray-800">Try
2
- <h3 class="svelte-1hc9mw2"><img src="/logo.svg" alt="HuggingChat" class="inline w-10"/>
3
- HuggingChat</h3></a> <p class="svelte-1hc9mw2">HuggingChat is now available at <a class="link svelte-1hc9mw2" href="https://huggingface.co/chat" target="_blank" rel="noreferrer">hf.co/chat</a> and is no longer hosted as a space.</p> <p class="svelte-1hc9mw2">You can still interact with the community and give feedback in the <a class="link svelte-1hc9mw2" href="https://huggingface.co/spaces/huggingchat/chat-ui/discussions" target="_blank" rel="noreferrer">space&#39;s discussions</a>.</p></article>`;return{c(){e=o("div"),e.innerHTML=s,this.h()},l(t){e=h(t,"DIV",{class:!0,"data-svelte-h":!0}),g(e)!=="svelte-hljhww"&&(e.innerHTML=s),this.h()},h(){m(e,"class","mx-auto w-fit pt-10")},m(t,n){u(t,e,n)},p:a,i:a,o:a,d(t){t&&f(e)}}}class w extends i{constructor(e){super(),c(this,e,null,d,l,{})}}export{w as component};
 
 
 
 
_app/version.json DELETED
@@ -1 +0,0 @@
1
- {"version":"1722243467595"}
 
 
entrypoint.sh ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ENV_LOCAL_PATH=/app/.env.local
2
+
3
+ if test -z "${DOTENV_LOCAL}" ; then
4
+ if ! test -f "${ENV_LOCAL_PATH}" ; then
5
+ echo "DOTENV_LOCAL was not found in the ENV variables and .env.local is not set using a bind volume. Make sure to set environment variables properly. "
6
+ fi;
7
+ else
8
+ echo "DOTENV_LOCAL was found in the ENV variables. Creating .env.local file."
9
+ cat <<< "$DOTENV_LOCAL" > ${ENV_LOCAL_PATH}
10
+ fi;
11
+
12
+ if [ "$INCLUDE_DB" = "true" ] ; then
13
+ echo "Starting local MongoDB instance"
14
+ nohup mongod &
15
+ fi;
16
+
17
+ export PUBLIC_VERSION=$(node -p "require('./package.json').version")
18
+
19
+ dotenv -e /app/.env -c -- node /app/build/index.js -- --host 0.0.0.0 --port 3000
index.html DELETED
@@ -1,48 +0,0 @@
1
- <!DOCTYPE html>
2
- <html lang="en" class="bg-gray-900">
3
- <head>
4
- <meta charset="utf-8" />
5
- <link rel="icon" href="./favicon.svg" />
6
- <meta name="viewport" content="width=device-width" />
7
-
8
- <link href="./_app/immutable/assets/0.DViICDYp.css" rel="stylesheet">
9
- <link href="./_app/immutable/assets/2.Dl1cvM0g.css" rel="stylesheet">
10
- <link rel="modulepreload" href="./_app/immutable/entry/start.BZni4wHA.js">
11
- <link rel="modulepreload" href="./_app/immutable/chunks/entry.CsquK5o6.js">
12
- <link rel="modulepreload" href="./_app/immutable/chunks/scheduler.CtbWrGNo.js">
13
- <link rel="modulepreload" href="./_app/immutable/entry/app.Bje1ZUR5.js">
14
- <link rel="modulepreload" href="./_app/immutable/chunks/index.C4D7lu78.js">
15
- <link rel="modulepreload" href="./_app/immutable/nodes/0.dPy0WIMN.js">
16
- <link rel="modulepreload" href="./_app/immutable/nodes/2.C-zSEB19.js">
17
- </head>
18
- <body data-sveltekit-preload-data="hover">
19
- <div style="display: contents"> <div class="mx-auto w-fit pt-10" data-svelte-h="svelte-hljhww"><article class="mx-4 max-w-4xl text-gray-300"><a href="https://huggingface.co/chat" target="_blank" rel="noreferrer" class="mx-auto mb-12 flex w-fit items-center justify-center gap-2 rounded-lg border border-gray-200 p-4 text-xl font-bold !text-white transition-all hover:-translate-y-0.5 hover:border-gray-300 hover:bg-gray-800">Try
20
- <h3 class="svelte-1hc9mw2"><img src="/logo.svg" alt="HuggingChat" class="inline w-10">
21
- HuggingChat</h3></a> <p class="svelte-1hc9mw2">HuggingChat is now available at <a class="link svelte-1hc9mw2" href="https://huggingface.co/chat" target="_blank" rel="noreferrer">hf.co/chat</a> and is no longer hosted as a space.</p> <p class="svelte-1hc9mw2">You can still interact with the community and give feedback in the <a class="link svelte-1hc9mw2" href="https://huggingface.co/spaces/huggingchat/chat-ui/discussions" target="_blank" rel="noreferrer">space&#39;s discussions</a>.</p></article> </div>
22
-
23
- <script>
24
- {
25
- __sveltekit_1klgtbu = {
26
- base: new URL(".", location).pathname.slice(0, -1)
27
- };
28
-
29
- const element = document.currentScript.parentElement;
30
-
31
- const data = [null,null];
32
-
33
- Promise.all([
34
- import("./_app/immutable/entry/start.BZni4wHA.js"),
35
- import("./_app/immutable/entry/app.Bje1ZUR5.js")
36
- ]).then(([kit, app]) => {
37
- kit.start(app, element, {
38
- node_ids: [0, 2],
39
- data,
40
- form: null,
41
- error: null
42
- });
43
- });
44
- }
45
- </script>
46
- </div>
47
- </body>
48
- </html>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "chat-ui",
3
+ "version": "0.8.3",
4
+ "private": true,
5
+ "packageManager": "[email protected]",
6
+ "scripts": {
7
+ "dev": "vite dev",
8
+ "build": "vite build",
9
+ "preview": "vite preview",
10
+ "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
11
+ "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
12
+ "lint": "prettier --plugin-search-dir . --check . && eslint .",
13
+ "format": "prettier --plugin-search-dir . --write .",
14
+ "test": "MONGODB_URL=mongodb://127.0.0.1:27017/ vitest",
15
+ "updateLocalEnv": "node --loader ts-node/esm scripts/updateLocalEnv.ts",
16
+ "updateProdEnv": "node --loader ts-node/esm scripts/updateProdEnv.ts",
17
+ "populate": "vite-node --options.transformMode.ssr='/.*/' scripts/populate.ts"
18
+ },
19
+ "devDependencies": {
20
+ "@faker-js/faker": "^8.4.1",
21
+ "@iconify-json/carbon": "^1.1.16",
22
+ "@iconify-json/eos-icons": "^1.1.6",
23
+ "@sveltejs/adapter-node": "^1.3.1",
24
+ "@sveltejs/kit": "^1.30.4",
25
+ "@tailwindcss/typography": "^0.5.9",
26
+ "@types/express": "^4.17.21",
27
+ "@types/jsdom": "^21.1.1",
28
+ "@types/minimist": "^1.2.5",
29
+ "@types/parquetjs": "^0.10.3",
30
+ "@types/uuid": "^9.0.8",
31
+ "@typescript-eslint/eslint-plugin": "^6.x",
32
+ "@typescript-eslint/parser": "^6.x",
33
+ "eslint": "^8.28.0",
34
+ "eslint-config-prettier": "^8.5.0",
35
+ "eslint-plugin-svelte": "^2.30.0",
36
+ "minimist": "^1.2.8",
37
+ "prettier": "^2.8.0",
38
+ "prettier-plugin-svelte": "^2.10.1",
39
+ "prettier-plugin-tailwindcss": "^0.2.7",
40
+ "prom-client": "^15.1.2",
41
+ "svelte": "^4.2.8",
42
+ "svelte-check": "^3.6.2",
43
+ "ts-node": "^10.9.1",
44
+ "tslib": "^2.4.1",
45
+ "typescript": "^5.0.0",
46
+ "unplugin-icons": "^0.16.1",
47
+ "vite": "^4.5.3",
48
+ "vite-node": "^1.3.1",
49
+ "vitest": "^0.31.0"
50
+ },
51
+ "type": "module",
52
+ "dependencies": {
53
+ "@huggingface/hub": "^0.5.1",
54
+ "@huggingface/inference": "^2.6.3",
55
+ "@iconify-json/bi": "^1.1.21",
56
+ "@resvg/resvg-js": "^2.6.0",
57
+ "@xenova/transformers": "^2.16.1",
58
+ "autoprefixer": "^10.4.14",
59
+ "browser-image-resizer": "^2.4.1",
60
+ "date-fns": "^2.29.3",
61
+ "dotenv": "^16.0.3",
62
+ "express": "^4.19.2",
63
+ "handlebars": "^4.7.8",
64
+ "highlight.js": "^11.7.0",
65
+ "image-size": "^1.0.2",
66
+ "ip-address": "^9.0.5",
67
+ "jsdom": "^22.0.0",
68
+ "json5": "^2.2.3",
69
+ "marked": "^12.0.1",
70
+ "marked-katex-extension": "^5.0.1",
71
+ "mongodb": "^5.8.0",
72
+ "nanoid": "^4.0.2",
73
+ "openid-client": "^5.4.2",
74
+ "parquetjs": "^0.11.2",
75
+ "pino": "^9.0.0",
76
+ "pino-pretty": "^11.0.0",
77
+ "postcss": "^8.4.31",
78
+ "saslprep": "^1.0.3",
79
+ "satori": "^0.10.11",
80
+ "satori-html": "^0.3.2",
81
+ "serpapi": "^1.1.1",
82
+ "sharp": "^0.33.2",
83
+ "tailwind-scrollbar": "^3.0.0",
84
+ "tailwindcss": "^3.4.0",
85
+ "uuid": "^9.0.1",
86
+ "zod": "^3.22.3"
87
+ },
88
+ "optionalDependencies": {
89
+ "@anthropic-ai/sdk": "^0.17.1",
90
+ "@google-cloud/vertexai": "^1.1.0",
91
+ "aws4fetch": "^1.0.17",
92
+ "cohere-ai": "^7.9.0",
93
+ "openai": "^4.14.2"
94
+ }
95
+ }
postcss.config.js ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ export default {
2
+ plugins: {
3
+ tailwindcss: {},
4
+ autoprefixer: {},
5
+ },
6
+ };
scripts/populate.ts ADDED
@@ -0,0 +1,269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import readline from "readline";
2
+ import minimist from "minimist";
3
+
4
+ // @ts-expect-error: vite-node makes the var available but the typescript compiler doesn't see them
5
+ import { env } from "$env/dynamic/private";
6
+
7
+ import { faker } from "@faker-js/faker";
8
+ import { ObjectId } from "mongodb";
9
+
10
+ // @ts-expect-error: vite-node makes the var available but the typescript compiler doesn't see them
11
+ import { collections } from "$lib/server/database";
12
+ import { models } from "../src/lib/server/models.ts";
13
+ import type { User } from "../src/lib/types/User";
14
+ import type { Assistant } from "../src/lib/types/Assistant";
15
+ import type { Conversation } from "../src/lib/types/Conversation";
16
+ import type { Settings } from "../src/lib/types/Settings";
17
+ import { defaultEmbeddingModel } from "../src/lib/server/embeddingModels.ts";
18
+ import { Message } from "../src/lib/types/Message.ts";
19
+
20
+ import { addChildren } from "../src/lib/utils/tree/addChildren.ts";
21
+ import { generateSearchTokens } from "../src/lib/utils/searchTokens.ts";
22
+
23
+ const rl = readline.createInterface({
24
+ input: process.stdin,
25
+ output: process.stdout,
26
+ });
27
+
28
+ rl.on("close", function () {
29
+ process.exit(0);
30
+ });
31
+
32
+ const possibleFlags = ["reset", "all", "users", "settings", "assistants", "conversations"];
33
+ const argv = minimist(process.argv.slice(2));
34
+ const flags = argv["_"].filter((flag) => possibleFlags.includes(flag));
35
+
36
+ async function generateMessages(preprompt?: string): Promise<Message[]> {
37
+ const isLinear = faker.datatype.boolean(0.5);
38
+ const isInterrupted = faker.datatype.boolean(0.05);
39
+
40
+ const messages: Message[] = [];
41
+
42
+ messages.push({
43
+ id: crypto.randomUUID(),
44
+ from: "system",
45
+ content: preprompt ?? "",
46
+ createdAt: faker.date.recent({ days: 30 }),
47
+ updatedAt: faker.date.recent({ days: 30 }),
48
+ });
49
+
50
+ let isUser = true;
51
+ let lastId = messages[0].id;
52
+ if (isLinear) {
53
+ const convLength = faker.number.int({ min: 1, max: 25 }) * 2; // must always be even
54
+
55
+ for (let i = 0; i < convLength; i++) {
56
+ lastId = addChildren(
57
+ {
58
+ messages,
59
+ rootMessageId: messages[0].id,
60
+ },
61
+ {
62
+ from: isUser ? "user" : "assistant",
63
+ content: faker.lorem.sentence({
64
+ min: 10,
65
+ max: isUser ? 50 : 200,
66
+ }),
67
+ createdAt: faker.date.recent({ days: 30 }),
68
+ updatedAt: faker.date.recent({ days: 30 }),
69
+ interrupted: i === convLength - 1 && isInterrupted,
70
+ },
71
+ lastId
72
+ );
73
+ isUser = !isUser;
74
+ }
75
+ } else {
76
+ const convLength = faker.number.int({ min: 2, max: 200 });
77
+
78
+ for (let i = 0; i < convLength; i++) {
79
+ addChildren(
80
+ {
81
+ messages,
82
+ rootMessageId: messages[0].id,
83
+ },
84
+ {
85
+ from: isUser ? "user" : "assistant",
86
+ content: faker.lorem.sentence({
87
+ min: 10,
88
+ max: isUser ? 50 : 200,
89
+ }),
90
+ createdAt: faker.date.recent({ days: 30 }),
91
+ updatedAt: faker.date.recent({ days: 30 }),
92
+ interrupted: i === convLength - 1 && isInterrupted,
93
+ },
94
+ faker.helpers.arrayElement([
95
+ messages[0].id,
96
+ ...messages.filter((m) => m.from === (isUser ? "assistant" : "user")).map((m) => m.id),
97
+ ])
98
+ );
99
+
100
+ isUser = !isUser;
101
+ }
102
+ }
103
+ return messages;
104
+ }
105
+
106
+ async function seed() {
107
+ console.log("Seeding...");
108
+ const modelIds = models.map((model) => model.id);
109
+
110
+ if (flags.includes("reset")) {
111
+ console.log("Starting reset of DB");
112
+ await collections.users.deleteMany({});
113
+ await collections.settings.deleteMany({});
114
+ await collections.assistants.deleteMany({});
115
+ await collections.conversations.deleteMany({});
116
+ console.log("Reset done");
117
+ }
118
+
119
+ if (flags.includes("users") || flags.includes("all")) {
120
+ console.log("Creating 100 new users");
121
+ const newUsers: User[] = Array.from({ length: 100 }, () => ({
122
+ _id: new ObjectId(),
123
+ createdAt: faker.date.recent({ days: 30 }),
124
+ updatedAt: faker.date.recent({ days: 30 }),
125
+ username: faker.internet.userName(),
126
+ name: faker.person.fullName(),
127
+ hfUserId: faker.string.alphanumeric(24),
128
+ avatarUrl: faker.image.avatar(),
129
+ }));
130
+
131
+ await collections.users.insertMany(newUsers);
132
+ console.log("Done creating users.");
133
+ }
134
+
135
+ const users = await collections.users.find().toArray();
136
+ if (flags.includes("settings") || flags.includes("all")) {
137
+ console.log("Updating settings for all users");
138
+ users.forEach(async (user) => {
139
+ const settings: Settings = {
140
+ userId: user._id,
141
+ shareConversationsWithModelAuthors: faker.datatype.boolean(0.25),
142
+ hideEmojiOnSidebar: faker.datatype.boolean(0.25),
143
+ ethicsModalAcceptedAt: faker.date.recent({ days: 30 }),
144
+ activeModel: faker.helpers.arrayElement(modelIds),
145
+ createdAt: faker.date.recent({ days: 30 }),
146
+ updatedAt: faker.date.recent({ days: 30 }),
147
+ customPrompts: {},
148
+ assistants: [],
149
+ };
150
+ await collections.settings.updateOne(
151
+ { userId: user._id },
152
+ { $set: { ...settings } },
153
+ { upsert: true }
154
+ );
155
+ });
156
+ console.log("Done updating settings.");
157
+ }
158
+
159
+ if (flags.includes("assistants") || flags.includes("all")) {
160
+ console.log("Creating assistants for all users");
161
+ await Promise.all(
162
+ users.map(async (user) => {
163
+ const name = faker.animal.insect();
164
+ const assistants = faker.helpers.multiple<Assistant>(
165
+ () => ({
166
+ _id: new ObjectId(),
167
+ name,
168
+ createdById: user._id,
169
+ createdByName: user.username,
170
+ createdAt: faker.date.recent({ days: 30 }),
171
+ updatedAt: faker.date.recent({ days: 30 }),
172
+ userCount: faker.number.int({ min: 1, max: 100000 }),
173
+ featured: faker.datatype.boolean(0.25),
174
+ modelId: faker.helpers.arrayElement(modelIds),
175
+ description: faker.lorem.sentence(),
176
+ preprompt: faker.hacker.phrase(),
177
+ exampleInputs: faker.helpers.multiple(() => faker.lorem.sentence(), {
178
+ count: faker.number.int({ min: 0, max: 4 }),
179
+ }),
180
+ searchTokens: generateSearchTokens(name),
181
+ last24HoursCount: faker.number.int({ min: 0, max: 1000 }),
182
+ }),
183
+ { count: faker.number.int({ min: 3, max: 10 }) }
184
+ );
185
+ await collections.assistants.insertMany(assistants);
186
+ await collections.settings.updateOne(
187
+ { userId: user._id },
188
+ { $set: { assistants: assistants.map((a) => a._id.toString()) } },
189
+ { upsert: true }
190
+ );
191
+ })
192
+ );
193
+ console.log("Done creating assistants.");
194
+ }
195
+
196
+ if (flags.includes("conversations") || flags.includes("all")) {
197
+ console.log("Creating conversations for all users");
198
+ await Promise.all(
199
+ users.map(async (user) => {
200
+ const conversations = faker.helpers.multiple(
201
+ async () => {
202
+ const settings = await collections.settings.findOne<Settings>({ userId: user._id });
203
+
204
+ const assistantId =
205
+ settings?.assistants && settings.assistants.length > 0 && faker.datatype.boolean(0.1)
206
+ ? faker.helpers.arrayElement<ObjectId>(settings.assistants)
207
+ : undefined;
208
+
209
+ const preprompt =
210
+ (assistantId
211
+ ? await collections.assistants
212
+ .findOne({ _id: assistantId })
213
+ .then((assistant: Assistant) => assistant?.preprompt ?? "")
214
+ : faker.helpers.maybe(() => faker.hacker.phrase(), { probability: 0.5 })) ?? "";
215
+
216
+ const messages = await generateMessages(preprompt);
217
+
218
+ const conv = {
219
+ _id: new ObjectId(),
220
+ userId: user._id,
221
+ assistantId,
222
+ preprompt,
223
+ createdAt: faker.date.recent({ days: 145 }),
224
+ updatedAt: faker.date.recent({ days: 145 }),
225
+ model: faker.helpers.arrayElement(modelIds),
226
+ title: faker.internet.emoji() + " " + faker.hacker.phrase(),
227
+ embeddingModel: defaultEmbeddingModel.id,
228
+ messages,
229
+ rootMessageId: messages[0].id,
230
+ } satisfies Conversation;
231
+
232
+ return conv;
233
+ },
234
+ { count: faker.number.int({ min: 10, max: 200 }) }
235
+ );
236
+
237
+ await collections.conversations.insertMany(await Promise.all(conversations));
238
+ })
239
+ );
240
+ console.log("Done creating conversations.");
241
+ }
242
+ }
243
+
244
+ // run seed
245
+ (async () => {
246
+ try {
247
+ rl.question(
248
+ "You're about to run a seeding script on the following MONGODB_URL: \x1b[31m" +
249
+ env.MONGODB_URL +
250
+ "\x1b[0m\n\n With the following flags: \x1b[31m" +
251
+ flags.join("\x1b[0m , \x1b[31m") +
252
+ "\x1b[0m\n \n\n Are you sure you want to continue? (yes/no): ",
253
+ async (confirm) => {
254
+ if (confirm !== "yes") {
255
+ console.log("Not 'yes', exiting.");
256
+ rl.close();
257
+ process.exit(0);
258
+ }
259
+ console.log("Starting seeding...");
260
+ await seed();
261
+ console.log("Seeding done.");
262
+ rl.close();
263
+ }
264
+ );
265
+ } catch (e) {
266
+ console.error(e);
267
+ process.exit(1);
268
+ }
269
+ })();
scripts/updateLocalEnv.ts ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fs from "fs";
2
+
3
+ const SECRET_CONFIG = fs.existsSync(".env.SECRET_CONFIG")
4
+ ? fs.readFileSync(".env.SECRET_CONFIG", "utf8")
5
+ : process.env.SECRET_CONFIG;
6
+
7
+ if (!SECRET_CONFIG) {
8
+ throw new Error(
9
+ "SECRET_CONFIG is not defined. Please provide it either in a file or as an environment variable."
10
+ );
11
+ }
12
+
13
+ // Read the content of the file .env.template
14
+ const PUBLIC_CONFIG = fs.readFileSync(".env.template", "utf8");
15
+
16
+ // Prepend the content of the env variable SECRET_CONFIG
17
+ const full_config = `${PUBLIC_CONFIG}\n${SECRET_CONFIG}`;
18
+
19
+ // Write full_config to .env.local
20
+ fs.writeFileSync(".env.local", full_config);
scripts/updateProdEnv.ts ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fs from "fs";
2
+
3
+ const HF_DEPLOYMENT_TOKEN = process.env.HF_DEPLOYMENT_TOKEN; // token used for pushing to hub
4
+
5
+ const SERPER_API_KEY = process.env.SERPER_API_KEY;
6
+ const OPENID_CONFIG = process.env.OPENID_CONFIG;
7
+ const MONGODB_URL = process.env.MONGODB_URL;
8
+ const HF_TOKEN = process.env.HF_TOKEN ?? process.env.HF_ACCESS_TOKEN; // token used for API requests in prod
9
+ const WEBHOOK_URL_REPORT_ASSISTANT = process.env.WEBHOOK_URL_REPORT_ASSISTANT; // slack webhook url used to get "report assistant" events
10
+ const ADMIN_API_SECRET = process.env.ADMIN_API_SECRET;
11
+ const USAGE_LIMITS = process.env.USAGE_LIMITS;
12
+ const MESSAGES_BEFORE_LOGIN = process.env.MESSAGES_BEFORE_LOGIN;
13
+
14
+ // Read the content of the file .env.template
15
+ const PUBLIC_CONFIG = fs.readFileSync(".env.template", "utf8");
16
+
17
+ // Prepend the content of the env variable SECRET_CONFIG
18
+ const full_config = `${PUBLIC_CONFIG}
19
+ MONGODB_URL=${MONGODB_URL}
20
+ OPENID_CONFIG=${OPENID_CONFIG}
21
+ SERPER_API_KEY=${SERPER_API_KEY}
22
+ HF_TOKEN=${HF_TOKEN}
23
+ WEBHOOK_URL_REPORT_ASSISTANT=${WEBHOOK_URL_REPORT_ASSISTANT}
24
+ ADMIN_API_SECRET=${ADMIN_API_SECRET}
25
+ USAGE_LIMITS=${USAGE_LIMITS}
26
+ MESSAGES_BEFORE_LOGIN=${MESSAGES_BEFORE_LOGIN}
27
+ `;
28
+
29
+ // Make an HTTP POST request to add the space secrets
30
+ fetch(`https://huggingface.co/api/spaces/huggingchat/chat-ui/secrets`, {
31
+ method: "POST",
32
+ body: JSON.stringify({
33
+ key: "DOTENV_LOCAL",
34
+ value: full_config,
35
+ description: `Env variable for HuggingChat. Last updated ${new Date().toISOString()}`,
36
+ }),
37
+ headers: {
38
+ Authorization: `Bearer ${HF_DEPLOYMENT_TOKEN}`,
39
+ "Content-Type": "application/json",
40
+ },
41
+ });
src/ambient.d.ts ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ declare module "*.ttf" {
2
+ const value: ArrayBuffer;
3
+ export default value;
4
+ }
src/app.d.ts ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /// <reference types="@sveltejs/kit" />
2
+ /// <reference types="unplugin-icons/types/svelte" />
3
+
4
+ import type { User } from "$lib/types/User";
5
+
6
+ // See https://kit.svelte.dev/docs/types#app
7
+ // for information about these interfaces
8
+ declare global {
9
+ namespace App {
10
+ // interface Error {}
11
+ interface Locals {
12
+ sessionId: string;
13
+ user?: User;
14
+ }
15
+
16
+ interface Error {
17
+ message: string;
18
+ errorId?: ReturnType<typeof crypto.randomUUID>;
19
+ }
20
+ // interface PageData {}
21
+ // interface Platform {}
22
+ }
23
+ }
24
+
25
+ export {};
src/app.html ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en" class="h-full">
3
+ <head>
4
+ <meta charset="utf-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no" />
6
+ <meta name="theme-color" content="rgb(249, 250, 251)" />
7
+ <script>
8
+ if (
9
+ localStorage.theme === "dark" ||
10
+ (!("theme" in localStorage) && window.matchMedia("(prefers-color-scheme: dark)").matches)
11
+ ) {
12
+ document.documentElement.classList.add("dark");
13
+ document
14
+ .querySelector('meta[name="theme-color"]')
15
+ .setAttribute("content", "rgb(26, 36, 50)");
16
+ }
17
+
18
+ // For some reason, Sveltekit doesn't let us load env variables from .env here, so we load it from hooks.server.ts
19
+ window.gaId = "%gaId%";
20
+ </script>
21
+ %sveltekit.head%
22
+ </head>
23
+ <body data-sveltekit-preload-data="hover" class="h-full dark:bg-gray-900">
24
+ <div id="app" class="contents h-full">%sveltekit.body%</div>
25
+
26
+ <!-- Google Tag Manager -->
27
+ <script>
28
+ if (window.gaId) {
29
+ const script = document.createElement("script");
30
+ script.src = "https://www.googletagmanager.com/gtag/js?id=" + window.gaId;
31
+ script.async = true;
32
+ document.head.appendChild(script);
33
+
34
+ window.dataLayer = window.dataLayer || [];
35
+ function gtag() {
36
+ dataLayer.push(arguments);
37
+ }
38
+ gtag("js", new Date());
39
+ /// ^ See https://developers.google.com/tag-platform/gtagjs/install
40
+ gtag("config", window.gaId);
41
+ gtag("consent", "default", { ad_storage: "denied", analytics_storage: "denied" });
42
+ /// ^ See https://developers.google.com/tag-platform/gtagjs/reference#consent
43
+ /// TODO: ask the user for their consent and update this with gtag('consent', 'update')
44
+ }
45
+ </script>
46
+ </body>
47
+ </html>
src/hooks.server.ts ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { env } from "$env/dynamic/private";
2
+ import { env as envPublic } from "$env/dynamic/public";
3
+ import type { Handle, HandleServerError } from "@sveltejs/kit";
4
+ import { collections } from "$lib/server/database";
5
+ import { base } from "$app/paths";
6
+ import { findUser, refreshSessionCookie, requiresUser } from "$lib/server/auth";
7
+ import { ERROR_MESSAGES } from "$lib/stores/errors";
8
+ import { sha256 } from "$lib/utils/sha256";
9
+ import { addWeeks } from "date-fns";
10
+ import { checkAndRunMigrations } from "$lib/migrations/migrations";
11
+ import { building } from "$app/environment";
12
+ import { refreshAssistantsCounts } from "$lib/assistantStats/refresh-assistants-counts";
13
+ import { logger } from "$lib/server/logger";
14
+ import { AbortedGenerations } from "$lib/server/abortedGenerations";
15
+ import { MetricsServer } from "$lib/server/metrics";
16
+
17
+ // TODO: move this code on a started server hook, instead of using a "building" flag
18
+ if (!building) {
19
+ await checkAndRunMigrations();
20
+ if (env.ENABLE_ASSISTANTS) {
21
+ refreshAssistantsCounts();
22
+ }
23
+
24
+ // Init metrics server
25
+ MetricsServer.getInstance();
26
+
27
+ // Init AbortedGenerations refresh process
28
+ AbortedGenerations.getInstance();
29
+ }
30
+
31
+ export const handleError: HandleServerError = async ({ error, event }) => {
32
+ // handle 404
33
+
34
+ if (building) {
35
+ throw error;
36
+ }
37
+
38
+ if (event.route.id === null) {
39
+ return {
40
+ message: `Page ${event.url.pathname} not found`,
41
+ };
42
+ }
43
+
44
+ const errorId = crypto.randomUUID();
45
+
46
+ logger.error({
47
+ locals: event.locals,
48
+ url: event.request.url,
49
+ params: event.params,
50
+ request: event.request,
51
+ error,
52
+ errorId,
53
+ });
54
+
55
+ return {
56
+ message: "An error occurred",
57
+ errorId,
58
+ };
59
+ };
60
+
61
+ export const handle: Handle = async ({ event, resolve }) => {
62
+ logger.debug({
63
+ locals: event.locals,
64
+ url: event.url.pathname,
65
+ params: event.params,
66
+ request: event.request,
67
+ });
68
+
69
+ if (event.url.pathname.startsWith(`${base}/api/`) && env.EXPOSE_API !== "true") {
70
+ return new Response("API is disabled", { status: 403 });
71
+ }
72
+
73
+ function errorResponse(status: number, message: string) {
74
+ const sendJson =
75
+ event.request.headers.get("accept")?.includes("application/json") ||
76
+ event.request.headers.get("content-type")?.includes("application/json");
77
+ return new Response(sendJson ? JSON.stringify({ error: message }) : message, {
78
+ status,
79
+ headers: {
80
+ "content-type": sendJson ? "application/json" : "text/plain",
81
+ },
82
+ });
83
+ }
84
+
85
+ if (event.url.pathname.startsWith(`${base}/admin/`) || event.url.pathname === `${base}/admin`) {
86
+ const ADMIN_SECRET = env.ADMIN_API_SECRET || env.PARQUET_EXPORT_SECRET;
87
+
88
+ if (!ADMIN_SECRET) {
89
+ return errorResponse(500, "Admin API is not configured");
90
+ }
91
+
92
+ if (event.request.headers.get("Authorization") !== `Bearer ${ADMIN_SECRET}`) {
93
+ return errorResponse(401, "Unauthorized");
94
+ }
95
+ }
96
+
97
+ const token = event.cookies.get(env.COOKIE_NAME);
98
+
99
+ let secretSessionId: string;
100
+ let sessionId: string;
101
+
102
+ if (token) {
103
+ secretSessionId = token;
104
+ sessionId = await sha256(token);
105
+
106
+ const user = await findUser(sessionId);
107
+
108
+ if (user) {
109
+ event.locals.user = user;
110
+ }
111
+ } else {
112
+ // if the user doesn't have any cookie, we generate one for him
113
+ secretSessionId = crypto.randomUUID();
114
+ sessionId = await sha256(secretSessionId);
115
+
116
+ if (await collections.sessions.findOne({ sessionId })) {
117
+ return errorResponse(500, "Session ID collision");
118
+ }
119
+ }
120
+
121
+ event.locals.sessionId = sessionId;
122
+
123
+ // CSRF protection
124
+ const requestContentType = event.request.headers.get("content-type")?.split(";")[0] ?? "";
125
+ /** https://developer.mozilla.org/en-US/docs/Web/HTML/Element/form#attr-enctype */
126
+ const nativeFormContentTypes = [
127
+ "multipart/form-data",
128
+ "application/x-www-form-urlencoded",
129
+ "text/plain",
130
+ ];
131
+
132
+ if (event.request.method === "POST") {
133
+ refreshSessionCookie(event.cookies, event.locals.sessionId);
134
+
135
+ if (nativeFormContentTypes.includes(requestContentType)) {
136
+ const origin = event.request.headers.get("origin");
137
+
138
+ if (!origin) {
139
+ return errorResponse(403, "Non-JSON form requests need to have an origin");
140
+ }
141
+
142
+ const validOrigins = [
143
+ new URL(event.request.url).host,
144
+ ...(envPublic.PUBLIC_ORIGIN ? [new URL(envPublic.PUBLIC_ORIGIN).host] : []),
145
+ ];
146
+
147
+ if (!validOrigins.includes(new URL(origin).host)) {
148
+ return errorResponse(403, "Invalid referer for POST request");
149
+ }
150
+ }
151
+ }
152
+
153
+ if (event.request.method === "POST") {
154
+ // if the request is a POST request we refresh the cookie
155
+ refreshSessionCookie(event.cookies, secretSessionId);
156
+
157
+ await collections.sessions.updateOne(
158
+ { sessionId },
159
+ { $set: { updatedAt: new Date(), expiresAt: addWeeks(new Date(), 2) } }
160
+ );
161
+ }
162
+
163
+ if (
164
+ !event.url.pathname.startsWith(`${base}/login`) &&
165
+ !event.url.pathname.startsWith(`${base}/admin`) &&
166
+ !["GET", "OPTIONS", "HEAD"].includes(event.request.method)
167
+ ) {
168
+ if (
169
+ !event.locals.user &&
170
+ requiresUser &&
171
+ !((env.MESSAGES_BEFORE_LOGIN ? parseInt(env.MESSAGES_BEFORE_LOGIN) : 0) > 0)
172
+ ) {
173
+ return errorResponse(401, ERROR_MESSAGES.authOnly);
174
+ }
175
+
176
+ // if login is not required and the call is not from /settings and we display the ethics modal with PUBLIC_APP_DISCLAIMER
177
+ // we check if the user has accepted the ethics modal first.
178
+ // If login is required, `ethicsModalAcceptedAt` is already true at this point, so do not pass this condition. This saves a DB call.
179
+ if (
180
+ !requiresUser &&
181
+ !event.url.pathname.startsWith(`${base}/settings`) &&
182
+ !!envPublic.PUBLIC_APP_DISCLAIMER
183
+ ) {
184
+ const hasAcceptedEthicsModal = await collections.settings.countDocuments({
185
+ sessionId: event.locals.sessionId,
186
+ ethicsModalAcceptedAt: { $exists: true },
187
+ });
188
+
189
+ if (!hasAcceptedEthicsModal) {
190
+ return errorResponse(405, "You need to accept the welcome modal first");
191
+ }
192
+ }
193
+ }
194
+
195
+ let replaced = false;
196
+
197
+ const response = await resolve(event, {
198
+ transformPageChunk: (chunk) => {
199
+ // For some reason, Sveltekit doesn't let us load env variables from .env in the app.html template
200
+ if (replaced || !chunk.html.includes("%gaId%")) {
201
+ return chunk.html;
202
+ }
203
+ replaced = true;
204
+
205
+ return chunk.html.replace("%gaId%", envPublic.PUBLIC_GOOGLE_ANALYTICS_ID);
206
+ },
207
+ });
208
+
209
+ return response;
210
+ };
src/lib/actions/clickOutside.ts ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export function clickOutside(element: HTMLDialogElement, callbackFunction: () => void) {
2
+ function onClick(event: MouseEvent) {
3
+ if (!element.contains(event.target as Node)) {
4
+ callbackFunction();
5
+ }
6
+ }
7
+
8
+ document.body.addEventListener("click", onClick);
9
+
10
+ return {
11
+ update(newCallbackFunction: () => void) {
12
+ callbackFunction = newCallbackFunction;
13
+ },
14
+ destroy() {
15
+ document.body.removeEventListener("click", onClick);
16
+ },
17
+ };
18
+ }
src/lib/actions/snapScrollToBottom.ts ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { navigating } from "$app/stores";
2
+ import { tick } from "svelte";
3
+ import { get } from "svelte/store";
4
+
5
+ const detachedOffset = 10;
6
+
7
+ /**
8
+ * @param node element to snap scroll to bottom
9
+ * @param dependency pass in a dependency to update scroll on changes.
10
+ */
11
+ export const snapScrollToBottom = (node: HTMLElement, dependency: unknown) => {
12
+ let prevScrollValue = node.scrollTop;
13
+ let isDetached = false;
14
+
15
+ const handleScroll = () => {
16
+ // if user scrolled up, we detach
17
+ if (node.scrollTop < prevScrollValue) {
18
+ isDetached = true;
19
+ }
20
+
21
+ // if user scrolled back to within 10px of bottom, we reattach
22
+ if (node.scrollTop - (node.scrollHeight - node.clientHeight) >= -detachedOffset) {
23
+ isDetached = false;
24
+ }
25
+
26
+ prevScrollValue = node.scrollTop;
27
+ };
28
+
29
+ const updateScroll = async (_options: { force?: boolean } = {}) => {
30
+ const defaultOptions = { force: false };
31
+ const options = { ...defaultOptions, ..._options };
32
+ const { force } = options;
33
+
34
+ if (!force && isDetached && !get(navigating)) return;
35
+
36
+ // wait for next tick to ensure that the DOM is updated
37
+ await tick();
38
+
39
+ node.scrollTo({ top: node.scrollHeight });
40
+ };
41
+
42
+ node.addEventListener("scroll", handleScroll);
43
+
44
+ if (dependency) {
45
+ updateScroll({ force: true });
46
+ }
47
+
48
+ return {
49
+ update: updateScroll,
50
+ destroy: () => {
51
+ node.removeEventListener("scroll", handleScroll);
52
+ },
53
+ };
54
+ };
src/lib/assistantStats/refresh-assistants-counts.ts ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { Database } from "$lib/server/database";
2
+ import { acquireLock, refreshLock } from "$lib/migrations/lock";
3
+ import type { ObjectId } from "mongodb";
4
+ import { subDays } from "date-fns";
5
+ import { logger } from "$lib/server/logger";
6
+
7
+ const LOCK_KEY = "assistants.count";
8
+
9
+ let hasLock = false;
10
+ let lockId: ObjectId | null = null;
11
+
12
+ async function refreshAssistantsCountsHelper() {
13
+ if (!hasLock) {
14
+ return;
15
+ }
16
+
17
+ try {
18
+ await Database.getInstance()
19
+ .getClient()
20
+ .withSession((session) =>
21
+ session.withTransaction(async () => {
22
+ await Database.getInstance()
23
+ .getCollections()
24
+ .assistants.aggregate([
25
+ { $project: { _id: 1 } },
26
+ { $set: { last24HoursCount: 0 } },
27
+ {
28
+ $unionWith: {
29
+ coll: "assistants.stats",
30
+ pipeline: [
31
+ {
32
+ $match: { "date.at": { $gte: subDays(new Date(), 1) }, "date.span": "hour" },
33
+ },
34
+ {
35
+ $group: {
36
+ _id: "$assistantId",
37
+ last24HoursCount: { $sum: "$count" },
38
+ },
39
+ },
40
+ ],
41
+ },
42
+ },
43
+ {
44
+ $group: {
45
+ _id: "$_id",
46
+ last24HoursCount: { $sum: "$last24HoursCount" },
47
+ },
48
+ },
49
+ {
50
+ $merge: {
51
+ into: "assistants",
52
+ on: "_id",
53
+ whenMatched: "merge",
54
+ whenNotMatched: "discard",
55
+ },
56
+ },
57
+ ])
58
+ .next();
59
+ })
60
+ );
61
+ } catch (e) {
62
+ logger.error("Refresh assistants counter failed!");
63
+ logger.error(e);
64
+ }
65
+ }
66
+
67
+ async function maintainLock() {
68
+ if (hasLock && lockId) {
69
+ hasLock = await refreshLock(LOCK_KEY, lockId);
70
+
71
+ if (!hasLock) {
72
+ lockId = null;
73
+ }
74
+ } else if (!hasLock) {
75
+ lockId = (await acquireLock(LOCK_KEY)) || null;
76
+ hasLock = !!lockId;
77
+ }
78
+
79
+ setTimeout(maintainLock, 10_000);
80
+ }
81
+
82
+ export function refreshAssistantsCounts() {
83
+ const ONE_HOUR_MS = 3_600_000;
84
+
85
+ maintainLock().then(() => {
86
+ refreshAssistantsCountsHelper();
87
+
88
+ setInterval(refreshAssistantsCountsHelper, ONE_HOUR_MS);
89
+ });
90
+ }