PeterPinetree commited on
Commit
93d579e
·
1 Parent(s): 8e610fc

Deploy to HF Spaces

Browse files
.gitignore CHANGED
@@ -32,6 +32,7 @@ yarn-error.log*
32
 
33
  # env files (can opt-in for committing if needed)
34
  .env*
 
35
 
36
  # vercel
37
  .vercel
 
32
 
33
  # env files (can opt-in for committing if needed)
34
  .env*
35
+ .env.example
36
 
37
  # vercel
38
  .vercel
.vscode/tasks.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "2.0.0",
3
+ "tasks": [
4
+ {
5
+ "label": "Install and Build",
6
+ "type": "shell",
7
+ "command": "npm ci && npm run build",
8
+ "args": [],
9
+ "isBackground": false,
10
+ "problemMatcher": [
11
+ "$tsc"
12
+ ],
13
+ "group": "build"
14
+ }
15
+ ]
16
+ }
Dockerfile CHANGED
@@ -1,19 +1,22 @@
1
  FROM node:20-alpine
2
- USER root
3
 
 
4
  USER 1000
5
  WORKDIR /usr/src/app
6
- # Copy package.json and package-lock.json to the container
 
7
  COPY --chown=1000 package.json package-lock.json ./
 
8
 
9
- # Copy the rest of the application files to the container
10
  COPY --chown=1000 . .
11
 
12
- RUN npm install
13
  RUN npm run build
14
 
15
- # Expose the application port (assuming your app runs on port 3000)
16
  EXPOSE 3000
17
 
18
- # Start the application
19
- CMD ["npm", "start"]
 
 
1
  FROM node:20-alpine
 
2
 
3
+ # Ensure non-root user for build/run
4
  USER 1000
5
  WORKDIR /usr/src/app
6
+
7
+ # Install deps with ci for reproducibility and layer caching
8
  COPY --chown=1000 package.json package-lock.json ./
9
+ RUN npm ci
10
 
11
+ # Copy the rest of the application
12
  COPY --chown=1000 . .
13
 
14
+ # Build the app
15
  RUN npm run build
16
 
17
+ # Expose default Next.js port (informational)
18
  EXPOSE 3000
19
 
20
+ # HF Spaces provides $PORT. Start Next.js on that port and bind to all interfaces.
21
+ ENV HOST=0.0.0.0
22
+ CMD ["sh", "-c", "npm start -- -p ${PORT:-3000}"]
app/(public)/page.tsx CHANGED
@@ -1,4 +1,3 @@
1
- import { AskAi } from "@/components/space/ask-ai";
2
  import { redirect } from "next/navigation";
3
  export default function Home() {
4
  redirect("/projects/new");
@@ -14,9 +13,6 @@ export default function Home() {
14
  <p className="text-2xl text-neutral-300/80 mt-4 text-center max-w-2xl">
15
  Vibe Coding has never been so easy.
16
  </p>
17
- <div className="mt-14 max-w-2xl w-full mx-auto">
18
- <AskAi />
19
- </div>
20
  <div className="absolute inset-0 pointer-events-none -z-[1]">
21
  <div className="w-full h-full bg-gradient-to-r from-purple-500 to-pink-500 opacity-10 blur-3xl rounded-full" />
22
  <div className="w-2/3 h-3/4 bg-gradient-to-r from-blue-500 to-teal-500 opacity-24 blur-3xl absolute -top-20 right-10 transform rotate-12" />
 
 
1
  import { redirect } from "next/navigation";
2
  export default function Home() {
3
  redirect("/projects/new");
 
13
  <p className="text-2xl text-neutral-300/80 mt-4 text-center max-w-2xl">
14
  Vibe Coding has never been so easy.
15
  </p>
 
 
 
16
  <div className="absolute inset-0 pointer-events-none -z-[1]">
17
  <div className="w-full h-full bg-gradient-to-r from-purple-500 to-pink-500 opacity-10 blur-3xl rounded-full" />
18
  <div className="w-2/3 h-3/4 bg-gradient-to-r from-blue-500 to-teal-500 opacity-24 blur-3xl absolute -top-20 right-10 transform rotate-12" />
app/api/ask-ai/route.ts CHANGED
@@ -1,10 +1,12 @@
1
- /* eslint-disable @typescript-eslint/no-explicit-any */
 
2
  import type { NextRequest } from "next/server";
3
  import { NextResponse } from "next/server";
4
  import { headers } from "next/headers";
5
  import { InferenceClient } from "@huggingface/inference";
6
 
7
  import { MODELS, PROVIDERS } from "@/lib/providers";
 
8
  import {
9
  DIVIDER,
10
  FOLLOW_UP_SYSTEM_PROMPT,
@@ -27,7 +29,14 @@ export async function POST(request: NextRequest) {
27
  const userToken = request.cookies.get(MY_TOKEN_KEY())?.value;
28
 
29
  const body = await request.json();
30
- const { prompt, provider, model, redesignMarkdown, previousPrompts, pages } = body;
 
 
 
 
 
 
 
31
 
32
  if (!model || (!prompt && !redesignMarkdown)) {
33
  return NextResponse.json(
@@ -76,7 +85,10 @@ export async function POST(request: NextRequest) {
76
 
77
  if (!token) {
78
  ipAddresses.set(ip, (ipAddresses.get(ip) || 0) + 1);
79
- if (ipAddresses.get(ip) > MAX_REQUESTS_PER_IP) {
 
 
 
80
  return NextResponse.json(
81
  {
82
  ok: false,
@@ -121,14 +133,17 @@ export async function POST(request: NextRequest) {
121
  // let completeResponse = "";
122
  try {
123
  const client = new InferenceClient(token);
 
 
 
124
  const chatCompletion = client.chatCompletionStream(
125
  {
126
  model: selectedModel.value,
127
- provider: selectedProvider.id as any,
128
  messages: [
129
  {
130
  role: "system",
131
- content: INITIAL_SYSTEM_PROMPT,
132
  },
133
  ...(pages?.length > 1 ? [{
134
  role: "assistant",
@@ -141,7 +156,7 @@ export async function POST(request: NextRequest) {
141
  : rewrittenPrompt,
142
  },
143
  ],
144
- max_tokens: selectedProvider.max_tokens,
145
  },
146
  billTo ? { billTo } : {}
147
  );
@@ -157,14 +172,16 @@ export async function POST(request: NextRequest) {
157
  await writer.write(encoder.encode(chunk));
158
  }
159
  }
160
- } catch (error: any) {
161
- if (error.message?.includes("exceeded your monthly included credits")) {
 
 
162
  await writer.write(
163
  encoder.encode(
164
  JSON.stringify({
165
  ok: false,
166
  openProModal: true,
167
- message: error.message,
168
  })
169
  )
170
  );
@@ -174,7 +191,7 @@ export async function POST(request: NextRequest) {
174
  JSON.stringify({
175
  ok: false,
176
  message:
177
- error.message ||
178
  "An error occurred while processing your request.",
179
  })
180
  )
@@ -186,13 +203,14 @@ export async function POST(request: NextRequest) {
186
  })();
187
 
188
  return response;
189
- } catch (error: any) {
190
  return NextResponse.json(
191
  {
192
  ok: false,
193
  openSelectProvider: true,
194
  message:
195
- error?.message || "An error occurred while processing your request.",
 
196
  },
197
  { status: 500 }
198
  );
@@ -204,8 +222,14 @@ export async function PUT(request: NextRequest) {
204
  const userToken = request.cookies.get(MY_TOKEN_KEY())?.value;
205
 
206
  const body = await request.json();
207
- const { prompt, previousPrompts, provider, selectedElementHtml, model, pages, files, } =
208
- body;
 
 
 
 
 
 
209
 
210
  if (!prompt || pages.length === 0) {
211
  return NextResponse.json(
@@ -242,7 +266,10 @@ export async function PUT(request: NextRequest) {
242
 
243
  if (!token) {
244
  ipAddresses.set(ip, (ipAddresses.get(ip) || 0) + 1);
245
- if (ipAddresses.get(ip) > MAX_REQUESTS_PER_IP) {
 
 
 
246
  return NextResponse.json(
247
  {
248
  ok: false,
@@ -269,11 +296,13 @@ export async function PUT(request: NextRequest) {
269
  const response = await client.chatCompletion(
270
  {
271
  model: selectedModel.value,
272
- provider: selectedProvider.id as any,
273
  messages: [
274
  {
275
  role: "system",
276
- content: FOLLOW_UP_SYSTEM_PROMPT,
 
 
277
  },
278
  {
279
  role: "user",
@@ -297,7 +326,7 @@ export async function PUT(request: NextRequest) {
297
  ],
298
  ...(selectedProvider.id !== "sambanova"
299
  ? {
300
- max_tokens: selectedProvider.max_tokens,
301
  }
302
  : {}),
303
  },
@@ -486,13 +515,15 @@ export async function PUT(request: NextRequest) {
486
  { status: 400 }
487
  );
488
  }
489
- } catch (error: any) {
490
- if (error.message?.includes("exceeded your monthly included credits")) {
 
 
491
  return NextResponse.json(
492
  {
493
  ok: false,
494
  openProModal: true,
495
- message: error.message,
496
  },
497
  { status: 402 }
498
  );
@@ -502,7 +533,7 @@ export async function PUT(request: NextRequest) {
502
  ok: false,
503
  openSelectProvider: true,
504
  message:
505
- error.message || "An error occurred while processing your request.",
506
  },
507
  { status: 500 }
508
  );
 
1
+ // Avoid depending on @types/node in this project: declare minimal process type
2
+ declare const process: { env: Record<string, string | undefined> };
3
  import type { NextRequest } from "next/server";
4
  import { NextResponse } from "next/server";
5
  import { headers } from "next/headers";
6
  import { InferenceClient } from "@huggingface/inference";
7
 
8
  import { MODELS, PROVIDERS } from "@/lib/providers";
9
+ import { DEFAULT_MODEL as DEFAULT_MODEL_FLAG, DEFAULT_PROVIDER as DEFAULT_PROVIDER_FLAG, EMBED_MODE, MAX_REQUESTS_PER_IP_ENV, MAX_OUTPUT_TOKENS } from "@/lib/flags";
10
  import {
11
  DIVIDER,
12
  FOLLOW_UP_SYSTEM_PROMPT,
 
29
  const userToken = request.cookies.get(MY_TOKEN_KEY())?.value;
30
 
31
  const body = await request.json();
32
+ const { prompt, redesignMarkdown, previousPrompts, pages } = body;
33
+ let { provider, model } = body;
34
+
35
+ // In embed mode, lock down model/provider to configured defaults (e.g., Kimi K2 on Groq)
36
+ if (EMBED_MODE) {
37
+ provider = DEFAULT_PROVIDER_FLAG as string;
38
+ model = (DEFAULT_MODEL_FLAG as string) ?? model;
39
+ }
40
 
41
  if (!model || (!prompt && !redesignMarkdown)) {
42
  return NextResponse.json(
 
85
 
86
  if (!token) {
87
  ipAddresses.set(ip, (ipAddresses.get(ip) || 0) + 1);
88
+ const RATE_LIMIT = Number.isFinite(MAX_REQUESTS_PER_IP_ENV)
89
+ ? MAX_REQUESTS_PER_IP_ENV
90
+ : MAX_REQUESTS_PER_IP;
91
+ if (ipAddresses.get(ip) > RATE_LIMIT) {
92
  return NextResponse.json(
93
  {
94
  ok: false,
 
133
  // let completeResponse = "";
134
  try {
135
  const client = new InferenceClient(token);
136
+ const SYSTEM_PROMPT = EMBED_MODE
137
+ ? `${INITIAL_SYSTEM_PROMPT}\n\nConstraints for demo mode: Keep outputs concise and efficient. Prefer tiny, single-purpose components. Avoid heavy libraries; use minimal Tailwind. Keep HTML/CSS small. Limit total output to roughly ${Math.min( MAX_OUTPUT_TOKENS, 1200 )} tokens.`
138
+ : INITIAL_SYSTEM_PROMPT;
139
  const chatCompletion = client.chatCompletionStream(
140
  {
141
  model: selectedModel.value,
142
+ provider: selectedProvider.id as import("@/lib/providers").ProviderId,
143
  messages: [
144
  {
145
  role: "system",
146
+ content: SYSTEM_PROMPT,
147
  },
148
  ...(pages?.length > 1 ? [{
149
  role: "assistant",
 
156
  : rewrittenPrompt,
157
  },
158
  ],
159
+ max_tokens: Math.min(selectedProvider.max_tokens, MAX_OUTPUT_TOKENS),
160
  },
161
  billTo ? { billTo } : {}
162
  );
 
172
  await writer.write(encoder.encode(chunk));
173
  }
174
  }
175
+ } catch (error: unknown) {
176
+ const message =
177
+ (error as Error)?.message ?? (typeof error === "string" ? error : "");
178
+ if (message?.includes("exceeded your monthly included credits")) {
179
  await writer.write(
180
  encoder.encode(
181
  JSON.stringify({
182
  ok: false,
183
  openProModal: true,
184
+ message,
185
  })
186
  )
187
  );
 
191
  JSON.stringify({
192
  ok: false,
193
  message:
194
+ message ||
195
  "An error occurred while processing your request.",
196
  })
197
  )
 
203
  })();
204
 
205
  return response;
206
+ } catch (error: unknown) {
207
  return NextResponse.json(
208
  {
209
  ok: false,
210
  openSelectProvider: true,
211
  message:
212
+ (error as Error)?.message ||
213
+ (typeof error === "string" ? error : "An error occurred while processing your request."),
214
  },
215
  { status: 500 }
216
  );
 
222
  const userToken = request.cookies.get(MY_TOKEN_KEY())?.value;
223
 
224
  const body = await request.json();
225
+ const { prompt, previousPrompts, selectedElementHtml, pages, files } = body;
226
+ let { provider, model } = body;
227
+
228
+ // In embed mode, lock down model/provider to configured defaults
229
+ if (EMBED_MODE) {
230
+ provider = DEFAULT_PROVIDER_FLAG as string;
231
+ model = (DEFAULT_MODEL_FLAG as string) ?? model;
232
+ }
233
 
234
  if (!prompt || pages.length === 0) {
235
  return NextResponse.json(
 
266
 
267
  if (!token) {
268
  ipAddresses.set(ip, (ipAddresses.get(ip) || 0) + 1);
269
+ const RATE_LIMIT = Number.isFinite(MAX_REQUESTS_PER_IP_ENV)
270
+ ? MAX_REQUESTS_PER_IP_ENV
271
+ : MAX_REQUESTS_PER_IP;
272
+ if (ipAddresses.get(ip) > RATE_LIMIT) {
273
  return NextResponse.json(
274
  {
275
  ok: false,
 
296
  const response = await client.chatCompletion(
297
  {
298
  model: selectedModel.value,
299
+ provider: selectedProvider.id as import("@/lib/providers").ProviderId,
300
  messages: [
301
  {
302
  role: "system",
303
+ content: EMBED_MODE
304
+ ? `${FOLLOW_UP_SYSTEM_PROMPT}\n\nConstraints for demo mode: Keep edits minimal and concise. Prefer small diffs and tiny components; avoid large rewrites. Limit output to roughly ${Math.min( MAX_OUTPUT_TOKENS, 1200 )} tokens.`
305
+ : FOLLOW_UP_SYSTEM_PROMPT,
306
  },
307
  {
308
  role: "user",
 
326
  ],
327
  ...(selectedProvider.id !== "sambanova"
328
  ? {
329
+ max_tokens: Math.min(selectedProvider.max_tokens, MAX_OUTPUT_TOKENS),
330
  }
331
  : {}),
332
  },
 
515
  { status: 400 }
516
  );
517
  }
518
+ } catch (error: unknown) {
519
+ const message =
520
+ (error as Error)?.message ?? (typeof error === "string" ? error : "");
521
+ if (message?.includes("exceeded your monthly included credits")) {
522
  return NextResponse.json(
523
  {
524
  ok: false,
525
  openProModal: true,
526
+ message,
527
  },
528
  { status: 402 }
529
  );
 
533
  ok: false,
534
  openSelectProvider: true,
535
  message:
536
+ message || "An error occurred while processing your request.",
537
  },
538
  { status: 500 }
539
  );
app/layout.tsx CHANGED
@@ -10,7 +10,6 @@ import MY_TOKEN_KEY from "@/lib/get-cookie-name";
10
  import { apiServer } from "@/lib/api";
11
  import AppContext from "@/components/contexts/app-context";
12
  import Script from "next/script";
13
- import IframeDetector from "@/components/iframe-detector";
14
 
15
  const inter = Inter({
16
  variable: "--font-inter-sans",
@@ -101,7 +100,7 @@ export default async function RootLayout({
101
  <body
102
  className={`${inter.variable} ${ptSans.variable} antialiased bg-black dark h-[100dvh] overflow-hidden`}
103
  >
104
- <IframeDetector />
105
  <Toaster richColors position="bottom-center" />
106
  <TanstackProvider>
107
  <AppContext me={data}>{children}</AppContext>
 
10
  import { apiServer } from "@/lib/api";
11
  import AppContext from "@/components/contexts/app-context";
12
  import Script from "next/script";
 
13
 
14
  const inter = Inter({
15
  variable: "--font-inter-sans",
 
100
  <body
101
  className={`${inter.variable} ${ptSans.variable} antialiased bg-black dark h-[100dvh] overflow-hidden`}
102
  >
103
+ {/* Iframe embed warning removed; EMBED_MODE controls behavior */}
104
  <Toaster richColors position="bottom-center" />
105
  <TanstackProvider>
106
  <AppContext me={data}>{children}</AppContext>
components/editor/ask-ai/index.tsx CHANGED
@@ -1,5 +1,4 @@
1
  "use client";
2
- /* eslint-disable @typescript-eslint/no-explicit-any */
3
  import { useState, useMemo, useRef } from "react";
4
  import classNames from "classnames";
5
  import { toast } from "sonner";
@@ -10,6 +9,7 @@ import { FaStopCircle } from "react-icons/fa";
10
  import ProModal from "@/components/pro-modal";
11
  import { Button } from "@/components/ui/button";
12
  import { MODELS } from "@/lib/providers";
 
13
  import { HtmlHistory, Page, Project } from "@/types";
14
  // import { InviteFriends } from "@/components/invite-friends";
15
  import { Settings } from "@/components/editor/ask-ai/settings";
@@ -73,8 +73,15 @@ export function AskAI({
73
 
74
  const [open, setOpen] = useState(false);
75
  const [prompt, setPrompt] = useState("");
76
- const [provider, setProvider] = useLocalStorage("provider", "auto");
77
- const [model, setModel] = useLocalStorage("model", MODELS[0].value);
 
 
 
 
 
 
 
78
  const [openProvider, setOpenProvider] = useState(false);
79
  const [providerError, setProviderError] = useState("");
80
  const [openProModal, setOpenProModal] = useState(false);
@@ -373,8 +380,8 @@ export function AskAI({
373
  selectedElement
374
  ? `Ask DeepSite about ${selectedElement.tagName.toLowerCase()}...`
375
  : isFollowUp && (!isSameHtml || pages?.length > 1)
376
- ? "Ask DeepSite for edits"
377
- : "Ask DeepSite anything..."
378
  }
379
  value={prompt}
380
  onChange={(e) => setPrompt(e.target.value)}
@@ -384,6 +391,9 @@ export function AskAI({
384
  }
385
  }}
386
  />
 
 
 
387
  </div>
388
  <div className="flex items-center justify-between gap-2 px-4 pb-3 mt-2">
389
  <div className="flex-1 flex items-center justify-start gap-1.5">
@@ -434,16 +444,18 @@ export function AskAI({
434
  {/* <InviteFriends /> */}
435
  </div>
436
  <div className="flex items-center justify-end gap-2">
437
- <Settings
438
- provider={provider as string}
439
- model={model as string}
440
- onChange={setProvider}
441
- onModelChange={setModel}
442
- open={openProvider}
443
- error={providerError}
444
- isFollowUp={!isSameHtml && isFollowUp}
445
- onClose={setOpenProvider}
446
- />
 
 
447
  <Button
448
  size="iconXs"
449
  disabled={isAiWorking || !prompt.trim()}
@@ -453,7 +465,9 @@ export function AskAI({
453
  </Button>
454
  </div>
455
  </div>
456
- <LoginModal open={open} onClose={() => setOpen(false)} pages={pages} />
 
 
457
  <ProModal
458
  pages={pages}
459
  open={openProModal}
 
1
  "use client";
 
2
  import { useState, useMemo, useRef } from "react";
3
  import classNames from "classnames";
4
  import { toast } from "sonner";
 
9
  import ProModal from "@/components/pro-modal";
10
  import { Button } from "@/components/ui/button";
11
  import { MODELS } from "@/lib/providers";
12
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER, EMBED_MODE, HIDE_LOGIN } from "@/lib/flags";
13
  import { HtmlHistory, Page, Project } from "@/types";
14
  // import { InviteFriends } from "@/components/invite-friends";
15
  import { Settings } from "@/components/editor/ask-ai/settings";
 
73
 
74
  const [open, setOpen] = useState(false);
75
  const [prompt, setPrompt] = useState("");
76
+ // Defaults: pin to flags so we can run a simple demo (e.g., Kimi K2 on Groq)
77
+ const [provider, setProvider] = useLocalStorage(
78
+ "provider",
79
+ DEFAULT_PROVIDER || "auto"
80
+ );
81
+ const [model, setModel] = useLocalStorage(
82
+ "model",
83
+ (DEFAULT_MODEL as string) || MODELS[0].value
84
+ );
85
  const [openProvider, setOpenProvider] = useState(false);
86
  const [providerError, setProviderError] = useState("");
87
  const [openProModal, setOpenProModal] = useState(false);
 
380
  selectedElement
381
  ? `Ask DeepSite about ${selectedElement.tagName.toLowerCase()}...`
382
  : isFollowUp && (!isSameHtml || pages?.length > 1)
383
+ ? "Ask DeepSite for edits (keep it short)"
384
+ : "Describe a tiny component or change (1–2 sentences)"
385
  }
386
  value={prompt}
387
  onChange={(e) => setPrompt(e.target.value)}
 
391
  }
392
  }}
393
  />
394
+ <div className="px-4 pb-1 -mt-2 text-[11px] text-neutral-400">
395
+ Tip: Small prompts → small, fast apps. We cap output length in demo mode.
396
+ </div>
397
  </div>
398
  <div className="flex items-center justify-between gap-2 px-4 pb-3 mt-2">
399
  <div className="flex-1 flex items-center justify-start gap-1.5">
 
444
  {/* <InviteFriends /> */}
445
  </div>
446
  <div className="flex items-center justify-end gap-2">
447
+ {!EMBED_MODE && (
448
+ <Settings
449
+ provider={provider as string}
450
+ model={model as string}
451
+ onChange={setProvider}
452
+ onModelChange={setModel}
453
+ open={openProvider}
454
+ error={providerError}
455
+ isFollowUp={!isSameHtml && isFollowUp}
456
+ onClose={setOpenProvider}
457
+ />
458
+ )}
459
  <Button
460
  size="iconXs"
461
  disabled={isAiWorking || !prompt.trim()}
 
465
  </Button>
466
  </div>
467
  </div>
468
+ {!HIDE_LOGIN && (
469
+ <LoginModal open={open} onClose={() => setOpen(false)} pages={pages} />
470
+ )}
471
  <ProModal
472
  pages={pages}
473
  open={openProModal}
components/editor/deploy-button/index.tsx CHANGED
@@ -1,4 +1,3 @@
1
- /* eslint-disable @typescript-eslint/no-explicit-any */
2
  import { useState } from "react";
3
  import { MdSave } from "react-icons/md";
4
 
 
 
1
  import { useState } from "react";
2
  import { MdSave } from "react-icons/md";
3
 
components/editor/footer/index.tsx CHANGED
@@ -7,6 +7,7 @@ import { Button } from "@/components/ui/button";
7
  import { MdAdd } from "react-icons/md";
8
  import { History } from "@/components/editor/history";
9
  import { UserMenu } from "@/components/user-menu";
 
10
  import { useUser } from "@/hooks/useUser";
11
  import Link from "next/link";
12
  import { useLocalStorage } from "react-use";
@@ -64,7 +65,7 @@ export function Footer({
64
  return (
65
  <footer className="border-t bg-slate-200 border-slate-300 dark:bg-neutral-950 dark:border-neutral-800 px-3 py-2 flex items-center justify-between sticky bottom-0 z-20">
66
  <div className="flex items-center gap-2">
67
- {user ? (
68
  user?.isLocalUse ? (
69
  <>
70
  <div className="max-w-max bg-amber-500/10 rounded-full px-3 py-1 text-amber-500 border border-amber-500/20 text-sm font-semibold">
@@ -74,14 +75,14 @@ export function Footer({
74
  ) : (
75
  <UserMenu className="!p-1 !pr-3 !h-auto" />
76
  )
77
- ) : (
78
  <Button size="sm" variant="default" onClick={handleClick}>
79
  <LogIn className="text-sm" />
80
  Log In
81
  </Button>
82
- )}
83
- {user && !isNew && <p className="text-neutral-700">|</p>}
84
- {!isNew && (
85
  <Link href="/projects/new">
86
  <Button size="sm" variant="secondary">
87
  <MdAdd className="text-sm" />
@@ -100,6 +101,7 @@ export function Footer({
100
  <a
101
  href="https://huggingface.co/spaces/victor/deepsite-gallery"
102
  target="_blank"
 
103
  >
104
  <Button size="sm" variant="ghost">
105
  <SparkleIcon className="size-3.5" />
@@ -109,6 +111,7 @@ export function Footer({
109
  <a
110
  target="_blank"
111
  href="https://huggingface.co/spaces/enzostvs/deepsite/discussions/157"
 
112
  >
113
  <Button size="sm" variant="outline">
114
  <HelpCircle className="size-3.5" />
@@ -138,6 +141,7 @@ export function Footer({
138
  "hover:bg-neutral-800": device !== deviceItem.name,
139
  }
140
  )}
 
141
  onClick={() => setDevice(deviceItem.name as "desktop" | "mobile")}
142
  >
143
  <deviceItem.icon className="text-sm" />
 
7
  import { MdAdd } from "react-icons/md";
8
  import { History } from "@/components/editor/history";
9
  import { UserMenu } from "@/components/user-menu";
10
+ import { HIDE_DEPLOY, HIDE_LOGIN } from "@/lib/flags";
11
  import { useUser } from "@/hooks/useUser";
12
  import Link from "next/link";
13
  import { useLocalStorage } from "react-use";
 
65
  return (
66
  <footer className="border-t bg-slate-200 border-slate-300 dark:bg-neutral-950 dark:border-neutral-800 px-3 py-2 flex items-center justify-between sticky bottom-0 z-20">
67
  <div className="flex items-center gap-2">
68
+ {!HIDE_LOGIN && user ? (
69
  user?.isLocalUse ? (
70
  <>
71
  <div className="max-w-max bg-amber-500/10 rounded-full px-3 py-1 text-amber-500 border border-amber-500/20 text-sm font-semibold">
 
75
  ) : (
76
  <UserMenu className="!p-1 !pr-3 !h-auto" />
77
  )
78
+ ) : !HIDE_LOGIN ? (
79
  <Button size="sm" variant="default" onClick={handleClick}>
80
  <LogIn className="text-sm" />
81
  Log In
82
  </Button>
83
+ ) : null}
84
+ {!HIDE_LOGIN && user && !isNew && <p className="text-neutral-700">|</p>}
85
+ {!HIDE_DEPLOY && !isNew && (
86
  <Link href="/projects/new">
87
  <Button size="sm" variant="secondary">
88
  <MdAdd className="text-sm" />
 
101
  <a
102
  href="https://huggingface.co/spaces/victor/deepsite-gallery"
103
  target="_blank"
104
+ rel="noopener noreferrer"
105
  >
106
  <Button size="sm" variant="ghost">
107
  <SparkleIcon className="size-3.5" />
 
111
  <a
112
  target="_blank"
113
  href="https://huggingface.co/spaces/enzostvs/deepsite/discussions/157"
114
+ rel="noopener noreferrer"
115
  >
116
  <Button size="sm" variant="outline">
117
  <HelpCircle className="size-3.5" />
 
141
  "hover:bg-neutral-800": device !== deviceItem.name,
142
  }
143
  )}
144
+ aria-label={`Switch to ${deviceItem.name} preview`}
145
  onClick={() => setDevice(deviceItem.name as "desktop" | "mobile")}
146
  >
147
  <deviceItem.icon className="text-sm" />
components/editor/index.tsx CHANGED
@@ -27,6 +27,7 @@ import { SaveButton } from "./save-button";
27
  import { LoadProject } from "../my-projects/load-project";
28
  import { isTheSameHtml } from "@/lib/compare-html-diff";
29
  import { ListPages } from "./pages";
 
30
 
31
  export const AppEditor = ({
32
  project,
@@ -195,16 +196,22 @@ export const AppEditor = ({
195
  return (
196
  <section className="h-[100dvh] bg-neutral-950 flex flex-col">
197
  <Header tab={currentTab} onNewTab={setCurrentTab}>
198
- <LoadProject
199
- onSuccess={(project: Project) => {
200
- router.push(`/projects/${project.space_id}`);
201
- }}
202
- />
 
 
203
  {/* for these buttons pass the whole pages */}
204
- {project?._id ? (
205
- <SaveButton pages={pages} prompts={prompts} />
206
- ) : (
207
- <DeployButton pages={pages} prompts={prompts} />
 
 
 
 
208
  )}
209
  </Header>
210
  <main className="bg-neutral-950 flex-1 max-lg:flex-col flex w-full max-lg:h-[calc(100%-82px)] relative">
 
27
  import { LoadProject } from "../my-projects/load-project";
28
  import { isTheSameHtml } from "@/lib/compare-html-diff";
29
  import { ListPages } from "./pages";
30
+ import { HIDE_DEPLOY, HIDE_LOGIN } from "@/lib/flags";
31
 
32
  export const AppEditor = ({
33
  project,
 
196
  return (
197
  <section className="h-[100dvh] bg-neutral-950 flex flex-col">
198
  <Header tab={currentTab} onNewTab={setCurrentTab}>
199
+ {!HIDE_LOGIN && (
200
+ <LoadProject
201
+ onSuccess={(project: Project) => {
202
+ router.push(`/projects/${project.space_id}`);
203
+ }}
204
+ />
205
+ )}
206
  {/* for these buttons pass the whole pages */}
207
+ {!HIDE_DEPLOY && (
208
+ <>
209
+ {project?._id ? (
210
+ <SaveButton pages={pages} prompts={prompts} />
211
+ ) : (
212
+ <DeployButton pages={pages} prompts={prompts} />
213
+ )}
214
+ </>
215
  )}
216
  </Header>
217
  <main className="bg-neutral-950 flex-1 max-lg:flex-col flex w-full max-lg:h-[calc(100%-82px)] relative">
lib/flags.ts ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Centralized feature flags and defaults for embed/lite mode
2
+ // Use NEXT_PUBLIC_* for values read in client components
3
+ // Avoid Node type dependency
4
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
5
+ declare const process: any;
6
+
7
+ export const EMBED_MODE = (process.env.NEXT_PUBLIC_EMBED_MODE ?? "false") === "true";
8
+ export const DISABLE_AUTH = EMBED_MODE || (process.env.NEXT_PUBLIC_DISABLE_AUTH ?? process.env.DISABLE_AUTH ?? "false") === "true";
9
+
10
+ // Default model/provider for simplified UI
11
+ // Choose a responsive general-purpose coding model from the existing list
12
+ export const DEFAULT_MODEL = process.env.NEXT_PUBLIC_DEFAULT_MODEL
13
+ || process.env.DEFAULT_MODEL
14
+ || "moonshotai/Kimi-K2-Instruct-0905"; // fast on Groq if configured
15
+
16
+ export const DEFAULT_PROVIDER = process.env.NEXT_PUBLIC_DEFAULT_PROVIDER
17
+ || process.env.DEFAULT_PROVIDER
18
+ || "groq"; // fallback; will be validated against model/providers list
19
+
20
+ // Anonymous/IP rate limit for no-login usage
21
+ export const MAX_REQUESTS_PER_IP_ENV = Number(
22
+ process.env.NEXT_PUBLIC_MAX_REQUESTS_PER_IP
23
+ || process.env.MAX_REQUESTS_PER_IP
24
+ || 2
25
+ );
26
+
27
+ // Hard cap on model output tokens to control cost in demo/embed mode
28
+ export const MAX_OUTPUT_TOKENS = Number(
29
+ process.env.NEXT_PUBLIC_MAX_OUTPUT_TOKENS
30
+ || process.env.MAX_OUTPUT_TOKENS
31
+ || (EMBED_MODE ? 1000 : 2000)
32
+ );
33
+
34
+ // UI toggles derived from flags
35
+ export const HIDE_LOGIN = DISABLE_AUTH || EMBED_MODE;
36
+ export const HIDE_DEPLOY = EMBED_MODE; // hide deploy/save in embedded demo
lib/providers.ts CHANGED
@@ -34,7 +34,9 @@ export const PROVIDERS = {
34
  max_tokens: 16_384,
35
  id: "groq",
36
  },
37
- };
 
 
38
 
39
  export const MODELS = [
40
  {
 
34
  max_tokens: 16_384,
35
  id: "groq",
36
  },
37
+ } as const;
38
+
39
+ export type ProviderId = keyof typeof PROVIDERS;
40
 
41
  export const MODELS = [
42
  {
next.config.ts CHANGED
@@ -25,7 +25,11 @@ const nextConfig: NextConfig = {
25
  return config;
26
  },
27
  images: {
28
- remotePatterns: [new URL('https://huggingface.co/**')],
 
 
 
 
29
  },
30
  };
31
 
 
25
  return config;
26
  },
27
  images: {
28
+ remotePatterns: [
29
+ { protocol: 'https', hostname: 'huggingface.co', pathname: '/**' },
30
+ { protocol: 'https', hostname: 'cdn-lfs.huggingface.co', pathname: '/**' },
31
+ { protocol: 'https', hostname: 'hf.co', pathname: '/**' },
32
+ ],
33
  },
34
  };
35