Skip to content

Commit 0a0107f

Browse files
Merge pull request #4 from shadowdevcode/codex/final-ui-release-candidate
feat(ui): finalize responsive workspace refresh
2 parents e78df5c + d5e38de commit 0a0107f

File tree

15 files changed

+1020
-530
lines changed

15 files changed

+1020
-530
lines changed

.github/workflows/ci.yml

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
name: CI
2+
3+
on:
4+
pull_request:
5+
push:
6+
branches:
7+
- main
8+
9+
concurrency:
10+
group: ci-${{ github.workflow }}-${{ github.ref }}
11+
cancel-in-progress: true
12+
13+
jobs:
14+
verify-local:
15+
name: verify-local
16+
runs-on: ubuntu-latest
17+
timeout-minutes: 30
18+
19+
steps:
20+
- name: Checkout
21+
uses: actions/checkout@v4
22+
23+
- name: Setup Node.js
24+
uses: actions/setup-node@v4
25+
with:
26+
node-version: 20
27+
cache: npm
28+
29+
- name: Install dependencies
30+
run: npm ci
31+
32+
- name: Run full verification
33+
run: npm run verify:local

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@ coverage/
66
*.log
77
.env*
88
!.env.example
9+
.vercel

.husky/pre-push

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/usr/bin/env sh
2+
3+
npm run verify:local

README.md

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,51 @@ These constraints are enforced in `firestore.rules` and validated by `test/rules
208208
- Local emulator config: `firebase.json`
209209
- Confirm production Firebase Auth domain setup before release (Google provider and authorized domains)
210210

211+
## GitHub-Vercel Sync Workflow
212+
213+
This project uses GitHub as the deployment source of truth.
214+
215+
### Daily flow
216+
1. Create a feature branch from `main`.
217+
2. Commit and push branch changes.
218+
3. Open a pull request.
219+
4. Wait for CI check `verify-local` to pass.
220+
5. Merge PR into `main`.
221+
6. Vercel auto-deploys merged `main` commit to production.
222+
223+
### CI contract
224+
- Workflow file: `.github/workflows/ci.yml`
225+
- Triggers:
226+
- every pull request
227+
- every push to `main`
228+
- Required check name for branch protection: `verify-local`
229+
- CI command chain:
230+
- `npm ci`
231+
- `npm run verify:local`
232+
233+
### Local push gate (Husky)
234+
- Husky install hook is configured via `npm run prepare`.
235+
- Pre-push hook path: `.husky/pre-push`
236+
- Pre-push command: `npm run verify:local`
237+
- If checks fail, push is blocked.
238+
239+
### Required GitHub settings (`main` branch protection)
240+
- Require pull request before merging.
241+
- Require status checks to pass before merging.
242+
- Add required status check: `verify-local`.
243+
- Require branches to be up to date before merging.
244+
245+
### Required Vercel settings
246+
- Git repository connected to this GitHub repo.
247+
- Production branch set to `main`.
248+
- Preview deployments enabled for pull requests.
249+
- `GEMINI_API_KEY` configured for Preview and Production environments.
250+
251+
### Emergency rollback
252+
- Open Vercel dashboard.
253+
- Find the last known-good production deployment.
254+
- Redeploy that deployment to production.
255+
211256
## Troubleshooting
212257

213258
### `GEMINI_API_KEY is not configured for the AI parse endpoint`

api/ai/parse.ts

Lines changed: 128 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,26 @@
1-
import { GoogleGenAI, Type } from '@google/genai';
2-
import { validateAiParseResult } from '../../src/services/aiValidation';
3-
import { AiParseResult, InventoryItem, Language } from '../../src/types';
1+
import { AiParseResult, InventoryPromptItem, Language, validateAiParseResult } from './validation.js';
42

53
type ParseCookVoiceInputRequest = {
64
input: string;
75
inventory: InventoryPromptItem[];
86
lang: Language;
97
};
108

11-
type InventoryPromptItem = Pick<InventoryItem, 'id' | 'name' | 'nameHi'>;
9+
type NodeApiRequest = {
10+
method?: string;
11+
body?: unknown;
12+
};
13+
14+
type NodeApiResponse = {
15+
status: (statusCode: number) => NodeApiResponse;
16+
json: (body: unknown) => void;
17+
};
1218

13-
const AI_MODEL = 'gemini-3-flash-preview';
1419
const AI_ENDPOINT_NAME = 'ai_parse';
1520
const MAX_AI_ATTEMPTS = 3;
1621
const BASE_RETRY_DELAY_MS = 250;
22+
const AI_REQUEST_TIMEOUT_MS = 12000;
23+
const GEMINI_API_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/models';
1724
const EMPTY_AI_RESPONSE_MESSAGE = 'Empty response';
1825

1926
class AiParseRequestError extends Error {
@@ -37,8 +44,8 @@ class AiParseExecutionError extends Error {
3744
}
3845
}
3946

40-
function createJsonResponse(body: unknown, status: number): Response {
41-
return Response.json(body, { status });
47+
function sendJsonResponse(response: NodeApiResponse, body: unknown, status: number): void {
48+
response.status(status).json(body);
4249
}
4350

4451
function getEnvApiKey(): string {
@@ -49,8 +56,8 @@ function getEnvApiKey(): string {
4956
return apiKey;
5057
}
5158

52-
function getAiClient(): GoogleGenAI {
53-
return new GoogleGenAI({ apiKey: getEnvApiKey() });
59+
function getAiModel(): string {
60+
return process.env.GEMINI_MODEL ?? 'gemini-2.5-flash';
5461
}
5562

5663
function isLanguage(value: unknown): value is Language {
@@ -71,11 +78,25 @@ function isInventoryPromptItem(value: unknown): value is InventoryPromptItem {
7178
}
7279

7380
function parseRequestBody(raw: unknown): ParseCookVoiceInputRequest {
74-
if (!raw || typeof raw !== 'object') {
81+
const parsedRaw = (() => {
82+
if (typeof raw !== 'string') {
83+
return raw;
84+
}
85+
86+
try {
87+
return JSON.parse(raw) as unknown;
88+
} catch (error) {
89+
throw new AiParseRequestError('AI parse request body must be valid JSON.', {
90+
cause: error instanceof Error ? error : undefined,
91+
});
92+
}
93+
})();
94+
95+
if (!parsedRaw || typeof parsedRaw !== 'object') {
7596
throw new AiParseRequestError('AI parse request body must be an object.');
7697
}
7798

78-
const candidate = raw as Record<string, unknown>;
99+
const candidate = parsedRaw as Record<string, unknown>;
79100
if (typeof candidate.input !== 'string' || candidate.input.trim().length === 0) {
80101
throw new AiParseRequestError('AI parse request input must be a non-empty string.');
81102
}
@@ -114,42 +135,6 @@ function buildPrompt(input: string, inventory: InventoryPromptItem[], lang: Lang
114135
Return a JSON object matching this schema.`;
115136
}
116137

117-
function createResponseSchema() {
118-
return {
119-
type: Type.OBJECT,
120-
properties: {
121-
understood: { type: Type.BOOLEAN },
122-
message: { type: Type.STRING },
123-
updates: {
124-
type: Type.ARRAY,
125-
items: {
126-
type: Type.OBJECT,
127-
properties: {
128-
itemId: { type: Type.STRING },
129-
newStatus: { type: Type.STRING },
130-
requestedQuantity: { type: Type.STRING },
131-
},
132-
required: ['itemId', 'newStatus'],
133-
},
134-
},
135-
unlistedItems: {
136-
type: Type.ARRAY,
137-
items: {
138-
type: Type.OBJECT,
139-
properties: {
140-
name: { type: Type.STRING },
141-
status: { type: Type.STRING },
142-
category: { type: Type.STRING },
143-
requestedQuantity: { type: Type.STRING },
144-
},
145-
required: ['name', 'status', 'category'],
146-
},
147-
},
148-
},
149-
required: ['understood', 'updates', 'unlistedItems'],
150-
};
151-
}
152-
153138
function getErrorMessage(error: unknown): string {
154139
if (error instanceof Error) {
155140
return error.message;
@@ -179,26 +164,96 @@ async function waitForRetry(delayMs: number): Promise<void> {
179164
});
180165
}
181166

167+
function createTimeoutError(timeoutMs: number): Error {
168+
return new Error(`AI request timed out after ${timeoutMs}ms.`);
169+
}
170+
171+
function buildGeminiEndpoint(model: string, apiKey: string): string {
172+
return `${GEMINI_API_BASE_URL}/${encodeURIComponent(model)}:generateContent?key=${encodeURIComponent(apiKey)}`;
173+
}
174+
175+
function createGeminiRequestBody(prompt: string): Record<string, unknown> {
176+
return {
177+
contents: [
178+
{
179+
role: 'user',
180+
parts: [{ text: prompt }],
181+
},
182+
],
183+
generationConfig: {
184+
responseMimeType: 'application/json',
185+
},
186+
};
187+
}
188+
189+
function parseGeminiText(raw: unknown): string {
190+
if (!raw || typeof raw !== 'object') {
191+
throw new Error('Gemini response body is not an object.');
192+
}
193+
194+
const parsed = raw as {
195+
candidates?: Array<{
196+
content?: {
197+
parts?: Array<{ text?: string }>;
198+
};
199+
}>;
200+
};
201+
202+
const text = parsed.candidates?.[0]?.content?.parts?.[0]?.text;
203+
if (typeof text !== 'string' || text.trim().length === 0) {
204+
throw new Error(EMPTY_AI_RESPONSE_MESSAGE);
205+
}
206+
207+
return text;
208+
}
209+
210+
async function requestGeminiJson(prompt: string, apiKey: string, model: string, timeoutMs: number): Promise<unknown> {
211+
const abortController = new AbortController();
212+
const timeoutId = setTimeout(() => {
213+
abortController.abort();
214+
}, timeoutMs);
215+
216+
try {
217+
const response = await fetch(buildGeminiEndpoint(model, apiKey), {
218+
method: 'POST',
219+
headers: {
220+
'Content-Type': 'application/json',
221+
},
222+
body: JSON.stringify(createGeminiRequestBody(prompt)),
223+
signal: abortController.signal,
224+
});
225+
226+
const responseBody = await response.text();
227+
228+
if (!response.ok) {
229+
throw new Error(
230+
`Gemini request failed. status=${response.status} body=${responseBody.slice(0, 1000)}`
231+
);
232+
}
233+
234+
const parsed = JSON.parse(responseBody) as unknown;
235+
const text = parseGeminiText(parsed);
236+
return JSON.parse(text) as unknown;
237+
} catch (error) {
238+
const candidate = error as { name?: string };
239+
if (candidate?.name === 'AbortError') {
240+
throw createTimeoutError(timeoutMs);
241+
}
242+
throw error;
243+
} finally {
244+
clearTimeout(timeoutId);
245+
}
246+
}
247+
182248
async function generateAiParseResult(input: string, inventory: InventoryPromptItem[], lang: Language): Promise<AiParseResult> {
183-
const aiClient = getAiClient();
249+
const apiKey = getEnvApiKey();
250+
const aiModel = getAiModel();
251+
const prompt = buildPrompt(input, inventory, lang);
184252
let lastError: unknown = null;
185253

186254
for (let attempt = 1; attempt <= MAX_AI_ATTEMPTS; attempt += 1) {
187255
try {
188-
const response = await aiClient.models.generateContent({
189-
model: AI_MODEL,
190-
contents: buildPrompt(input, inventory, lang),
191-
config: {
192-
responseMimeType: 'application/json',
193-
responseSchema: createResponseSchema(),
194-
},
195-
});
196-
197-
if (!response.text) {
198-
throw new Error(EMPTY_AI_RESPONSE_MESSAGE);
199-
}
200-
201-
const parsed = JSON.parse(response.text) as unknown;
256+
const parsed = await requestGeminiJson(prompt, apiKey, aiModel, AI_REQUEST_TIMEOUT_MS);
202257
return validateAiParseResult(parsed);
203258
} catch (error) {
204259
lastError = error;
@@ -222,25 +277,18 @@ export const config = {
222277
runtime: 'nodejs',
223278
};
224279

225-
export default async function handler(request: Request): Promise<Response> {
280+
export default async function handler(request: NodeApiRequest, response: NodeApiResponse): Promise<void> {
226281
if (request.method !== 'POST') {
227-
return createJsonResponse({ message: 'Method not allowed.' }, 405);
282+
sendJsonResponse(response, { message: 'Method not allowed.' }, 405);
283+
return;
228284
}
229285

230286
try {
231-
let body: unknown;
232-
233-
try {
234-
body = (await request.json()) as unknown;
235-
} catch (error) {
236-
throw new AiParseRequestError('AI parse request body must be valid JSON.', {
237-
cause: error instanceof Error ? error : undefined,
238-
});
239-
}
240-
287+
const body = request.body;
241288
const { input, inventory, lang } = parseRequestBody(body);
242289
const result = await generateAiParseResult(input, inventory, lang);
243-
return createJsonResponse(result, 200);
290+
sendJsonResponse(response, result, 200);
291+
return;
244292
} catch (error) {
245293
const errorMessage = getErrorMessage(error);
246294
const status =
@@ -254,6 +302,10 @@ export default async function handler(request: Request): Promise<Response> {
254302
errorMessage,
255303
});
256304

257-
return createJsonResponse({ message: status === 400 || status === 503 ? errorMessage : 'Could not process AI response safely. Please retry with clearer input.' }, status);
305+
sendJsonResponse(
306+
response,
307+
{ message: status === 400 || status === 503 ? errorMessage : 'Could not process AI response safely. Please retry with clearer input.' },
308+
status
309+
);
258310
}
259311
}

0 commit comments

Comments
 (0)