Skip to content

Commit 2b8ca34

Browse files
fix(admin): use cursor-based pagination for API request log download (#2259)
* fix(admin): use cursor-based pagination for API request log download The download endpoint loaded all rows into memory at once, causing Vercel OOM kills for users with many API requests. Switch to cursor-based pagination (batches of 100) with backpressure-aware stream draining so only one batch is in memory at a time. * fix: propagate pagination errors to the response stream Address review comment: if a batch query or archive.finalize() fails, destroy the passthrough stream so the error surfaces to the client instead of hanging until timeout. * feat(admin): add optional model filter to API request log download Adds an optional 'model' query parameter to filter the download by a specific model. When not set, all models are included (existing behavior). * fix(admin): sanitize model name in download filename Replace / and : in the model name so the ZIP filename is safe for all filesystems. --------- Co-authored-by: kiloconnect[bot] <240665456+kiloconnect[bot]@users.noreply.github.com> Co-authored-by: Christiaan Arnoldus <christiaan@kilocode.ai>
1 parent 50a44b9 commit 2b8ca34

2 files changed

Lines changed: 83 additions & 31 deletions

File tree

  • apps/web/src/app/admin

apps/web/src/app/admin/api-request-log/page.tsx

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ export default function ApiRequestLogPage() {
1717
const [userId, setUserId] = useState('');
1818
const [startDate, setStartDate] = useState(weekAgo);
1919
const [endDate, setEndDate] = useState(today);
20+
const [model, setModel] = useState('');
2021
const [error, setError] = useState<string | null>(null);
2122

2223
function handleDownload() {
@@ -36,6 +37,9 @@ export default function ApiRequestLogPage() {
3637
startDate,
3738
endDate,
3839
});
40+
if (model.trim()) {
41+
params.set('model', model.trim());
42+
}
3943

4044
// Navigate directly to preserve server-side streaming
4145
window.location.href = `/admin/api/api-request-log/download?${params}`;
@@ -61,6 +65,16 @@ export default function ApiRequestLogPage() {
6165
/>
6266
</div>
6367

68+
<div className="space-y-2">
69+
<Label htmlFor="model">Model (optional)</Label>
70+
<Input
71+
id="model"
72+
placeholder="e.g. claude-sonnet-4-20250514"
73+
value={model}
74+
onChange={e => setModel(e.target.value)}
75+
/>
76+
</div>
77+
6478
<div className="grid grid-cols-2 gap-4">
6579
<div className="space-y-2">
6680
<Label htmlFor="startDate">Start Date</Label>

apps/web/src/app/admin/api/api-request-log/download/route.ts

Lines changed: 69 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,12 @@ import { connection, type NextRequest } from 'next/server';
22
import { getUserFromAuth } from '@/lib/user.server';
33
import { db } from '@/lib/drizzle';
44
import { api_request_log } from '@kilocode/db/schema';
5-
import { and, gte, lte, eq, asc } from 'drizzle-orm';
5+
import { and, gte, lte, eq, asc, gt, count } from 'drizzle-orm';
66
import archiver from 'archiver';
77
import { PassThrough } from 'node:stream';
88

9+
const BATCH_SIZE = 100;
10+
911
function formatTimestamp(isoString: string): string {
1012
return isoString.replaceAll(':', '-').replaceAll(' ', '_');
1113
}
@@ -54,6 +56,18 @@ function jsonError(message: string, status: number) {
5456
});
5557
}
5658

59+
function buildFilter(userId: string, parsedStart: Date, parsedEnd: Date, model: string | null) {
60+
const conditions = [
61+
eq(api_request_log.kilo_user_id, userId),
62+
gte(api_request_log.created_at, parsedStart.toISOString()),
63+
lte(api_request_log.created_at, parsedEnd.toISOString()),
64+
];
65+
if (model) {
66+
conditions.push(eq(api_request_log.model, model));
67+
}
68+
return and(...conditions);
69+
}
70+
5771
export async function GET(request: NextRequest) {
5872
await connection();
5973

@@ -66,6 +80,7 @@ export async function GET(request: NextRequest) {
6680
const userId = searchParams.get('userId');
6781
const startDate = searchParams.get('startDate');
6882
const endDate = searchParams.get('endDate');
83+
const model = searchParams.get('model');
6984

7085
if (!userId || !startDate || !endDate) {
7186
return jsonError('userId, startDate, and endDate are required', 400);
@@ -77,19 +92,10 @@ export async function GET(request: NextRequest) {
7792
return jsonError('Invalid date format. Use YYYY-MM-DD.', 400);
7893
}
7994

80-
const rows = await db
81-
.select()
82-
.from(api_request_log)
83-
.where(
84-
and(
85-
eq(api_request_log.kilo_user_id, userId),
86-
gte(api_request_log.created_at, parsedStart.toISOString()),
87-
lte(api_request_log.created_at, parsedEnd.toISOString())
88-
)
89-
)
90-
.orderBy(asc(api_request_log.created_at));
91-
92-
if (rows.length === 0) {
95+
const filter = buildFilter(userId, parsedStart, parsedEnd, model);
96+
97+
const [result] = await db.select({ total: count() }).from(api_request_log).where(filter);
98+
if (result.total === 0) {
9399
return jsonError('No records found for the given criteria', 404);
94100
}
95101

@@ -98,24 +104,54 @@ export async function GET(request: NextRequest) {
98104

99105
archive.pipe(passthrough);
100106

101-
for (const row of rows) {
102-
const ts = formatTimestamp(row.created_at);
103-
const id = String(row.id);
104-
105-
const requestExt = isJson(row.request) ? 'json' : 'txt';
106-
const requestContent = tryFormatJson(row.request);
107-
if (requestContent) {
108-
archive.append(requestContent, { name: `${ts}_${id}_request.${requestExt}` });
107+
// Fetch and archive rows in batches using cursor-based pagination to
108+
// avoid loading the entire result set into memory at once.
109+
const appendRows = async () => {
110+
let cursor: bigint | null = null;
111+
for (;;) {
112+
const rows = await db
113+
.select()
114+
.from(api_request_log)
115+
.where(cursor ? and(filter, gt(api_request_log.id, cursor)) : filter)
116+
.orderBy(asc(api_request_log.id))
117+
.limit(BATCH_SIZE);
118+
119+
if (rows.length === 0) break;
120+
121+
for (const row of rows) {
122+
const ts = formatTimestamp(row.created_at);
123+
const id = String(row.id);
124+
125+
const requestExt = isJson(row.request) ? 'json' : 'txt';
126+
const requestContent = tryFormatJson(row.request);
127+
if (requestContent) {
128+
archive.append(requestContent, { name: `${ts}_${id}_request.${requestExt}` });
129+
}
130+
131+
const responseExt = isJson(row.response) ? 'json' : 'txt';
132+
const responseContent = tryFormatJson(row.response);
133+
if (responseContent) {
134+
archive.append(responseContent, { name: `${ts}_${id}_response.${responseExt}` });
135+
}
136+
}
137+
138+
cursor = rows[rows.length - 1].id;
139+
140+
// Wait for the passthrough stream to drain before fetching the next
141+
// batch so we don't buffer unbounded data in memory.
142+
await new Promise<void>(resolve => {
143+
if (passthrough.writableNeedDrain) {
144+
passthrough.once('drain', resolve);
145+
} else {
146+
resolve();
147+
}
148+
});
109149
}
110150

111-
const responseExt = isJson(row.response) ? 'json' : 'txt';
112-
const responseContent = tryFormatJson(row.response);
113-
if (responseContent) {
114-
archive.append(responseContent, { name: `${ts}_${id}_response.${responseExt}` });
115-
}
116-
}
151+
await archive.finalize();
152+
};
117153

118-
void archive.finalize();
154+
void appendRows().catch(error => passthrough.destroy(error));
119155

120156
const webStream = new ReadableStream({
121157
start(controller) {
@@ -125,8 +161,10 @@ export async function GET(request: NextRequest) {
125161
},
126162
});
127163

128-
const safeUserId = userId.replaceAll('/', '-').replaceAll(':', '-');
129-
const filename = `api-request-log_${safeUserId}_${startDate}_${endDate}.zip`;
164+
const sanitize = (s: string) => s.replaceAll('/', '-').replaceAll(':', '-');
165+
const safeUserId = sanitize(userId);
166+
const safeModel = model ? `_${sanitize(model)}` : '';
167+
const filename = `api-request-log_${safeUserId}_${startDate}_${endDate}${safeModel}.zip`;
130168

131169
return new Response(webStream, {
132170
headers: {

0 commit comments

Comments
 (0)