-
Notifications
You must be signed in to change notification settings - Fork 55
Expand file tree
/
Copy pathquery-history.ts
More file actions
271 lines (251 loc) · 7.71 KB
/
query-history.ts
File metadata and controls
271 lines (251 loc) · 7.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
/**
* Query history — fetch and analyze recent query execution from warehouse system tables.
*
* SQL templates ported verbatim from Python altimate_engine.finops.query_history.
*/
import * as Registry from "../connections/registry"
import type { QueryHistoryParams, QueryHistoryResult } from "../types"
// ---------------------------------------------------------------------------
// SQL templates
// ---------------------------------------------------------------------------
const SNOWFLAKE_HISTORY_SQL = `
SELECT
query_id,
query_text,
query_type,
user_name,
warehouse_name,
warehouse_size,
execution_status,
error_code,
error_message,
start_time,
end_time,
total_elapsed_time / 1000.0 as execution_time_sec,
bytes_scanned,
rows_produced,
credits_used_cloud_services
FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY
WHERE start_time >= DATEADD('day', ?, CURRENT_TIMESTAMP())
{user_filter}
{warehouse_filter}
ORDER BY start_time DESC
LIMIT ?
`
const POSTGRES_HISTORY_SQL = `
SELECT
queryid::text as query_id,
query as query_text,
'SELECT' as query_type,
'' as user_name,
'' as warehouse_name,
'' as warehouse_size,
'SUCCESS' as execution_status,
NULL as error_code,
NULL as error_message,
now() as start_time,
now() as end_time,
mean_exec_time / 1000.0 as execution_time_sec,
shared_blks_read * 8192 as bytes_scanned,
rows as rows_produced,
0 as credits_used_cloud_services,
calls as execution_count
FROM pg_stat_statements
ORDER BY total_exec_time DESC
LIMIT {limit}
`
const BIGQUERY_HISTORY_SQL = `
SELECT
job_id as query_id,
query as query_text,
job_type as query_type,
user_email as user_name,
'' as warehouse_name,
reservation_id as warehouse_size,
state as execution_status,
NULL as error_code,
error_message,
start_time,
end_time,
TIMESTAMP_DIFF(end_time, start_time, SECOND) as execution_time_sec,
total_bytes_billed as bytes_scanned,
total_rows as rows_produced,
0 as credits_used_cloud_services
FROM \`region-US.INFORMATION_SCHEMA.JOBS\`
WHERE creation_time >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL ? DAY)
ORDER BY creation_time DESC
LIMIT ?
`
const DATABRICKS_HISTORY_SQL = `
SELECT
query_id,
query_text,
statement_type as query_type,
user_name,
warehouse_id as warehouse_name,
'' as warehouse_size,
status as execution_status,
NULL as error_code,
error_message,
start_time,
end_time,
execution_time_ms / 1000.0 as execution_time_sec,
bytes_read as bytes_scanned,
rows_produced,
0 as credits_used_cloud_services
FROM system.query.history
WHERE start_time >= DATE_SUB(CURRENT_DATE(), ?)
ORDER BY start_time DESC
LIMIT ?
`
const CLICKHOUSE_HISTORY_SQL = `
SELECT
query_id,
query as query_text,
query_kind as query_type,
user as user_name,
'' as warehouse_name,
'' as warehouse_size,
multiIf(exception_code = 0, 'SUCCESS', 'FAILED') as execution_status,
toString(exception_code) as error_code,
exception as error_message,
event_time as start_time,
event_time + query_duration_ms / 1000 as end_time,
query_duration_ms / 1000.0 as execution_time_sec,
read_bytes as bytes_scanned,
result_rows as rows_produced,
0 as credits_used_cloud_services
FROM system.query_log
WHERE type = 'QueryFinish'
AND event_date >= today() - __DAYS__
AND is_initial_query = 1
ORDER BY event_time DESC
LIMIT __LIMIT__
`
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function getWhType(warehouse: string): string {
const warehouses = Registry.list().warehouses
const wh = warehouses.find((w) => w.name === warehouse)
return wh?.type || "unknown"
}
function buildHistoryQuery(
whType: string,
days: number,
limit: number,
user?: string,
warehouseFilter?: string,
): { sql: string; binds: any[] } | null {
if (whType === "snowflake") {
const binds: any[] = [-days]
const userF = user ? (binds.push(user), "AND user_name = ?") : ""
const whF = warehouseFilter ? (binds.push(warehouseFilter), "AND warehouse_name = ?") : ""
binds.push(limit)
return {
sql: SNOWFLAKE_HISTORY_SQL.replace("{user_filter}", userF).replace("{warehouse_filter}", whF),
binds,
}
}
if (whType === "postgres" || whType === "postgresql") {
// The Postgres connector (packages/drivers/src/postgres.ts) does not yet
// pass binds through to pg — its execute() ignores the _binds parameter.
// Render LIMIT inline with Math.floor to prevent injection.
return {
sql: POSTGRES_HISTORY_SQL.replace("{limit}", String(Math.floor(Number(limit)))),
binds: [],
}
}
if (whType === "bigquery") {
return { sql: BIGQUERY_HISTORY_SQL, binds: [days, limit] }
}
if (whType === "databricks") {
return { sql: DATABRICKS_HISTORY_SQL, binds: [days, limit] }
}
if (whType === "clickhouse") {
const clampedDays = Math.max(1, Math.min(Math.floor(Number(days)) || 30, 365))
const clampedLimit = Math.max(1, Math.min(Math.floor(Number(limit)) || 100, 10000))
// Placeholders use __NAME__ format (not ClickHouse's {name:Type} syntax) to
// make clear these are string-substituted with clamped integer values, not
// ClickHouse query parameters.
const sql = CLICKHOUSE_HISTORY_SQL.replace("__DAYS__", String(clampedDays)).replace(
"__LIMIT__",
String(clampedLimit),
)
return { sql, binds: [] }
}
if (whType === "duckdb") {
return null // DuckDB has no native query history
}
return null
}
function rowsToRecords(result: { columns: string[]; rows: any[][] }): Record<string, unknown>[] {
return result.rows.map((row) => {
const obj: Record<string, unknown> = {}
result.columns.forEach((col, i) => {
obj[col] = row[i]
})
return obj
})
}
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
export async function getQueryHistory(params: QueryHistoryParams): Promise<QueryHistoryResult> {
const whType = getWhType(params.warehouse)
const days = params.days ?? 7
const limit = params.limit ?? 100
const built = buildHistoryQuery(whType, days, limit, params.user, params.warehouse_filter)
if (!built) {
return {
success: false,
queries: [],
summary: {},
error: `Query history is not available for ${whType} warehouses.`,
}
}
try {
const connector = await Registry.get(params.warehouse)
const result = await connector.execute(built.sql, limit, built.binds)
const queries = rowsToRecords(result)
let totalBytes = 0
let totalTime = 0
let errorCount = 0
for (const q of queries) {
totalBytes += Number(q.bytes_scanned || 0)
totalTime += Number(q.execution_time_sec || 0)
if (String(q.execution_status || "").toUpperCase() !== "SUCCESS") {
errorCount++
}
}
const summary = {
query_count: queries.length,
total_bytes_scanned: totalBytes,
total_execution_time_sec: Math.round(totalTime * 100) / 100,
error_count: errorCount,
avg_execution_time_sec: queries.length > 0 ? Math.round((totalTime / queries.length) * 100) / 100 : 0,
}
return {
success: true,
queries,
summary,
warehouse_type: whType,
}
} catch (e) {
return {
success: false,
queries: [],
summary: {},
error: String(e),
}
}
}
// Exported for SQL template testing
export const SQL_TEMPLATES = {
SNOWFLAKE_HISTORY_SQL,
POSTGRES_HISTORY_SQL,
BIGQUERY_HISTORY_SQL,
DATABRICKS_HISTORY_SQL,
CLICKHOUSE_HISTORY_SQL,
buildHistoryQuery,
}