Skip to content

Commit 3837fae

Browse files
vid277cdxker
authored andcommitted
feature: show queue length in dashboard
1 parent 2a278c4 commit 3837fae

File tree

10 files changed

+240
-16
lines changed

10 files changed

+240
-16
lines changed

clients/ts-sdk/openapi.json

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4239,6 +4239,57 @@
42394239
]
42404240
}
42414241
},
4242+
"/api/dataset/get_dataset_queue_lengths": {
4243+
"get": {
4244+
"tags": [
4245+
"Dataset"
4246+
],
4247+
"summary": "Get file and chunk creation queue lengths",
4248+
"description": "Get the queue lengths for file and chunk creation.",
4249+
"operationId": "get_dataset_queue_lengths",
4250+
"parameters": [
4251+
{
4252+
"name": "TR-Dataset",
4253+
"in": "header",
4254+
"description": "The dataset id or tracking_id to use for the request. We assume you intend to use an id if the value is a valid uuid.",
4255+
"required": true,
4256+
"schema": {
4257+
"type": "string",
4258+
"format": "uuid"
4259+
}
4260+
}
4261+
],
4262+
"responses": {
4263+
"200": {
4264+
"description": "Queue lengths for file and chunk creation",
4265+
"content": {
4266+
"application/json": {
4267+
"schema": {
4268+
"$ref": "#/components/schemas/DatasetQueueLengthsResponse"
4269+
}
4270+
}
4271+
}
4272+
},
4273+
"400": {
4274+
"description": "Service error relating to getting the queue lengths",
4275+
"content": {
4276+
"application/json": {
4277+
"schema": {
4278+
"$ref": "#/components/schemas/ErrorResponseBody"
4279+
}
4280+
}
4281+
}
4282+
}
4283+
},
4284+
"security": [
4285+
{
4286+
"ApiKey": [
4287+
"readonly"
4288+
]
4289+
}
4290+
]
4291+
}
4292+
},
42424293
"/api/dataset/groups/{dataset_id}/{page}": {
42434294
"get": {
42444295
"tags": [
@@ -12391,6 +12442,25 @@
1239112442
}
1239212443
}
1239312444
},
12445+
"DatasetQueueLengthsResponse": {
12446+
"type": "object",
12447+
"required": [
12448+
"file_queue_length",
12449+
"chunk_queue_length"
12450+
],
12451+
"properties": {
12452+
"chunk_queue_length": {
12453+
"type": "integer",
12454+
"format": "int64",
12455+
"description": "Number of chunks in the queue for the dataset"
12456+
},
12457+
"file_queue_length": {
12458+
"type": "integer",
12459+
"format": "int64",
12460+
"description": "Number of files in the queue for the dataset"
12461+
}
12462+
}
12463+
},
1239412464
"DatasetUsageCount": {
1239512465
"type": "object",
1239612466
"required": [

clients/ts-sdk/src/functions/datasets/index.ts

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import {
1010
CreateDatasetReqPayload,
1111
Dataset,
1212
DatasetAndUsage,
13+
DatasetQueueLengthsResponse,
1314
DatasetUsageCount,
1415
EventReturn,
1516
FileData,
@@ -300,3 +301,19 @@ export async function getPagefindUrl(
300301
signal
301302
) as Promise<GetPagefindIndexResponse>;
302303
}
304+
305+
export async function getDatasetQueueLengths(
306+
/** @hidden */
307+
this: TrieveSDK,
308+
datasetId: string,
309+
signal?: AbortSignal
310+
): Promise<DatasetQueueLengthsResponse> {
311+
return this.trieve.fetch(
312+
"/api/dataset/get_dataset_queue_lengths",
313+
"get",
314+
{
315+
datasetId,
316+
},
317+
signal
318+
) as Promise<DatasetQueueLengthsResponse>;
319+
}

clients/ts-sdk/src/types.gen.ts

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1470,6 +1470,17 @@ export type DatasetFilePathParams = {
14701470
page: number;
14711471
};
14721472

1473+
export type DatasetQueueLengthsResponse = {
1474+
/**
1475+
* Number of chunks in the queue for the dataset
1476+
*/
1477+
chunk_queue_length: number;
1478+
/**
1479+
* Number of files in the queue for the dataset
1480+
*/
1481+
file_queue_length: number;
1482+
};
1483+
14731484
export type DatasetUsageCount = {
14741485
chunk_count: number;
14751486
dataset_id: string;
@@ -6012,6 +6023,15 @@ export type GetAllTagsData = {
60126023

60136024
export type GetAllTagsResponse2 = (GetAllTagsResponse);
60146025

6026+
export type GetDatasetQueueLengthsData = {
6027+
/**
6028+
* The dataset id or tracking_id to use for the request. We assume you intend to use an id if the value is a valid uuid.
6029+
*/
6030+
trDataset: string;
6031+
};
6032+
6033+
export type GetDatasetQueueLengthsResponse = (DatasetQueueLengthsResponse);
6034+
60156035
export type GetGroupsForDatasetData = {
60166036
/**
60176037
* The cursor offset for. Requires `use_cursor` = True. Defaults to `00000000-00000000-00000000-00000000`. Group ids are compared to the cursor using a greater than or equal to.
@@ -7811,6 +7831,21 @@ export type $OpenApiTs = {
78117831
};
78127832
};
78137833
};
7834+
'/api/dataset/get_dataset_queue_lengths': {
7835+
get: {
7836+
req: GetDatasetQueueLengthsData;
7837+
res: {
7838+
/**
7839+
* Queue lengths for file and chunk creation
7840+
*/
7841+
200: DatasetQueueLengthsResponse;
7842+
/**
7843+
* Service error relating to getting the queue lengths
7844+
*/
7845+
400: ErrorResponseBody;
7846+
};
7847+
};
7848+
};
78147849
'/api/dataset/groups/{dataset_id}/{page}': {
78157850
get: {
78167851
req: GetGroupsForDatasetData;

frontends/dashboard/src/components/Sidebar.tsx

Lines changed: 97 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import {
1111
AiOutlineLeft,
1212
AiOutlineMessage,
1313
AiOutlineSearch,
14+
AiOutlineReload,
1415
} from "solid-icons/ai";
1516
import { Spacer } from "./Spacer";
1617
import { Portal } from "solid-js/web";
@@ -24,14 +25,19 @@ import { TbSparkles, TbTimelineEventText, TbTransform } from "solid-icons/tb";
2425
import { createSignal } from "solid-js";
2526
import NewDatasetModal from "../components/NewDatasetModal";
2627
import { ImNewspaper } from "solid-icons/im";
28+
import { useTrieve } from "../hooks/useTrieve";
29+
import { createQuery } from "@tanstack/solid-query";
2730

28-
const searchUiURL = import.meta.env.VITE_SEARCH_UI_URL as string;
29-
const chatUiURL = import.meta.env.VITE_CHAT_UI_URL as string;
31+
const searchUiURL =
32+
(import.meta.env.VITE_SEARCH_UI_URL as string | undefined) ?? "";
33+
const chatUiURL =
34+
(import.meta.env.VITE_CHAT_UI_URL as string | undefined) ?? "";
3035

3136
export const DashboardSidebar = () => {
3237
const { datasetId } = useContext(DatasetContext);
3338
const userContext = useContext(UserContext);
3439
const pathname = useLocation();
40+
const trieve = useTrieve();
3541

3642
const [newDatasetModalOpen, setNewDatasetModalOpen] =
3743
createSignal<boolean>(false);
@@ -63,6 +69,91 @@ export const DashboardSidebar = () => {
6369
);
6470
});
6571

72+
const uploadStatusQuery = createQuery(() => ({
73+
queryKey: ["upload-status", datasetId()],
74+
queryFn: async () => {
75+
return await trieve.fetch(
76+
"/api/dataset/get_dataset_queue_lengths",
77+
"get",
78+
{
79+
datasetId: datasetId(),
80+
},
81+
);
82+
},
83+
refetchInterval: 5000,
84+
refetchOnMount: true,
85+
refetchOnWindowFocus: true,
86+
enabled: !!datasetId(),
87+
}));
88+
89+
const UploadStatusBar = () => {
90+
return (
91+
<div class="space-y-2 p-2">
92+
<div class="flex flex-col justify-between">
93+
<div class="text-sm font-medium text-neutral-700">
94+
Processing Queue
95+
</div>
96+
<div class="flex items-center justify-between text-xs">
97+
<span class="text-neutral-500">Updates every 5 seconds</span>
98+
<button
99+
onClick={() => {
100+
void uploadStatusQuery.refetch();
101+
}}
102+
class="flex items-center gap-1 text-neutral-500 hover:text-fuchsia-500"
103+
title="Reload status"
104+
>
105+
<AiOutlineReload
106+
size={14}
107+
classList={{
108+
"animate-spin": uploadStatusQuery.isFetching,
109+
}}
110+
/>
111+
</button>
112+
</div>
113+
</div>
114+
115+
<div class="space-y-1">
116+
<div class="flex justify-between text-xs text-neutral-600">
117+
<span>Files</span>
118+
<span>{uploadStatusQuery.data?.file_queue_length ?? 0} files</span>
119+
</div>
120+
<div class="h-2 w-full rounded-full bg-neutral-200">
121+
<div
122+
class="h-2 rounded-full bg-blue-500 transition-all duration-300"
123+
style={{
124+
width: `${Math.min(
125+
((uploadStatusQuery.data?.file_queue_length ?? 0) / 20) * 100,
126+
100,
127+
)}%`,
128+
}}
129+
/>
130+
</div>
131+
</div>
132+
133+
<div class="space-y-1">
134+
<div class="flex justify-between text-xs text-neutral-600">
135+
<span>Chunk Batches</span>
136+
<span>
137+
{uploadStatusQuery.data?.chunk_queue_length ?? 0} batches
138+
</span>
139+
</div>
140+
<div class="h-2 w-full rounded-full bg-neutral-200">
141+
<div
142+
class="h-2 rounded-full bg-orange-500 transition-all duration-300"
143+
style={{
144+
width: `${Math.min(
145+
((uploadStatusQuery.data?.chunk_queue_length ?? 0) / 1000) *
146+
100,
147+
100,
148+
)}%`,
149+
}}
150+
/>
151+
</div>
152+
</div>
153+
</div>
154+
);
155+
};
156+
66157
const Link = (props: {
67158
href: string;
68159
label: JSX.Element;
@@ -239,6 +330,10 @@ export const DashboardSidebar = () => {
239330
/>
240331
</Show>
241332
</div>
333+
<div class="gap flex flex-col pt-4">
334+
<SectionLabel>Upload Status</SectionLabel>
335+
<UploadStatusBar />
336+
</div>
242337
</div>
243338
</div>
244339
</>

server/src/bin/file-worker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use std::{
1111
sync::{atomic::AtomicBool, Arc},
1212
};
1313
use trieve_server::{
14-
data::models::{self, ChunkGroup, FileWorkerMessage, Pool},
14+
data::models::{self, ChunkGroup, FileWorkerMessage},
1515
establish_connection, get_env,
1616
handlers::chunk_handler::ChunkReqPayload,
1717
operators::{
@@ -796,4 +796,4 @@ async fn upload_file(
796796
.await?;
797797

798798
Ok(None)
799-
}
799+
}

server/src/handlers/dataset_handler.rs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ use crate::{
88
get_env,
99
middleware::auth_middleware::{verify_admin, verify_owner},
1010
operators::{
11+
chunk_operator::get_chunk_queue_length,
1112
dataset_operator::{
1213
clear_dataset_by_dataset_id_query, create_dataset_query, create_datasets_query,
1314
get_dataset_by_id_query, get_dataset_by_tracking_id_query, get_dataset_usage_query,
@@ -17,7 +18,6 @@ use crate::{
1718
dittofeed_operator::{
1819
send_ditto_event, DittoDatasetCreated, DittoTrackProperties, DittoTrackRequest,
1920
},
20-
chunk_operator::get_chunk_queue_length,
2121
file_operator::get_file_queue_length,
2222
organization_operator::{get_org_dataset_count, get_org_from_id_query},
2323
},
@@ -1028,14 +1028,15 @@ pub async fn get_dataset_queue_lengths(
10281028
broccoli_queue: web::Data<BroccoliQueue>,
10291029
dataset_org_plan_sub: DatasetAndOrgWithSubAndPlan,
10301030
) -> Result<HttpResponse, ServiceError> {
1031-
log::info!("Getting file queue length for dataset: {}", dataset_org_plan_sub.dataset.id);
1032-
let file_queue_length = get_file_queue_length(dataset_org_plan_sub.dataset.id, &broccoli_queue).await
1033-
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
1034-
let chunk_queue_length = get_chunk_queue_length(dataset_org_plan_sub.dataset.id, &broccoli_queue).await
1031+
let file_queue_length = get_file_queue_length(dataset_org_plan_sub.dataset.id, &broccoli_queue)
1032+
.await
10351033
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
1036-
log::info!("file_queue_length: {}", file_queue_length);
1034+
let chunk_queue_length =
1035+
get_chunk_queue_length(dataset_org_plan_sub.dataset.id, &broccoli_queue)
1036+
.await
1037+
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
10371038
Ok(HttpResponse::Ok().json(DatasetQueueLengthsResponse {
10381039
file_queue_length,
10391040
chunk_queue_length,
10401041
}))
1041-
}
1042+
}

server/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,6 +261,7 @@ impl Modify for SecurityAddon {
261261
handlers::dataset_handler::get_pagefind_index_for_dataset,
262262
handlers::dataset_handler::clear_dataset,
263263
handlers::dataset_handler::clone_dataset,
264+
handlers::dataset_handler::get_dataset_queue_lengths,
264265
handlers::payment_handler::direct_to_payment_link,
265266
handlers::payment_handler::cancel_subscription,
266267
handlers::payment_handler::update_subscription_plan,

server/src/operators/chunk_operator.rs

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,12 @@ use crate::operators::parse_operator::convert_html_to_text;
1010
use crate::operators::qdrant_operator::{
1111
delete_points_from_qdrant, get_qdrant_collection_from_dataset_config, scroll_dataset_points,
1212
};
13-
use broccoli_queue::queue::BroccoliQueue;
1413
use crate::{
1514
data::models::{ChunkMetadata, Pool},
1615
errors::ServiceError,
1716
};
1817
use actix_web::web;
18+
use broccoli_queue::queue::BroccoliQueue;
1919
use chrono::NaiveDateTime;
2020
use clickhouse::Row;
2121
use dateparser::DateTimeUtc;
@@ -2972,9 +2972,14 @@ pub async fn get_chunk_queue_length(
29722972
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
29732973

29742974
let premium_ingestion_queue_status = broccoli_queue
2975-
.queue_status("premium_ingestion".to_string(), Some(dataset_id.to_string()))
2975+
.queue_status(
2976+
"premium_ingestion".to_string(),
2977+
Some(dataset_id.to_string()),
2978+
)
29762979
.await
29772980
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
29782981

2979-
Ok(openai_ingestion_queue_status.size as i64 + regular_ingestion_queue_status.size as i64 + premium_ingestion_queue_status.size as i64)
2982+
Ok(openai_ingestion_queue_status.size as i64
2983+
+ regular_ingestion_queue_status.size as i64
2984+
+ premium_ingestion_queue_status.size as i64)
29802985
}

0 commit comments

Comments
 (0)