Skip to content

Commit fbddbaf

Browse files
chore(closes OPEN-8647): add endpoint to retrieve aggregated user data for inference pipelines
1 parent 67ea296 commit fbddbaf

File tree

7 files changed

+293
-5
lines changed

7 files changed

+293
-5
lines changed

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
configured_endpoints: 26
2-
openapi_spec_hash: 6f6cb98b7755d18274dd51e857508336
3-
config_hash: cc9a32249c08143687799eb8de187d6a
1+
configured_endpoints: 27
2+
openapi_spec_hash: c70c3eccfe803e99c14e97e650b1e314
3+
config_hash: 1f7626e569e1a74574a58d7883170a0e

api.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,14 +119,19 @@ Methods:
119119
Types:
120120

121121
```python
122-
from openlayer.types import InferencePipelineRetrieveResponse, InferencePipelineUpdateResponse
122+
from openlayer.types import (
123+
InferencePipelineRetrieveResponse,
124+
InferencePipelineUpdateResponse,
125+
InferencePipelineRetrieveUsersResponse,
126+
)
123127
```
124128

125129
Methods:
126130

127131
- <code title="get /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">retrieve</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipeline_retrieve_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipeline_retrieve_response.py">InferencePipelineRetrieveResponse</a></code>
128132
- <code title="put /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">update</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipeline_update_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipeline_update_response.py">InferencePipelineUpdateResponse</a></code>
129133
- <code title="delete /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">delete</a>(inference_pipeline_id) -> None</code>
134+
- <code title="get /inference-pipelines/{inferencePipelineId}/users">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">retrieve_users</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipeline_retrieve_users_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipeline_retrieve_users_response.py">InferencePipelineRetrieveUsersResponse</a></code>
130135

131136
## Data
132137

src/openlayer/resources/inference_pipelines/inference_pipelines.py

Lines changed: 128 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,11 @@
2323
RowsResourceWithStreamingResponse,
2424
AsyncRowsResourceWithStreamingResponse,
2525
)
26-
from ...types import inference_pipeline_update_params, inference_pipeline_retrieve_params
26+
from ...types import (
27+
inference_pipeline_update_params,
28+
inference_pipeline_retrieve_params,
29+
inference_pipeline_retrieve_users_params,
30+
)
2731
from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
2832
from ..._utils import maybe_transform, async_maybe_transform
2933
from ..._compat import cached_property
@@ -45,6 +49,7 @@
4549
from ..._base_client import make_request_options
4650
from ...types.inference_pipeline_update_response import InferencePipelineUpdateResponse
4751
from ...types.inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse
52+
from ...types.inference_pipeline_retrieve_users_response import InferencePipelineRetrieveUsersResponse
4853

4954
__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]
5055

@@ -214,6 +219,61 @@ def delete(
214219
cast_to=NoneType,
215220
)
216221

222+
def retrieve_users(
223+
self,
224+
inference_pipeline_id: str,
225+
*,
226+
page: int | Omit = omit,
227+
per_page: int | Omit = omit,
228+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
229+
# The extra values given here take precedence over values defined on the client or passed to this method.
230+
extra_headers: Headers | None = None,
231+
extra_query: Query | None = None,
232+
extra_body: Body | None = None,
233+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
234+
) -> InferencePipelineRetrieveUsersResponse:
235+
"""
236+
Get aggregated user data for an inference pipeline with pagination and metadata.
237+
238+
Returns a list of users who have interacted with the inference pipeline,
239+
including their activity statistics such as session counts, record counts, token
240+
usage, and costs.
241+
242+
Args:
243+
page: The page to return in a paginated query.
244+
245+
per_page: Maximum number of items to return per page.
246+
247+
extra_headers: Send extra headers
248+
249+
extra_query: Add additional query parameters to the request
250+
251+
extra_body: Add additional JSON properties to the request
252+
253+
timeout: Override the client-level default timeout for this request, in seconds
254+
"""
255+
if not inference_pipeline_id:
256+
raise ValueError(
257+
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
258+
)
259+
return self._get(
260+
f"/inference-pipelines/{inference_pipeline_id}/users",
261+
options=make_request_options(
262+
extra_headers=extra_headers,
263+
extra_query=extra_query,
264+
extra_body=extra_body,
265+
timeout=timeout,
266+
query=maybe_transform(
267+
{
268+
"page": page,
269+
"per_page": per_page,
270+
},
271+
inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
272+
),
273+
),
274+
cast_to=InferencePipelineRetrieveUsersResponse,
275+
)
276+
217277

218278
class AsyncInferencePipelinesResource(AsyncAPIResource):
219279
@cached_property
@@ -380,6 +440,61 @@ async def delete(
380440
cast_to=NoneType,
381441
)
382442

443+
async def retrieve_users(
444+
self,
445+
inference_pipeline_id: str,
446+
*,
447+
page: int | Omit = omit,
448+
per_page: int | Omit = omit,
449+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
450+
# The extra values given here take precedence over values defined on the client or passed to this method.
451+
extra_headers: Headers | None = None,
452+
extra_query: Query | None = None,
453+
extra_body: Body | None = None,
454+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
455+
) -> InferencePipelineRetrieveUsersResponse:
456+
"""
457+
Get aggregated user data for an inference pipeline with pagination and metadata.
458+
459+
Returns a list of users who have interacted with the inference pipeline,
460+
including their activity statistics such as session counts, record counts, token
461+
usage, and costs.
462+
463+
Args:
464+
page: The page to return in a paginated query.
465+
466+
per_page: Maximum number of items to return per page.
467+
468+
extra_headers: Send extra headers
469+
470+
extra_query: Add additional query parameters to the request
471+
472+
extra_body: Add additional JSON properties to the request
473+
474+
timeout: Override the client-level default timeout for this request, in seconds
475+
"""
476+
if not inference_pipeline_id:
477+
raise ValueError(
478+
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
479+
)
480+
return await self._get(
481+
f"/inference-pipelines/{inference_pipeline_id}/users",
482+
options=make_request_options(
483+
extra_headers=extra_headers,
484+
extra_query=extra_query,
485+
extra_body=extra_body,
486+
timeout=timeout,
487+
query=await async_maybe_transform(
488+
{
489+
"page": page,
490+
"per_page": per_page,
491+
},
492+
inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
493+
),
494+
),
495+
cast_to=InferencePipelineRetrieveUsersResponse,
496+
)
497+
383498

384499
class InferencePipelinesResourceWithRawResponse:
385500
def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
@@ -394,6 +509,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
394509
self.delete = to_raw_response_wrapper(
395510
inference_pipelines.delete,
396511
)
512+
self.retrieve_users = to_raw_response_wrapper(
513+
inference_pipelines.retrieve_users,
514+
)
397515

398516
@cached_property
399517
def data(self) -> DataResourceWithRawResponse:
@@ -421,6 +539,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
421539
self.delete = async_to_raw_response_wrapper(
422540
inference_pipelines.delete,
423541
)
542+
self.retrieve_users = async_to_raw_response_wrapper(
543+
inference_pipelines.retrieve_users,
544+
)
424545

425546
@cached_property
426547
def data(self) -> AsyncDataResourceWithRawResponse:
@@ -448,6 +569,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
448569
self.delete = to_streamed_response_wrapper(
449570
inference_pipelines.delete,
450571
)
572+
self.retrieve_users = to_streamed_response_wrapper(
573+
inference_pipelines.retrieve_users,
574+
)
451575

452576
@cached_property
453577
def data(self) -> DataResourceWithStreamingResponse:
@@ -475,6 +599,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
475599
self.delete = async_to_streamed_response_wrapper(
476600
inference_pipelines.delete,
477601
)
602+
self.retrieve_users = async_to_streamed_response_wrapper(
603+
inference_pipelines.retrieve_users,
604+
)
478605

479606
@cached_property
480607
def data(self) -> AsyncDataResourceWithStreamingResponse:

src/openlayer/types/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,9 @@
1818
from .inference_pipeline_retrieve_params import InferencePipelineRetrieveParams as InferencePipelineRetrieveParams
1919
from .inference_pipeline_update_response import InferencePipelineUpdateResponse as InferencePipelineUpdateResponse
2020
from .inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse as InferencePipelineRetrieveResponse
21+
from .inference_pipeline_retrieve_users_params import (
22+
InferencePipelineRetrieveUsersParams as InferencePipelineRetrieveUsersParams,
23+
)
24+
from .inference_pipeline_retrieve_users_response import (
25+
InferencePipelineRetrieveUsersResponse as InferencePipelineRetrieveUsersResponse,
26+
)
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2+
3+
from __future__ import annotations
4+
5+
from typing_extensions import Annotated, TypedDict
6+
7+
from .._utils import PropertyInfo
8+
9+
__all__ = ["InferencePipelineRetrieveUsersParams"]
10+
11+
12+
class InferencePipelineRetrieveUsersParams(TypedDict, total=False):
13+
page: int
14+
"""The page to return in a paginated query."""
15+
16+
per_page: Annotated[int, PropertyInfo(alias="perPage")]
17+
"""Maximum number of items to return per page."""
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2+
3+
from typing import List
4+
from datetime import datetime
5+
6+
from pydantic import Field as FieldInfo
7+
8+
from .._models import BaseModel
9+
10+
__all__ = ["InferencePipelineRetrieveUsersResponse", "Item"]
11+
12+
13+
class Item(BaseModel):
14+
id: str
15+
"""The unique user identifier"""
16+
17+
cost: float
18+
"""Total cost for this user"""
19+
20+
date_of_first_record: datetime = FieldInfo(alias="dateOfFirstRecord")
21+
"""Timestamp of the user's first event/trace"""
22+
23+
date_of_last_record: datetime = FieldInfo(alias="dateOfLastRecord")
24+
"""Timestamp of the user's last event/trace"""
25+
26+
records: int
27+
"""Total number of traces/rows for this user"""
28+
29+
sessions: int
30+
"""Count of unique sessions for this user"""
31+
32+
tokens: float
33+
"""Total token count for this user"""
34+
35+
36+
class InferencePipelineRetrieveUsersResponse(BaseModel):
37+
items: List[Item]
38+
"""Array of user aggregation data"""

tests/api_resources/test_inference_pipelines.py

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from openlayer.types import (
1313
InferencePipelineUpdateResponse,
1414
InferencePipelineRetrieveResponse,
15+
InferencePipelineRetrieveUsersResponse,
1516
)
1617

1718
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -152,6 +153,53 @@ def test_path_params_delete(self, client: Openlayer) -> None:
152153
"",
153154
)
154155

156+
@parametrize
157+
def test_method_retrieve_users(self, client: Openlayer) -> None:
158+
inference_pipeline = client.inference_pipelines.retrieve_users(
159+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
160+
)
161+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
162+
163+
@parametrize
164+
def test_method_retrieve_users_with_all_params(self, client: Openlayer) -> None:
165+
inference_pipeline = client.inference_pipelines.retrieve_users(
166+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
167+
page=1,
168+
per_page=1,
169+
)
170+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
171+
172+
@parametrize
173+
def test_raw_response_retrieve_users(self, client: Openlayer) -> None:
174+
response = client.inference_pipelines.with_raw_response.retrieve_users(
175+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
176+
)
177+
178+
assert response.is_closed is True
179+
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
180+
inference_pipeline = response.parse()
181+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
182+
183+
@parametrize
184+
def test_streaming_response_retrieve_users(self, client: Openlayer) -> None:
185+
with client.inference_pipelines.with_streaming_response.retrieve_users(
186+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
187+
) as response:
188+
assert not response.is_closed
189+
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
190+
191+
inference_pipeline = response.parse()
192+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
193+
194+
assert cast(Any, response.is_closed) is True
195+
196+
@parametrize
197+
def test_path_params_retrieve_users(self, client: Openlayer) -> None:
198+
with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
199+
client.inference_pipelines.with_raw_response.retrieve_users(
200+
inference_pipeline_id="",
201+
)
202+
155203

156204
class TestAsyncInferencePipelines:
157205
parametrize = pytest.mark.parametrize(
@@ -289,3 +337,50 @@ async def test_path_params_delete(self, async_client: AsyncOpenlayer) -> None:
289337
await async_client.inference_pipelines.with_raw_response.delete(
290338
"",
291339
)
340+
341+
@parametrize
342+
async def test_method_retrieve_users(self, async_client: AsyncOpenlayer) -> None:
343+
inference_pipeline = await async_client.inference_pipelines.retrieve_users(
344+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
345+
)
346+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
347+
348+
@parametrize
349+
async def test_method_retrieve_users_with_all_params(self, async_client: AsyncOpenlayer) -> None:
350+
inference_pipeline = await async_client.inference_pipelines.retrieve_users(
351+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
352+
page=1,
353+
per_page=1,
354+
)
355+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
356+
357+
@parametrize
358+
async def test_raw_response_retrieve_users(self, async_client: AsyncOpenlayer) -> None:
359+
response = await async_client.inference_pipelines.with_raw_response.retrieve_users(
360+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
361+
)
362+
363+
assert response.is_closed is True
364+
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
365+
inference_pipeline = await response.parse()
366+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
367+
368+
@parametrize
369+
async def test_streaming_response_retrieve_users(self, async_client: AsyncOpenlayer) -> None:
370+
async with async_client.inference_pipelines.with_streaming_response.retrieve_users(
371+
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
372+
) as response:
373+
assert not response.is_closed
374+
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
375+
376+
inference_pipeline = await response.parse()
377+
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
378+
379+
assert cast(Any, response.is_closed) is True
380+
381+
@parametrize
382+
async def test_path_params_retrieve_users(self, async_client: AsyncOpenlayer) -> None:
383+
with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
384+
await async_client.inference_pipelines.with_raw_response.retrieve_users(
385+
inference_pipeline_id="",
386+
)

0 commit comments

Comments
 (0)