diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6c8933ae..fd07c85c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] package: ["functions", "realtime", "storage", "auth", "postgrest", "supabase"] runs-on: ${{ matrix.os }} steps: diff --git a/Makefile b/Makefile index b90dc35f..4fd2a5fd 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ .PHONY: ci, default, clean, start-infra, stop-infra -PACKAGES := functions realtime storage auth postgrest supabase +PACKAGES := utils functions realtime storage auth postgrest supabase FORALL_PKGS = $(foreach pkg, $(PACKAGES), $(pkg).$(1)) help:: @@ -58,6 +58,9 @@ help:: realtime.%: @$(MAKE) -C src/realtime $* +utils.%: + @$(MAKE) -C src/utils $* + functions.%: @$(MAKE) -C src/functions $* diff --git a/flake.lock b/flake.lock index 44d697a9..91c90689 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "nixpkgs": { "locked": { - "lastModified": 1762363567, - "narHash": "sha256-YRqMDEtSMbitIMj+JLpheSz0pwEr0Rmy5mC7myl17xs=", + "lastModified": 1774106199, + "narHash": "sha256-US5Tda2sKmjrg2lNHQL3jRQ6p96cgfWh3J1QBliQ8Ws=", "owner": "nixos", "repo": "nixpkgs", - "rev": "ae814fd3904b621d8ab97418f1d0f2eb0d3716f4", + "rev": "6c9a78c09ff4d6c21d0319114873508a6ec01655", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index dda68f9f..a24fda76 100644 --- a/flake.nix +++ b/flake.nix @@ -1,5 +1,5 @@ { - description = "realtime-py: a Realtime python client."; + description = "Supabase-py development flake"; inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; @@ -45,7 +45,7 @@ root = "$REPO_ROOT"; }; - pyproject-overlay = final: prev: { + pyproject-overlay = pkgs: final: prev: { ruamel-yaml-clib = prev.ruamel-yaml-clib.overrideAttrs (old: { nativeBuildInputs = old.nativeBuildInputs ++ [ (final.resolveBuildSystem { @@ -53,7 +53,21 @@ }) ]; }); - + pyiceberg = prev.pyiceberg.overrideAttrs (old: { + buildInputs = (old.buildInputs or []) ++ [ final.poetry-core ]; + }); + pyroaring = prev.pyroaring.overrideAttrs (old: { + postPatch = (old.postPatch or "") + '' + sed -i '1i from Cython.Build import cythonize' setup.py + sed -i 's/ext_modules=[pyroaring_module]/ext_modules=[cythonize(pyroaring_module)]/' setup.py + ''; + nativeBuildInputs = old.nativeBuildInputs ++ [ + (final.resolveBuildSystem { + setuptools = []; + }) + final.cython + ]; + }); }; python-for = pkgs: let @@ -61,10 +75,10 @@ pyproject-build-systems.overlays.default workspace-overlay editable-overlay - pyproject-overlay + (pyproject-overlay pkgs) ]; base-python = pkgs.callPackage pyproject-nix.build.packages { - python = pkgs.python311; + python = pkgs.python314; }; in base-python.overrideScope extensions; in { @@ -79,7 +93,7 @@ # Force uv to use nixpkgs Python interpreter UV_PROJECT_ENVIRONMENT = python-env; - UV_PYTHON = pkgs.python311.interpreter; + UV_PYTHON = pkgs.python314.interpreter; # Prevent uv from downloading managed Python's UV_PYTHON_DOWNLOADS = "never"; @@ -92,5 +106,8 @@ packages = [ python-env ] ++ (dev-tools pkgs); }; }); + lib = for-all-systems (pkgs: { + python = python-for pkgs; + }); }; } diff --git a/pyproject.toml b/pyproject.toml index ed64b96f..f6f1115c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,13 +5,15 @@ members = [ "src/supabase", "src/storage", "src/postgrest", - "src/auth" + "src/auth", + "src/utils", ] [tool.uv.sources] realtime = { workspace = true } supabase_functions = { workspace = true } supabase_auth = { workspace = true } +supabase_utils = { workspace = true } storage3 = { workspace = true } postgrest = { workspace = true } supabase = { workspace = true } @@ -20,7 +22,7 @@ supabase = { workspace = true } asyncio_mode = "auto" [tool.ruff] -target-version = "py39" +target-version = "py310" [tool.ruff.lint] select = [ diff --git a/src/auth/pyproject.toml b/src/auth/pyproject.toml index c57680ba..49a5b03a 100644 --- a/src/auth/pyproject.toml +++ b/src/auth/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "supabase_auth" -version = "2.28.3" # {x-release-please-version} +version = "3.0.0a1" # {x-release-please-version} description = "Python Client Library for Supabase Auth" authors = [ {name = "Joel Lee", email = "joel@joellee.org" } @@ -15,11 +15,11 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "httpx[http2] >=0.26,<0.29", - "pydantic >=1.10,<3", + "pydantic >=2,<3", "pyjwt[crypto] >=2.10.1", + "supabase_utils == 3.0.0a1", # x-release-please-version ] [project.urls] @@ -40,6 +40,7 @@ tests = [ "pytest-cov >= 6.2.1", "pytest-depends >= 1.0.1", "pytest-asyncio >= 1.0.0", + "supabase_utils[all]", "pyotp>=2.9.0", ] lints = [ @@ -53,13 +54,31 @@ dev = [{ include-group = "lints" }, {include-group = "tests" }] [tool.pytest.ini_options] asyncio_mode = "auto" +addopts = "tests" + +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + # "B", + # flake8-simplify + # "SIM", + # isort + "I", +] +ignore = ["E712", "E501", "E402", "UP006", "UP035"] [build-system] requires = ["uv_build>=0.8.3,<0.9.0"] build-backend = "uv_build" [tool.mypy] -python_version = "3.9" +python_version = "3.10" check_untyped_defs = true allow_redefinition = true follow_untyped_imports = true # for deprecation module that does not have stubs diff --git a/src/auth/scripts/gh-download.py b/src/auth/scripts/gh-download.py index b2f4c123..8e8f974b 100644 --- a/src/auth/scripts/gh-download.py +++ b/src/auth/scripts/gh-download.py @@ -20,7 +20,6 @@ import os import shutil import sys -from typing import Optional from github import Github, GithubException from github.ContentFile import ContentFile @@ -90,9 +89,9 @@ def main(argv) -> None: print(err) usage() sys.exit(2) - repo: Optional[str] = None - branch: Optional[str] = None - folder: Optional[str] = None + repo: str | None = None + branch: str | None = None + folder: str | None = None for opt, arg in opts: if opt in ("-r", "--repo"): repo = arg diff --git a/src/auth/src/supabase_auth/__init__.py b/src/auth/src/supabase_auth/__init__.py index 35352171..1dea0dab 100644 --- a/src/auth/src/supabase_auth/__init__.py +++ b/src/auth/src/supabase_auth/__init__.py @@ -1,16 +1,12 @@ from __future__ import annotations -from ._async.gotrue_admin_api import AsyncGoTrueAdminAPI # noqa -from ._async.gotrue_client import AsyncGoTrueClient # noqa -from ._async.storage import ( +from .admin_api import AsyncSupabaseAuthAdmin, SyncSupabaseAuthAdmin # noqa +from .client import AsyncSupabaseAuthClient, SyncSupabaseAuthClient # noqa +from .session import ( AsyncMemoryStorage, # noqa AsyncSupportedStorage, # noqa -) -from ._sync.gotrue_admin_api import SyncGoTrueAdminAPI # noqa -from ._sync.gotrue_client import SyncGoTrueClient # noqa -from ._sync.storage import ( SyncMemoryStorage, # noqa SyncSupportedStorage, # noqa ) -from .types import * +from .types import * # noqa from .version import __version__ # noqa diff --git a/src/auth/src/supabase_auth/_async/__init__.py b/src/auth/src/supabase_auth/_async/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/auth/src/supabase_auth/_async/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/auth/src/supabase_auth/_async/gotrue_admin_api.py b/src/auth/src/supabase_auth/_async/gotrue_admin_api.py deleted file mode 100644 index 4f6afa11..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_admin_api.py +++ /dev/null @@ -1,353 +0,0 @@ -from __future__ import annotations - -from typing import Dict, List, Optional - -from httpx import AsyncClient, QueryParams - -from ..helpers import ( - model_validate, - parse_link_response, - parse_user_response, - validate_uuid, -) -from ..types import ( - AdminUserAttributes, - AuthMFAAdminDeleteFactorParams, - AuthMFAAdminDeleteFactorResponse, - AuthMFAAdminListFactorsParams, - AuthMFAAdminListFactorsResponse, - AuthMFAAdminListFactorsResponseParser, - CreateOAuthClientParams, - GenerateLinkParams, - GenerateLinkResponse, - InviteUserByEmailOptions, - OAuthClient, - OAuthClientListResponse, - OAuthClientResponse, - PageParams, - SignOutScope, - UpdateOAuthClientParams, - User, - UserList, - UserResponse, -) -from .gotrue_admin_mfa_api import AsyncGoTrueAdminMFAAPI -from .gotrue_admin_oauth_api import AsyncGoTrueAdminOAuthAPI -from .gotrue_base_api import AsyncGoTrueBaseAPI - - -class AsyncGoTrueAdminAPI(AsyncGoTrueBaseAPI): - def __init__( - self, - *, - url: str = "", - headers: Optional[Dict[str, str]] = None, - http_client: Optional[AsyncClient] = None, - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - http_headers = headers or {} - AsyncGoTrueBaseAPI.__init__( - self, - url=url, - headers=http_headers, - http_client=http_client, - verify=verify, - proxy=proxy, - ) - # TODO(@o-santi): why is is this done this way? - self.mfa = AsyncGoTrueAdminMFAAPI() - self.mfa.list_factors = self._list_factors # type: ignore - self.mfa.delete_factor = self._delete_factor # type: ignore - self.oauth = AsyncGoTrueAdminOAuthAPI() - self.oauth.list_clients = self._list_oauth_clients # type: ignore - self.oauth.create_client = self._create_oauth_client # type: ignore - self.oauth.get_client = self._get_oauth_client # type: ignore - self.oauth.update_client = self._update_oauth_client # type: ignore - self.oauth.delete_client = self._delete_oauth_client # type: ignore - self.oauth.regenerate_client_secret = self._regenerate_oauth_client_secret # type: ignore - - async def sign_out(self, jwt: str, scope: SignOutScope = "global") -> None: - """ - Removes a logged-in session. - """ - await self._request( - "POST", - "logout", - query=QueryParams(scope=scope), - jwt=jwt, - no_resolve_json=True, - ) - - async def invite_user_by_email( - self, - email: str, - options: Optional[InviteUserByEmailOptions] = None, - ) -> UserResponse: - """ - Sends an invite link to an email address. - """ - email_options = options or {} - response = await self._request( - "POST", - "invite", - body={"email": email, "data": email_options.get("data")}, - redirect_to=email_options.get("redirect_to"), - ) - return parse_user_response(response) - - async def generate_link(self, params: GenerateLinkParams) -> GenerateLinkResponse: - """ - Generates email links and OTPs to be sent via a custom email provider. - """ - response = await self._request( - "POST", - "admin/generate_link", - body={ - "type": params.get("type"), - "email": params.get("email"), - "password": params.get("password"), - "new_email": params.get("new_email"), - "data": params.get("options", {}).get("data"), - }, - redirect_to=params.get("options", {}).get("redirect_to"), - ) - - return parse_link_response(response) - - # User Admin API - - async def create_user(self, attributes: AdminUserAttributes) -> UserResponse: - """ - Creates a new user. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = await self._request( - "POST", - "admin/users", - body=attributes, - ) - return parse_user_response(response) - - async def list_users( - self, page: Optional[int] = None, per_page: Optional[int] = None - ) -> List[User]: - """ - Get a list of users. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = await self._request( - "GET", - "admin/users", - query=QueryParams(page=page, per_page=per_page), - ) - return model_validate(UserList, response.content).users - - async def get_user_by_id(self, uid: str) -> UserResponse: - """ - Get user by id. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(uid) - - response = await self._request( - "GET", - f"admin/users/{uid}", - ) - return parse_user_response(response) - - async def update_user_by_id( - self, - uid: str, - attributes: AdminUserAttributes, - ) -> UserResponse: - """ - Updates the user data. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(uid) - response = await self._request( - "PUT", - f"admin/users/{uid}", - body=attributes, - ) - return parse_user_response(response) - - async def delete_user(self, id: str, should_soft_delete: bool = False) -> None: - """ - Delete a user. Requires a `service_role` key. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(id) - body = {"should_soft_delete": should_soft_delete} - await self._request("DELETE", f"admin/users/{id}", body=body) - - async def _list_factors( - self, - params: AuthMFAAdminListFactorsParams, - ) -> AuthMFAAdminListFactorsResponse: - validate_uuid(params.get("user_id")) - response = await self._request( - "GET", - f"admin/users/{params.get('user_id')}/factors", - ) - return AuthMFAAdminListFactorsResponseParser.validate_json(response.content) - - async def _delete_factor( - self, - params: AuthMFAAdminDeleteFactorParams, - ) -> AuthMFAAdminDeleteFactorResponse: - validate_uuid(params.get("user_id")) - validate_uuid(params.get("id")) - response = await self._request( - "DELETE", - f"admin/users/{params.get('user_id')}/factors/{params.get('id')}", - ) - return model_validate(AuthMFAAdminDeleteFactorResponse, response.content) - - async def _list_oauth_clients( - self, - params: PageParams | None = None, - ) -> OAuthClientListResponse: - """ - Lists all OAuth clients with optional pagination. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - if params: - query = QueryParams(page=params.page, per_page=params.per_page) - else: - query = None - response = await self._request( - "GET", - "admin/oauth/clients", - query=query, - no_resolve_json=True, - ) - - result = model_validate(OAuthClientListResponse, response.content) - - # Parse pagination headers - total = response.headers.get("x-total-count") - if total: - result.total = int(total) - - links = response.headers.get("link") - if links: - for link in links.split(","): - parts = link.split(";") - if len(parts) >= 2: - page_match = parts[0].split("page=") - if len(page_match) >= 2: - page_num = int(page_match[1].split("&")[0].rstrip(">")) - rel = parts[1].split("=")[1].strip('"') - if rel == "next": - result.next_page = page_num - elif rel == "last": - result.last_page = page_num - - return result - - async def _create_oauth_client( - self, - params: CreateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Creates a new OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = await self._request( - "POST", - "admin/oauth/clients", - body=params, - ) - - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - async def _get_oauth_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Gets details of a specific OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = await self._request( - "GET", - f"admin/oauth/clients/{client_id}", - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - async def _update_oauth_client( - self, - client_id: str, - params: UpdateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Updates an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = await self._request( - "PUT", - f"admin/oauth/clients/{client_id}", - body=params, - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - async def _delete_oauth_client( - self, - client_id: str, - ) -> None: - """ - Deletes an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - await self._request( - "DELETE", - f"admin/oauth/clients/{client_id}", - ) - - async def _regenerate_oauth_client_secret( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Regenerates the secret for an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = await self._request( - "POST", - f"admin/oauth/clients/{client_id}/regenerate_secret", - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) diff --git a/src/auth/src/supabase_auth/_async/gotrue_admin_mfa_api.py b/src/auth/src/supabase_auth/_async/gotrue_admin_mfa_api.py deleted file mode 100644 index ca812fcd..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_admin_mfa_api.py +++ /dev/null @@ -1,32 +0,0 @@ -from ..types import ( - AuthMFAAdminDeleteFactorParams, - AuthMFAAdminDeleteFactorResponse, - AuthMFAAdminListFactorsParams, - AuthMFAAdminListFactorsResponse, -) - - -class AsyncGoTrueAdminMFAAPI: - """ - Contains the full multi-factor authentication administration API. - """ - - async def list_factors( - self, - params: AuthMFAAdminListFactorsParams, - ) -> AuthMFAAdminListFactorsResponse: - """ - Lists all factors attached to a user. - """ - raise NotImplementedError() # pragma: no cover - - async def delete_factor( - self, - params: AuthMFAAdminDeleteFactorParams, - ) -> AuthMFAAdminDeleteFactorResponse: - """ - Deletes a factor on a user. This will log the user out of all active - sessions (if the deleted factor was verified). There's no need to delete - unverified factors. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_async/gotrue_admin_oauth_api.py b/src/auth/src/supabase_auth/_async/gotrue_admin_oauth_api.py deleted file mode 100644 index d0fffe9b..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_admin_oauth_api.py +++ /dev/null @@ -1,95 +0,0 @@ -from typing import Optional - -from ..types import ( - CreateOAuthClientParams, - OAuthClientListResponse, - OAuthClientResponse, - PageParams, - UpdateOAuthClientParams, -) - - -class AsyncGoTrueAdminOAuthAPI: - """ - Contains all OAuth client administration methods. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - """ - - async def list_clients( - self, - params: Optional[PageParams] = None, - ) -> OAuthClientListResponse: - """ - Lists all OAuth clients with optional pagination. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - async def create_client( - self, - params: CreateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Creates a new OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - async def get_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Gets details of a specific OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - async def update_client( - self, - client_id: str, - params: UpdateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Updates an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - async def delete_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Deletes an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - async def regenerate_client_secret( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Regenerates the secret for an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_async/gotrue_base_api.py b/src/auth/src/supabase_auth/_async/gotrue_base_api.py deleted file mode 100644 index ae0bcaa9..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_base_api.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict, Optional - -from httpx import AsyncClient, HTTPStatusError, QueryParams, Response -from pydantic import BaseModel -from typing_extensions import Literal, Self - -from ..constants import API_VERSION_HEADER_NAME, API_VERSIONS_2024_01_01_NAME -from ..helpers import handle_exception, model_dump - - -class AsyncGoTrueBaseAPI: - def __init__( - self, - *, - url: str, - headers: Dict[str, str], - http_client: Optional[AsyncClient], - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - self._url = url - self._headers = headers - self._http_client = http_client or AsyncClient( - verify=bool(verify), - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - async def __aenter__(self) -> Self: - return self - - async def __aexit__(self, exc_t, exc_v, exc_tb) -> None: - await self.close() - - async def close(self) -> None: - await self._http_client.aclose() - - async def _request( - self, - method: Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"], - path: str, - *, - jwt: Optional[str] = None, - redirect_to: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - query: Optional[QueryParams] = None, - body: Optional[Any] = None, - no_resolve_json: bool = False, - ) -> Response: - url = f"{self._url}/{path}" - headers = {**self._headers, **(headers or {})} - if API_VERSION_HEADER_NAME not in headers: - headers[API_VERSION_HEADER_NAME] = API_VERSIONS_2024_01_01_NAME - if "Content-Type" not in headers: - headers["Content-Type"] = "application/json;charset=UTF-8" - if jwt: - headers["Authorization"] = f"Bearer {jwt}" - query = query or QueryParams() - if redirect_to: - query = query.set("redirect_to", redirect_to) - try: - response = await self._http_client.request( - method, - url, - headers=headers, - params=query, - json=model_dump(body) if isinstance(body, BaseModel) else body, - ) - - response.raise_for_status() - return response - except (HTTPStatusError, RuntimeError) as e: - raise handle_exception(e) # noqa diff --git a/src/auth/src/supabase_auth/_async/gotrue_client.py b/src/auth/src/supabase_auth/_async/gotrue_client.py deleted file mode 100644 index 4048f93a..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_client.py +++ /dev/null @@ -1,1306 +0,0 @@ -from __future__ import annotations - -import platform -import sys -import time -from contextlib import suppress -from typing import Callable, Dict, List, Optional, Tuple -from urllib.parse import parse_qs, urlparse -from uuid import uuid4 -from warnings import warn - -from httpx import AsyncClient, QueryParams, Response -from jwt import get_algorithm_by_name -from typing_extensions import cast - -from ..constants import ( - EXPIRY_MARGIN, - GOTRUE_URL, - MAX_RETRIES, - STORAGE_KEY, -) -from ..errors import ( - AuthApiError, - AuthImplicitGrantRedirectError, - AuthInvalidCredentialsError, - AuthInvalidJwtError, - AuthRetryableError, - AuthSessionMissingError, - UserDoesntExist, -) -from ..helpers import ( - decode_jwt, - generate_pkce_challenge, - generate_pkce_verifier, - model_dump_json, - model_validate, - parse_auth_otp_response, - parse_auth_response, - parse_jwks, - parse_link_identity_response, - parse_sso_response, - parse_user_response, - validate_exp, -) -from ..timer import Timer -from ..types import ( - JWK, - AMREntry, - AuthChangeEvent, - AuthFlowType, - AuthMFAChallengeResponse, - AuthMFAEnrollResponse, - AuthMFAGetAuthenticatorAssuranceLevelResponse, - AuthMFAListFactorsResponse, - AuthMFAUnenrollResponse, - AuthMFAVerifyResponse, - AuthOtpResponse, - AuthResponse, - ClaimsResponse, - CodeExchangeParams, - IdentitiesResponse, - JWKSet, - MFAChallengeAndVerifyParams, - MFAChallengeParams, - MFAEnrollParams, - MFAUnenrollParams, - MFAVerifyParams, - OAuthResponse, - Options, - Provider, - ResendCredentials, - Session, - SignInAnonymouslyCredentials, - SignInWithEmailAndPasswordlessCredentialsOptions, - SignInWithIdTokenCredentials, - SignInWithOAuthCredentials, - SignInWithPasswordCredentials, - SignInWithPasswordlessCredentials, - SignInWithPhoneAndPasswordlessCredentialsOptions, - SignInWithSSOCredentials, - SignOutOptions, - SignUpWithEmailAndPasswordCredentialsOptions, - SignUpWithPasswordCredentials, - SignUpWithPhoneAndPasswordCredentialsOptions, - SSOResponse, - Subscription, - UpdateUserOptions, - UserAttributes, - UserIdentity, - UserResponse, - VerifyOtpParams, -) -from ..version import __version__ -from .gotrue_admin_api import AsyncGoTrueAdminAPI -from .gotrue_base_api import AsyncGoTrueBaseAPI -from .gotrue_mfa_api import AsyncGoTrueMFAAPI -from .storage import AsyncMemoryStorage, AsyncSupportedStorage - - -class AsyncGoTrueClient(AsyncGoTrueBaseAPI): - def __init__( - self, - *, - url: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - storage_key: Optional[str] = None, - auto_refresh_token: bool = True, - persist_session: bool = True, - storage: Optional[AsyncSupportedStorage] = None, - http_client: Optional[AsyncClient] = None, - flow_type: AuthFlowType = "implicit", - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - extra_headers = { - "X-Client-Info": f"supabase-py/supabase_auth v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - } - if headers: - extra_headers.update(headers) - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - AsyncGoTrueBaseAPI.__init__( - self, - url=url or GOTRUE_URL, - headers=extra_headers, - http_client=http_client, - verify=verify, - proxy=proxy, - ) - - self._jwks: JWKSet = {"keys": []} - self._jwks_ttl: float = 600 # 10 minutes - self._jwks_cached_at: Optional[float] = None - - self._storage_key = storage_key or STORAGE_KEY - self._auto_refresh_token = auto_refresh_token - self._persist_session = persist_session - self._storage = storage or AsyncMemoryStorage() - self._in_memory_session: Optional[Session] = None - self._refresh_token_timer: Optional[Timer] = None - self._network_retries = 0 - self._state_change_emitters: Dict[str, Subscription] = {} - self._flow_type = flow_type - - self.admin = AsyncGoTrueAdminAPI( - url=self._url, - headers=self._headers, - http_client=self._http_client, - ) - # TODO(@o-santi): why is it like this? - self.mfa = AsyncGoTrueMFAAPI() - self.mfa.challenge = self._challenge # type: ignore - self.mfa.challenge_and_verify = self._challenge_and_verify # type: ignore - self.mfa.enroll = self._enroll # type: ignore - self.mfa.get_authenticator_assurance_level = ( # type: ignore - self._get_authenticator_assurance_level - ) - self.mfa.list_factors = self._list_factors # type: ignore - self.mfa.unenroll = self._unenroll # type: ignore - self.mfa.verify = self._verify # type: ignore - - # Initializations - - async def initialize(self, *, url: Optional[str] = None) -> None: - if url and self._is_implicit_grant_flow(url): - await self.initialize_from_url(url) - else: - await self.initialize_from_storage() - - async def initialize_from_storage(self) -> None: - return await self._recover_and_refresh() - - async def initialize_from_url(self, url: str) -> None: - try: - if self._is_implicit_grant_flow(url): - session, redirect_type = await self._get_session_from_url(url) - await self._save_session(session) - self._notify_all_subscribers("SIGNED_IN", session) - if redirect_type == "recovery": - self._notify_all_subscribers("PASSWORD_RECOVERY", session) - except Exception as e: - await self._remove_session() - raise e - - # Public methods - - async def sign_in_anonymously( - self, credentials: Optional[SignInAnonymouslyCredentials] = None - ) -> AuthResponse: - """ - Creates a new anonymous user. - """ - await self._remove_session() - if credentials is None: - credentials = {"options": {}} - options = credentials.get("options", {}) - data = options.get("data") or {} - captcha_token = options.get("captcha_token") - response = await self._request( - "POST", - "signup", - body={ - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - auth_response = parse_auth_response(response) - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def sign_up( - self, - credentials: SignUpWithPasswordCredentials, - ) -> AuthResponse: - """ - Creates a new user. - """ - await self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - password = credentials.get("password") - # TODO(@o-santi): this is horrible, but it is the easiest way to satisfy mypy - # it should have been a builder pattern instead, and with proper classes - if email and password: - email_options = cast( - SignUpWithEmailAndPasswordCredentialsOptions, - credentials.get("options", {}), - ) - data = email_options.get("data") or {} - channel = email_options.get("channel", "sms") - captcha_token = email_options.get("captcha_token") - redirect_to = email_options.get("email_redirect_to") - response = await self._request( - "POST", - "signup", - body={ - "email": email, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - redirect_to=redirect_to, - ) - elif phone and password: - phone_options = cast( - SignUpWithPhoneAndPasswordCredentialsOptions, - credentials.get("options", {}), - ) - data = phone_options.get("data") or {} - channel = phone_options.get("channel", "sms") - captcha_token = phone_options.get("captcha_token") - response = await self._request( - "POST", - "signup", - body={ - "phone": phone, - "password": password, - "data": data, - "channel": channel, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - else: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number and a password" - ) - - auth_response = parse_auth_response(response) - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def sign_in_with_password( - self, - credentials: SignInWithPasswordCredentials, - ) -> AuthResponse: - """ - Log in an existing user with an email or phone and password. - """ - await self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - password = credentials.get("password") - options = credentials.get("options", {}) - data = options.get("data") or {} - captcha_token = options.get("captcha_token") - if email and password: - response = await self._request( - "POST", - "token", - body={ - "email": email, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="password"), - ) - elif phone and password: - response = await self._request( - "POST", - "token", - body={ - "phone": phone, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="password"), - ) - else: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number and a password" - ) - auth_response = parse_auth_response(response) - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def sign_in_with_id_token( - self, - credentials: SignInWithIdTokenCredentials, - ) -> AuthResponse: - """ - Allows signing in with an OIDC ID token. The authentication provider used should be enabled and configured. - """ - await self._remove_session() - provider = credentials["provider"] - token = credentials["token"] - access_token = credentials.get("access_token") - nonce = credentials.get("nonce") - options = credentials.get("options", {}) - captcha_token = options.get("captcha_token") - - response = await self._request( - "POST", - "token", - body={ - "provider": provider, - "id_token": token, - "access_token": access_token, - "nonce": nonce, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="id_token"), - ) - auth_response = parse_auth_response(response) - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def sign_in_with_sso( - self, credentials: SignInWithSSOCredentials - ) -> SSOResponse: - """ - Attempts a single-sign on using an enterprise Identity Provider. A - successful SSO attempt will redirect the current page to the identity - provider authorization page. The redirect URL is implementation and SSO - protocol specific. - - You can use it by providing a SSO domain. Typically you can extract this - domain by asking users for their email address. If this domain is - registered on the Auth instance the redirect will use that organization's - currently active SSO Identity Provider for the login. - If you have built an organization-specific login page, you can use the - organization's SSO Identity Provider UUID directly instead. - """ - await self._remove_session() - provider_id = credentials.get("provider_id") - domain = credentials.get("domain") - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - captcha_token = options.get("captcha_token") - # HTTPX currently does not follow redirects: https://www.python-httpx.org/compatibility/ - # Additionally, unlike the JS client, Python is a server side language and it's not possible - # to automatically redirect in browser for the user - skip_http_redirect = options.get("skip_http_redirect", True) - - if domain: - response = await self._request( - "POST", - "sso", - body={ - "domain": domain, - "skip_http_redirect": skip_http_redirect, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - "redirect_to": redirect_to, - }, - ) - return parse_sso_response(response) - if provider_id: - response = await self._request( - "POST", - "sso", - body={ - "provider_id": provider_id, - "skip_http_redirect": skip_http_redirect, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - "redirect_to": redirect_to, - }, - ) - return parse_sso_response(response) - raise AuthInvalidCredentialsError( - "You must provide either a domain or provider_id" - ) - - async def sign_in_with_oauth( - self, - credentials: SignInWithOAuthCredentials, - ) -> OAuthResponse: - """ - Log in an existing user via a third-party provider. - """ - await self._remove_session() - - provider = credentials["provider"] - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - scopes = options.get("scopes") - params = options.get("query_params", {}) - if redirect_to: - params["redirect_to"] = redirect_to - if scopes: - params["scopes"] = scopes - url_with_qs, _ = await self._get_url_for_provider( - f"{self._url}/authorize", provider, params - ) - return OAuthResponse(provider=provider, url=url_with_qs) - - async def link_identity( - self, credentials: SignInWithOAuthCredentials - ) -> OAuthResponse: - provider = credentials["provider"] - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - scopes = options.get("scopes") - params = options.get("query_params", {}) - if redirect_to: - params["redirect_to"] = redirect_to - if scopes: - params["scopes"] = scopes - params["skip_http_redirect"] = "true" - url = "user/identities/authorize" - _, query = await self._get_url_for_provider(url, provider, params) - - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - - response = await self._request( - method="GET", - path=url, - query=query, - jwt=session.access_token, - ) - link_identity = parse_link_identity_response(response) - return OAuthResponse(provider=provider, url=link_identity.url) - - async def get_user_identities(self) -> IdentitiesResponse: - response = await self.get_user() - if response: - return IdentitiesResponse(identities=response.user.identities or []) - raise AuthSessionMissingError() - - async def unlink_identity(self, identity: UserIdentity) -> Response: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - - return await self._request( - "DELETE", - f"user/identities/{identity.identity_id}", - jwt=session.access_token, - ) - - async def sign_in_with_otp( - self, - credentials: SignInWithPasswordlessCredentials, - ) -> AuthOtpResponse: - """ - Log in a user using magiclink or a one-time password (OTP). - - If the `{{ .ConfirmationURL }}` variable is specified in - the email template, a magiclink will be sent. - - If the `{{ .Token }}` variable is specified in the email - template, an OTP will be sent. - - If you're using phone sign-ins, only an OTP will be sent. - You won't be able to send a magiclink for phone sign-ins. - """ - await self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - # TODO(@o-santi): this is horrible, but it is the easiest way to satisfy mypy - # it should have been a builder pattern instead, and with proper classes - if email: - email_options = cast( - SignInWithEmailAndPasswordlessCredentialsOptions, - credentials.get("options", {}), - ) - email_redirect_to = email_options.get("email_redirect_to") - should_create_user = email_options.get("should_create_user", True) - data = email_options.get("data") - channel = email_options.get("channel", "sms") - captcha_token = email_options.get("captcha_token") - response = await self._request( - "POST", - "otp", - body={ - "email": email, - "data": data, - "create_user": should_create_user, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - redirect_to=email_redirect_to, - ) - return parse_auth_otp_response(response) - if phone: - phone_options = cast( - SignInWithPhoneAndPasswordlessCredentialsOptions, - credentials.get("options", {}), - ) - should_create_user = phone_options.get("should_create_user", True) - data = phone_options.get("data") - channel = phone_options.get("channel", "sms") - captcha_token = phone_options.get("captcha_token") - response = await self._request( - "POST", - "otp", - body={ - "phone": phone, - "data": data, - "create_user": should_create_user, - "channel": channel, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - return parse_auth_otp_response(response) - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number" - ) - - async def resend( - self, - credentials: ResendCredentials, - ) -> AuthOtpResponse: - """ - Resends an existing signup confirmation email, email change email, SMS OTP or phone change OTP. - """ - email = credentials.get("email") - phone = credentials.get("phone") - type = credentials.get("type") - options = credentials.get("options", {}) - email_redirect_to: Optional[str] = options.get("email_redirect_to") # type: ignore - captcha_token = options.get("captcha_token") - body: Dict[str, object] = { # improve later - "type": type, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - } - - if email is None and phone is None: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number" - ) - - body.update({"email": email} if email else {"phone": phone}) - - response = await self._request( - "POST", - "resend", - body=body, - redirect_to=email_redirect_to if email else None, - ) - return parse_auth_otp_response(response) - - async def verify_otp(self, params: VerifyOtpParams) -> AuthResponse: - """ - Log in a user given a User supplied OTP received via mobile. - """ - await self._remove_session() - response = await self._request( - "POST", - "verify", - body={ - "gotrue_meta_security": { - "captcha_token": params.get("options", {}).get("captcha_token"), - }, - **params, - }, - redirect_to=params.get("options", {}).get("redirect_to"), - ) - auth_response = parse_auth_response(response) - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def reauthenticate(self) -> AuthResponse: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - - await self._request( - "GET", - "reauthenticate", - jwt=session.access_token, - ) - return AuthResponse(user=None, session=None) - - async def get_session(self) -> Optional[Session]: - """ - Returns the session, refreshing it if necessary. - - The session returned can be null if the session is not detected which - can happen in the event a user is not signed-in or has logged out. - """ - current_session: Optional[Session] = None - if self._persist_session: - maybe_session = await self._storage.get_item(self._storage_key) - current_session = self._get_valid_session(maybe_session) - if not current_session: - await self._remove_session() - else: - current_session = self._in_memory_session - - if not current_session: - return None - time_now = round(time.time()) - has_expired = ( - current_session.expires_at <= time_now + EXPIRY_MARGIN - if current_session.expires_at - else False - ) - return ( - await self._call_refresh_token(current_session.refresh_token) - if has_expired - else current_session - ) - - async def get_user(self, jwt: Optional[str] = None) -> Optional[UserResponse]: - """ - Gets the current user details if there is an existing session. - - Takes in an optional access token `jwt`. If no `jwt` is provided, - `get_user()` will attempt to get the `jwt` from the current session. - """ - if not jwt: - session = await self.get_session() - if session: - jwt = session.access_token - else: - return None - return parse_user_response(await self._request("GET", "user", jwt=jwt)) - - async def update_user( - self, attributes: UserAttributes, options: Optional[UpdateUserOptions] = None - ) -> UserResponse: - """ - Updates user data, if there is a logged in user. - """ - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - update_options = options or {} - response = await self._request( - "PUT", - "user", - body=attributes, - redirect_to=update_options.get("email_redirect_to"), - jwt=session.access_token, - ) - user_response = parse_user_response(response) - session.user = user_response.user - await self._save_session(session) - self._notify_all_subscribers("USER_UPDATED", session) - return user_response - - async def set_session(self, access_token: str, refresh_token: str) -> AuthResponse: - """ - Sets the session data from the current session. If the current session - is expired, `set_session` will take care of refreshing it to obtain a - new session. - - If the refresh token in the current session is invalid and the current - session has expired, an error will be thrown. - - If the current session does not contain at `expires_at` field, - `set_session` will use the exp claim defined in the access token. - - The current session that minimally contains an access token, - refresh token and a user. - """ - time_now = round(time.time()) - expires_at = time_now - has_expired = True - session: Optional[Session] = None - if access_token and access_token.split(".")[1]: - payload = decode_jwt(access_token)["payload"] - exp = payload.get("exp") - if exp: - expires_at = int(exp) - has_expired = expires_at <= time_now - if has_expired: - if not refresh_token: - raise AuthSessionMissingError() - response = await self._refresh_access_token(refresh_token) - if not response.session: - return AuthResponse() - session = response.session - else: - user_response = await self.get_user(access_token) - if user_response is None: - raise UserDoesntExist(access_token) - session = Session( - access_token=access_token, - refresh_token=refresh_token, - user=user_response.user, - token_type="bearer", - expires_in=expires_at - time_now, - expires_at=expires_at, - ) - await self._save_session(session) - self._notify_all_subscribers("TOKEN_REFRESHED", session) - return AuthResponse(session=session, user=session.user) - - async def refresh_session( - self, refresh_token: Optional[str] = None - ) -> AuthResponse: - """ - Returns a new session, regardless of expiry status. - - Takes in an optional current session. If not passed in, then refreshSession() - will attempt to retrieve it from getSession(). If the current session's - refresh token is invalid, an error will be thrown. - """ - if not refresh_token: - session = await self.get_session() - if session: - refresh_token = session.refresh_token - if not refresh_token: - raise AuthSessionMissingError() - session = await self._call_refresh_token(refresh_token) - return AuthResponse(session=session, user=session.user) - - async def sign_out(self, options: Optional[SignOutOptions] = None) -> None: - """ - `sign_out` will remove the logged in user from the - current session and log them out - removing all items from storage and then trigger a `"SIGNED_OUT"` event. - - For advanced use cases, you can revoke all refresh tokens for a user by passing a user's JWT through to `admin.sign_out`. - - There is no way to revoke a user's access token jwt until it expires. - It is recommended to set a shorter expiry on the jwt for this reason. - """ - signout_options = options or {"scope": "global"} - with suppress(AuthApiError): - session = await self.get_session() - access_token = session.access_token if session else None - if access_token: - await self.admin.sign_out(access_token, signout_options["scope"]) - - if signout_options["scope"] != "others": - await self._remove_session() - self._notify_all_subscribers("SIGNED_OUT", None) - - def on_auth_state_change( - self, - callback: Callable[[AuthChangeEvent, Optional[Session]], None], - ) -> Subscription: - """ - Receive a notification every time an auth event happens. - """ - unique_id = str(uuid4()) - - def _unsubscribe() -> None: - self._state_change_emitters.pop(unique_id) - - subscription = Subscription( - id=unique_id, - callback=callback, - unsubscribe=_unsubscribe, - ) - self._state_change_emitters[unique_id] = subscription - return subscription - - async def reset_password_for_email( - self, email: str, options: Optional[Options] = None - ) -> None: - """ - Sends a password reset request to an email address. - """ - reset_options = options or {} - await self._request( - "POST", - "recover", - body={ - "email": email, - "gotrue_meta_security": { - "captcha_token": reset_options.get("captcha_token"), - }, - }, - redirect_to=reset_options.get("redirect_to"), - ) - - async def reset_password_email( - self, - email: str, - options: Optional[Options] = None, - ) -> None: - """ - Sends a password reset request to an email address. - """ - - await self.reset_password_for_email(email, options or {}) - - # MFA methods - - async def _enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - - body = { - "friendly_name": params.get("friendly_name"), - "factor_type": params.get("factor_type"), - } - - if params["factor_type"] == "phone": - body["phone"] = params.get("phone") - else: - body["issuer"] = params.get("issuer") - - response = await self._request( - "POST", - "factors", - body=body, - jwt=session.access_token, - ) - auth_response = model_validate(AuthMFAEnrollResponse, response.content) - if params["factor_type"] == "totp" and auth_response.totp: - auth_response.totp.qr_code = ( - f"data:image/svg+xml;utf-8,{auth_response.totp.qr_code}" - ) - return auth_response - - async def _challenge(self, params: MFAChallengeParams) -> AuthMFAChallengeResponse: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - response = await self._request( - "POST", - f"factors/{params.get('factor_id')}/challenge", - body={"channel": params.get("channel")}, - jwt=session.access_token, - ) - return model_validate(AuthMFAChallengeResponse, response.content) - - async def _challenge_and_verify( - self, - params: MFAChallengeAndVerifyParams, - ) -> AuthMFAVerifyResponse: - response = await self._challenge( - { - "factor_id": params["factor_id"], - } - ) - return await self._verify( - { - "factor_id": params["factor_id"], - "challenge_id": response.id, - "code": params["code"], - } - ) - - async def _verify(self, params: MFAVerifyParams) -> AuthMFAVerifyResponse: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - response = await self._request( - "POST", - f"factors/{params.get('factor_id')}/verify", - body=params, - jwt=session.access_token, - ) - auth_response = model_validate(AuthMFAVerifyResponse, response.content) - session = model_validate(Session, response.content) - await self._save_session(session) - self._notify_all_subscribers("MFA_CHALLENGE_VERIFIED", session) - return auth_response - - async def _unenroll(self, params: MFAUnenrollParams) -> AuthMFAUnenrollResponse: - session = await self.get_session() - if not session: - raise AuthSessionMissingError() - response = await self._request( - "DELETE", - f"factors/{params.get('factor_id')}", - jwt=session.access_token, - ) - return model_validate(AuthMFAUnenrollResponse, response.content) - - async def _list_factors(self) -> AuthMFAListFactorsResponse: - response = await self.get_user() - factors = response.user.factors or [] if response else [] - totp = [ - f for f in factors if f.factor_type == "totp" and f.status == "verified" - ] - phone = [ - f for f in factors if f.factor_type == "phone" and f.status == "verified" - ] - return AuthMFAListFactorsResponse(all=factors, totp=totp, phone=phone) - - async def _get_authenticator_assurance_level( - self, - ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: - session = await self.get_session() - if not session: - return AuthMFAGetAuthenticatorAssuranceLevelResponse( - current_level=None, - next_level=None, - current_authentication_methods=[], - ) - payload = decode_jwt(session.access_token)["payload"] - current_level = payload.get("aal") - verified_factors = [ - f for f in session.user.factors or [] if f.status == "verified" - ] - next_level = "aal2" if verified_factors else current_level - amr_dict_list = payload.get("amr") or [] - current_authentication_methods = [ - AMREntry.model_validate(amr) for amr in amr_dict_list - ] - return AuthMFAGetAuthenticatorAssuranceLevelResponse( - current_level=current_level, - next_level=next_level, - current_authentication_methods=current_authentication_methods, - ) - - # Private methods - - async def _remove_session(self) -> None: - if self._persist_session: - await self._storage.remove_item(self._storage_key) - else: - self._in_memory_session = None - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = None - - async def _get_session_from_url( - self, - url: str, - ) -> Tuple[Session, Optional[str]]: - if not self._is_implicit_grant_flow(url): - raise AuthImplicitGrantRedirectError("Not a valid implicit grant flow url.") - result = urlparse(url) - params = parse_qs(result.query) - error_description = self._get_param(params, "error_description") - if error_description: - error_code = self._get_param(params, "error_code") - error = self._get_param(params, "error") - if not error_code: - raise AuthImplicitGrantRedirectError("No error_code detected.") - if not error: - raise AuthImplicitGrantRedirectError("No error detected.") - raise AuthImplicitGrantRedirectError( - error_description, - {"code": error_code, "error": error}, - ) - provider_token = self._get_param(params, "provider_token") - provider_refresh_token = self._get_param(params, "provider_refresh_token") - access_token = self._get_param(params, "access_token") - if not access_token: - raise AuthImplicitGrantRedirectError("No access_token detected.") - expires_in = self._get_param(params, "expires_in") - if not expires_in: - raise AuthImplicitGrantRedirectError("No expires_in detected.") - refresh_token = self._get_param(params, "refresh_token") - if not refresh_token: - raise AuthImplicitGrantRedirectError("No refresh_token detected.") - token_type = self._get_param(params, "token_type") - if not token_type: - raise AuthImplicitGrantRedirectError("No token_type detected.") - time_now = round(time.time()) - expires_at = time_now + int(expires_in) - user = await self.get_user(access_token) - if user is None: - raise UserDoesntExist(access_token) - session = Session( - provider_token=provider_token, - provider_refresh_token=provider_refresh_token, - access_token=access_token, - expires_in=int(expires_in), - expires_at=expires_at, - refresh_token=refresh_token, - token_type=token_type, - user=user.user, - ) - redirect_type = self._get_param(params, "type") - return session, redirect_type - - async def _recover_and_refresh(self) -> None: - raw_session = await self._storage.get_item(self._storage_key) - current_session = self._get_valid_session(raw_session) - if not current_session: - if raw_session: - await self._remove_session() - return - time_now = round(time.time()) - expires_at = current_session.expires_at - if expires_at and expires_at < time_now + EXPIRY_MARGIN: - refresh_token = current_session.refresh_token - if self._auto_refresh_token and refresh_token: - self._network_retries += 1 - try: - await self._call_refresh_token(refresh_token) - self._network_retries = 0 - except Exception as e: - if ( - isinstance(e, AuthRetryableError) - and self._network_retries < MAX_RETRIES - ): - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = Timer( - (200 * (2 ** (self._network_retries - 1))), - self._recover_and_refresh, - ) - self._refresh_token_timer.start() - return - await self._remove_session() - return - if self._persist_session: - await self._save_session(current_session) - self._notify_all_subscribers("SIGNED_IN", current_session) - - async def _call_refresh_token(self, refresh_token: str) -> Session: - if not refresh_token: - raise AuthSessionMissingError() - response = await self._refresh_access_token(refresh_token) - if not response.session: - raise AuthSessionMissingError() - await self._save_session(response.session) - self._notify_all_subscribers("TOKEN_REFRESHED", response.session) - return response.session - - async def _refresh_access_token(self, refresh_token: str) -> AuthResponse: - response = await self._request( - "POST", - "token", - query=QueryParams(grant_type="refresh_token"), - body={"refresh_token": refresh_token}, - ) - return parse_auth_response(response) - - async def _save_session(self, session: Session) -> None: - if not self._persist_session: - self._in_memory_session = session - expire_at = session.expires_at - if expire_at: - time_now = round(time.time()) - expire_in = expire_at - time_now - refresh_duration_before_expires = ( - EXPIRY_MARGIN if expire_in > EXPIRY_MARGIN else 0.5 - ) - value = (expire_in - refresh_duration_before_expires) * 1000 - await self._start_auto_refresh_token(value) - if self._persist_session and session.expires_at: - await self._storage.set_item(self._storage_key, model_dump_json(session)) - - async def _start_auto_refresh_token(self, value: float) -> None: - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = None - if value <= 0 or not self._auto_refresh_token: - return - - async def refresh_token_function() -> None: - self._network_retries += 1 - try: - session = await self.get_session() - if session: - await self._call_refresh_token(session.refresh_token) - self._network_retries = 0 - except Exception as e: - if ( - isinstance(e, AuthRetryableError) - and self._network_retries < MAX_RETRIES - ): - await self._start_auto_refresh_token( - 200 * (2 ** (self._network_retries - 1)) - ) - - self._refresh_token_timer = Timer(value, refresh_token_function) - self._refresh_token_timer.start() - - def _notify_all_subscribers( - self, - event: AuthChangeEvent, - session: Optional[Session], - ) -> None: - for subscription in self._state_change_emitters.values(): - subscription.callback(event, session) - - def _get_valid_session( - self, - raw_session: Optional[str], - ) -> Optional[Session]: - if not raw_session: - return None - try: - session = model_validate(Session, raw_session) - if session.expires_at is None: - return None - return session - except Exception: - return None - - def _get_param( - self, - query_params: Dict[str, List[str]], - name: str, - ) -> Optional[str]: - return query_params[name][0] if name in query_params else None - - def _is_implicit_grant_flow(self, url: str) -> bool: - result = urlparse(url) - params = parse_qs(result.query) - return "access_token" in params or "error_description" in params - - async def _get_url_for_provider( - self, - url: str, - provider: Provider, - params: Dict[str, str], - ) -> Tuple[str, QueryParams]: - query = QueryParams(params) - if self._flow_type == "pkce": - code_verifier = generate_pkce_verifier() - code_challenge = generate_pkce_challenge(code_verifier) - await self._storage.set_item( - f"{self._storage_key}-code-verifier", code_verifier - ) - code_challenge_method = ( - "plain" if code_verifier == code_challenge else "s256" - ) - query = query.set("code_challenge", code_challenge).set( - "code_challenge_method", code_challenge_method - ) - query = query.set("provider", provider) - return f"{url}?{query}", query - - async def exchange_code_for_session( - self, params: CodeExchangeParams - ) -> AuthResponse: - code_verifier = params.get("code_verifier") or await self._storage.get_item( - f"{self._storage_key}-code-verifier" - ) - response = await self._request( - "POST", - "token", - query=QueryParams(grant_type="pkce"), - body={ - "auth_code": params.get("auth_code"), - "code_verifier": code_verifier, - }, - redirect_to=params.get("redirect_to"), - ) - auth_response = parse_auth_response(response) - await self._storage.remove_item(f"{self._storage_key}-code-verifier") - if auth_response.session: - await self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - async def _fetch_jwks(self, kid: str, jwks: JWKSet) -> JWK: - jwk: Optional[JWK] = None - - # try fetching from the suplied keys. - jwk = next((jwk for jwk in jwks["keys"] if jwk["kid"] == kid), None) - - if jwk: - return jwk - - if self._jwks and ( - self._jwks_cached_at and time.time() - self._jwks_cached_at < self._jwks_ttl - ): - # try fetching from the cache. - jwk = next( - (jwk for jwk in self._jwks["keys"] if jwk["kid"] == kid), - None, - ) - if jwk: - return jwk - - # jwk isn't cached in memory so we need to fetch it from the well-known endpoint - response = await self._request("GET", ".well-known/jwks.json") - jwks = parse_jwks(response) - if response: - self._jwks = jwks - self._jwks_cached_at = time.time() - - # find the signing key - jwk = next((jwk for jwk in jwks["keys"] if jwk["kid"] == kid), None) - if not jwk: - raise AuthInvalidJwtError("No matching signing key found in JWKS") - - return jwk - - raise AuthInvalidJwtError("JWT has no valid kid") - - async def get_claims( - self, jwt: Optional[str] = None, jwks: Optional[JWKSet] = None - ) -> Optional[ClaimsResponse]: - token = jwt - if not token: - session = await self.get_session() - if not session: - return None - - token = session.access_token - - decoded_jwt = decode_jwt(token) - - payload, header, signature = ( - decoded_jwt["payload"], - decoded_jwt["header"], - decoded_jwt["signature"], - ) - raw_header, raw_payload = ( - decoded_jwt["raw"]["header"], - decoded_jwt["raw"]["payload"], - ) - - validate_exp(payload["exp"]) - - # if symmetric algorithm, fallback to get_user - if "kid" not in header or header["alg"] == "HS256": - await self.get_user(token) - return ClaimsResponse(claims=payload, headers=header, signature=signature) - - algorithm = get_algorithm_by_name(header["alg"]) - jwk_set = await self._fetch_jwks(header["kid"], jwks or {"keys": []}) - signing_key = algorithm.from_jwk(cast(Dict[str, str], jwk_set)) - - # verify the signature - is_valid = algorithm.verify( - msg=f"{raw_header}.{raw_payload}".encode(), key=signing_key, sig=signature - ) - - if not is_valid: - raise AuthInvalidJwtError("Invalid JWT signature") - - # If verification succeeds, decode and return claims - return ClaimsResponse(claims=payload, headers=header, signature=signature) - - def __del__(self) -> None: - """Clean up resources when the client is destroyed.""" - if self._refresh_token_timer: - try: - # Try to cancel the timer - self._refresh_token_timer.cancel() - except Exception: - # Ignore errors if event loop is closed or selector is not registered - pass - finally: - # Always set to None to prevent further attempts - self._refresh_token_timer = None diff --git a/src/auth/src/supabase_auth/_async/gotrue_mfa_api.py b/src/auth/src/supabase_auth/_async/gotrue_mfa_api.py deleted file mode 100644 index a30c4c73..00000000 --- a/src/auth/src/supabase_auth/_async/gotrue_mfa_api.py +++ /dev/null @@ -1,94 +0,0 @@ -from ..types import ( - AuthMFAChallengeResponse, - AuthMFAEnrollResponse, - AuthMFAGetAuthenticatorAssuranceLevelResponse, - AuthMFAListFactorsResponse, - AuthMFAUnenrollResponse, - AuthMFAVerifyResponse, - MFAChallengeAndVerifyParams, - MFAChallengeParams, - MFAEnrollParams, - MFAUnenrollParams, - MFAVerifyParams, -) - - -class AsyncGoTrueMFAAPI: - """ - Contains the full multi-factor authentication API. - """ - - async def enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: - """ - Starts the enrollment process for a new Multi-Factor Authentication - factor. This method creates a new factor in the 'unverified' state. - Present the QR code or secret to the user and ask them to add it to their - authenticator app. Ask the user to provide you with an authenticator code - from their app and verify it by calling challenge and then verify. - - The first successful verification of an unverified factor activates the - factor. All other sessions are logged out and the current one gets an - `aal2` authenticator level. - """ - raise NotImplementedError() # pragma: no cover - - async def challenge(self, params: MFAChallengeParams) -> AuthMFAChallengeResponse: - """ - Prepares a challenge used to verify that a user has access to a MFA - factor. Provide the challenge ID and verification code by calling `verify`. - """ - raise NotImplementedError() # pragma: no cover - - async def challenge_and_verify( - self, - params: MFAChallengeAndVerifyParams, - ) -> AuthMFAVerifyResponse: - """ - Helper method which creates a challenge and immediately uses the given code - to verify against it thereafter. The verification code is provided by the - user by entering a code seen in their authenticator app. - """ - raise NotImplementedError() # pragma: no cover - - async def verify(self, params: MFAVerifyParams) -> AuthMFAVerifyResponse: - """ - Verifies a verification code against a challenge. The verification code is - provided by the user by entering a code seen in their authenticator app. - """ - raise NotImplementedError() # pragma: no cover - - async def unenroll(self, params: MFAUnenrollParams) -> AuthMFAUnenrollResponse: - """ - Unenroll removes a MFA factor. Unverified factors can safely be ignored - and it's not necessary to unenroll them. Unenrolling a verified MFA factor - cannot be done from a session with an `aal1` authenticator level. - """ - raise NotImplementedError() # pragma: no cover - - async def list_factors(self) -> AuthMFAListFactorsResponse: - """ - Returns the list of MFA factors enabled for this user. For most use cases - you should consider using `get_authenticator_assurance_level`. - - This uses a cached version of the factors and avoids incurring a network call. - If you need to update this list, call `get_user` first. - """ - raise NotImplementedError() # pragma: no cover - - async def get_authenticator_assurance_level( - self, - ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: - """ - Returns the Authenticator Assurance Level (AAL) for the active session. - - - `aal1` (or `null`) means that the user's identity has been verified only - with a conventional login (email+password, OTP, magic link, social login, - etc.). - - `aal2` means that the user's identity has been verified both with a - conventional login and at least one MFA factor. - - Although this method returns a promise, it's fairly quick (microseconds) - and rarely uses the network. You can use this to check whether the current - user needs to be shown a screen to verify their MFA factors. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_async/storage.py b/src/auth/src/supabase_auth/_async/storage.py deleted file mode 100644 index db520b11..00000000 --- a/src/auth/src/supabase_auth/_async/storage.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from typing import Dict, Optional - - -class AsyncSupportedStorage(ABC): - @abstractmethod - async def get_item(self, key: str) -> Optional[str]: ... # pragma: no cover - - @abstractmethod - async def set_item(self, key: str, value: str) -> None: ... # pragma: no cover - - @abstractmethod - async def remove_item(self, key: str) -> None: ... # pragma: no cover - - -class AsyncMemoryStorage(AsyncSupportedStorage): - def __init__(self) -> None: - self.storage: Dict[str, str] = {} - - async def get_item(self, key: str) -> Optional[str]: - if key in self.storage: - return self.storage[key] - return None - - async def set_item(self, key: str, value: str) -> None: - self.storage[key] = value - - async def remove_item(self, key: str) -> None: - if key in self.storage: - del self.storage[key] diff --git a/src/auth/src/supabase_auth/_sync/__init__.py b/src/auth/src/supabase_auth/_sync/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/auth/src/supabase_auth/_sync/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/auth/src/supabase_auth/_sync/gotrue_admin_api.py b/src/auth/src/supabase_auth/_sync/gotrue_admin_api.py deleted file mode 100644 index b239b014..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_admin_api.py +++ /dev/null @@ -1,353 +0,0 @@ -from __future__ import annotations - -from typing import Dict, List, Optional - -from httpx import Client, QueryParams - -from ..helpers import ( - model_validate, - parse_link_response, - parse_user_response, - validate_uuid, -) -from ..types import ( - AdminUserAttributes, - AuthMFAAdminDeleteFactorParams, - AuthMFAAdminDeleteFactorResponse, - AuthMFAAdminListFactorsParams, - AuthMFAAdminListFactorsResponse, - AuthMFAAdminListFactorsResponseParser, - CreateOAuthClientParams, - GenerateLinkParams, - GenerateLinkResponse, - InviteUserByEmailOptions, - OAuthClient, - OAuthClientListResponse, - OAuthClientResponse, - PageParams, - SignOutScope, - UpdateOAuthClientParams, - User, - UserList, - UserResponse, -) -from .gotrue_admin_mfa_api import SyncGoTrueAdminMFAAPI -from .gotrue_admin_oauth_api import SyncGoTrueAdminOAuthAPI -from .gotrue_base_api import SyncGoTrueBaseAPI - - -class SyncGoTrueAdminAPI(SyncGoTrueBaseAPI): - def __init__( - self, - *, - url: str = "", - headers: Optional[Dict[str, str]] = None, - http_client: Optional[Client] = None, - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - http_headers = headers or {} - SyncGoTrueBaseAPI.__init__( - self, - url=url, - headers=http_headers, - http_client=http_client, - verify=verify, - proxy=proxy, - ) - # TODO(@o-santi): why is is this done this way? - self.mfa = SyncGoTrueAdminMFAAPI() - self.mfa.list_factors = self._list_factors # type: ignore - self.mfa.delete_factor = self._delete_factor # type: ignore - self.oauth = SyncGoTrueAdminOAuthAPI() - self.oauth.list_clients = self._list_oauth_clients # type: ignore - self.oauth.create_client = self._create_oauth_client # type: ignore - self.oauth.get_client = self._get_oauth_client # type: ignore - self.oauth.update_client = self._update_oauth_client # type: ignore - self.oauth.delete_client = self._delete_oauth_client # type: ignore - self.oauth.regenerate_client_secret = self._regenerate_oauth_client_secret # type: ignore - - def sign_out(self, jwt: str, scope: SignOutScope = "global") -> None: - """ - Removes a logged-in session. - """ - self._request( - "POST", - "logout", - query=QueryParams(scope=scope), - jwt=jwt, - no_resolve_json=True, - ) - - def invite_user_by_email( - self, - email: str, - options: Optional[InviteUserByEmailOptions] = None, - ) -> UserResponse: - """ - Sends an invite link to an email address. - """ - email_options = options or {} - response = self._request( - "POST", - "invite", - body={"email": email, "data": email_options.get("data")}, - redirect_to=email_options.get("redirect_to"), - ) - return parse_user_response(response) - - def generate_link(self, params: GenerateLinkParams) -> GenerateLinkResponse: - """ - Generates email links and OTPs to be sent via a custom email provider. - """ - response = self._request( - "POST", - "admin/generate_link", - body={ - "type": params.get("type"), - "email": params.get("email"), - "password": params.get("password"), - "new_email": params.get("new_email"), - "data": params.get("options", {}).get("data"), - }, - redirect_to=params.get("options", {}).get("redirect_to"), - ) - - return parse_link_response(response) - - # User Admin API - - def create_user(self, attributes: AdminUserAttributes) -> UserResponse: - """ - Creates a new user. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = self._request( - "POST", - "admin/users", - body=attributes, - ) - return parse_user_response(response) - - def list_users( - self, page: Optional[int] = None, per_page: Optional[int] = None - ) -> List[User]: - """ - Get a list of users. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = self._request( - "GET", - "admin/users", - query=QueryParams(page=page, per_page=per_page), - ) - return model_validate(UserList, response.content).users - - def get_user_by_id(self, uid: str) -> UserResponse: - """ - Get user by id. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(uid) - - response = self._request( - "GET", - f"admin/users/{uid}", - ) - return parse_user_response(response) - - def update_user_by_id( - self, - uid: str, - attributes: AdminUserAttributes, - ) -> UserResponse: - """ - Updates the user data. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(uid) - response = self._request( - "PUT", - f"admin/users/{uid}", - body=attributes, - ) - return parse_user_response(response) - - def delete_user(self, id: str, should_soft_delete: bool = False) -> None: - """ - Delete a user. Requires a `service_role` key. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(id) - body = {"should_soft_delete": should_soft_delete} - self._request("DELETE", f"admin/users/{id}", body=body) - - def _list_factors( - self, - params: AuthMFAAdminListFactorsParams, - ) -> AuthMFAAdminListFactorsResponse: - validate_uuid(params.get("user_id")) - response = self._request( - "GET", - f"admin/users/{params.get('user_id')}/factors", - ) - return AuthMFAAdminListFactorsResponseParser.validate_json(response.content) - - def _delete_factor( - self, - params: AuthMFAAdminDeleteFactorParams, - ) -> AuthMFAAdminDeleteFactorResponse: - validate_uuid(params.get("user_id")) - validate_uuid(params.get("id")) - response = self._request( - "DELETE", - f"admin/users/{params.get('user_id')}/factors/{params.get('id')}", - ) - return model_validate(AuthMFAAdminDeleteFactorResponse, response.content) - - def _list_oauth_clients( - self, - params: PageParams | None = None, - ) -> OAuthClientListResponse: - """ - Lists all OAuth clients with optional pagination. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - if params: - query = QueryParams(page=params.page, per_page=params.per_page) - else: - query = None - response = self._request( - "GET", - "admin/oauth/clients", - query=query, - no_resolve_json=True, - ) - - result = model_validate(OAuthClientListResponse, response.content) - - # Parse pagination headers - total = response.headers.get("x-total-count") - if total: - result.total = int(total) - - links = response.headers.get("link") - if links: - for link in links.split(","): - parts = link.split(";") - if len(parts) >= 2: - page_match = parts[0].split("page=") - if len(page_match) >= 2: - page_num = int(page_match[1].split("&")[0].rstrip(">")) - rel = parts[1].split("=")[1].strip('"') - if rel == "next": - result.next_page = page_num - elif rel == "last": - result.last_page = page_num - - return result - - def _create_oauth_client( - self, - params: CreateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Creates a new OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - response = self._request( - "POST", - "admin/oauth/clients", - body=params, - ) - - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - def _get_oauth_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Gets details of a specific OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = self._request( - "GET", - f"admin/oauth/clients/{client_id}", - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - def _update_oauth_client( - self, - client_id: str, - params: UpdateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Updates an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = self._request( - "PUT", - f"admin/oauth/clients/{client_id}", - body=params, - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) - - def _delete_oauth_client( - self, - client_id: str, - ) -> None: - """ - Deletes an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - self._request( - "DELETE", - f"admin/oauth/clients/{client_id}", - ) - - def _regenerate_oauth_client_secret( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Regenerates the secret for an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - validate_uuid(client_id) - response = self._request( - "POST", - f"admin/oauth/clients/{client_id}/regenerate_secret", - ) - return OAuthClientResponse(client=model_validate(OAuthClient, response.content)) diff --git a/src/auth/src/supabase_auth/_sync/gotrue_admin_mfa_api.py b/src/auth/src/supabase_auth/_sync/gotrue_admin_mfa_api.py deleted file mode 100644 index c3fcfc8e..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_admin_mfa_api.py +++ /dev/null @@ -1,32 +0,0 @@ -from ..types import ( - AuthMFAAdminDeleteFactorParams, - AuthMFAAdminDeleteFactorResponse, - AuthMFAAdminListFactorsParams, - AuthMFAAdminListFactorsResponse, -) - - -class SyncGoTrueAdminMFAAPI: - """ - Contains the full multi-factor authentication administration API. - """ - - def list_factors( - self, - params: AuthMFAAdminListFactorsParams, - ) -> AuthMFAAdminListFactorsResponse: - """ - Lists all factors attached to a user. - """ - raise NotImplementedError() # pragma: no cover - - def delete_factor( - self, - params: AuthMFAAdminDeleteFactorParams, - ) -> AuthMFAAdminDeleteFactorResponse: - """ - Deletes a factor on a user. This will log the user out of all active - sessions (if the deleted factor was verified). There's no need to delete - unverified factors. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_sync/gotrue_admin_oauth_api.py b/src/auth/src/supabase_auth/_sync/gotrue_admin_oauth_api.py deleted file mode 100644 index a050da4f..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_admin_oauth_api.py +++ /dev/null @@ -1,95 +0,0 @@ -from typing import Optional - -from ..types import ( - CreateOAuthClientParams, - OAuthClientListResponse, - OAuthClientResponse, - PageParams, - UpdateOAuthClientParams, -) - - -class SyncGoTrueAdminOAuthAPI: - """ - Contains all OAuth client administration methods. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - """ - - def list_clients( - self, - params: Optional[PageParams] = None, - ) -> OAuthClientListResponse: - """ - Lists all OAuth clients with optional pagination. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - def create_client( - self, - params: CreateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Creates a new OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - def get_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Gets details of a specific OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - def update_client( - self, - client_id: str, - params: UpdateOAuthClientParams, - ) -> OAuthClientResponse: - """ - Updates an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - def delete_client( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Deletes an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover - - def regenerate_client_secret( - self, - client_id: str, - ) -> OAuthClientResponse: - """ - Regenerates the secret for an OAuth client. - Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. - - This function should only be called on a server. - Never expose your `service_role` key in the browser. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_sync/gotrue_base_api.py b/src/auth/src/supabase_auth/_sync/gotrue_base_api.py deleted file mode 100644 index 727478eb..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_base_api.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict, Optional - -from httpx import Client, HTTPStatusError, QueryParams, Response -from pydantic import BaseModel -from typing_extensions import Literal, Self - -from ..constants import API_VERSION_HEADER_NAME, API_VERSIONS_2024_01_01_NAME -from ..helpers import handle_exception, model_dump - - -class SyncGoTrueBaseAPI: - def __init__( - self, - *, - url: str, - headers: Dict[str, str], - http_client: Optional[Client], - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - self._url = url - self._headers = headers - self._http_client = http_client or Client( - verify=bool(verify), - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - def __enter__(self) -> Self: - return self - - def __exit__(self, exc_t, exc_v, exc_tb) -> None: - self.close() - - def close(self) -> None: - self._http_client.close() - - def _request( - self, - method: Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"], - path: str, - *, - jwt: Optional[str] = None, - redirect_to: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - query: Optional[QueryParams] = None, - body: Optional[Any] = None, - no_resolve_json: bool = False, - ) -> Response: - url = f"{self._url}/{path}" - headers = {**self._headers, **(headers or {})} - if API_VERSION_HEADER_NAME not in headers: - headers[API_VERSION_HEADER_NAME] = API_VERSIONS_2024_01_01_NAME - if "Content-Type" not in headers: - headers["Content-Type"] = "application/json;charset=UTF-8" - if jwt: - headers["Authorization"] = f"Bearer {jwt}" - query = query or QueryParams() - if redirect_to: - query = query.set("redirect_to", redirect_to) - try: - response = self._http_client.request( - method, - url, - headers=headers, - params=query, - json=model_dump(body) if isinstance(body, BaseModel) else body, - ) - - response.raise_for_status() - return response - except (HTTPStatusError, RuntimeError) as e: - raise handle_exception(e) # noqa diff --git a/src/auth/src/supabase_auth/_sync/gotrue_client.py b/src/auth/src/supabase_auth/_sync/gotrue_client.py deleted file mode 100644 index 882950e7..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_client.py +++ /dev/null @@ -1,1296 +0,0 @@ -from __future__ import annotations - -import platform -import sys -import time -from contextlib import suppress -from typing import Callable, Dict, List, Optional, Tuple -from urllib.parse import parse_qs, urlparse -from uuid import uuid4 -from warnings import warn - -from httpx import Client, QueryParams, Response -from jwt import get_algorithm_by_name -from typing_extensions import cast - -from ..constants import ( - EXPIRY_MARGIN, - GOTRUE_URL, - MAX_RETRIES, - STORAGE_KEY, -) -from ..errors import ( - AuthApiError, - AuthImplicitGrantRedirectError, - AuthInvalidCredentialsError, - AuthInvalidJwtError, - AuthRetryableError, - AuthSessionMissingError, - UserDoesntExist, -) -from ..helpers import ( - decode_jwt, - generate_pkce_challenge, - generate_pkce_verifier, - model_dump_json, - model_validate, - parse_auth_otp_response, - parse_auth_response, - parse_jwks, - parse_link_identity_response, - parse_sso_response, - parse_user_response, - validate_exp, -) -from ..timer import Timer -from ..types import ( - JWK, - AMREntry, - AuthChangeEvent, - AuthFlowType, - AuthMFAChallengeResponse, - AuthMFAEnrollResponse, - AuthMFAGetAuthenticatorAssuranceLevelResponse, - AuthMFAListFactorsResponse, - AuthMFAUnenrollResponse, - AuthMFAVerifyResponse, - AuthOtpResponse, - AuthResponse, - ClaimsResponse, - CodeExchangeParams, - IdentitiesResponse, - JWKSet, - MFAChallengeAndVerifyParams, - MFAChallengeParams, - MFAEnrollParams, - MFAUnenrollParams, - MFAVerifyParams, - OAuthResponse, - Options, - Provider, - ResendCredentials, - Session, - SignInAnonymouslyCredentials, - SignInWithEmailAndPasswordlessCredentialsOptions, - SignInWithIdTokenCredentials, - SignInWithOAuthCredentials, - SignInWithPasswordCredentials, - SignInWithPasswordlessCredentials, - SignInWithPhoneAndPasswordlessCredentialsOptions, - SignInWithSSOCredentials, - SignOutOptions, - SignUpWithEmailAndPasswordCredentialsOptions, - SignUpWithPasswordCredentials, - SignUpWithPhoneAndPasswordCredentialsOptions, - SSOResponse, - Subscription, - UpdateUserOptions, - UserAttributes, - UserIdentity, - UserResponse, - VerifyOtpParams, -) -from ..version import __version__ -from .gotrue_admin_api import SyncGoTrueAdminAPI -from .gotrue_base_api import SyncGoTrueBaseAPI -from .gotrue_mfa_api import SyncGoTrueMFAAPI -from .storage import SyncMemoryStorage, SyncSupportedStorage - - -class SyncGoTrueClient(SyncGoTrueBaseAPI): - def __init__( - self, - *, - url: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - storage_key: Optional[str] = None, - auto_refresh_token: bool = True, - persist_session: bool = True, - storage: Optional[SyncSupportedStorage] = None, - http_client: Optional[Client] = None, - flow_type: AuthFlowType = "implicit", - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - extra_headers = { - "X-Client-Info": f"supabase-py/supabase_auth v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - } - if headers: - extra_headers.update(headers) - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - SyncGoTrueBaseAPI.__init__( - self, - url=url or GOTRUE_URL, - headers=extra_headers, - http_client=http_client, - verify=verify, - proxy=proxy, - ) - - self._jwks: JWKSet = {"keys": []} - self._jwks_ttl: float = 600 # 10 minutes - self._jwks_cached_at: Optional[float] = None - - self._storage_key = storage_key or STORAGE_KEY - self._auto_refresh_token = auto_refresh_token - self._persist_session = persist_session - self._storage = storage or SyncMemoryStorage() - self._in_memory_session: Optional[Session] = None - self._refresh_token_timer: Optional[Timer] = None - self._network_retries = 0 - self._state_change_emitters: Dict[str, Subscription] = {} - self._flow_type = flow_type - - self.admin = SyncGoTrueAdminAPI( - url=self._url, - headers=self._headers, - http_client=self._http_client, - ) - # TODO(@o-santi): why is it like this? - self.mfa = SyncGoTrueMFAAPI() - self.mfa.challenge = self._challenge # type: ignore - self.mfa.challenge_and_verify = self._challenge_and_verify # type: ignore - self.mfa.enroll = self._enroll # type: ignore - self.mfa.get_authenticator_assurance_level = ( # type: ignore - self._get_authenticator_assurance_level - ) - self.mfa.list_factors = self._list_factors # type: ignore - self.mfa.unenroll = self._unenroll # type: ignore - self.mfa.verify = self._verify # type: ignore - - # Initializations - - def initialize(self, *, url: Optional[str] = None) -> None: - if url and self._is_implicit_grant_flow(url): - self.initialize_from_url(url) - else: - self.initialize_from_storage() - - def initialize_from_storage(self) -> None: - return self._recover_and_refresh() - - def initialize_from_url(self, url: str) -> None: - try: - if self._is_implicit_grant_flow(url): - session, redirect_type = self._get_session_from_url(url) - self._save_session(session) - self._notify_all_subscribers("SIGNED_IN", session) - if redirect_type == "recovery": - self._notify_all_subscribers("PASSWORD_RECOVERY", session) - except Exception as e: - self._remove_session() - raise e - - # Public methods - - def sign_in_anonymously( - self, credentials: Optional[SignInAnonymouslyCredentials] = None - ) -> AuthResponse: - """ - Creates a new anonymous user. - """ - self._remove_session() - if credentials is None: - credentials = {"options": {}} - options = credentials.get("options", {}) - data = options.get("data") or {} - captcha_token = options.get("captcha_token") - response = self._request( - "POST", - "signup", - body={ - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - auth_response = parse_auth_response(response) - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def sign_up( - self, - credentials: SignUpWithPasswordCredentials, - ) -> AuthResponse: - """ - Creates a new user. - """ - self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - password = credentials.get("password") - # TODO(@o-santi): this is horrible, but it is the easiest way to satisfy mypy - # it should have been a builder pattern instead, and with proper classes - if email and password: - email_options = cast( - SignUpWithEmailAndPasswordCredentialsOptions, - credentials.get("options", {}), - ) - data = email_options.get("data") or {} - channel = email_options.get("channel", "sms") - captcha_token = email_options.get("captcha_token") - redirect_to = email_options.get("email_redirect_to") - response = self._request( - "POST", - "signup", - body={ - "email": email, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - redirect_to=redirect_to, - ) - elif phone and password: - phone_options = cast( - SignUpWithPhoneAndPasswordCredentialsOptions, - credentials.get("options", {}), - ) - data = phone_options.get("data") or {} - channel = phone_options.get("channel", "sms") - captcha_token = phone_options.get("captcha_token") - response = self._request( - "POST", - "signup", - body={ - "phone": phone, - "password": password, - "data": data, - "channel": channel, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - else: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number and a password" - ) - - auth_response = parse_auth_response(response) - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def sign_in_with_password( - self, - credentials: SignInWithPasswordCredentials, - ) -> AuthResponse: - """ - Log in an existing user with an email or phone and password. - """ - self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - password = credentials.get("password") - options = credentials.get("options", {}) - data = options.get("data") or {} - captcha_token = options.get("captcha_token") - if email and password: - response = self._request( - "POST", - "token", - body={ - "email": email, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="password"), - ) - elif phone and password: - response = self._request( - "POST", - "token", - body={ - "phone": phone, - "password": password, - "data": data, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="password"), - ) - else: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number and a password" - ) - auth_response = parse_auth_response(response) - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def sign_in_with_id_token( - self, - credentials: SignInWithIdTokenCredentials, - ) -> AuthResponse: - """ - Allows signing in with an OIDC ID token. The authentication provider used should be enabled and configured. - """ - self._remove_session() - provider = credentials["provider"] - token = credentials["token"] - access_token = credentials.get("access_token") - nonce = credentials.get("nonce") - options = credentials.get("options", {}) - captcha_token = options.get("captcha_token") - - response = self._request( - "POST", - "token", - body={ - "provider": provider, - "id_token": token, - "access_token": access_token, - "nonce": nonce, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - query=QueryParams(grant_type="id_token"), - ) - auth_response = parse_auth_response(response) - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def sign_in_with_sso(self, credentials: SignInWithSSOCredentials) -> SSOResponse: - """ - Attempts a single-sign on using an enterprise Identity Provider. A - successful SSO attempt will redirect the current page to the identity - provider authorization page. The redirect URL is implementation and SSO - protocol specific. - - You can use it by providing a SSO domain. Typically you can extract this - domain by asking users for their email address. If this domain is - registered on the Auth instance the redirect will use that organization's - currently active SSO Identity Provider for the login. - If you have built an organization-specific login page, you can use the - organization's SSO Identity Provider UUID directly instead. - """ - self._remove_session() - provider_id = credentials.get("provider_id") - domain = credentials.get("domain") - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - captcha_token = options.get("captcha_token") - # HTTPX currently does not follow redirects: https://www.python-httpx.org/compatibility/ - # Additionally, unlike the JS client, Python is a server side language and it's not possible - # to automatically redirect in browser for the user - skip_http_redirect = options.get("skip_http_redirect", True) - - if domain: - response = self._request( - "POST", - "sso", - body={ - "domain": domain, - "skip_http_redirect": skip_http_redirect, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - "redirect_to": redirect_to, - }, - ) - return parse_sso_response(response) - if provider_id: - response = self._request( - "POST", - "sso", - body={ - "provider_id": provider_id, - "skip_http_redirect": skip_http_redirect, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - "redirect_to": redirect_to, - }, - ) - return parse_sso_response(response) - raise AuthInvalidCredentialsError( - "You must provide either a domain or provider_id" - ) - - def sign_in_with_oauth( - self, - credentials: SignInWithOAuthCredentials, - ) -> OAuthResponse: - """ - Log in an existing user via a third-party provider. - """ - self._remove_session() - - provider = credentials["provider"] - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - scopes = options.get("scopes") - params = options.get("query_params", {}) - if redirect_to: - params["redirect_to"] = redirect_to - if scopes: - params["scopes"] = scopes - url_with_qs, _ = self._get_url_for_provider( - f"{self._url}/authorize", provider, params - ) - return OAuthResponse(provider=provider, url=url_with_qs) - - def link_identity(self, credentials: SignInWithOAuthCredentials) -> OAuthResponse: - provider = credentials["provider"] - options = credentials.get("options", {}) - redirect_to = options.get("redirect_to") - scopes = options.get("scopes") - params = options.get("query_params", {}) - if redirect_to: - params["redirect_to"] = redirect_to - if scopes: - params["scopes"] = scopes - params["skip_http_redirect"] = "true" - url = "user/identities/authorize" - _, query = self._get_url_for_provider(url, provider, params) - - session = self.get_session() - if not session: - raise AuthSessionMissingError() - - response = self._request( - method="GET", - path=url, - query=query, - jwt=session.access_token, - ) - link_identity = parse_link_identity_response(response) - return OAuthResponse(provider=provider, url=link_identity.url) - - def get_user_identities(self) -> IdentitiesResponse: - response = self.get_user() - if response: - return IdentitiesResponse(identities=response.user.identities or []) - raise AuthSessionMissingError() - - def unlink_identity(self, identity: UserIdentity) -> Response: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - - return self._request( - "DELETE", - f"user/identities/{identity.identity_id}", - jwt=session.access_token, - ) - - def sign_in_with_otp( - self, - credentials: SignInWithPasswordlessCredentials, - ) -> AuthOtpResponse: - """ - Log in a user using magiclink or a one-time password (OTP). - - If the `{{ .ConfirmationURL }}` variable is specified in - the email template, a magiclink will be sent. - - If the `{{ .Token }}` variable is specified in the email - template, an OTP will be sent. - - If you're using phone sign-ins, only an OTP will be sent. - You won't be able to send a magiclink for phone sign-ins. - """ - self._remove_session() - email = credentials.get("email") - phone = credentials.get("phone") - # TODO(@o-santi): this is horrible, but it is the easiest way to satisfy mypy - # it should have been a builder pattern instead, and with proper classes - if email: - email_options = cast( - SignInWithEmailAndPasswordlessCredentialsOptions, - credentials.get("options", {}), - ) - email_redirect_to = email_options.get("email_redirect_to") - should_create_user = email_options.get("should_create_user", True) - data = email_options.get("data") - channel = email_options.get("channel", "sms") - captcha_token = email_options.get("captcha_token") - response = self._request( - "POST", - "otp", - body={ - "email": email, - "data": data, - "create_user": should_create_user, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - redirect_to=email_redirect_to, - ) - return parse_auth_otp_response(response) - if phone: - phone_options = cast( - SignInWithPhoneAndPasswordlessCredentialsOptions, - credentials.get("options", {}), - ) - should_create_user = phone_options.get("should_create_user", True) - data = phone_options.get("data") - channel = phone_options.get("channel", "sms") - captcha_token = phone_options.get("captcha_token") - response = self._request( - "POST", - "otp", - body={ - "phone": phone, - "data": data, - "create_user": should_create_user, - "channel": channel, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - }, - ) - return parse_auth_otp_response(response) - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number" - ) - - def resend( - self, - credentials: ResendCredentials, - ) -> AuthOtpResponse: - """ - Resends an existing signup confirmation email, email change email, SMS OTP or phone change OTP. - """ - email = credentials.get("email") - phone = credentials.get("phone") - type = credentials.get("type") - options = credentials.get("options", {}) - email_redirect_to: Optional[str] = options.get("email_redirect_to") # type: ignore - captcha_token = options.get("captcha_token") - body: Dict[str, object] = { # improve later - "type": type, - "gotrue_meta_security": { - "captcha_token": captcha_token, - }, - } - - if email is None and phone is None: - raise AuthInvalidCredentialsError( - "You must provide either an email or phone number" - ) - - body.update({"email": email} if email else {"phone": phone}) - - response = self._request( - "POST", - "resend", - body=body, - redirect_to=email_redirect_to if email else None, - ) - return parse_auth_otp_response(response) - - def verify_otp(self, params: VerifyOtpParams) -> AuthResponse: - """ - Log in a user given a User supplied OTP received via mobile. - """ - self._remove_session() - response = self._request( - "POST", - "verify", - body={ - "gotrue_meta_security": { - "captcha_token": params.get("options", {}).get("captcha_token"), - }, - **params, - }, - redirect_to=params.get("options", {}).get("redirect_to"), - ) - auth_response = parse_auth_response(response) - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def reauthenticate(self) -> AuthResponse: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - - self._request( - "GET", - "reauthenticate", - jwt=session.access_token, - ) - return AuthResponse(user=None, session=None) - - def get_session(self) -> Optional[Session]: - """ - Returns the session, refreshing it if necessary. - - The session returned can be null if the session is not detected which - can happen in the event a user is not signed-in or has logged out. - """ - current_session: Optional[Session] = None - if self._persist_session: - maybe_session = self._storage.get_item(self._storage_key) - current_session = self._get_valid_session(maybe_session) - if not current_session: - self._remove_session() - else: - current_session = self._in_memory_session - - if not current_session: - return None - time_now = round(time.time()) - has_expired = ( - current_session.expires_at <= time_now + EXPIRY_MARGIN - if current_session.expires_at - else False - ) - return ( - self._call_refresh_token(current_session.refresh_token) - if has_expired - else current_session - ) - - def get_user(self, jwt: Optional[str] = None) -> Optional[UserResponse]: - """ - Gets the current user details if there is an existing session. - - Takes in an optional access token `jwt`. If no `jwt` is provided, - `get_user()` will attempt to get the `jwt` from the current session. - """ - if not jwt: - session = self.get_session() - if session: - jwt = session.access_token - else: - return None - return parse_user_response(self._request("GET", "user", jwt=jwt)) - - def update_user( - self, attributes: UserAttributes, options: Optional[UpdateUserOptions] = None - ) -> UserResponse: - """ - Updates user data, if there is a logged in user. - """ - session = self.get_session() - if not session: - raise AuthSessionMissingError() - update_options = options or {} - response = self._request( - "PUT", - "user", - body=attributes, - redirect_to=update_options.get("email_redirect_to"), - jwt=session.access_token, - ) - user_response = parse_user_response(response) - session.user = user_response.user - self._save_session(session) - self._notify_all_subscribers("USER_UPDATED", session) - return user_response - - def set_session(self, access_token: str, refresh_token: str) -> AuthResponse: - """ - Sets the session data from the current session. If the current session - is expired, `set_session` will take care of refreshing it to obtain a - new session. - - If the refresh token in the current session is invalid and the current - session has expired, an error will be thrown. - - If the current session does not contain at `expires_at` field, - `set_session` will use the exp claim defined in the access token. - - The current session that minimally contains an access token, - refresh token and a user. - """ - time_now = round(time.time()) - expires_at = time_now - has_expired = True - session: Optional[Session] = None - if access_token and access_token.split(".")[1]: - payload = decode_jwt(access_token)["payload"] - exp = payload.get("exp") - if exp: - expires_at = int(exp) - has_expired = expires_at <= time_now - if has_expired: - if not refresh_token: - raise AuthSessionMissingError() - response = self._refresh_access_token(refresh_token) - if not response.session: - return AuthResponse() - session = response.session - else: - user_response = self.get_user(access_token) - if user_response is None: - raise UserDoesntExist(access_token) - session = Session( - access_token=access_token, - refresh_token=refresh_token, - user=user_response.user, - token_type="bearer", - expires_in=expires_at - time_now, - expires_at=expires_at, - ) - self._save_session(session) - self._notify_all_subscribers("TOKEN_REFRESHED", session) - return AuthResponse(session=session, user=session.user) - - def refresh_session(self, refresh_token: Optional[str] = None) -> AuthResponse: - """ - Returns a new session, regardless of expiry status. - - Takes in an optional current session. If not passed in, then refreshSession() - will attempt to retrieve it from getSession(). If the current session's - refresh token is invalid, an error will be thrown. - """ - if not refresh_token: - session = self.get_session() - if session: - refresh_token = session.refresh_token - if not refresh_token: - raise AuthSessionMissingError() - session = self._call_refresh_token(refresh_token) - return AuthResponse(session=session, user=session.user) - - def sign_out(self, options: Optional[SignOutOptions] = None) -> None: - """ - `sign_out` will remove the logged in user from the - current session and log them out - removing all items from storage and then trigger a `"SIGNED_OUT"` event. - - For advanced use cases, you can revoke all refresh tokens for a user by passing a user's JWT through to `admin.sign_out`. - - There is no way to revoke a user's access token jwt until it expires. - It is recommended to set a shorter expiry on the jwt for this reason. - """ - signout_options = options or {"scope": "global"} - with suppress(AuthApiError): - session = self.get_session() - access_token = session.access_token if session else None - if access_token: - self.admin.sign_out(access_token, signout_options["scope"]) - - if signout_options["scope"] != "others": - self._remove_session() - self._notify_all_subscribers("SIGNED_OUT", None) - - def on_auth_state_change( - self, - callback: Callable[[AuthChangeEvent, Optional[Session]], None], - ) -> Subscription: - """ - Receive a notification every time an auth event happens. - """ - unique_id = str(uuid4()) - - def _unsubscribe() -> None: - self._state_change_emitters.pop(unique_id) - - subscription = Subscription( - id=unique_id, - callback=callback, - unsubscribe=_unsubscribe, - ) - self._state_change_emitters[unique_id] = subscription - return subscription - - def reset_password_for_email( - self, email: str, options: Optional[Options] = None - ) -> None: - """ - Sends a password reset request to an email address. - """ - reset_options = options or {} - self._request( - "POST", - "recover", - body={ - "email": email, - "gotrue_meta_security": { - "captcha_token": reset_options.get("captcha_token"), - }, - }, - redirect_to=reset_options.get("redirect_to"), - ) - - def reset_password_email( - self, - email: str, - options: Optional[Options] = None, - ) -> None: - """ - Sends a password reset request to an email address. - """ - - self.reset_password_for_email(email, options or {}) - - # MFA methods - - def _enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - - body = { - "friendly_name": params.get("friendly_name"), - "factor_type": params.get("factor_type"), - } - - if params["factor_type"] == "phone": - body["phone"] = params.get("phone") - else: - body["issuer"] = params.get("issuer") - - response = self._request( - "POST", - "factors", - body=body, - jwt=session.access_token, - ) - auth_response = model_validate(AuthMFAEnrollResponse, response.content) - if params["factor_type"] == "totp" and auth_response.totp: - auth_response.totp.qr_code = ( - f"data:image/svg+xml;utf-8,{auth_response.totp.qr_code}" - ) - return auth_response - - def _challenge(self, params: MFAChallengeParams) -> AuthMFAChallengeResponse: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - response = self._request( - "POST", - f"factors/{params.get('factor_id')}/challenge", - body={"channel": params.get("channel")}, - jwt=session.access_token, - ) - return model_validate(AuthMFAChallengeResponse, response.content) - - def _challenge_and_verify( - self, - params: MFAChallengeAndVerifyParams, - ) -> AuthMFAVerifyResponse: - response = self._challenge( - { - "factor_id": params["factor_id"], - } - ) - return self._verify( - { - "factor_id": params["factor_id"], - "challenge_id": response.id, - "code": params["code"], - } - ) - - def _verify(self, params: MFAVerifyParams) -> AuthMFAVerifyResponse: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - response = self._request( - "POST", - f"factors/{params.get('factor_id')}/verify", - body=params, - jwt=session.access_token, - ) - auth_response = model_validate(AuthMFAVerifyResponse, response.content) - session = model_validate(Session, response.content) - self._save_session(session) - self._notify_all_subscribers("MFA_CHALLENGE_VERIFIED", session) - return auth_response - - def _unenroll(self, params: MFAUnenrollParams) -> AuthMFAUnenrollResponse: - session = self.get_session() - if not session: - raise AuthSessionMissingError() - response = self._request( - "DELETE", - f"factors/{params.get('factor_id')}", - jwt=session.access_token, - ) - return model_validate(AuthMFAUnenrollResponse, response.content) - - def _list_factors(self) -> AuthMFAListFactorsResponse: - response = self.get_user() - factors = response.user.factors or [] if response else [] - totp = [ - f for f in factors if f.factor_type == "totp" and f.status == "verified" - ] - phone = [ - f for f in factors if f.factor_type == "phone" and f.status == "verified" - ] - return AuthMFAListFactorsResponse(all=factors, totp=totp, phone=phone) - - def _get_authenticator_assurance_level( - self, - ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: - session = self.get_session() - if not session: - return AuthMFAGetAuthenticatorAssuranceLevelResponse( - current_level=None, - next_level=None, - current_authentication_methods=[], - ) - payload = decode_jwt(session.access_token)["payload"] - current_level = payload.get("aal") - verified_factors = [ - f for f in session.user.factors or [] if f.status == "verified" - ] - next_level = "aal2" if verified_factors else current_level - amr_dict_list = payload.get("amr") or [] - current_authentication_methods = [ - AMREntry.model_validate(amr) for amr in amr_dict_list - ] - return AuthMFAGetAuthenticatorAssuranceLevelResponse( - current_level=current_level, - next_level=next_level, - current_authentication_methods=current_authentication_methods, - ) - - # Private methods - - def _remove_session(self) -> None: - if self._persist_session: - self._storage.remove_item(self._storage_key) - else: - self._in_memory_session = None - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = None - - def _get_session_from_url( - self, - url: str, - ) -> Tuple[Session, Optional[str]]: - if not self._is_implicit_grant_flow(url): - raise AuthImplicitGrantRedirectError("Not a valid implicit grant flow url.") - result = urlparse(url) - params = parse_qs(result.query) - error_description = self._get_param(params, "error_description") - if error_description: - error_code = self._get_param(params, "error_code") - error = self._get_param(params, "error") - if not error_code: - raise AuthImplicitGrantRedirectError("No error_code detected.") - if not error: - raise AuthImplicitGrantRedirectError("No error detected.") - raise AuthImplicitGrantRedirectError( - error_description, - {"code": error_code, "error": error}, - ) - provider_token = self._get_param(params, "provider_token") - provider_refresh_token = self._get_param(params, "provider_refresh_token") - access_token = self._get_param(params, "access_token") - if not access_token: - raise AuthImplicitGrantRedirectError("No access_token detected.") - expires_in = self._get_param(params, "expires_in") - if not expires_in: - raise AuthImplicitGrantRedirectError("No expires_in detected.") - refresh_token = self._get_param(params, "refresh_token") - if not refresh_token: - raise AuthImplicitGrantRedirectError("No refresh_token detected.") - token_type = self._get_param(params, "token_type") - if not token_type: - raise AuthImplicitGrantRedirectError("No token_type detected.") - time_now = round(time.time()) - expires_at = time_now + int(expires_in) - user = self.get_user(access_token) - if user is None: - raise UserDoesntExist(access_token) - session = Session( - provider_token=provider_token, - provider_refresh_token=provider_refresh_token, - access_token=access_token, - expires_in=int(expires_in), - expires_at=expires_at, - refresh_token=refresh_token, - token_type=token_type, - user=user.user, - ) - redirect_type = self._get_param(params, "type") - return session, redirect_type - - def _recover_and_refresh(self) -> None: - raw_session = self._storage.get_item(self._storage_key) - current_session = self._get_valid_session(raw_session) - if not current_session: - if raw_session: - self._remove_session() - return - time_now = round(time.time()) - expires_at = current_session.expires_at - if expires_at and expires_at < time_now + EXPIRY_MARGIN: - refresh_token = current_session.refresh_token - if self._auto_refresh_token and refresh_token: - self._network_retries += 1 - try: - self._call_refresh_token(refresh_token) - self._network_retries = 0 - except Exception as e: - if ( - isinstance(e, AuthRetryableError) - and self._network_retries < MAX_RETRIES - ): - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = Timer( - (200 * (2 ** (self._network_retries - 1))), - self._recover_and_refresh, - ) - self._refresh_token_timer.start() - return - self._remove_session() - return - if self._persist_session: - self._save_session(current_session) - self._notify_all_subscribers("SIGNED_IN", current_session) - - def _call_refresh_token(self, refresh_token: str) -> Session: - if not refresh_token: - raise AuthSessionMissingError() - response = self._refresh_access_token(refresh_token) - if not response.session: - raise AuthSessionMissingError() - self._save_session(response.session) - self._notify_all_subscribers("TOKEN_REFRESHED", response.session) - return response.session - - def _refresh_access_token(self, refresh_token: str) -> AuthResponse: - response = self._request( - "POST", - "token", - query=QueryParams(grant_type="refresh_token"), - body={"refresh_token": refresh_token}, - ) - return parse_auth_response(response) - - def _save_session(self, session: Session) -> None: - if not self._persist_session: - self._in_memory_session = session - expire_at = session.expires_at - if expire_at: - time_now = round(time.time()) - expire_in = expire_at - time_now - refresh_duration_before_expires = ( - EXPIRY_MARGIN if expire_in > EXPIRY_MARGIN else 0.5 - ) - value = (expire_in - refresh_duration_before_expires) * 1000 - self._start_auto_refresh_token(value) - if self._persist_session and session.expires_at: - self._storage.set_item(self._storage_key, model_dump_json(session)) - - def _start_auto_refresh_token(self, value: float) -> None: - if self._refresh_token_timer: - self._refresh_token_timer.cancel() - self._refresh_token_timer = None - if value <= 0 or not self._auto_refresh_token: - return - - def refresh_token_function() -> None: - self._network_retries += 1 - try: - session = self.get_session() - if session: - self._call_refresh_token(session.refresh_token) - self._network_retries = 0 - except Exception as e: - if ( - isinstance(e, AuthRetryableError) - and self._network_retries < MAX_RETRIES - ): - self._start_auto_refresh_token( - 200 * (2 ** (self._network_retries - 1)) - ) - - self._refresh_token_timer = Timer(value, refresh_token_function) - self._refresh_token_timer.start() - - def _notify_all_subscribers( - self, - event: AuthChangeEvent, - session: Optional[Session], - ) -> None: - for subscription in self._state_change_emitters.values(): - subscription.callback(event, session) - - def _get_valid_session( - self, - raw_session: Optional[str], - ) -> Optional[Session]: - if not raw_session: - return None - try: - session = model_validate(Session, raw_session) - if session.expires_at is None: - return None - return session - except Exception: - return None - - def _get_param( - self, - query_params: Dict[str, List[str]], - name: str, - ) -> Optional[str]: - return query_params[name][0] if name in query_params else None - - def _is_implicit_grant_flow(self, url: str) -> bool: - result = urlparse(url) - params = parse_qs(result.query) - return "access_token" in params or "error_description" in params - - def _get_url_for_provider( - self, - url: str, - provider: Provider, - params: Dict[str, str], - ) -> Tuple[str, QueryParams]: - query = QueryParams(params) - if self._flow_type == "pkce": - code_verifier = generate_pkce_verifier() - code_challenge = generate_pkce_challenge(code_verifier) - self._storage.set_item(f"{self._storage_key}-code-verifier", code_verifier) - code_challenge_method = ( - "plain" if code_verifier == code_challenge else "s256" - ) - query = query.set("code_challenge", code_challenge).set( - "code_challenge_method", code_challenge_method - ) - query = query.set("provider", provider) - return f"{url}?{query}", query - - def exchange_code_for_session(self, params: CodeExchangeParams) -> AuthResponse: - code_verifier = params.get("code_verifier") or self._storage.get_item( - f"{self._storage_key}-code-verifier" - ) - response = self._request( - "POST", - "token", - query=QueryParams(grant_type="pkce"), - body={ - "auth_code": params.get("auth_code"), - "code_verifier": code_verifier, - }, - redirect_to=params.get("redirect_to"), - ) - auth_response = parse_auth_response(response) - self._storage.remove_item(f"{self._storage_key}-code-verifier") - if auth_response.session: - self._save_session(auth_response.session) - self._notify_all_subscribers("SIGNED_IN", auth_response.session) - return auth_response - - def _fetch_jwks(self, kid: str, jwks: JWKSet) -> JWK: - jwk: Optional[JWK] = None - - # try fetching from the suplied keys. - jwk = next((jwk for jwk in jwks["keys"] if jwk["kid"] == kid), None) - - if jwk: - return jwk - - if self._jwks and ( - self._jwks_cached_at and time.time() - self._jwks_cached_at < self._jwks_ttl - ): - # try fetching from the cache. - jwk = next( - (jwk for jwk in self._jwks["keys"] if jwk["kid"] == kid), - None, - ) - if jwk: - return jwk - - # jwk isn't cached in memory so we need to fetch it from the well-known endpoint - response = self._request("GET", ".well-known/jwks.json") - jwks = parse_jwks(response) - if response: - self._jwks = jwks - self._jwks_cached_at = time.time() - - # find the signing key - jwk = next((jwk for jwk in jwks["keys"] if jwk["kid"] == kid), None) - if not jwk: - raise AuthInvalidJwtError("No matching signing key found in JWKS") - - return jwk - - raise AuthInvalidJwtError("JWT has no valid kid") - - def get_claims( - self, jwt: Optional[str] = None, jwks: Optional[JWKSet] = None - ) -> Optional[ClaimsResponse]: - token = jwt - if not token: - session = self.get_session() - if not session: - return None - - token = session.access_token - - decoded_jwt = decode_jwt(token) - - payload, header, signature = ( - decoded_jwt["payload"], - decoded_jwt["header"], - decoded_jwt["signature"], - ) - raw_header, raw_payload = ( - decoded_jwt["raw"]["header"], - decoded_jwt["raw"]["payload"], - ) - - validate_exp(payload["exp"]) - - # if symmetric algorithm, fallback to get_user - if "kid" not in header or header["alg"] == "HS256": - self.get_user(token) - return ClaimsResponse(claims=payload, headers=header, signature=signature) - - algorithm = get_algorithm_by_name(header["alg"]) - jwk_set = self._fetch_jwks(header["kid"], jwks or {"keys": []}) - signing_key = algorithm.from_jwk(cast(Dict[str, str], jwk_set)) - - # verify the signature - is_valid = algorithm.verify( - msg=f"{raw_header}.{raw_payload}".encode(), key=signing_key, sig=signature - ) - - if not is_valid: - raise AuthInvalidJwtError("Invalid JWT signature") - - # If verification succeeds, decode and return claims - return ClaimsResponse(claims=payload, headers=header, signature=signature) - - def __del__(self) -> None: - """Clean up resources when the client is destroyed.""" - if self._refresh_token_timer: - try: - # Try to cancel the timer - self._refresh_token_timer.cancel() - except Exception: - # Ignore errors if event loop is closed or selector is not registered - pass - finally: - # Always set to None to prevent further attempts - self._refresh_token_timer = None diff --git a/src/auth/src/supabase_auth/_sync/gotrue_mfa_api.py b/src/auth/src/supabase_auth/_sync/gotrue_mfa_api.py deleted file mode 100644 index 16bec8d5..00000000 --- a/src/auth/src/supabase_auth/_sync/gotrue_mfa_api.py +++ /dev/null @@ -1,94 +0,0 @@ -from ..types import ( - AuthMFAChallengeResponse, - AuthMFAEnrollResponse, - AuthMFAGetAuthenticatorAssuranceLevelResponse, - AuthMFAListFactorsResponse, - AuthMFAUnenrollResponse, - AuthMFAVerifyResponse, - MFAChallengeAndVerifyParams, - MFAChallengeParams, - MFAEnrollParams, - MFAUnenrollParams, - MFAVerifyParams, -) - - -class SyncGoTrueMFAAPI: - """ - Contains the full multi-factor authentication API. - """ - - def enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: - """ - Starts the enrollment process for a new Multi-Factor Authentication - factor. This method creates a new factor in the 'unverified' state. - Present the QR code or secret to the user and ask them to add it to their - authenticator app. Ask the user to provide you with an authenticator code - from their app and verify it by calling challenge and then verify. - - The first successful verification of an unverified factor activates the - factor. All other sessions are logged out and the current one gets an - `aal2` authenticator level. - """ - raise NotImplementedError() # pragma: no cover - - def challenge(self, params: MFAChallengeParams) -> AuthMFAChallengeResponse: - """ - Prepares a challenge used to verify that a user has access to a MFA - factor. Provide the challenge ID and verification code by calling `verify`. - """ - raise NotImplementedError() # pragma: no cover - - def challenge_and_verify( - self, - params: MFAChallengeAndVerifyParams, - ) -> AuthMFAVerifyResponse: - """ - Helper method which creates a challenge and immediately uses the given code - to verify against it thereafter. The verification code is provided by the - user by entering a code seen in their authenticator app. - """ - raise NotImplementedError() # pragma: no cover - - def verify(self, params: MFAVerifyParams) -> AuthMFAVerifyResponse: - """ - Verifies a verification code against a challenge. The verification code is - provided by the user by entering a code seen in their authenticator app. - """ - raise NotImplementedError() # pragma: no cover - - def unenroll(self, params: MFAUnenrollParams) -> AuthMFAUnenrollResponse: - """ - Unenroll removes a MFA factor. Unverified factors can safely be ignored - and it's not necessary to unenroll them. Unenrolling a verified MFA factor - cannot be done from a session with an `aal1` authenticator level. - """ - raise NotImplementedError() # pragma: no cover - - def list_factors(self) -> AuthMFAListFactorsResponse: - """ - Returns the list of MFA factors enabled for this user. For most use cases - you should consider using `get_authenticator_assurance_level`. - - This uses a cached version of the factors and avoids incurring a network call. - If you need to update this list, call `get_user` first. - """ - raise NotImplementedError() # pragma: no cover - - def get_authenticator_assurance_level( - self, - ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: - """ - Returns the Authenticator Assurance Level (AAL) for the active session. - - - `aal1` (or `null`) means that the user's identity has been verified only - with a conventional login (email+password, OTP, magic link, social login, - etc.). - - `aal2` means that the user's identity has been verified both with a - conventional login and at least one MFA factor. - - Although this method returns a promise, it's fairly quick (microseconds) - and rarely uses the network. You can use this to check whether the current - user needs to be shown a screen to verify their MFA factors. - """ - raise NotImplementedError() # pragma: no cover diff --git a/src/auth/src/supabase_auth/_sync/storage.py b/src/auth/src/supabase_auth/_sync/storage.py deleted file mode 100644 index 2557d5db..00000000 --- a/src/auth/src/supabase_auth/_sync/storage.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from typing import Dict, Optional - - -class SyncSupportedStorage(ABC): - @abstractmethod - def get_item(self, key: str) -> Optional[str]: ... # pragma: no cover - - @abstractmethod - def set_item(self, key: str, value: str) -> None: ... # pragma: no cover - - @abstractmethod - def remove_item(self, key: str) -> None: ... # pragma: no cover - - -class SyncMemoryStorage(SyncSupportedStorage): - def __init__(self) -> None: - self.storage: Dict[str, str] = {} - - def get_item(self, key: str) -> Optional[str]: - if key in self.storage: - return self.storage[key] - return None - - def set_item(self, key: str, value: str) -> None: - self.storage[key] = value - - def remove_item(self, key: str) -> None: - if key in self.storage: - del self.storage[key] diff --git a/src/auth/src/supabase_auth/admin_api.py b/src/auth/src/supabase_auth/admin_api.py new file mode 100644 index 00000000..bc06a925 --- /dev/null +++ b/src/auth/src/supabase_auth/admin_api.py @@ -0,0 +1,447 @@ +from __future__ import annotations + +from dataclasses import dataclass +from types import TracebackType +from typing import Generic, List + +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + AsyncHttpSession, + HttpIO, + HttpMethod, + HttpSession, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import EmptyRequest, JSONRequest +from supabase_utils.types import JSON +from yarl import URL + +from .helpers import ( + handle_error_response, + parse_link_response, + parse_user_response, + redirect_to_as_query, + validate_adapter, + validate_model, + validate_uuid, +) +from .types import ( + AdminUserAttributes, + AuthMFAAdminDeleteFactorResponse, + AuthMFAAdminListFactorsResponse, + AuthMFAAdminListFactorsResponseParser, + CreateOAuthClientParams, + GenerateLinkParams, + GenerateLinkResponse, + OAuthClient, + OAuthClientListResponse, + OAuthClientResponse, + UpdateOAuthClientParams, + User, + UserList, + UserResponse, +) + + +@dataclass +class SupabaseAuthAdminMFA(Generic[HttpIO]): + """ + Contains the full multi-factor authentication administration API. + """ + + executor: HttpIO + base_url: URL + default_headers: Headers + + @handle_http_io + def delete_factor( + self, + factor_id: str, + user_id: str, + ) -> HttpMethod[AuthMFAAdminDeleteFactorResponse]: + """ + Deletes a factor on a user. This will log the user out of all active + sessions (if the deleted factor was verified). There's no need to delete + unverified factors. + """ + validate_uuid(user_id) + validate_uuid(factor_id) + response = yield EmptyRequest( + method="DELETE", + path=["admin", "users", user_id, "factors", factor_id], + ) + return validate_model(response, AuthMFAAdminDeleteFactorResponse) + + @handle_http_io + def list_factors(self, user_id: str) -> HttpMethod[AuthMFAAdminListFactorsResponse]: + """ + Lists all factors attached to a user. + """ + validate_uuid(user_id) + response = yield EmptyRequest( + method="GET", + path=["admin", "users", user_id, "factors"], + ) + return validate_adapter(response, AuthMFAAdminListFactorsResponseParser) + + +@dataclass +class SupabaseAuthAdminOAuth(Generic[HttpIO]): + """ + Contains all OAuth client administration methods. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + """ + + executor: HttpIO + base_url: URL + default_headers: Headers + + @handle_http_io + def list_clients( + self, + page: int | None = None, + per_page: int | None = None, + ) -> HttpMethod[OAuthClientListResponse]: + """ + Lists all OAuth clients with optional pagination. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + query = URLQuery.from_mapping( + { + "page": page if page is not None else "", + "per_page": per_page if per_page is not None else "", + } + ) + response = yield EmptyRequest( + method="GET", + path=["admin", "oauth", "clients"], + query=query, + ) + + result = validate_model(response, OAuthClientListResponse) + + # Parse pagination headers + total = response.headers.get("x-total-count") + if total: + result.total = int(total) + + links = response.headers.get("link") + if links: + for link in links.split(","): + parts = link.split(";") + if len(parts) >= 2: + page_match = parts[0].split("page=") + if len(page_match) >= 2: + page_num = int(page_match[1].split("&")[0].rstrip(">")) + rel = parts[1].split("=")[1].strip('"') + if rel == "next": + result.next_page = page_num + elif rel == "last": + result.last_page = page_num + + return result + + @handle_http_io + def create_client( + self, + params: CreateOAuthClientParams, + ) -> HttpMethod[OAuthClientResponse]: + """ + Creates a new OAuth client. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + response = yield JSONRequest( + method="POST", + path=["admin", "oauth", "clients"], + body=params, + ) + + return OAuthClientResponse(client=validate_model(response, OAuthClient)) + + @handle_http_io + def get_client( + self, + client_id: str, + ) -> HttpMethod[OAuthClientResponse]: + """ + Gets details of a specific OAuth client. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(client_id) + response = yield EmptyRequest( + method="GET", + path=["admin", "oauth", "clients", client_id], + ) + return OAuthClientResponse(client=validate_model(response, OAuthClient)) + + @handle_http_io + def update_client( + self, + client_id: str, + params: UpdateOAuthClientParams, + ) -> HttpMethod[OAuthClientResponse]: + """ + Updates an OAuth client. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(client_id) + response = yield JSONRequest( + method="PUT", + path=["admin", "oauth", "clients", client_id], + body=params, + ) + return OAuthClientResponse(client=validate_model(response, OAuthClient)) + + @handle_http_io + def delete_client( + self, + client_id: str, + ) -> HttpMethod[None]: + """ + Deletes an OAuth client. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(client_id) + response = yield EmptyRequest( + method="DELETE", + path=["admin", "oauth", "clients", client_id], + ) + if not response.is_success: + raise handle_error_response(response) + + @handle_http_io + def regenerate_client_secret( + self, + client_id: str, + ) -> HttpMethod[OAuthClientResponse]: + """ + Regenerates the secret for an OAuth client. + Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(client_id) + response = yield EmptyRequest( + method="POST", + path=["admin", "oauth", "clients", client_id, "regenerate_secret"], + ) + return OAuthClientResponse(client=validate_model(response, OAuthClient)) + + +class SupabaseAuthAdmin(Generic[HttpIO]): + def __init__( + self, executor: HttpIO, base_url: URL, default_headers: Headers + ) -> None: + self.executor: HttpIO = executor + self.base_url: URL = base_url + self.default_headers: Headers = default_headers + + self.mfa: SupabaseAuthAdminMFA[HttpIO] = SupabaseAuthAdminMFA( + self.executor, self.base_url, default_headers + ) + self.oauth: SupabaseAuthAdminOAuth[HttpIO] = SupabaseAuthAdminOAuth( + self.executor, self.base_url, default_headers + ) + + @handle_http_io + def invite_user_by_email( + self, + email: str, + redirect_to: str | None = None, + data: JSON | None = None, + ) -> HttpMethod[UserResponse]: + """ + Sends an invite link to an email address. + """ + response = yield JSONRequest( + method="POST", + path=["invite"], + body={"email": email, "data": data}, + query=redirect_to_as_query(redirect_to), + ) + return parse_user_response(response) + + @handle_http_io + def generate_link( + self, params: GenerateLinkParams + ) -> HttpMethod[GenerateLinkResponse]: + """ + Generates email links and OTPs to be sent via a custom email provider. + """ + response = yield JSONRequest( + method="POST", + path=["admin", "generate_link"], + body=params.body, + query=redirect_to_as_query(params.redirect_to), + ) + + return parse_link_response(response) + + # User Admin API + + @handle_http_io + def create_user(self, attributes: AdminUserAttributes) -> HttpMethod[UserResponse]: + """ + Creates a new user. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + response = yield JSONRequest( + method="POST", path=["admin", "users"], body=attributes, exclude_none=True + ) + return parse_user_response(response) + + @handle_http_io + def list_users( + self, page: int | None = None, per_page: int | None = None + ) -> HttpMethod[List[User]]: + """ + Get a list of users. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + query = URLQuery.from_mapping( + { + "page": page if page is not None else "", + "per_page": per_page if per_page is not None else "", + } + ) + response = yield EmptyRequest( + method="GET", + path=["admin", "users"], + query=query, + ) + return validate_model(response, UserList).users + + @handle_http_io + def get_user_by_id(self, uid: str) -> HttpMethod[UserResponse]: + """ + Get user by id. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(uid) + + response = yield EmptyRequest( + method="GET", + path=["admin", "users", uid], + ) + return parse_user_response(response) + + @handle_http_io + def update_user_by_id( + self, + uid: str, + attributes: AdminUserAttributes, + ) -> HttpMethod[UserResponse]: + """ + Updates the user data. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(uid) + response = yield JSONRequest( + method="PUT", + path=["admin", "users", uid], + body=attributes, + ) + return parse_user_response(response) + + @handle_http_io + def delete_user( + self, id: str, should_soft_delete: bool = False + ) -> HttpMethod[None]: + """ + Delete a user. Requires a `service_role` key. + + This function should only be called on a server. + Never expose your `service_role` key in the browser. + """ + validate_uuid(id) + body = {"should_soft_delete": should_soft_delete} + response = yield JSONRequest( + method="DELETE", path=["admin", "users", id], body=body + ) + if not response.is_success: + raise handle_error_response(response) + + +class SyncSupabaseAuthAdmin(SupabaseAuthAdmin[SyncHttpIO]): + def __init__( + self, + url: str, + http_session: HttpSession, + default_headers: dict[str, str] | None = None, + ) -> None: + SupabaseAuthAdmin.__init__( + self, + executor=SyncHttpIO(session=http_session), + base_url=URL(url), + default_headers=Headers.from_mapping(default_headers) + if default_headers + else Headers.empty(), + ) + + def __enter__(self) -> SyncSupabaseAuthAdmin: + self.executor.session.__enter__() + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.executor.session.__exit__(exc_type, exc, tb) + + +class AsyncSupabaseAuthAdmin(SupabaseAuthAdmin[AsyncHttpIO]): + def __init__( + self, + url: str, + http_session: AsyncHttpSession, + default_headers: dict[str, str] | None = None, + ) -> None: + SupabaseAuthAdmin.__init__( + self, + executor=AsyncHttpIO(session=http_session), + base_url=URL(url), + default_headers=Headers.from_mapping(default_headers) + if default_headers + else Headers.empty(), + ) + + async def __aenter__(self) -> AsyncSupabaseAuthAdmin: + await self.executor.session.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.executor.session.__aexit__(exc_type, exc, tb) diff --git a/src/auth/src/supabase_auth/client.py b/src/auth/src/supabase_auth/client.py new file mode 100644 index 00000000..8a8a4335 --- /dev/null +++ b/src/auth/src/supabase_auth/client.py @@ -0,0 +1,1262 @@ +from __future__ import annotations + +import time +from dataclasses import dataclass +from types import TracebackType +from typing import Callable, Generic, Literal +from uuid import uuid4 + +from jwt import get_algorithm_by_name +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + AsyncHttpSession, + HttpIO, + HttpMethod, + HttpSession, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import EmptyRequest, JSONRequest, Response +from supabase_utils.types import JSON +from yarl import URL + +from .errors import ( + AuthImplicitGrantRedirectError, + AuthInvalidJwtError, + AuthSessionMissingError, + UserDoesntExist, +) +from .helpers import ( + decode_jwt, + generate_pkce_challenge, + generate_pkce_verifier, + handle_error_response, + parse_auth_otp_response, + parse_auth_response, + parse_jwks, + parse_link_identity_response, + parse_sso_response, + parse_user_response, + redirect_to_as_query, + validate_exp, +) +from .mfa import AsyncSupabaseAuthMFAClient, SyncSupabaseAuthMFAClient +from .session import ( + AsyncMemoryStorage, + AsyncSessionManager, + AsyncSupportedStorage, + SessionManagerCommon, + SyncMemoryStorage, + SyncSessionManager, + SyncSupportedStorage, +) +from .types import ( + JWK, + AuthChangeEvent, + AuthFlowType, + AuthOtpResponse, + AuthResponse, + ClaimsResponse, + IdentitiesResponse, + JWKSet, + OAuthResponse, + Provider, + ResendCredentials, + ResendEmailCredentials, + Session, + SignInWithEmailAndPasswordlessCredentials, + SignInWithPasswordCredentials, + SignInWithPasswordlessCredentials, + SignInWithSSOCredentials, + SignOutScope, + SignUpWithEmailAndPasswordCredentials, + SignUpWithPasswordCredentials, + SSOResponse, + Subscription, + UserAttributes, + UserIdentity, + UserResponse, + VerifyOtpParams, + VerifyTokenHashParams, +) + + +def is_implicit_grant_flow(url: URL) -> bool: + params = url.query + return "access_token" in params or "error_description" in params + + +@dataclass +class SupabaseAuthHttpClient(Generic[HttpIO]): + executor: HttpIO + base_url: URL + default_headers: Headers + session_manager: SessionManagerCommon[HttpIO] + _jwks: JWKSet + flow_type: AuthFlowType = "implicit" + _jwks_ttl: float = 600 # 10 minutes + _jwks_cached_at: float | None = None + + @handle_http_io + def _sign_in_anonymously( + self, data: JSON = None, captcha_token: str | None = None + ) -> HttpMethod[AuthResponse]: + """ + Creates a new anonymous user. + """ + response = yield JSONRequest( + method="POST", + path=["signup"], + body={ + "data": data, + "gotrue_meta_security": { + "captcha_token": captcha_token, + }, + }, + ) + return parse_auth_response(response) + + @handle_http_io + def _sign_up( + self, + credentials: SignUpWithPasswordCredentials, + ) -> HttpMethod[AuthResponse]: + """ + Creates a new user. + """ + if isinstance(credentials, SignUpWithEmailAndPasswordCredentials): + query = redirect_to_as_query(credentials.redirect_to) + else: + query = URLQuery.empty() + response = yield JSONRequest( + method="POST", path=["signup"], body=credentials.body, query=query + ) + auth_response = parse_auth_response(response) + return auth_response + + @handle_http_io + def _sign_in_with_password( + self, + credentials: SignInWithPasswordCredentials, + ) -> HttpMethod[AuthResponse]: + """ + Log in an existing user with an email or phone and password. + """ + response = yield JSONRequest( + method="POST", + path=["token"], + body=credentials, + query=URLQuery.from_mapping({"grant_type": "password"}), + ) + auth_response = parse_auth_response(response) + return auth_response + + @handle_http_io + def _sign_in_with_id_token( + self, + provider: Literal["google", "apple", "azure", "facebook", "kakao"], + token: str, + access_token: str | None = None, + nonce: str | None = None, + captcha_token: str | None = None, + ) -> HttpMethod[AuthResponse]: + """ + Allows signing in with an OIDC ID token. The authentication provider used should be enabled and configured. + """ + response = yield JSONRequest( + method="POST", + path=["token"], + body={ + "provider": provider, + "id_token": token, + "access_token": access_token, + "nonce": nonce, + "gotrue_meta_security": { + "captcha_token": captcha_token, + }, + }, + query=URLQuery.from_mapping({"grant_type": "id_token"}), + ) + auth_response = parse_auth_response(response) + return auth_response + + @handle_http_io + def sign_in_with_sso( + self, credentials: SignInWithSSOCredentials + ) -> HttpMethod[SSOResponse]: + response = yield JSONRequest( + method="POST", + path=["sso"], + body=credentials, + ) + return parse_sso_response(response) + + @handle_http_io + def _sign_out(self, session: Session, scope: SignOutScope) -> HttpMethod[None]: + response = yield EmptyRequest( + method="POST", + path=["logout"], + query=URLQuery.from_mapping({"scope": scope}), + headers=session.encode_access_token(), + ) + if not response.is_success: + raise handle_error_response(response) + + def _sign_in_with_oauth( + self, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> tuple[OAuthResponse, str | None]: + """ + Log in an existing user via a third-party provider. + """ + query = ( + URLQuery.from_mapping(query_params) if query_params else URLQuery.empty() + ) + if redirect_to: + query = query.set("redirect_to", redirect_to) + if scopes: + query = query.set("scopes", scopes) + code_verifier, query = self._get_url_for_provider(provider, query) + new_url = self.base_url.joinpath("authorize").with_query(query.as_query()) + return OAuthResponse(provider=provider, url=str(new_url)), code_verifier + + @handle_http_io + def _link_identity( + self, + session: Session, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> HttpMethod[tuple[OAuthResponse, str | None]]: + query = ( + URLQuery.from_mapping(query_params) if query_params else URLQuery.empty() + ) + if redirect_to: + query = query.set("redirect_to", redirect_to) + if scopes: + query = query.set("scopes", scopes) + query = query.set("skip_http_redirect", "true") + + code_verifier, query = self._get_url_for_provider(provider, query) + + response = yield EmptyRequest( + method="GET", + path=["user", "identities", "authorize"], + query=query, + headers=session.encode_access_token(), + ) + link_identity = parse_link_identity_response(response) + return OAuthResponse(provider=provider, url=link_identity.url), code_verifier + + @handle_http_io + def _unlink_identity( + self, session: Session, identity: UserIdentity + ) -> HttpMethod[Response]: + response = yield EmptyRequest( + method="DELETE", + path=["user", "identities", identity.identity_id], + headers=session.encode_access_token(), + ) + if not response.is_success: + raise handle_error_response(response) + return response + + @handle_http_io + def _sign_in_with_otp( + self, + credentials: SignInWithPasswordlessCredentials, + ) -> HttpMethod[AuthOtpResponse]: + if isinstance(credentials, SignInWithEmailAndPasswordlessCredentials): + query = redirect_to_as_query(credentials.email_redirect_to) + else: + query = URLQuery.empty() + response = yield JSONRequest( + method="POST", + path=["otp"], + body=credentials.body, + query=query, + ) + return parse_auth_otp_response(response) + + @handle_http_io + def resend( + self, + credentials: ResendCredentials, + ) -> HttpMethod[AuthOtpResponse]: + if isinstance(credentials, ResendEmailCredentials): + query = redirect_to_as_query(credentials.email_redirect_to) + else: + query = URLQuery.empty() + response = yield JSONRequest( + method="POST", + path=["resend"], + body=credentials.body, + query=query, + ) + return parse_auth_otp_response(response) + + @handle_http_io + def _verify_otp(self, params: VerifyOtpParams) -> HttpMethod[AuthResponse]: + if isinstance(params, VerifyTokenHashParams): + query = URLQuery.empty() + else: + query = redirect_to_as_query(params.redirect_to) + response = yield JSONRequest( + method="POST", + path=["verify"], + body=params.body, + query=query, + ) + auth_response = parse_auth_response(response) + return auth_response + + @handle_http_io + def _reauthenticate(self, session: Session) -> HttpMethod[AuthResponse]: + response = yield EmptyRequest( + method="GET", + path=["reauthenticate"], + headers=session.encode_access_token(), + ) + if not response.is_success: + raise handle_error_response(response) + return AuthResponse(user=None, session=None) + + @handle_http_io + def _update_user( + self, + session: Session, + attributes: UserAttributes, + email_redirect_to: str | None = None, + ) -> HttpMethod[UserResponse]: + """ + Updates user data, if there is a logged in user. + """ + response = yield JSONRequest( + method="PUT", + path=["user"], + body=attributes, + query=redirect_to_as_query(email_redirect_to), + headers=session.encode_access_token(), + ) + user_response = parse_user_response(response) + session.user = user_response.user + return user_response + + @handle_http_io + def _set_session( + self, access_token: str, refresh_token: str + ) -> HttpMethod[AuthResponse]: + time_now = round(time.time()) + expires_at = time_now + has_expired = True + session: Session | None = None + if access_token and access_token.split(".")[1]: + payload = decode_jwt(access_token).payload + exp = payload.exp + if exp: + has_expired = exp <= time_now + if has_expired: + if not refresh_token: + raise AuthSessionMissingError() + response = yield from self.session_manager._refresh_access_token( + refresh_token + ) + if not response.session: + return AuthResponse() + session = response.session + else: + user_response = yield from self.session_manager._get_user(access_token) + if user_response is None: + raise UserDoesntExist(access_token) + session = Session( + access_token=access_token, + refresh_token=refresh_token, + user=user_response.user, + token_type="bearer", + expires_in=expires_at - time_now, + expires_at=expires_at, + ) + return AuthResponse(session=session, user=session.user) + + def on_auth_state_change( + self, + callback: Callable[[AuthChangeEvent, Session | None], None], + ) -> Subscription: + unique_id = str(uuid4()) + + def _unsubscribe() -> None: + self.session_manager.state_change_emitters.pop(unique_id) + + subscription = Subscription( + id=unique_id, + callback=callback, + unsubscribe=_unsubscribe, + ) + self.session_manager.state_change_emitters[unique_id] = subscription + return subscription + + @handle_http_io + def reset_password_for_email( + self, + email: str, + captcha_token: str | None = None, + redirect_to: str | None = None, + ) -> HttpMethod[None]: + """ + Sends a password reset request to an email address. + """ + response = yield JSONRequest( + method="POST", + path=["recover"], + body={ + "email": email, + "gotrue_meta_security": { + "captcha_token": captcha_token, + }, + }, + query=redirect_to_as_query(redirect_to), + ) + if not response.is_success: + raise handle_error_response(response) + + @handle_http_io + def _get_session_from_url( + self, + url: str, + ) -> HttpMethod[tuple[Session, str | None]]: + result = URL(url) + if not is_implicit_grant_flow(result): + raise AuthImplicitGrantRedirectError("Not a valid implicit grant flow url.") + params = result.query + error_description = params.get("error_description") + if error_description: + error_code = params.get("error_code") + error = params.get("error") + if not error_code: + raise AuthImplicitGrantRedirectError("No error_code detected.") + if not error: + raise AuthImplicitGrantRedirectError("No error detected.") + raise AuthImplicitGrantRedirectError( + error_description, + code=error_code, + error=error, + ) + provider_token = params.get("provider_token") + provider_refresh_token = params.get("provider_refresh_token") + access_token = params.get("access_token") + if not access_token: + raise AuthImplicitGrantRedirectError("No access_token detected.") + expires_in = params.get("expires_in") + if not expires_in: + raise AuthImplicitGrantRedirectError("No expires_in detected.") + refresh_token = params.get("refresh_token") + if not refresh_token: + raise AuthImplicitGrantRedirectError("No refresh_token detected.") + token_type = params.get("token_type") + if not token_type: + raise AuthImplicitGrantRedirectError("No token_type detected.") + time_now = round(time.time()) + expires_at = time_now + int(expires_in) + user = yield from self.session_manager._get_user(access_token) + if user is None: + raise UserDoesntExist(access_token) + session = Session( + provider_token=provider_token, + provider_refresh_token=provider_refresh_token, + access_token=access_token, + expires_in=int(expires_in), + expires_at=expires_at, + refresh_token=refresh_token, + token_type=token_type, + user=user.user, + ) + redirect_type = params.get("type") + return session, redirect_type + + def _get_url_for_provider( + self, + provider: Provider, + query: URLQuery, + ) -> tuple[str | None, URLQuery]: + code_verifier = None + if self.flow_type == "pkce": + code_verifier = generate_pkce_verifier() + code_challenge = generate_pkce_challenge(code_verifier) + code_challenge_method = ( + "plain" if code_verifier == code_challenge else "s256" + ) + query = query.set("code_challenge", code_challenge).set( + "code_challenge_method", code_challenge_method + ) + query = query.set("provider", provider) + return code_verifier, query + + @handle_http_io + def exchange_code_for_session( + self, code_verifier: str, auth_code: str, redirect_to: str | None = None + ) -> HttpMethod[AuthResponse]: + query = redirect_to_as_query(redirect_to).set("grant_type", "pkce") + response = yield JSONRequest( + method="POST", + path=["token"], + body={ + "auth_code": auth_code, + "code_verifier": code_verifier, + }, + query=query, + ) + auth_response = parse_auth_response(response) + return auth_response + + def _fetch_jwks(self, kid: str, jwks: JWKSet) -> HttpMethod[JWK]: + jwk: JWK | None = None + + # try fetching from the suplied keys. + jwk = next((jwk for jwk in jwks.keys if jwk.kid == kid), None) + + if jwk: + return jwk + + if self._jwks and ( + self._jwks_cached_at and time.time() - self._jwks_cached_at < self._jwks_ttl + ): + # try fetching from the cache. + jwk = next( + (jwk for jwk in self._jwks.keys if jwk.kid == kid), + None, + ) + if jwk: + return jwk + + # jwk isn't cached in memory so we need to fetch it from the well-known endpoint + response = yield EmptyRequest(method="GET", path=[".well-known", "jwks.json"]) + jwks = parse_jwks(response) + if not response: + raise AuthInvalidJwtError("JWT has no valid kid") + + self._jwks = jwks + self._jwks_cached_at = time.time() + + # find the signing key + jwk = next((jwk for jwk in jwks.keys if jwk.kid == kid), None) + if not jwk: + raise AuthInvalidJwtError("No matching signing key found in JWKS") + return jwk + + @handle_http_io + def _get_claims( + self, jwt: str, jwks: JWKSet | None = None + ) -> HttpMethod[ClaimsResponse]: + decoded_jwt = decode_jwt(jwt) + + validate_exp(decoded_jwt.payload.exp) + header = decoded_jwt.header + payload = decoded_jwt.payload + signature = decoded_jwt.signature + # if symmetric algorithm, fallback to get_user + if not header.kid or header.alg == "HS256": + yield from self.session_manager._get_user(jwt) + return ClaimsResponse( + claims=decoded_jwt.payload, + headers=decoded_jwt.header, + signature=decoded_jwt.signature, + ) + + algorithm = get_algorithm_by_name(header.alg) + jwk_set = yield from self._fetch_jwks(header.kid, jwks or JWKSet(keys=[])) + signing_key = algorithm.from_jwk(dict(jwk_set)) + + # verify the signature + is_valid = algorithm.verify( + msg=f"{decoded_jwt.raw_header}.{decoded_jwt.raw_payload}".encode(), + key=signing_key, + sig=signature, + ) + + if not is_valid: + raise AuthInvalidJwtError("Invalid JWT signature") + + # If verification succeeds, decode and return claims + return ClaimsResponse(claims=payload, headers=header, signature=signature) + + +class AsyncSupabaseAuthClient(SupabaseAuthHttpClient[AsyncHttpIO]): + def __init__( + self, + url: str, + http_session: AsyncHttpSession, + *, + headers: dict[str, str] | None = None, + storage_key: str | None = None, + auto_refresh_token: bool = True, + persist_session: bool = True, + storage: AsyncSupportedStorage | None = None, + flow_type: AuthFlowType = "implicit", + ) -> None: + self.base_url = URL(url) + default_headers = Headers.from_mapping(headers) if headers else Headers.empty() + executor = AsyncHttpIO(session=http_session) + self.session_manager: AsyncSessionManager = AsyncSessionManager( + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + storage=storage or AsyncMemoryStorage(), + state_change_emitters={}, + auto_refresh_token=auto_refresh_token, + ) + SupabaseAuthHttpClient.__init__( + self, + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + session_manager=self.session_manager, + _jwks=JWKSet(keys=[]), + flow_type=flow_type, + ) + self.mfa = AsyncSupabaseAuthMFAClient( + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + session_manager=self.session_manager, + ) + + async def __aenter__(self) -> AsyncSupabaseAuthClient: + await self.executor.session.__aenter__() + await self.session_manager.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.executor.session.__aexit__(exc_type, exc, tb) + await self.session_manager.__aexit__(exc_type, exc, tb) + + # Initializations + + async def initialize(self, *, url: str | None = None) -> None: + if url and is_implicit_grant_flow(URL(url)): + await self.initialize_from_url(url) + else: + await self.initialize_from_storage() + + async def initialize_from_storage(self) -> None: + return await self.session_manager.recover_and_refresh() + + async def initialize_from_url(self, url: str) -> None: + try: + if is_implicit_grant_flow(URL(url)): + session, redirect_type = await self._get_session_from_url(url) + await self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("SIGNED_IN", session) + if redirect_type == "recovery": + self.session_manager.notify_all_subscribers( + "PASSWORD_RECOVERY", session + ) + except Exception as e: + await self.session_manager.remove_session() + raise e + + async def save_session_and_sign_in(self, auth_response: AuthResponse) -> None: + await self.session_manager.remove_session() + if auth_response.session: + await self.session_manager.save_session(auth_response.session) + self.session_manager.notify_all_subscribers( + "SIGNED_IN", auth_response.session + ) + + # Public methods + + async def sign_in_anonymously( + self, data: JSON = None, captcha_token: str | None = None + ) -> AuthResponse: + """ + Creates a new anonymous user. + """ + auth_response = await self._sign_in_anonymously(data, captcha_token) + await self.save_session_and_sign_in(auth_response) + return auth_response + + async def sign_up( + self, + credentials: SignUpWithPasswordCredentials, + ) -> AuthResponse: + """ + Creates a new user. + """ + auth_response = await self._sign_up(credentials) + await self.save_session_and_sign_in(auth_response) + return auth_response + + async def sign_in_with_password( + self, + credentials: SignInWithPasswordCredentials, + ) -> AuthResponse: + """ + Log in an existing user with an email or phone and password. + """ + auth_response = await self._sign_in_with_password(credentials) + await self.save_session_and_sign_in(auth_response) + return auth_response + + async def sign_in_with_id_token( + self, + provider: Literal["google", "apple", "azure", "facebook", "kakao"], + token: str, + access_token: str | None = None, + nonce: str | None = None, + captcha_token: str | None = None, + ) -> AuthResponse: + """ + Allows signing in with an OIDC ID token. The authentication provider used should be enabled and configured. + """ + auth_response = await self._sign_in_with_id_token( + provider, token, access_token, nonce, captcha_token + ) + await self.save_session_and_sign_in(auth_response) + return auth_response + + async def sign_in_with_oauth( + self, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> OAuthResponse: + """ + Log in an existing user via a third-party provider. + """ + await self.session_manager.remove_session() + oauth_response, code_verifier = self._sign_in_with_oauth( + provider, redirect_to, scopes, query_params + ) + if code_verifier: + key = f"{self.session_manager.storage_key}-code-verifier" + await self.session_manager.storage.set_item(key, code_verifier) + return oauth_response + + async def link_identity( + self, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> OAuthResponse: + session = await self.session_manager.get_session_or_raise() + oauth_response, code_verifier = await self._link_identity( + session, provider, redirect_to, scopes, query_params + ) + if code_verifier: + key = f"{self.session_manager.storage_key}-code-verifier" + await self.session_manager.storage.set_item(key, code_verifier) + return oauth_response + + async def get_user_identities(self) -> IdentitiesResponse: + response = await self.get_user() + if not response: + raise AuthSessionMissingError() + return IdentitiesResponse(identities=response.user.identities or []) + + async def unlink_identity(self, identity: UserIdentity) -> Response: + session = await self.session_manager.get_session_or_raise() + return await self._unlink_identity(session, identity) + + async def sign_in_with_otp( + self, + credentials: SignInWithPasswordlessCredentials, + ) -> AuthOtpResponse: + """ + Log in a user using magiclink or a one-time password (OTP). + + If the `{{ .ConfirmationURL }}` variable is specified in + the email template, a magiclink will be sent. + + If the `{{ .Token }}` variable is specified in the email + template, an OTP will be sent. + + If you're using phone sign-ins, only an OTP will be sent. + You won't be able to send a magiclink for phone sign-ins. + """ + await self.session_manager.remove_session() + return await self._sign_in_with_otp(credentials) + + async def verify_otp(self, params: VerifyOtpParams) -> AuthResponse: + """ + Log in a user given a User supplied OTP received via mobile. + """ + auth_response = await self._verify_otp(params) + await self.save_session_and_sign_in(auth_response) + return auth_response + + async def reauthenticate(self) -> AuthResponse: + session = await self.session_manager.get_session_or_raise() + return await self._reauthenticate(session) + + async def get_session(self) -> Session | None: + """ + Returns the session, refreshing it if necessary. + + The session returned can be null if the session is not detected which + can happen in the event a user is not signed-in or has logged out. + """ + return await self.session_manager.get_session() + + async def get_user(self, jwt: str | None = None) -> UserResponse | None: + """ + Gets the current user details if there is an existing session. + + Takes in an optional access token `jwt`. If no `jwt` is provided, + `get_user()` will attempt to get the `jwt` from the current session. + """ + if not jwt: + session = await self.get_session() + if not session: + return None + jwt = session.access_token + return await self.session_manager.get_user(jwt) + + async def update_user( + self, attributes: UserAttributes, email_redirect_to: str | None = None + ) -> UserResponse: + """ + Updates user data, if there is a logged in user. + """ + session = await self.session_manager.get_session_or_raise() + user_response = await self._update_user(session, attributes, email_redirect_to) + await self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("USER_UPDATED", session) + return user_response + + async def set_session(self, access_token: str, refresh_token: str) -> AuthResponse: + """ + Sets the session data from the current session. If the current session + is expired, `set_session` will take care of refreshing it to obtain a + new session. + + If the refresh token in the current session is invalid and the current + session has expired, an error will be thrown. + + If the current session does not contain at `expires_at` field, + `set_session` will use the exp claim defined in the access token. + + The current session that minimally contains an access token, + refresh token and a user. + """ + auth_response = await self._set_session(access_token, refresh_token) + if auth_response.session: + await self.session_manager.save_session(auth_response.session) + self.session_manager.notify_all_subscribers( + "TOKEN_REFRESHED", auth_response.session + ) + return auth_response + + async def refresh_session(self, refresh_token: str | None = None) -> AuthResponse: + """ + Returns a new session, regardless of expiry status. + + Takes in an optional current session. If not passed in, then refreshSession() + will attempt to retrieve it from getSession(). If the current session's + refresh token is invalid, an error will be thrown. + """ + if not refresh_token: + session = await self.get_session() + if session: + refresh_token = session.refresh_token + if not refresh_token: + raise AuthSessionMissingError() + session = await self.session_manager.call_refresh_token(refresh_token) + return AuthResponse(session=session, user=session.user) + + async def sign_out(self, scope: SignOutScope = "global") -> None: + """ + `sign_out` will remove the logged in user from the + current session and log them out - removing all items from storage and then trigger a `"SIGNED_OUT"` event. + + For advanced use cases, you can revoke all refresh tokens for a user by passing a user's JWT through to `admin.sign_out`. + + There is no way to revoke a user's access token jwt until it expires. + It is recommended to set a shorter expiry on the jwt for this reason. + """ + session = await self.get_session() + if session: + await self._sign_out(session, scope) + + if scope != "others": + await self.session_manager.remove_session() + self.session_manager.notify_all_subscribers("SIGNED_OUT", None) + + def on_auth_state_change( + self, + callback: Callable[[AuthChangeEvent, Session | None], None], + ) -> Subscription: + """ + Receive a notification every time an auth event happens. + """ + unique_id = str(uuid4()) + + def _unsubscribe() -> None: + self.session_manager.state_change_emitters.pop(unique_id) + + subscription = Subscription( + id=unique_id, + callback=callback, + unsubscribe=_unsubscribe, + ) + self.session_manager.state_change_emitters[unique_id] = subscription + return subscription + + async def get_claims( + self, jwt: str | None = None, jwks: JWKSet | None = None + ) -> ClaimsResponse | None: + if not jwt: + session = await self.get_session() + if not session: + return None + jwt = session.access_token + return await self._get_claims(jwt, jwks) + + +class SyncSupabaseAuthClient(SupabaseAuthHttpClient[SyncHttpIO]): + def __init__( + self, + url: str, + http_session: HttpSession, + *, + headers: dict[str, str] | None = None, + storage_key: str | None = None, + auto_refresh_token: bool = True, + persist_session: bool = True, + storage: SyncSupportedStorage | None = None, + flow_type: AuthFlowType = "implicit", + ) -> None: + self.base_url = URL(url) + default_headers = Headers.from_mapping(headers) if headers else Headers.empty() + executor = SyncHttpIO(session=http_session) + self.session_manager: SyncSessionManager = SyncSessionManager( + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + storage=storage or SyncMemoryStorage(), + state_change_emitters={}, + auto_refresh_token=auto_refresh_token, + ) + SupabaseAuthHttpClient.__init__( + self, + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + session_manager=self.session_manager, + _jwks=JWKSet(keys=[]), + flow_type=flow_type, + ) + self.mfa = SyncSupabaseAuthMFAClient( + base_url=self.base_url, + executor=executor, + default_headers=default_headers, + session_manager=self.session_manager, + ) + + def __enter__(self) -> SyncSupabaseAuthClient: + self.executor.session.__enter__() + self.session_manager.__enter__() + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.executor.session.__exit__(exc_type, exc, tb) + self.session_manager.__exit__(exc_type, exc, tb) + + # Initializations + + def initialize(self, *, url: str | None = None) -> None: + if url and is_implicit_grant_flow(URL(url)): + self.initialize_from_url(url) + else: + self.initialize_from_storage() + + def initialize_from_storage(self) -> None: + return self.session_manager.recover_and_refresh() + + def initialize_from_url(self, url: str) -> None: + try: + if is_implicit_grant_flow(URL(url)): + session, redirect_type = self._get_session_from_url(url) + self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("SIGNED_IN", session) + if redirect_type == "recovery": + self.session_manager.notify_all_subscribers( + "PASSWORD_RECOVERY", session + ) + except Exception as e: + self.session_manager.remove_session() + raise e + + def save_session_and_sign_in(self, auth_response: AuthResponse) -> None: + self.session_manager.remove_session() + if auth_response.session: + self.session_manager.save_session(auth_response.session) + self.session_manager.notify_all_subscribers( + "SIGNED_IN", auth_response.session + ) + + # Public methods + + def sign_in_anonymously( + self, data: JSON = None, captcha_token: str | None = None + ) -> AuthResponse: + """ + Creates a new anonymous user. + """ + auth_response = self._sign_in_anonymously(data, captcha_token) + self.save_session_and_sign_in(auth_response) + return auth_response + + def sign_up( + self, + credentials: SignUpWithPasswordCredentials, + ) -> AuthResponse: + """ + Creates a new user. + """ + auth_response = self._sign_up(credentials) + self.save_session_and_sign_in(auth_response) + return auth_response + + def sign_in_with_password( + self, + credentials: SignInWithPasswordCredentials, + ) -> AuthResponse: + """ + Log in an existing user with an email or phone and password. + """ + auth_response = self._sign_in_with_password(credentials) + self.save_session_and_sign_in(auth_response) + return auth_response + + def sign_in_with_id_token( + self, + provider: Literal["google", "apple", "azure", "facebook", "kakao"], + token: str, + access_token: str | None = None, + nonce: str | None = None, + captcha_token: str | None = None, + ) -> AuthResponse: + """ + Allows signing in with an OIDC ID token. The authentication provider used should be enabled and configured. + """ + auth_response = self._sign_in_with_id_token( + provider, token, access_token, nonce, captcha_token + ) + self.save_session_and_sign_in(auth_response) + return auth_response + + def sign_in_with_oauth( + self, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> OAuthResponse: + """ + Log in an existing user via a third-party provider. + """ + self.session_manager.remove_session() + oauth_response, code_verifier = self._sign_in_with_oauth( + provider, redirect_to, scopes, query_params + ) + if code_verifier: + key = f"{self.session_manager.storage_key}-code-verifier" + self.session_manager.storage.set_item(key, code_verifier) + return oauth_response + + def link_identity( + self, + provider: Provider, + redirect_to: str | None = None, + scopes: str | None = None, + query_params: dict[str, str] | None = None, + ) -> OAuthResponse: + session = self.session_manager.get_session_or_raise() + oauth_response, code_verifier = self._link_identity( + session, provider, redirect_to, scopes, query_params + ) + if code_verifier: + key = f"{self.session_manager.storage_key}-code-verifier" + self.session_manager.storage.set_item(key, code_verifier) + return oauth_response + + def get_user_identities(self) -> IdentitiesResponse: + response = self.get_user() + if not response: + raise AuthSessionMissingError() + return IdentitiesResponse(identities=response.user.identities or []) + + def unlink_identity(self, identity: UserIdentity) -> Response: + session = self.session_manager.get_session_or_raise() + return self._unlink_identity(session, identity) + + def sign_in_with_otp( + self, + credentials: SignInWithPasswordlessCredentials, + ) -> AuthOtpResponse: + """ + Log in a user using magiclink or a one-time password (OTP). + + If the `{{ .ConfirmationURL }}` variable is specified in + the email template, a magiclink will be sent. + + If the `{{ .Token }}` variable is specified in the email + template, an OTP will be sent. + + If you're using phone sign-ins, only an OTP will be sent. + You won't be able to send a magiclink for phone sign-ins. + """ + self.session_manager.remove_session() + return self._sign_in_with_otp(credentials) + + def verify_otp(self, params: VerifyOtpParams) -> AuthResponse: + """ + Log in a user given a User supplied OTP received via mobile. + """ + auth_response = self._verify_otp(params) + self.save_session_and_sign_in(auth_response) + return auth_response + + def reauthenticate(self) -> AuthResponse: + session = self.session_manager.get_session_or_raise() + return self._reauthenticate(session) + + def get_session(self) -> Session | None: + """ + Returns the session, refreshing it if necessary. + + The session returned can be null if the session is not detected which + can happen in the event a user is not signed-in or has logged out. + """ + return self.session_manager.get_session() + + def get_user(self, jwt: str | None = None) -> UserResponse | None: + """ + Gets the current user details if there is an existing session. + + Takes in an optional access token `jwt`. If no `jwt` is provided, + `get_user()` will attempt to get the `jwt` from the current session. + """ + if not jwt: + session = self.get_session() + if not session: + return None + jwt = session.access_token + return self.session_manager.get_user(jwt) + + def update_user( + self, attributes: UserAttributes, email_redirect_to: str | None = None + ) -> UserResponse: + """ + Updates user data, if there is a logged in user. + """ + session = self.session_manager.get_session_or_raise() + user_response = self._update_user(session, attributes, email_redirect_to) + self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("USER_UPDATED", session) + return user_response + + def set_session(self, access_token: str, refresh_token: str) -> AuthResponse: + """ + Sets the session data from the current session. If the current session + is expired, `set_session` will take care of refreshing it to obtain a + new session. + + If the refresh token in the current session is invalid and the current + session has expired, an error will be thrown. + + If the current session does not contain at `expires_at` field, + `set_session` will use the exp claim defined in the access token. + + The current session that minimally contains an access token, + refresh token and a user. + """ + auth_response = self._set_session(access_token, refresh_token) + if auth_response.session: + self.session_manager.save_session(auth_response.session) + self.session_manager.notify_all_subscribers( + "TOKEN_REFRESHED", auth_response.session + ) + return auth_response + + def refresh_session(self, refresh_token: str | None = None) -> AuthResponse: + """ + Returns a new session, regardless of expiry status. + + Takes in an optional current session. If not passed in, then refreshSession() + will attempt to retrieve it from getSession(). If the current session's + refresh token is invalid, an error will be thrown. + """ + if not refresh_token: + session = self.get_session() + if session: + refresh_token = session.refresh_token + if not refresh_token: + raise AuthSessionMissingError() + session = self.session_manager.call_refresh_token(refresh_token) + return AuthResponse(session=session, user=session.user) + + def sign_out(self, scope: SignOutScope = "global") -> None: + """ + `sign_out` will remove the logged in user from the + current session and log them out - removing all items from storage and then trigger a `"SIGNED_OUT"` event. + + For advanced use cases, you can revoke all refresh tokens for a user by passing a user's JWT through to `admin.sign_out`. + + There is no way to revoke a user's access token jwt until it expires. + It is recommended to set a shorter expiry on the jwt for this reason. + """ + session = self.get_session() + if session: + self._sign_out(session, scope) + + if scope != "others": + self.session_manager.remove_session() + self.session_manager.notify_all_subscribers("SIGNED_OUT", None) + + def on_auth_state_change( + self, + callback: Callable[[AuthChangeEvent, Session | None], None], + ) -> Subscription: + """ + Receive a notification every time an auth event happens. + """ + unique_id = str(uuid4()) + + def _unsubscribe() -> None: + self.session_manager.state_change_emitters.pop(unique_id) + + subscription = Subscription( + id=unique_id, + callback=callback, + unsubscribe=_unsubscribe, + ) + self.session_manager.state_change_emitters[unique_id] = subscription + return subscription + + def get_claims( + self, jwt: str | None = None, jwks: JWKSet | None = None + ) -> ClaimsResponse | None: + if not jwt: + session = self.get_session() + if not session: + return None + jwt = session.access_token + return self._get_claims(jwt, jwks) diff --git a/src/auth/src/supabase_auth/errors.py b/src/auth/src/supabase_auth/errors.py index e7a10539..eb4baefd 100644 --- a/src/auth/src/supabase_auth/errors.py +++ b/src/auth/src/supabase_auth/errors.py @@ -1,8 +1,9 @@ from __future__ import annotations -from typing import List, Literal, Optional +from dataclasses import dataclass +from typing import List, Literal -from typing_extensions import TypedDict +from pydantic import BaseModel, TypeAdapter ErrorCode = Literal[ "unexpected_failure", @@ -89,75 +90,66 @@ "invalid_jwt", ] +ErrorCodeAdapter: TypeAdapter[ErrorCode] = TypeAdapter(ErrorCode) + + +class WeakPasswordReasons(BaseModel): + reasons: list[str] + + +class RawApiError(BaseModel): + msg: str | None = None + message: str | None = None + error: str | None = None + code: int | None = None + error_description: str | None = None + error_code: ErrorCode | None = None + weak_password: WeakPasswordReasons | None = None + + def get_error_message(self) -> str: + return ( + self.msg + or self.message + or self.error_description + or self.error + or "Unknown" + ) + class UserDoesntExist(Exception): def __init__(self, access_token: str) -> None: self.access_token = access_token +@dataclass class AuthError(Exception): - def __init__(self, message: str, code: ErrorCode | None) -> None: - Exception.__init__(self, message) - self.message = message - self.name = "AuthError" - self.code = code - - -class AuthApiErrorDict(TypedDict): - name: str message: str - status: int code: ErrorCode | None +@dataclass class AuthApiError(AuthError): - def __init__(self, message: str, status: int, code: Optional[ErrorCode]) -> None: - AuthError.__init__(self, message, code) - self.name = "AuthApiError" - self.status = status - self.code = code - - def to_dict(self) -> AuthApiErrorDict: - return { - "name": self.name, - "message": self.message, - "status": self.status, - "code": self.code, - } + status: int +@dataclass class AuthUnknownError(AuthError): - def __init__(self, message: str, original_error: Exception) -> None: - AuthError.__init__(self, message, None) - self.name = "AuthUnknownError" - self.original_error = original_error + data: bytes + status: int +@dataclass class CustomAuthError(AuthError): - def __init__( - self, message: str, name: str, status: int, code: Optional[ErrorCode] - ) -> None: - AuthError.__init__(self, message, code) - self.name = name - self.status = status - - def to_dict(self) -> AuthApiErrorDict: - return { - "name": self.name, - "message": self.message, - "status": self.status, - "code": self.code, - } + status: int class AuthSessionMissingError(CustomAuthError): def __init__(self) -> None: CustomAuthError.__init__( self, - "Auth session missing!", - "AuthSessionMissingError", - 400, - None, + message="Auth session missing!", + status=400, + code=None, ) @@ -165,45 +157,27 @@ class AuthInvalidCredentialsError(CustomAuthError): def __init__(self, message: str) -> None: CustomAuthError.__init__( self, - message, - "AuthInvalidCredentialsError", - 400, - None, + message=message, + status=400, + code=None, ) -class AuthImplicitGrantRedirectErrorDetails(TypedDict): - error: str - code: str - - -class AuthImplicitGrantRedirectErrorDict(AuthApiErrorDict): - details: Optional[AuthImplicitGrantRedirectErrorDetails] - - class AuthImplicitGrantRedirectError(CustomAuthError): def __init__( self, message: str, - details: Optional[AuthImplicitGrantRedirectErrorDetails] = None, + error: str | None = None, + code: str | None = None, ) -> None: CustomAuthError.__init__( self, - message, - "AuthImplicitGrantRedirectError", - 500, - None, + message=message, + status=500, + code=None, ) - self.details = details - - def to_dict(self) -> AuthImplicitGrantRedirectErrorDict: - return { - "name": self.name, - "message": self.message, - "status": self.status, - "details": self.details, - "code": self.code, - } + self.detail_error = error + self.detail_code = code class AuthRetryableError(CustomAuthError): @@ -211,43 +185,27 @@ def __init__(self, message: str, status: int) -> None: CustomAuthError.__init__( self, message, - "AuthRetryableError", - status, - None, + status=status, + code=None, ) -class AuthApiErrorWithReasonsDict(AuthApiErrorDict): - reasons: List[str] - - class AuthWeakPasswordError(CustomAuthError): def __init__(self, message: str, status: int, reasons: List[str]) -> None: CustomAuthError.__init__( self, - message, - "AuthWeakPasswordError", - status, - "weak_password", + message=message, + status=status, + code="weak_password", ) self.reasons = reasons - def to_dict(self) -> AuthApiErrorWithReasonsDict: - return { - "name": self.name, - "message": self.message, - "status": self.status, - "reasons": self.reasons, - "code": self.code, - } - class AuthInvalidJwtError(CustomAuthError): def __init__(self, message: str) -> None: CustomAuthError.__init__( self, - message, - "AuthInvalidJwtError", - 400, - "invalid_jwt", + message=message, + status=400, + code="invalid_jwt", ) diff --git a/src/auth/src/supabase_auth/helpers.py b/src/auth/src/supabase_auth/helpers.py index 10cfbf34..dbe065ad 100644 --- a/src/auth/src/supabase_auth/helpers.py +++ b/src/auth/src/supabase_auth/helpers.py @@ -8,12 +8,14 @@ import string import uuid from base64 import urlsafe_b64decode +from dataclasses import dataclass from datetime import datetime -from typing import Any, Dict, Optional, Type, TypedDict, TypeVar, Union +from typing import Any, Dict, Type, TypeVar from urllib.parse import urlparse -from httpx import HTTPStatusError, Response from pydantic import BaseModel, TypeAdapter, ValidationError +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import Response from .constants import ( API_VERSION_HEADER_NAME, @@ -26,6 +28,8 @@ AuthRetryableError, AuthUnknownError, AuthWeakPasswordError, + ErrorCodeAdapter, + RawApiError, ) from .types import ( AuthOtpResponse, @@ -45,7 +49,7 @@ TBaseModel = TypeVar("TBaseModel", bound=BaseModel) -def model_validate(model: Type[TBaseModel], contents: Union[str, bytes]) -> TBaseModel: +def model_validate(model: Type[TBaseModel], contents: str | bytes) -> TBaseModel: """Compatibility layer between pydantic 1 and 2 for parsing an instance of a BaseModel from varied""" try: @@ -78,34 +82,37 @@ def model_dump_json(model: BaseModel) -> str: def parse_auth_response(response: Response) -> AuthResponse: try: - session = model_validate(Session, response.content) + session = validate_model(response, Session) user = session.user except ValidationError: session = None - user = model_validate(User, response.content) + user = validate_model(response, User) return AuthResponse(user=user, session=session) def parse_auth_otp_response(response: Response) -> AuthOtpResponse: - return model_validate(AuthOtpResponse, response.content) + return validate_model(response, AuthOtpResponse) def parse_link_identity_response(response: Response) -> LinkIdentityResponse: - return model_validate(LinkIdentityResponse, response.content) + return validate_model(response, LinkIdentityResponse) def parse_link_response(response: Response) -> GenerateLinkResponse: - properties = model_validate(GenerateLinkProperties, response.content) - user = model_validate(User, response.content) + properties = validate_model(response, GenerateLinkProperties) + user = validate_model(response, User) return GenerateLinkResponse(properties=properties, user=user) -UserParser: TypeAdapter = TypeAdapter(Union[UserResponse, User]) +UserParser: TypeAdapter[UserResponse | User] = TypeAdapter(UserResponse | User) def parse_user_response(response: Response) -> UserResponse: - parsed = UserParser.validate_json(response.content) - return UserResponse(user=parsed) if isinstance(parsed, User) else parsed + if response.is_success: + parsed = UserParser.validate_json(response.content) + return UserResponse(user=parsed) if isinstance(parsed, User) else parsed + else: + raise handle_error_response(response) def parse_sso_response(response: Response) -> SSOResponse: @@ -117,79 +124,76 @@ def parse_sso_response(response: Response) -> SSOResponse: def parse_jwks(response: Response) -> JWKSet: jwk = JWKSetParser.validate_json(response.content) - if len(jwk["keys"]) == 0: + if len(jwk.keys) == 0: raise AuthInvalidJwtError("JWKS is empty") return jwk -def get_error_message(error: Any) -> str: - props = ["msg", "message", "error_description", "error"] +Model = TypeVar("Model", bound=BaseModel) - def filter(prop) -> bool: - return prop in error if isinstance(error, dict) else hasattr(error, prop) - return next((error[prop] for prop in props if filter(prop)), str(error)) +def validate_model(response: Response, model: type[Model]) -> Model: + if response.is_success: + return model.model_validate_json(response.content) + else: + raise handle_error_response(response) -def handle_exception(error: HTTPStatusError | RuntimeError) -> AuthError: - if not isinstance(error, HTTPStatusError): - return AuthRetryableError(get_error_message(error), 0) +Inner = TypeVar("Inner") + + +def validate_adapter(response: Response, adapter: TypeAdapter[Inner]) -> Inner: + if response.is_success: + return adapter.validate_json(response.content) + else: + raise handle_error_response(response) + + +def handle_error_response(response: Response) -> AuthError: try: - network_error_codes = [502, 503, 504] - if error.response.status_code in network_error_codes: - return AuthRetryableError( - get_error_message(error), error.response.status_code - ) - data = error.response.json() - - error_code = None - response_api_version = parse_response_api_version(error.response) - - if ( - response_api_version - and ( - datetime.timestamp(response_api_version) - >= API_VERSIONS_2024_01_01_TIMESTAMP - ) - and isinstance(data, dict) - and data - and isinstance(data.get("code"), str) - ): - error_code = data.get("code") - elif ( - isinstance(data, dict) and data and isinstance(data.get("error_code"), str) - ): - error_code = data.get("error_code") - - if error_code is None: - if ( - isinstance(data, dict) - and data - and isinstance(data.get("weak_password"), dict) - and data.get("weak_password") - and isinstance(data.get("weak_password"), list) - and len(data["weak_password"]) - ): - return AuthWeakPasswordError( - get_error_message(data), - error.response.status_code, - data["weak_password"].get("reasons"), - ) - elif error_code == "weak_password": - return AuthWeakPasswordError( - get_error_message(data), - error.response.status_code, - data["weak_password"].get("reasons", {}), - ) - - return AuthApiError( - get_error_message(data), - error.response.status_code or 500, - error_code, + raw_error = RawApiError.model_validate_json(response.content) + except ValidationError: + return AuthUnknownError( + message="Unexpected error: Unable to parse API error", + code="unexpected_failure", + status=response.status, + data=response.content, + ) + if not response.is_error: + return AuthRetryableError(raw_error.get_error_message(), response.status) + if 502 <= response.status <= 504: + return AuthRetryableError(raw_error.get_error_message(), response.status) + error_code = None + response_api_version = parse_response_api_version(response) + + if ( + response_api_version + and datetime.timestamp(response_api_version) + >= API_VERSIONS_2024_01_01_TIMESTAMP + ): + error_code = ErrorCodeAdapter.validate_python(raw_error.error_code) + else: + error_code = raw_error.error_code + + if error_code is None and raw_error.weak_password: + return AuthWeakPasswordError( + message=raw_error.get_error_message(), + status=response.status, + reasons=raw_error.weak_password.reasons, + ) + elif error_code == "weak_password": + return AuthWeakPasswordError( + raw_error.get_error_message(), + status=response.status, + reasons=raw_error.weak_password.reasons if raw_error.weak_password else [], ) - except Exception as e: - return AuthUnknownError(get_error_message(error), e) + + return AuthApiError( + raw_error.get_error_message(), + status=response.status or 500, + code=error_code, + ) def str_from_base64url(base64url: str) -> str: @@ -206,15 +210,13 @@ def base64url_to_bytes(base64url: str) -> bytes: return urlsafe_b64decode(base64url_with_padding) -class DecodedJWT(TypedDict): +@dataclass +class DecodedJWT: header: JWTHeader payload: JWTPayload signature: bytes - raw: Dict[str, str] - - -JWTHeaderParser = TypeAdapter(JWTHeader) -JWTPayloadParser = TypeAdapter(JWTPayload) + raw_header: str + raw_payload: str def decode_jwt(token: str) -> DecodedJWT: @@ -230,17 +232,15 @@ def decode_jwt(token: str) -> DecodedJWT: raise AuthInvalidJwtError("Invalid JWT structure") from e return DecodedJWT( - header=JWTHeaderParser.validate_json(header), - payload=JWTPayloadParser.validate_json(payload), + header=JWTHeader.model_validate_json(header), + payload=JWTPayload.model_validate_json(payload), signature=signature, - raw={ - "header": parts[0], - "payload": parts[1], - }, + raw_header=parts[0], + raw_payload=parts[1], ) -def generate_pkce_verifier(length=64) -> str: +def generate_pkce_verifier(length: int = 64) -> str: """Generate a random PKCE verifier of the specified length.""" if length < 43 or length > 128: raise ValueError("PKCE verifier length must be between 43 and 128 characters") @@ -251,7 +251,7 @@ def generate_pkce_verifier(length=64) -> str: return "".join(secrets.choice(charset) for _ in range(length)) -def generate_pkce_challenge(code_verifier) -> str: +def generate_pkce_challenge(code_verifier: str) -> str: """Generate a code challenge from a PKCE verifier.""" # Hash the verifier using SHA-256 verifier_bytes = code_verifier.encode("utf-8") @@ -263,7 +263,7 @@ def generate_pkce_challenge(code_verifier) -> str: API_VERSION_REGEX = r"^2[0-9]{3}-(0[1-9]|1[0-2])-(0[1-9]|1[0-9]|2[0-9]|3[0-1])$" -def parse_response_api_version(response: Response) -> Optional[datetime]: +def parse_response_api_version(response: Response) -> datetime | None: api_version = response.headers.get(API_VERSION_HEADER_NAME) if not api_version: @@ -283,7 +283,7 @@ def is_http_url(url: str) -> bool: return urlparse(url).scheme in {"https", "http"} -def validate_exp(exp: int) -> None: +def validate_exp(exp: int | None) -> None: if not exp: raise AuthInvalidJwtError("JWT has no expiration time") @@ -305,3 +305,9 @@ def validate_uuid(id: str | None) -> None: raise ValueError("Invalid id, id is None") if not is_valid_uuid(id): raise ValueError(f"Invalid id, '{id}' is not a valid uuid") + + +def redirect_to_as_query(redirect_to: str | None) -> URLQuery: + if redirect_to: + return URLQuery.from_mapping({"redirect_to": redirect_to}) + return URLQuery.empty() diff --git a/src/auth/src/supabase_auth/mfa.py b/src/auth/src/supabase_auth/mfa.py new file mode 100644 index 00000000..b68ac431 --- /dev/null +++ b/src/auth/src/supabase_auth/mfa.py @@ -0,0 +1,370 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Generic, Literal + +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + HttpIO, + HttpMethod, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.request import EmptyRequest, JSONRequest +from yarl import URL + +from .errors import AuthSessionMissingError +from .helpers import ( + decode_jwt, + validate_model, +) +from .session import AsyncSessionManager, SyncSessionManager +from .types import ( + AMREntry, + AuthMFAChallengeResponse, + AuthMFAEnrollResponse, + AuthMFAGetAuthenticatorAssuranceLevelResponse, + AuthMFAListFactorsResponse, + AuthMFAUnenrollResponse, + AuthMFAVerifyResponse, + MFAEnrollParams, + Session, + UserResponse, +) + + +@dataclass +class SupabaseAuthMFAHttpClient(Generic[HttpIO]): + """ + Contains the full multi-factor authentication API http calls + """ + + executor: HttpIO + base_url: URL + default_headers: Headers + + @handle_http_io + def _enroll( + self, session: Session, params: MFAEnrollParams + ) -> HttpMethod[AuthMFAEnrollResponse]: + response = yield JSONRequest( + method="POST", + path=["factors"], + body=params, + headers=session.encode_access_token(), + ) + auth_response = validate_model(response, AuthMFAEnrollResponse) + if params.factor_type == "totp" and auth_response.totp: + auth_response.totp.qr_code = ( + f"data:image/svg+xml;utf-8,{auth_response.totp.qr_code}" + ) + return auth_response + + def _challenge_http( + self, + session: Session, + factor_id: str, + channel: Literal["sms", "whatsapp"] | None = None, + ) -> HttpMethod[AuthMFAChallengeResponse]: + response = yield JSONRequest( + method="POST", + path=["factors", factor_id, "challenge"], + body={"channel": channel}, + headers=session.encode_access_token(), + ) + return validate_model(response, AuthMFAChallengeResponse) + + @handle_http_io + def _challenge( + self, + session: Session, + factor_id: str, + channel: Literal["sms", "whatsapp"] | None = None, + ) -> HttpMethod[AuthMFAChallengeResponse]: + return self._challenge_http(session, factor_id, channel) + + def _verify_http( + self, session: Session, factor_id: str, code: str, challenge_id: str + ) -> HttpMethod[tuple[AuthMFAVerifyResponse, Session]]: + response = yield JSONRequest( + method="POST", + path=["factors", factor_id, "verify"], + body={ + "factor_id": factor_id, + "code": code, + "challenge_id": challenge_id, + }, + headers=session.encode_access_token(), + ) + auth_response = validate_model(response, AuthMFAVerifyResponse) + session = validate_model(response, Session) + return auth_response, session + + @handle_http_io + def _verify( + self, session: Session, factor_id: str, code: str, challenge_id: str + ) -> HttpMethod[tuple[AuthMFAVerifyResponse, Session]]: + return self._verify_http(session, factor_id, code, challenge_id) + + @handle_http_io + def _challenge_and_verify( + self, + session: Session, + factor_id: str, + code: str, + ) -> HttpMethod[tuple[AuthMFAVerifyResponse, Session]]: + response = yield from self._challenge_http(session, factor_id) + result = yield from self._verify_http( + session, factor_id, code, challenge_id=response.id + ) + return result + + @handle_http_io + def _unenroll( + self, session: Session, factor_id: str + ) -> HttpMethod[AuthMFAUnenrollResponse]: + response = yield EmptyRequest( + method="DELETE", + path=["factors", factor_id], + headers=session.encode_access_token(), + ) + return validate_model(response, AuthMFAUnenrollResponse) + + def _list_factors( + self, user_response: UserResponse | None + ) -> AuthMFAListFactorsResponse: + factors = user_response.user.factors or [] if user_response else [] + totp = [ + f for f in factors if f.factor_type == "totp" and f.status == "verified" + ] + phone = [ + f for f in factors if f.factor_type == "phone" and f.status == "verified" + ] + return AuthMFAListFactorsResponse(all=factors, totp=totp, phone=phone) + + def _get_authenticator_assurance_level( + self, + session: Session | None = None, + ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: + if not session: + return AuthMFAGetAuthenticatorAssuranceLevelResponse( + current_level=None, + next_level=None, + current_authentication_methods=[], + ) + payload = decode_jwt(session.access_token).payload + current_level = payload.aal + verified_factors = [ + f for f in session.user.factors or [] if f.status == "verified" + ] + next_level = "aal2" if verified_factors else current_level + amr_dict_list = payload.amr or [] + current_authentication_methods = [ + AMREntry.model_validate(amr) for amr in amr_dict_list + ] + return AuthMFAGetAuthenticatorAssuranceLevelResponse( + current_level=current_level, + next_level=next_level, + current_authentication_methods=current_authentication_methods, + ) + + +@dataclass +class AsyncSupabaseAuthMFAClient(SupabaseAuthMFAHttpClient[AsyncHttpIO]): + session_manager: AsyncSessionManager + + async def enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: + """ + Starts the enrollment process for a new Multi-Factor Authentication + factor. This method creates a new factor in the 'unverified' state. + Present the QR code or secret to the user and ask them to add it to their + authenticator app. Ask the user to provide you with an authenticator code + from their app and verify it by calling challenge and then verify. + + The first successful verification of an unverified factor activates the + factor. All other sessions are logged out and the current one gets an + `aal2` authenticator level. + """ + session = await self.session_manager.get_session_or_raise() + return await self._enroll(session, params) + + async def challenge( + self, factor_id: str, channel: Literal["sms", "whatsapp"] | None = None + ) -> AuthMFAChallengeResponse: + """ + Prepares a challenge used to verify that a user has access to a MFA + factor. Provide the challenge ID and verification code by calling `verify`. + """ + session = await self.session_manager.get_session_or_raise() + return await self._challenge(session, factor_id, channel) + + async def challenge_and_verify( + self, + factor_id: str, + code: str, + ) -> AuthMFAVerifyResponse: + """ + Helper method which creates a challenge and immediately uses the given code + to verify against it thereafter. The verification code is provided by the + user by entering a code seen in their authenticator app. + """ + session = await self.session_manager.get_session_or_raise() + response, session = await self._challenge_and_verify(session, factor_id, code) + await self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("TOKEN_REFRESHED", session) + return response + + async def verify( + self, factor_id: str, code: str, challenge_id: str + ) -> AuthMFAVerifyResponse: + """ + Verifies a verification code against a challenge. The verification code is + provided by the user by entering a code seen in their authenticator app. + """ + session = await self.session_manager.get_session_or_raise() + response, session = await self._verify(session, factor_id, code, challenge_id) + await self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("MFA_CHALLENGE_VERIFIED", session) + return response + + async def unenroll(self, factor_id: str) -> AuthMFAUnenrollResponse: + """ + Unenroll removes a MFA factor. Unverified factors can safely be ignored + and it's not necessary to unenroll them. Unenrolling a verified MFA factor + cannot be done from a session with an `aal1` authenticator level. + """ + session = await self.session_manager.get_session_or_raise() + return await self._unenroll(session, factor_id) + + async def list_factors(self) -> AuthMFAListFactorsResponse: + """ + Returns the list of MFA factors enabled for this user. For most use cases + you should consider using `get_authenticator_assurance_level`. + + This uses a cached version of the factors and avoids incurring a network call. + If you need to update this list, call `get_user` first. + """ + session = await self.session_manager.get_session_or_raise() + user = await self.session_manager.get_user(session.access_token) + return self._list_factors(user) + + async def get_authenticator_assurance_level( + self, + ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: + """ + Returns the Authenticator Assurance Level (AAL) for the active session. + + - `aal1` (or `null`) means that the user's identity has been verified only + with a conventional login (email+password, OTP, magic link, social login, + etc.). + - `aal2` means that the user's identity has been verified both with a + conventional login and at least one MFA factor. + + Although this method returns a promise, it's fairly quick (microseconds) + and rarely uses the network. You can use this to check whether the current + user needs to be shown a screen to verify their MFA factors. + """ + session = await self.session_manager.get_session() + return self._get_authenticator_assurance_level(session) + + +@dataclass +class SyncSupabaseAuthMFAClient(SupabaseAuthMFAHttpClient[SyncHttpIO]): + session_manager: SyncSessionManager + + def enroll(self, params: MFAEnrollParams) -> AuthMFAEnrollResponse: + """ + Starts the enrollment process for a new Multi-Factor Authentication + factor. This method creates a new factor in the 'unverified' state. + Present the QR code or secret to the user and ask them to add it to their + authenticator app. Ask the user to provide you with an authenticator code + from their app and verify it by calling challenge and then verify. + + The first successful verification of an unverified factor activates the + factor. All other sessions are logged out and the current one gets an + `aal2` authenticator level. + """ + session = self.session_manager.get_session_or_raise() + return self._enroll(session, params) + + def challenge( + self, factor_id: str, channel: Literal["sms", "whatsapp"] | None = None + ) -> AuthMFAChallengeResponse: + """ + Prepares a challenge used to verify that a user has access to a MFA + factor. Provide the challenge ID and verification code by calling `verify`. + """ + session = self.session_manager.get_session_or_raise() + return self._challenge(session, factor_id, channel) + + def challenge_and_verify( + self, + factor_id: str, + code: str, + ) -> AuthMFAVerifyResponse: + """ + Helper method which creates a challenge and immediately uses the given code + to verify against it thereafter. The verification code is provided by the + user by entering a code seen in their authenticator app. + """ + session = self.session_manager.get_session_or_raise() + response, session = self._challenge_and_verify(session, factor_id, code) + self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("TOKEN_REFRESHED", session) + return response + + def verify( + self, factor_id: str, code: str, challenge_id: str + ) -> AuthMFAVerifyResponse: + """ + Verifies a verification code against a challenge. The verification code is + provided by the user by entering a code seen in their authenticator app. + """ + session = self.session_manager.get_session_or_raise() + response, session = self._verify(session, factor_id, code, challenge_id) + self.session_manager.save_session(session) + self.session_manager.notify_all_subscribers("MFA_CHALLENGE_VERIFIED", session) + return response + + def unenroll(self, factor_id: str) -> AuthMFAUnenrollResponse: + """ + Unenroll removes a MFA factor. Unverified factors can safely be ignored + and it's not necessary to unenroll them. Unenrolling a verified MFA factor + cannot be done from a session with an `aal1` authenticator level. + """ + session = self.session_manager.get_session() + if not session: + raise AuthSessionMissingError() + return self._unenroll(session, factor_id) + + def list_factors(self) -> AuthMFAListFactorsResponse: + """ + Returns the list of MFA factors enabled for this user. For most use cases + you should consider using `get_authenticator_assurance_level`. + + This uses a cached version of the factors and avoids incurring a network call. + If you need to update this list, call `get_user` first. + """ + session = self.session_manager.get_session_or_raise() + user = self.session_manager.get_user(session.access_token) + return self._list_factors(user) + + def get_authenticator_assurance_level( + self, + ) -> AuthMFAGetAuthenticatorAssuranceLevelResponse: + """ + Returns the Authenticator Assurance Level (AAL) for the active session. + + - `aal1` (or `null`) means that the user's identity has been verified only + with a conventional login (email+password, OTP, magic link, social login, + etc.). + - `aal2` means that the user's identity has been verified both with a + conventional login and at least one MFA factor. + + Although this method returns a promise, it's fairly quick (microseconds) + and rarely uses the network. You can use this to check whether the current + user needs to be shown a screen to verify their MFA factors. + """ + session = self.session_manager.get_session() + return self._get_authenticator_assurance_level(session) diff --git a/src/auth/src/supabase_auth/session.py b/src/auth/src/supabase_auth/session.py new file mode 100644 index 00000000..76177d79 --- /dev/null +++ b/src/auth/src/supabase_auth/session.py @@ -0,0 +1,440 @@ +from __future__ import annotations + +import time +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from types import TracebackType +from typing import Dict, Generic + +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + HttpIO, + HttpMethod, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import EmptyRequest, JSONRequest +from yarl import URL + +from .constants import EXPIRY_MARGIN, MAX_RETRIES, RETRY_INTERVAL, STORAGE_KEY +from .errors import AuthRetryableError, AuthSessionMissingError +from .helpers import parse_auth_response, parse_user_response +from .timer import AsyncTimer, SyncTimer +from .types import AuthChangeEvent, AuthResponse, Session, Subscription, UserResponse + + +class AsyncSupportedStorage(ABC): + @abstractmethod + async def get_item(self, key: str) -> str | None: ... # pragma: no cover + + @abstractmethod + async def set_item(self, key: str, value: str) -> None: ... # pragma: no cover + + @abstractmethod + async def remove_item(self, key: str) -> None: ... # pragma: no cover + + +class AsyncMemoryStorage(AsyncSupportedStorage): + def __init__(self) -> None: + self.storage: Dict[str, str] = {} + + async def get_item(self, key: str) -> str | None: + if key in self.storage: + return self.storage[key] + return None + + async def set_item(self, key: str, value: str) -> None: + self.storage[key] = value + + async def remove_item(self, key: str) -> None: + if key in self.storage: + del self.storage[key] + + +@dataclass +class SessionManagerCommon(Generic[HttpIO]): + """ + Common methods shared between sync and async implementations + of the session manager. + """ + + base_url: URL + executor: HttpIO + default_headers: Headers + state_change_emitters: Dict[str, Subscription] + storage_key: str = field(default=STORAGE_KEY, kw_only=True) + persist_session: bool = field(default=True, kw_only=True) + network_retries: int = field(default=0, kw_only=True) + in_memory_session: Session | None = field(default=None, kw_only=True) + auto_refresh_token: bool = field(default=True, kw_only=True) + + def _refresh_access_token(self, refresh_token: str) -> HttpMethod[AuthResponse]: + response = yield JSONRequest( + method="POST", + path=["token"], + query=URLQuery.from_mapping({"grant_type": "refresh_token"}), + body={"refresh_token": refresh_token}, + ) + return parse_auth_response(response) + + @handle_http_io + def refresh_access_token(self, refresh_token: str) -> HttpMethod[AuthResponse]: + return self._refresh_access_token(refresh_token) + + def _get_user(self, jwt: str) -> HttpMethod[UserResponse]: + """ + Gets the current user details if there is an existing session. + + Takes in an optional access token `jwt`. If no `jwt` is provided, + `get_user()` will attempt to get the `jwt` from the current session. + """ + response = yield EmptyRequest( + method="GET", + path=["user"], + headers=Headers.from_mapping({"authorization": f"Bearer {jwt}"}), + ) + return parse_user_response(response) + + @handle_http_io + def get_user(self, jwt: str) -> HttpMethod[UserResponse]: + return self._get_user(jwt) + + def parse_valid_session( + self, + raw_session: str | None, + ) -> Session | None: + if not raw_session: + return None + try: + session = Session.model_validate_json(raw_session) + if session.expires_at is None: + return None + return session + except Exception: + return None + + def notify_all_subscribers( + self, + event: AuthChangeEvent, + session: Session | None, + ) -> None: + for subscription in self.state_change_emitters.values(): + subscription.callback(event, session) + + +@dataclass +class AsyncSessionManager(SessionManagerCommon[AsyncHttpIO]): + storage: AsyncSupportedStorage + refresh_token_timer: AsyncTimer | None = None + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + + async def __aenter__(self) -> AsyncSessionManager: + return self + + async def remove_session(self) -> None: + if self.persist_session: + await self.storage.remove_item(self.storage_key) + else: + self.in_memory_session = None + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = None + + async def call_refresh_token(self, refresh_token: str) -> Session: + if not refresh_token: + raise AuthSessionMissingError() + response = await self.refresh_access_token(refresh_token) + if not response.session: + raise AuthSessionMissingError() + await self.save_session(response.session) + self.notify_all_subscribers("TOKEN_REFRESHED", response.session) + return response.session + + async def get_session(self) -> Session | None: + """ + Returns the session, refreshing it if necessary. + + The session returned can be null if the session is not detected which + can happen in the event a user is not signed-in or has logged out. + """ + current_session: Session | None = None + if self.persist_session: + maybe_session = await self.storage.get_item(self.storage_key) + current_session = self.parse_valid_session(maybe_session) + if not current_session: + await self.remove_session() + else: + current_session = self.in_memory_session + + if not current_session: + return None + time_now = round(time.time()) + has_expired = ( + current_session.expires_at <= time_now + EXPIRY_MARGIN + if current_session.expires_at + else False + ) + if not has_expired: + return current_session + return await self.call_refresh_token(current_session.refresh_token) + + async def get_session_or_raise(self) -> Session: + session = await self.get_session() + if not session: + raise AuthSessionMissingError() + return session + + async def save_session(self, session: Session) -> None: + if not self.persist_session: + self.in_memory_session = session + expire_at = session.expires_at + if expire_at: + time_now = round(time.time()) + expire_in = expire_at - time_now + refresh_duration_before_expires = ( + EXPIRY_MARGIN if expire_in > EXPIRY_MARGIN else 0.5 + ) + value = (expire_in - refresh_duration_before_expires) * 1000 + self.start_auto_refresh_token(value) + if self.persist_session and session.expires_at: + await self.storage.set_item(self.storage_key, session.model_dump_json()) + + async def recover_and_refresh(self) -> None: + raw_session = await self.storage.get_item(self.storage_key) + current_session = self.parse_valid_session(raw_session) + if not current_session: + if raw_session: + await self.remove_session() + return + time_now = round(time.time()) + expires_at = current_session.expires_at + if expires_at and expires_at < time_now + EXPIRY_MARGIN: + refresh_token = current_session.refresh_token + if self.auto_refresh_token and refresh_token: + self.network_retries += 1 + try: + await self.call_refresh_token(refresh_token) + self.network_retries = 0 + except Exception as e: + if ( + isinstance(e, AuthRetryableError) + and self.network_retries < MAX_RETRIES + ): + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = AsyncTimer( + (RETRY_INTERVAL ** (2 * (self.network_retries - 1))), + self.recover_and_refresh, + ) + self.refresh_token_timer.start() + return + await self.remove_session() + return + if self.persist_session: + await self.save_session(current_session) + self.notify_all_subscribers("SIGNED_IN", current_session) + + async def refresh_token_function(self) -> None: + self.network_retries += 1 + try: + session = await self.get_session() + if session: + await self.call_refresh_token(session.refresh_token) + self.network_retries = 0 + except Exception as e: + if isinstance(e, AuthRetryableError) and self.network_retries < MAX_RETRIES: + self.start_auto_refresh_token( + (RETRY_INTERVAL ** (2 * (self.network_retries - 1))), + ) + + def start_auto_refresh_token(self, value: float) -> None: + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = None + if value <= 0 or not self.auto_refresh_token: + return + + self.refresh_token_timer = AsyncTimer(value, self.refresh_token_function) + self.refresh_token_timer.start() + + +class SyncSupportedStorage(ABC): + @abstractmethod + def get_item(self, key: str) -> str | None: ... # pragma: no cover + + @abstractmethod + def set_item(self, key: str, value: str) -> None: ... # pragma: no cover + + @abstractmethod + def remove_item(self, key: str) -> None: ... # pragma: no cover + + +class SyncMemoryStorage(SyncSupportedStorage): + def __init__(self) -> None: + self.storage: Dict[str, str] = {} + + def get_item(self, key: str) -> str | None: + if key in self.storage: + return self.storage[key] + return None + + def set_item(self, key: str, value: str) -> None: + self.storage[key] = value + + def remove_item(self, key: str) -> None: + if key in self.storage: + del self.storage[key] + + +@dataclass +class SyncSessionManager(SessionManagerCommon[SyncHttpIO]): + storage: SyncSupportedStorage + refresh_token_timer: SyncTimer | None = None + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + + def __enter__(self) -> SyncSessionManager: + return self + + def remove_session(self) -> None: + if self.persist_session: + self.storage.remove_item(self.storage_key) + else: + self.in_memory_session = None + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = None + + def call_refresh_token(self, refresh_token: str) -> Session: + if not refresh_token: + raise AuthSessionMissingError() + response = self.refresh_access_token(refresh_token) + if not response.session: + raise AuthSessionMissingError() + self.save_session(response.session) + self.notify_all_subscribers("TOKEN_REFRESHED", response.session) + return response.session + + def get_session(self) -> Session | None: + """ + Returns the session, refreshing it if necessary. + + The session returned can be null if the session is not detected which + can happen in the event a user is not signed-in or has logged out. + """ + current_session: Session | None = None + if self.persist_session: + maybe_session = self.storage.get_item(self.storage_key) + current_session = self.parse_valid_session(maybe_session) + if not current_session: + self.remove_session() + else: + current_session = self.in_memory_session + + if not current_session: + return None + time_now = round(time.time()) + has_expired = ( + current_session.expires_at <= time_now + EXPIRY_MARGIN + if current_session.expires_at + else False + ) + if not has_expired: + return current_session + return self.call_refresh_token(current_session.refresh_token) + + def get_session_or_raise(self) -> Session: + session = self.get_session() + if not session: + raise AuthSessionMissingError() + return session + + def save_session(self, session: Session) -> None: + if not self.persist_session: + self.in_memory_session = session + expire_at = session.expires_at + if expire_at: + time_now = round(time.time()) + expire_in = expire_at - time_now + refresh_duration_before_expires = ( + EXPIRY_MARGIN if expire_in > EXPIRY_MARGIN else 0.5 + ) + value = (expire_in - refresh_duration_before_expires) * 1000 + self.start_auto_refresh_token(value) + if self.persist_session and session.expires_at: + self.storage.set_item(self.storage_key, session.model_dump_json()) + + def refresh_token_function(self) -> None: + self.network_retries += 1 + try: + session = self.get_session() + if session: + self.call_refresh_token(session.refresh_token) + self.network_retries = 0 + except Exception as e: + if isinstance(e, AuthRetryableError) and self.network_retries < MAX_RETRIES: + self.start_auto_refresh_token( + RETRY_INTERVAL ** (2 * (self.network_retries - 1)) + ) + + def recover_and_refresh(self) -> None: + raw_session = self.storage.get_item(self.storage_key) + current_session = self.parse_valid_session(raw_session) + if not current_session: + if raw_session: + self.remove_session() + return + time_now = round(time.time()) + expires_at = current_session.expires_at + if expires_at and expires_at < time_now + EXPIRY_MARGIN: + refresh_token = current_session.refresh_token + if self.auto_refresh_token and refresh_token: + self.network_retries += 1 + try: + self.call_refresh_token(refresh_token) + self.network_retries = 0 + except Exception as e: + if ( + isinstance(e, AuthRetryableError) + and self.network_retries < MAX_RETRIES + ): + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = SyncTimer( + (RETRY_INTERVAL ** (2 * (self.network_retries - 1))), + self.recover_and_refresh, + ) + self.refresh_token_timer.start() + return + self.remove_session() + return + if self.persist_session: + self.save_session(current_session) + self.notify_all_subscribers("SIGNED_IN", current_session) + + def start_auto_refresh_token(self, value: float) -> None: + if self.refresh_token_timer: + self.refresh_token_timer.cancel() + self.refresh_token_timer = None + if value <= 0 or not self.auto_refresh_token: + return + + self.refresh_token_timer = SyncTimer(value, self.refresh_token_function) + self.refresh_token_timer.start() diff --git a/src/auth/src/supabase_auth/timer.py b/src/auth/src/supabase_auth/timer.py index 15d5fa0b..4d4fd2c6 100644 --- a/src/auth/src/supabase_auth/timer.py +++ b/src/auth/src/supabase_auth/timer.py @@ -1,45 +1,57 @@ import asyncio -from threading import Timer as _Timer -from typing import Any, Callable, Coroutine, Optional, cast +from threading import Timer +from typing import Awaitable, Callable -class Timer: +class AsyncTimer: def __init__( self, seconds: float, - function: Callable[[], Optional[Coroutine[Any, Any, None]]], + function: Callable[[], Awaitable[None]], ) -> None: self._milliseconds = seconds self._function = function - self._task: Optional[asyncio.Task] = None - self._timer: Optional[_Timer] = None + self._task: asyncio.Task[None] | None = None def start(self) -> None: - if asyncio.iscoroutinefunction(self._function): + async def schedule() -> None: + await asyncio.sleep(self._milliseconds / 1000) + await self._function() - async def schedule() -> None: - await asyncio.sleep(self._milliseconds / 1000) - await cast(Coroutine[Any, Any, None], self._function()) - - def cleanup(_) -> None: - self._task = None + def cleanup(_task: asyncio.Task[None]) -> None: + self._task = None - self._task = asyncio.create_task(schedule()) - self._task.add_done_callback(cleanup) - else: - self._timer = _Timer(self._milliseconds / 1000, self._function) - self._timer.daemon = True - self._timer.start() + self._task = asyncio.create_task(schedule()) + self._task.add_done_callback(cleanup) def cancel(self) -> None: if self._task is not None: self._task.cancel() self._task = None + + def is_alive(self) -> bool: + return self._task is not None + + +class SyncTimer: + def __init__( + self, + seconds: float, + function: Callable[[], None], + ) -> None: + self._milliseconds = seconds + self._function = function + self._timer: Timer | None = None + + def start(self) -> None: + self._timer = Timer(self._milliseconds / 1000, self._function) + self._timer.daemon = True + self._timer.start() + + def cancel(self) -> None: if self._timer is not None: self._timer.cancel() self._timer = None def is_alive(self) -> bool: - return self._task is not None or ( - self._timer is not None and self._timer.is_alive() - ) + return self._timer is not None and self._timer.is_alive() diff --git a/src/auth/src/supabase_auth/types.py b/src/auth/src/supabase_auth/types.py index 7e1d9c64..9a9d2899 100644 --- a/src/auth/src/supabase_auth/types.py +++ b/src/auth/src/supabase_auth/types.py @@ -2,9 +2,18 @@ from datetime import datetime from time import time -from typing import Any, Callable, Dict, List, Optional, Union - -from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, with_config +from typing import Callable, List, Mapping + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + TypeAdapter, + model_validator, +) +from pydantic.dataclasses import dataclass +from supabase_utils.http.headers import Headers +from supabase_utils.types import JSON try: # > 2 @@ -17,7 +26,7 @@ model_validator_v1_v2_compat = root_validator # type: ignore -from typing_extensions import Literal, NotRequired, TypedDict +from typing_extensions import Literal, TypedDict Provider = Literal[ "apple", @@ -72,7 +81,7 @@ class AMREntry(BaseModel): identity and at what time. """ - method: Union[Literal["password", "otp", "oauth", "mfa/totp"], str] + method: Literal["password", "otp", "oauth", "mfa/totp"] | str """ Authentication method name. """ @@ -85,32 +94,18 @@ class AMREntry(BaseModel): class AMREntryDict(TypedDict): timestamp: int - method: Union[Literal["password", "otp", "oauth", "mfa/totp"], str] - - -class Options(TypedDict): - redirect_to: NotRequired[str] - captcha_token: NotRequired[str] - - -class UpdateUserOptions(TypedDict): - email_redirect_to: NotRequired[str] - - -class InviteUserByEmailOptions(TypedDict): - redirect_to: NotRequired[str] - data: NotRequired[Any] + method: Literal["password", "otp", "oauth", "mfa/totp"] | str class AuthResponse(BaseModel): - user: Optional[User] = None - session: Optional[Session] = None + user: User | None = None + session: Session | None = None class AuthOtpResponse(BaseModel): user: None = None session: None = None - message_id: Optional[str] = None + message_id: str | None = None class OAuthResponse(BaseModel): @@ -127,11 +122,11 @@ class LinkIdentityResponse(BaseModel): class IdentitiesResponse(BaseModel): - identities: List[UserIdentity] + identities: list[UserIdentity] class UserList(BaseModel): - users: List[User] + users: list[User] class UserResponse(BaseModel): @@ -139,12 +134,12 @@ class UserResponse(BaseModel): class Session(BaseModel): - provider_token: Optional[str] = None + provider_token: str | None = None """ The oauth provider token. If present, this can be used to make external API requests to the oauth provider used. """ - provider_refresh_token: Optional[str] = None + provider_refresh_token: str | None = None """ The oauth provider refresh token. If present, this can be used to refresh the provider_token via the oauth provider's API. @@ -160,30 +155,32 @@ class Session(BaseModel): The number of seconds until the token expires (since it was issued). Returned when a login is confirmed. """ - expires_at: Optional[int] = None + expires_at: int | None = None """ A timestamp of when the token will expire. Returned when a login is confirmed. """ token_type: str user: User - @model_validator_v1_v2_compat - def validator(cls, values: dict) -> dict: - expires_in = values.get("expires_in") - if expires_in and not values.get("expires_at"): - values["expires_at"] = round(time()) + expires_in - return values + @model_validator(mode="after") + def validator(self) -> Session: + if self.expires_in and not self.expires_at: + self.expires_at = round(time()) + self.expires_in + return self + + def encode_access_token(self) -> Headers: + return Headers.from_mapping({"Authorization": f"Bearer {self.access_token}"}) class UserIdentity(BaseModel): id: str identity_id: str user_id: str - identity_data: Dict[str, Any] + identity_data: dict[str, JSON] provider: str created_at: datetime - last_sign_in_at: Optional[datetime] = None - updated_at: Optional[datetime] = None + last_sign_in_at: datetime | None = None + updated_at: datetime | None = None class Factor(BaseModel): @@ -195,11 +192,11 @@ class Factor(BaseModel): """ ID of the factor. """ - friendly_name: Optional[str] = None + friendly_name: str | None = None """ Friendly name of the factor, useful to disambiguate between multiple factors. """ - factor_type: Union[Literal["totp", "phone"], str] + factor_type: Literal["totp", "phone"] | str """ Type of factor. Only `totp` supported with this version but may change in future versions. @@ -214,48 +211,48 @@ class Factor(BaseModel): class User(BaseModel): id: str - app_metadata: Dict[str, Any] - user_metadata: Dict[str, Any] + user_metadata: Mapping[str, JSON] + app_metadata: Mapping[str, JSON] aud: str - confirmation_sent_at: Optional[datetime] = None - recovery_sent_at: Optional[datetime] = None - email_change_sent_at: Optional[datetime] = None - new_email: Optional[str] = None - new_phone: Optional[str] = None - invited_at: Optional[datetime] = None - action_link: Optional[str] = None - email: Optional[str] = None - phone: Optional[str] = None + confirmation_sent_at: datetime | None = None + recovery_sent_at: datetime | None = None + email_change_sent_at: datetime | None = None + new_email: str | None = None + new_phone: str | None = None + invited_at: datetime | None = None + action_link: str | None = None + email: str | None = None + phone: str | None = None created_at: datetime - confirmed_at: Optional[datetime] = None - email_confirmed_at: Optional[datetime] = None - phone_confirmed_at: Optional[datetime] = None - last_sign_in_at: Optional[datetime] = None - role: Optional[str] = None - updated_at: Optional[datetime] = None - identities: Optional[List[UserIdentity]] = None + confirmed_at: datetime | None = None + email_confirmed_at: datetime | None = None + phone_confirmed_at: datetime | None = None + last_sign_in_at: datetime | None = None + role: str | None = None + updated_at: datetime | None = None + identities: list[UserIdentity] | None = None is_anonymous: bool = False is_sso_user: bool = False - factors: Optional[List[Factor]] = None - deleted_at: Optional[str] = None - banned_until: Optional[str] = None + factors: list[Factor] | None = None + deleted_at: str | None = None + banned_until: str | None = None -class UserAttributes(TypedDict): - email: NotRequired[str] - phone: NotRequired[str] - password: NotRequired[str] - data: NotRequired[Any] - nonce: NotRequired[str] +class UserAttributes(BaseModel): + email: str | None = None + phone: str | None = None + password: str | None = None + data: JSON = None + nonce: str | None = None -class AdminUserAttributes(UserAttributes, TypedDict): - user_metadata: NotRequired[Any] - app_metadata: NotRequired[Any] - email_confirm: NotRequired[bool] - phone_confirm: NotRequired[bool] - ban_duration: NotRequired[Union[str, Literal["none"]]] - role: NotRequired[str] +class AdminUserAttributes(UserAttributes): + user_metadata: Mapping[str, JSON] | None = None + app_metadata: Mapping[str, JSON] | None = None + email_confirm: bool | None = None + phone_confirm: bool | None = None + ban_duration: str | None = None + role: str | None = None """ The `role` claim set in the user's access token JWT. @@ -263,7 +260,7 @@ class AdminUserAttributes(UserAttributes, TypedDict): Setting this role to `service_role` is not recommended as it grants the user admin privileges. """ - password_hash: NotRequired[str] + password_hash: str | None = None """ The `password_hash` for the user's password. @@ -271,7 +268,7 @@ class AdminUserAttributes(UserAttributes, TypedDict): Supports bcrypt and argon2 password hashes. """ - id: NotRequired[str] + id: str | None = None """ The `id` for the user. @@ -284,7 +281,7 @@ class Subscription(BaseModel): """ The subscriber UUID. This will be set by the client. """ - callback: Callable[[AuthChangeEvent, Optional[Session]], None] + callback: Callable[[AuthChangeEvent, Session | None], None] """ The function to call every time there is an event. """ @@ -294,239 +291,364 @@ class Subscription(BaseModel): """ -class UpdatableFactorAttributes(TypedDict): - friendly_name: str +class CaptchaToken(BaseModel): + captcha_token: str | None -class SignUpWithEmailAndPasswordCredentialsOptions( - TypedDict, -): - email_redirect_to: NotRequired[str] - data: NotRequired[Any] - captcha_token: NotRequired[str] +class WithCaptchaToken(BaseModel): + gotrue_meta_security: CaptchaToken -class SignUpWithEmailAndPasswordCredentials(TypedDict): +class SignUpWithEmailAndPasswordBody(WithCaptchaToken): email: str password: str - options: NotRequired[SignUpWithEmailAndPasswordCredentialsOptions] + data: JSON | None = None -class SignUpWithPhoneAndPasswordCredentialsOptions(TypedDict): - data: NotRequired[Any] - captcha_token: NotRequired[str] - channel: NotRequired[Literal["sms", "whatsapp"]] +@dataclass +class SignUpWithEmailAndPasswordCredentials: + body: SignUpWithEmailAndPasswordBody + redirect_to: str | None = None -class SignUpWithPhoneAndPasswordCredentials(TypedDict): +class SignUpWithPhoneAndPasswordBody(WithCaptchaToken): phone: str password: str - options: NotRequired[SignUpWithPhoneAndPasswordCredentialsOptions] - - -SignUpWithPasswordCredentials = Union[ - SignUpWithEmailAndPasswordCredentials, - SignUpWithPhoneAndPasswordCredentials, -] - - -class SignInWithPasswordCredentialsOptions(TypedDict): - data: NotRequired[Any] - captcha_token: NotRequired[str] + data: JSON | None = None + channel: Literal["sms", "whatsapp"] = "sms" + + +@dataclass +class SignUpWithPhoneAndPasswordCredentials: + body: SignUpWithPhoneAndPasswordBody + + +class SignUpWithPassword: + @staticmethod + def phone( + phone: str, + password: str, + data: JSON | None = None, + channel: Literal["sms", "whatsapp"] = "sms", + captcha_token: str | None = None, + ) -> SignUpWithPhoneAndPasswordCredentials: + body = SignUpWithPhoneAndPasswordBody( + phone=phone, + password=password, + data=data, + channel=channel, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + return SignUpWithPhoneAndPasswordCredentials(body=body) + + @staticmethod + def email( + email: str, + password: str, + data: JSON | None = None, + redirect_to: str | None = None, + captcha_token: str | None = None, + ) -> SignUpWithEmailAndPasswordCredentials: + body = SignUpWithEmailAndPasswordBody( + email=email, + password=password, + data=data, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + return SignUpWithEmailAndPasswordCredentials(body=body, redirect_to=redirect_to) + + +SignUpWithPasswordCredentials = ( + SignUpWithEmailAndPasswordCredentials | SignUpWithPhoneAndPasswordCredentials +) -class SignInWithEmailAndPasswordCredentials(TypedDict): +class SignInWithEmailAndPasswordCredentials(WithCaptchaToken): email: str password: str - options: NotRequired[SignInWithPasswordCredentialsOptions] -class SignInWithPhoneAndPasswordCredentials(TypedDict): +class SignInWithPhoneAndPasswordCredentials(WithCaptchaToken): phone: str password: str - options: NotRequired[SignInWithPasswordCredentialsOptions] -SignInWithPasswordCredentials = Union[ - SignInWithEmailAndPasswordCredentials, - SignInWithPhoneAndPasswordCredentials, -] - - -class SignInWithIdTokenCredentials(TypedDict): - """ - Provider name or OIDC `iss` value identifying which provider should be used to verify the provided token. Supported names: `google`, `apple`, `azure`, `facebook`, `kakao`, `keycloak` (deprecated). - """ - - provider: Literal["google", "apple", "azure", "facebook", "kakao"] - token: str - access_token: NotRequired[str] - nonce: NotRequired[str] - options: NotRequired[SignInWithIdTokenCredentialsOptions] - - -class SignInWithIdTokenCredentialsOptions(TypedDict): - captcha_token: NotRequired[str] - - -class SignInWithEmailAndPasswordlessCredentialsOptions(TypedDict): - email_redirect_to: NotRequired[str] - should_create_user: NotRequired[bool] - data: NotRequired[Any] - captcha_token: NotRequired[str] +SignInWithPasswordCredentials = ( + SignInWithEmailAndPasswordCredentials | SignInWithPhoneAndPasswordCredentials +) -class SignInWithEmailAndPasswordlessCredentials(TypedDict): +class SignInWithPassword: + @staticmethod + def phone( + phone: str, password: str, captcha_token: str | None = None + ) -> SignInWithPhoneAndPasswordCredentials: + return SignInWithPhoneAndPasswordCredentials( + phone=phone, + password=password, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + + @staticmethod + def email( + email: str, password: str, captcha_token: str | None = None + ) -> SignInWithEmailAndPasswordCredentials: + return SignInWithEmailAndPasswordCredentials( + email=email, + password=password, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + + +class SignInWithEmailAndPasswordlessBody(WithCaptchaToken): email: str - options: NotRequired[SignInWithEmailAndPasswordlessCredentialsOptions] + data: JSON = None + create_user: bool = True -class SignInWithPhoneAndPasswordlessCredentialsOptions(TypedDict): - should_create_user: NotRequired[bool] - data: NotRequired[Any] - captcha_token: NotRequired[str] - channel: NotRequired[Literal["sms", "whatsapp"]] +class SignInWithPhoneAndPasswordlessBody(WithCaptchaToken): + phone: str + data: JSON = None + create_user: bool = True + channel: Literal["sms", "whatsapp"] = "sms" -class SignInWithPhoneAndPasswordlessCredentials(TypedDict): - phone: str - options: NotRequired[SignInWithPhoneAndPasswordlessCredentialsOptions] +@dataclass +class SignInWithEmailAndPasswordlessCredentials: + body: SignInWithEmailAndPasswordlessBody + email_redirect_to: str | None = None -SignInWithPasswordlessCredentials = Union[ - SignInWithEmailAndPasswordlessCredentials, - SignInWithPhoneAndPasswordlessCredentials, -] +@dataclass +class SignInWithPhoneAndPasswordlessCredentials: + body: SignInWithPhoneAndPasswordlessBody -class ResendEmailCredentialsOptions(TypedDict): - email_redirect_to: NotRequired[str] - captcha_token: NotRequired[str] +SignInWithPasswordlessCredentials = ( + SignInWithEmailAndPasswordlessCredentials + | SignInWithPhoneAndPasswordlessCredentials +) -class ResendEmailCredentials(TypedDict): +class SignInWithPasswordless: + @staticmethod + def email( + email: str, + data: JSON = None, + should_create_user: bool = True, + email_redirect_to: str | None = None, + captcha_token: str | None = None, + ) -> SignInWithPasswordlessCredentials: + body = SignInWithEmailAndPasswordlessBody( + email=email, + data=data, + create_user=should_create_user, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + return SignInWithEmailAndPasswordlessCredentials( + body=body, email_redirect_to=email_redirect_to + ) + + @staticmethod + def phone( + phone: str, + data: JSON = None, + should_create_user: bool = True, + channel: Literal["sms", "whatsapp"] = "sms", + captcha_token: str | None = None, + ) -> SignInWithPasswordlessCredentials: + body = SignInWithPhoneAndPasswordlessBody( + phone=phone, + data=data, + create_user=should_create_user, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + channel=channel, + ) + return SignInWithPhoneAndPasswordlessCredentials(body=body) + + +class ResendEmailBody(WithCaptchaToken): type: Literal["signup", "email_change"] email: str - options: NotRequired[ResendEmailCredentialsOptions] -class ResendPhoneCredentialsOptions(TypedDict): - captcha_token: NotRequired[str] +@dataclass +class ResendEmailCredentials: + body: ResendEmailBody + email_redirect_to: str | None = None -class ResendPhoneCredentials(TypedDict): +class ResendPhoneBody(WithCaptchaToken): type: Literal["sms", "phone_change"] phone: str - options: NotRequired[ResendPhoneCredentialsOptions] -ResendCredentials = Union[ResendEmailCredentials, ResendPhoneCredentials] - - -class SignInWithOAuthCredentialsOptions(TypedDict): - redirect_to: NotRequired[str] - scopes: NotRequired[str] - query_params: NotRequired[Dict[str, str]] - - -class SignInWithOAuthCredentials(TypedDict): - provider: Provider - options: NotRequired[SignInWithOAuthCredentialsOptions] - - -class SignInWithSSOCredentials(TypedDict): - provider_id: NotRequired[str] - domain: NotRequired[str] - options: NotRequired[SignInWithSSOOptions] - - -class SignInWithSSOOptions(TypedDict): - redirect_to: NotRequired[str] - skip_http_redirect: NotRequired[bool] - - -class SignInAnonymouslyCredentials(TypedDict): - options: NotRequired[SignInAnonymouslyCredentialsOptions] - - -class SignInAnonymouslyCredentialsOptions(TypedDict): - data: NotRequired[Any] - captcha_token: NotRequired[str] - - -class VerifyOtpParamsOptions(TypedDict): - redirect_to: NotRequired[str] - captcha_token: NotRequired[str] - - -class VerifyEmailOtpParams(TypedDict): +@dataclass +class ResendPhoneCredentials: + body: ResendPhoneBody + + +ResendCredentials = ResendEmailCredentials | ResendPhoneCredentials + + +class Resend: + @staticmethod + def email( + email: str, + type: Literal["signup", "email_change"], + email_redirect_to: str | None = None, + captcha_token: str | None = None, + ) -> ResendCredentials: + return ResendEmailCredentials( + body=ResendEmailBody( + email=email, + type=type, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ), + email_redirect_to=email_redirect_to, + ) + + @staticmethod + def phone( + phone: str, + type: Literal["sms", "phone_change"], + captcha_token: str | None = None, + ) -> ResendCredentials: + return ResendPhoneCredentials( + body=ResendPhoneBody( + phone=phone, + type=type, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ), + ) + + +class SignInWithSSOProvider(WithCaptchaToken): + provider_id: str + redirect_to: str | None = None + skip_http_redirect: bool = True + + +class SignInWithSSODomain(WithCaptchaToken): + domain: str + redirect_to: str | None = None + skip_http_redirect: bool = True + + +SignInWithSSOCredentials = SignInWithSSODomain | SignInWithSSOProvider + + +class SignInWithSSO: + @staticmethod + def domain( + domain: str, + redirect_to: str | None = None, + skip_http_redirect: bool = True, + captcha_token: str | None = None, + ) -> SignInWithSSOCredentials: + return SignInWithSSODomain( + domain=domain, + redirect_to=redirect_to, + skip_http_redirect=skip_http_redirect, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + + @staticmethod + def provider_id( + provider_id: str, + redirect_to: str | None = None, + skip_http_redirect: bool = True, + captcha_token: str | None = None, + ) -> SignInWithSSOCredentials: + return SignInWithSSOProvider( + provider_id=provider_id, + redirect_to=redirect_to, + skip_http_redirect=skip_http_redirect, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + + +class VerifyEmailOtpBody(WithCaptchaToken): email: str token: str type: EmailOtpType - options: NotRequired[VerifyOtpParamsOptions] -class VerifyMobileOtpParams(TypedDict): +@dataclass +class VerifyEmailOtpParams: + body: VerifyEmailOtpBody + redirect_to: str | None = None + + +class VerifyMobileOtpBody(WithCaptchaToken): phone: str token: str type: Literal[ "sms", "phone_change", ] - options: NotRequired[VerifyOtpParamsOptions] - - -class VerifyTokenHashParams(TypedDict): - token_hash: str - type: EmailOtpType - options: NotRequired[VerifyOtpParamsOptions] - - -VerifyOtpParams = Union[ - VerifyEmailOtpParams, VerifyMobileOtpParams, VerifyTokenHashParams -] - - -class GenerateLinkParamsOptions(TypedDict): - redirect_to: NotRequired[str] - - -class GenerateLinkParamsWithDataOptions(GenerateLinkParamsOptions, TypedDict): - data: NotRequired[Any] - -class GenerateSignupLinkParams(TypedDict): - type: Literal["signup"] - email: str - password: str - options: NotRequired[GenerateLinkParamsWithDataOptions] - - -class GenerateInviteOrMagiclinkParams(TypedDict): - type: Literal["invite", "magiclink"] - email: str - options: NotRequired[GenerateLinkParamsWithDataOptions] +@dataclass +class VerifyMobileOtpParams: + body: VerifyMobileOtpBody + redirect_to: str | None = None -class GenerateRecoveryLinkParams(TypedDict): - type: Literal["recovery"] - email: str - options: NotRequired[GenerateLinkParamsOptions] +class VerifyTokenHashBody(BaseModel): + token_hash: str + type: EmailOtpType -class GenerateEmailChangeLinkParams(TypedDict): - type: Literal["email_change_current", "email_change_new"] - email: str - new_email: str - options: NotRequired[GenerateLinkParamsOptions] +@dataclass +class VerifyTokenHashParams: + body: VerifyTokenHashBody + + +VerifyOtpParams = VerifyEmailOtpParams | VerifyMobileOtpParams | VerifyTokenHashParams + + +class VerifyOtp: + @staticmethod + def email( + email: str, + token: str, + type: EmailOtpType, + redirect_to: str | None = None, + captcha_token: str | None = None, + ) -> VerifyOtpParams: + body = VerifyEmailOtpBody( + email=email, + token=token, + type=type, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + return VerifyEmailOtpParams(body=body, redirect_to=redirect_to) + + @staticmethod + def mobile( + phone: str, + token: str, + type: Literal["sms", "phone_change"], + redirect_to: str | None = None, + captcha_token: str | None = None, + ) -> VerifyOtpParams: + body = VerifyMobileOtpBody( + phone=phone, + token=token, + type=type, + gotrue_meta_security=CaptchaToken(captcha_token=captcha_token), + ) + return VerifyMobileOtpParams(body=body, redirect_to=redirect_to) + + @staticmethod + def token_hash(token_hash: str, type: EmailOtpType) -> VerifyOtpParams: + body = VerifyTokenHashBody( + token_hash=token_hash, + type=type, + ) + return VerifyTokenHashParams(body=body) -GenerateLinkParams = Union[ - GenerateSignupLinkParams, - GenerateInviteOrMagiclinkParams, - GenerateRecoveryLinkParams, - GenerateEmailChangeLinkParams, -] GenerateLinkType = Literal[ "signup", @@ -538,75 +660,128 @@ class GenerateEmailChangeLinkParams(TypedDict): ] -class MFAEnrollTOTPParams(TypedDict): +class GenerateLinkBody(BaseModel): + type: GenerateLinkType + email: str + password: str | None = None + new_email: str | None = None + data: JSON = None + + +class GenerateLinkParams(BaseModel): + body: GenerateLinkBody + redirect_to: str | None = None + + @staticmethod + def sign_up( + email: str, password: str, data: JSON = None, redirect_to: str | None = None + ) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="signup", + email=email, + password=password, + data=data, + ), + redirect_to=redirect_to, + ) + + @staticmethod + def invite( + email: str, data: JSON = None, redirect_to: str | None = None + ) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="invite", + email=email, + data=data, + ), + redirect_to=redirect_to, + ) + + @staticmethod + def magiclink( + email: str, data: JSON = None, redirect_to: str | None = None + ) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="magiclink", + email=email, + data=data, + ), + redirect_to=redirect_to, + ) + + @staticmethod + def recovery(email: str, redirect_to: str | None = None) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="recovery", + email=email, + ), + redirect_to=redirect_to, + ) + + @staticmethod + def email_change_current( + email: str, new_email: str, redirect_to: str | None = None + ) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="email_change_current", + email=email, + new_email=new_email, + ), + redirect_to=redirect_to, + ) + + @staticmethod + def email_change_new( + email: str, new_email: str, redirect_to: str | None = None + ) -> GenerateLinkParams: + return GenerateLinkParams( + body=GenerateLinkBody( + type="email_change_new", + email=email, + new_email=new_email, + ), + redirect_to=redirect_to, + ) + + +class MFAEnrollTOTPParams(BaseModel): factor_type: Literal["totp"] - issuer: NotRequired[str] - friendly_name: NotRequired[str] + issuer: str | None = None + friendly_name: str | None = None -class MFAEnrollPhoneParams(TypedDict): +class MFAEnrollPhoneParams(BaseModel): factor_type: Literal["phone"] - friendly_name: NotRequired[str] phone: str + friendly_name: str | None = None -MFAEnrollParams = Union[MFAEnrollTOTPParams, MFAEnrollPhoneParams] - - -class MFAUnenrollParams(TypedDict): - factor_id: str - """ - ID of the factor being unenrolled. - """ - - -class CodeExchangeParams(TypedDict): - code_verifier: str - """ - Randomly generated string - """ - auth_code: str - """ - Code returned after completing one of the authorization flows - """ - redirect_to: str - """ - The URL to route to after a session is successfully obtained - """ +class MFAEnroll: + @staticmethod + def totp( + issuer: str | None = None, friendly_name: str | None = None + ) -> MFAEnrollParams: + return MFAEnrollTOTPParams( + factor_type="totp", + issuer=issuer, + friendly_name=friendly_name, + ) + @staticmethod + def phone(phone: str, friendly_name: str | None = None) -> MFAEnrollParams: + return MFAEnrollPhoneParams( + factor_type="phone", + phone=phone, + friendly_name=friendly_name, + ) -class MFAVerifyParams(TypedDict): - factor_id: str - """ - ID of the factor being verified. - """ - challenge_id: str - """ - ID of the challenge being verified. - """ - code: str - """ - Verification code provided by the user. - """ - -class MFAChallengeParams(TypedDict): - factor_id: str - """ - ID of the factor to be challenged. - """ - channel: NotRequired[Literal["sms", "whatsapp"]] - - -class MFAChallengeAndVerifyParams(TypedDict): - factor_id: str - """ - ID of the factor being verified. - """ - code: str - """ - Verification code provided by the user. - """ +MFAEnrollParams = MFAEnrollPhoneParams | MFAEnrollTOTPParams class AuthMFAVerifyResponse(BaseModel): @@ -661,7 +836,7 @@ class AuthMFAEnrollResponse(BaseModel): """ Type of MFA factor. Only `totp` supported for now. """ - totp: Optional[AuthMFAEnrollResponseTotp] = None + totp: AuthMFAEnrollResponseTotp | None = None """ TOTP enrollment information. """ @@ -670,16 +845,16 @@ class AuthMFAEnrollResponse(BaseModel): """ Friendly name of the factor, useful for distinguishing between factors """ - phone: Optional[str] = None + phone: str | None = None """ Phone number of the MFA factor in E.164 format. Used to send messages """ - @model_validator_v1_v2_compat - def validate_phone_required_for_phone_type(cls, values: dict) -> dict: - if values.get("type") == "phone" and not values.get("phone"): + @model_validator(mode="after") + def validate_phone_required_for_phone_type(self) -> AuthMFAEnrollResponse: + if self.type == "phone" and not self.phone: raise ValueError("phone is required when type is 'phone'") - return values + return self class AuthMFAUnenrollResponse(BaseModel): @@ -698,7 +873,7 @@ class AuthMFAChallengeResponse(BaseModel): """ Timestamp in UNIX seconds when this challenge will no longer be usable. """ - factor_type: Optional[Literal["totp", "phone"]] = Field( + factor_type: Literal["totp", "phone"] | None = Field( validation_alias="type", default=None ) """ @@ -707,15 +882,15 @@ class AuthMFAChallengeResponse(BaseModel): class AuthMFAListFactorsResponse(BaseModel): - all: List[Factor] + all: list[Factor] """ All available factors (verified and unverified). """ - totp: List[Factor] + totp: list[Factor] """ Only verified TOTP factors. (A subset of `all`.) """ - phone: List[Factor] + phone: list[Factor] """ Only verified Phone factors. (A subset of `all`.) """ @@ -725,16 +900,16 @@ class AuthMFAListFactorsResponse(BaseModel): class AuthMFAGetAuthenticatorAssuranceLevelResponse(BaseModel): - current_level: Optional[AuthenticatorAssuranceLevels] = None + current_level: AuthenticatorAssuranceLevels | None = None """ Current AAL level of the session. """ - next_level: Optional[AuthenticatorAssuranceLevels] = None + next_level: AuthenticatorAssuranceLevels | None = None """ Next possible AAL level for the session. If the next level is higher than the current one, the user should go through MFA. """ - current_authentication_methods: List[AMREntry] + current_authentication_methods: list[AMREntry] """ A list of all authentication methods attached to this session. Use the information here to detect the last time a user verified a @@ -749,17 +924,6 @@ class AuthMFAAdminDeleteFactorResponse(BaseModel): """ -class AuthMFAAdminDeleteFactorParams(TypedDict): - id: str - """ - ID of the MFA factor to delete. - """ - user_id: str - """ - ID of the user whose factor is being deleted. - """ - - AuthMFAAdminListFactorsResponse = List[Factor] AuthMFAAdminListFactorsResponseParser: TypeAdapter[AuthMFAAdminListFactorsResponse] = ( @@ -767,13 +931,6 @@ class AuthMFAAdminDeleteFactorParams(TypedDict): ) -class AuthMFAAdminListFactorsParams(TypedDict): - user_id: str - """ - ID of the user for which to list all MFA factors. - """ - - class GenerateLinkProperties(BaseModel): """ The properties related to the email link generated. @@ -810,73 +967,43 @@ class GenerateLinkResponse(BaseModel): user: User -class DecodedJWTDict(TypedDict): - exp: NotRequired[int] - aal: NotRequired[Optional[AuthenticatorAssuranceLevels]] - amr: NotRequired[Optional[List[AMREntry]]] +SignOutScope = Literal["global", "local", "others"] -SignOutScope = Literal["global", "local", "others"] +class JWTHeader(BaseModel, extra="allow"): + alg: Literal["RS256", "ES256", "HS256"] + typ: str + kid: str | None = None -class SignOutOptions(TypedDict): - scope: NotRequired[SignOutScope] +class JWTPayload(BaseModel, extra="allow"): + iss: str | None = None + sub: str | None = None + auth: str | list[str] | None = None + exp: int | None = None + iat: int | None = None + role: str | None = None + aal: AuthenticatorAssuranceLevels | None = None + session_id: str | None = None + amr: list[AMREntryDict] | None = None -@with_config( - ConfigDict(extra="allow") -) # pydantic <2.7.0 with_config does not accept kwargs -class JWTHeader(TypedDict): - alg: Literal["RS256", "ES256", "HS256"] - typ: str - kid: NotRequired[str] - - -# TODO: useless, only kept for backwards compatibility -class RequiredClaims(TypedDict): - iss: str - sub: str - auth: Union[str, List[str]] - exp: int - iat: int - role: str - aal: AuthenticatorAssuranceLevels - session_id: str - - -@with_config( - ConfigDict(extra="allow") -) # pydantic <2.7.0 with_config does not accept kwargs -class JWTPayload(TypedDict, total=False): - iss: str - sub: str - auth: Union[str, List[str]] - exp: int - iat: int - role: str - aal: AuthenticatorAssuranceLevels - session_id: str - amr: NotRequired[List[AMREntryDict]] - - -class ClaimsResponse(TypedDict): +@dataclass +class ClaimsResponse: claims: JWTPayload headers: JWTHeader signature: bytes -@with_config( - ConfigDict(extra="allow") -) # pydantic <2.7.0 with_config does not accept kwargs -class JWK(TypedDict, total=False): +class JWK(BaseModel, extra="allow"): kty: Literal["RSA", "EC", "oct"] - key_ops: List[str] - alg: Optional[str] - kid: Optional[str] + key_ops: list[str] + alg: str | None = None + kid: str | None = None -class JWKSet(TypedDict): - keys: List[JWK] +class JWKSet(BaseModel): + keys: list[JWK] OAuthClientGrantType = Literal["authorization_code", "refresh_token"] @@ -922,7 +1049,7 @@ class OAuthClient(BaseModel): """Unique client identifier""" client_name: str """Human-readable name of the client application""" - client_secret: Optional[str] = None + client_secret: str | None = None """Client secret for confidential clients (only returned on registration/regeneration)""" client_type: OAuthClientType """Type of the client""" @@ -930,17 +1057,17 @@ class OAuthClient(BaseModel): """Authentication method for the token endpoint""" registration_type: OAuthClientRegistrationType """Registration type of the client""" - client_uri: Optional[str] = None + client_uri: str | None = None """URL of the client application's homepage""" - logo_uri: Optional[str] = None + logo_uri: str | None = None """URL of the client application's logo""" - redirect_uris: List[str] + redirect_uris: list[str] """Array of redirect URIs used by the client""" - grant_types: List[OAuthClientGrantType] + grant_types: list[OAuthClientGrantType] """OAuth grant types the client is authorized to use""" - response_types: List[OAuthClientResponseType] + response_types: list[OAuthClientResponseType] """OAuth response types the client can use""" - scope: Optional[str] = None + scope: str | None = None """Space-separated list of scope values""" created_at: str """Timestamp when the client was created""" @@ -956,17 +1083,17 @@ class CreateOAuthClientParams(BaseModel): client_name: str """Human-readable name of the OAuth client""" - client_uri: Optional[str] = None + client_uri: str | None = None """URL of the client application's homepage""" - logo_uri: Optional[str] = None + logo_uri: str | None = None """URL of the client application's logo""" - redirect_uris: List[str] + redirect_uris: list[str] """Array of redirect URIs used by the client""" - grant_types: Optional[List[OAuthClientGrantType]] = None + grant_types: list[OAuthClientGrantType] | None = None """OAuth grant types the client is authorized to use (optional, defaults to authorization_code and refresh_token)""" - response_types: Optional[List[OAuthClientResponseType]] = None + response_types: list[OAuthClientResponseType] | None = None """OAuth response types the client can use (optional, defaults to code)""" - scope: Optional[str] = None + scope: str | None = None """Space-separated list of scope values""" @@ -976,15 +1103,15 @@ class UpdateOAuthClientParams(BaseModel): Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. """ - client_name: Optional[str] = None + client_name: str | None = None """Human-readable name of the OAuth client""" - client_uri: Optional[str] = None + client_uri: str | None = None """URI of the OAuth client""" - logo_uri: Optional[str] = None + logo_uri: str | None = None """URI of the OAuth client's logo""" - redirect_uris: Optional[List[str]] = None + redirect_uris: list[str] | None = None """Array of allowed redirect URIs""" - grant_types: Optional[List[OAuthClientGrantType]] = None + grant_types: list[OAuthClientGrantType] | None = None """Array of allowed grant types""" @@ -994,7 +1121,7 @@ class OAuthClientResponse(BaseModel): Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. """ - client: Optional[OAuthClient] = None + client: OAuthClient | None = None class Pagination(BaseModel): @@ -1002,7 +1129,7 @@ class Pagination(BaseModel): Pagination information for list responses. """ - next_page: Optional[int] = None + next_page: int | None = None last_page: int = 0 total: int = 0 @@ -1013,19 +1140,8 @@ class OAuthClientListResponse(BaseModel): Only relevant when the OAuth 2.1 server is enabled in Supabase Auth. """ - clients: List[OAuthClient] - aud: Optional[str] = None - next_page: Optional[int] = None + clients: list[OAuthClient] + aud: str | None = None + next_page: int | None = None last_page: int = 0 total: int = 0 - - -class PageParams(BaseModel): - """ - Pagination parameters. - """ - - page: Optional[int] = None - """Page number""" - per_page: Optional[int] = None - """Number of items per page""" diff --git a/src/auth/tests/_async/clients.py b/src/auth/tests/_async/clients.py deleted file mode 100644 index eb2686e5..00000000 --- a/src/auth/tests/_async/clients.py +++ /dev/null @@ -1,232 +0,0 @@ -from dataclasses import dataclass -from random import random -from time import time -from typing import Optional - -from faker import Faker -from jwt import encode -from supabase_auth import AsyncGoTrueAdminAPI, AsyncGoTrueClient -from supabase_auth.types import User -from typing_extensions import NotRequired, TypedDict - - -def mock_access_token() -> str: - return encode( - { - "sub": "1234567890", - "role": "anon_key", - }, - GOTRUE_JWT_SECRET, - ) - - -class OptionalCredentials(TypedDict): - email: NotRequired[Optional[str]] - phone: NotRequired[Optional[str]] - password: NotRequired[Optional[str]] - - -@dataclass -class Credentials: - email: str - phone: str - password: str - - -def mock_user_credentials( - options: Optional[OptionalCredentials] = None, -) -> Credentials: - fake = Faker() - user_options = options or {} - rand_numbers = str(int(time())) - return Credentials( - email=user_options.get("email") or fake.email(), - phone=user_options.get("phone") or f"1{rand_numbers[-11:]}", - password=user_options.get("password") or fake.password(), - ) - - -def mock_verification_otp() -> str: - return str(int(100000 + random() * 900000)) - - -class UserMetadata(TypedDict): - profile_image: str - - -def mock_user_metadata() -> UserMetadata: - fake = Faker() - return { - "profile_image": fake.url(), - } - - -class AppMetadata(TypedDict): - roles: list[str] - - -def mock_app_metadata() -> AppMetadata: - return { - "roles": ["editor", "publisher"], - } - - -async def create_new_user_with_email( - *, - email: Optional[str] = None, - password: Optional[str] = None, -) -> User: - credentials = mock_user_credentials( - { - "email": email, - "password": password, - } - ) - response = await service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - } - ) - return response.user - - -SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT = 9999 -SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT = 9998 -SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT = 9997 -SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT = 9996 - -GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF = ( - f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT}" -) -GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON = ( - f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT}" -) -GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON = ( - f"http://localhost:{SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT}" -) -GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF = ( - f"http://localhost:{SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT}" -) - -GOTRUE_JWT_SECRET = "37c304f8-51aa-419a-a1af-06154e63707a" - -AUTH_ADMIN_JWT = encode( - { - "sub": "1234567890", - "role": "supabase_admin", - }, - GOTRUE_JWT_SECRET, -) - - -def auth_client() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -async def auth_client_with_session() -> AsyncGoTrueClient: - client = AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=False, - ) - credentials = mock_user_credentials() - await client.sign_up({"email": credentials.email, "password": credentials.password}) - return client - - -def auth_client_with_asymmetric_session() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=False, - ) - - -def auth_subscription_client() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_enabled_client() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_off_signups_enabled_client() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_disabled_client() -> AsyncGoTrueClient: - return AsyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, - auto_refresh_token=False, - persist_session=True, - ) - - -def auth_admin_api_auto_confirm_enabled_client() -> AsyncGoTrueAdminAPI: - return AsyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - headers={ - "Authorization": f"Bearer {AUTH_ADMIN_JWT}", - }, - ) - - -def auth_admin_api_auto_confirm_disabled_client() -> AsyncGoTrueAdminAPI: - return AsyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {AUTH_ADMIN_JWT}", - }, - ) - - -SERVICE_ROLE_JWT = encode( - { - "role": "service_role", - }, - GOTRUE_JWT_SECRET, -) - - -def service_role_api_client() -> AsyncGoTrueAdminAPI: - return AsyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) - - -def service_role_api_client_with_sms() -> AsyncGoTrueAdminAPI: - return AsyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) - - -def service_role_api_client_no_sms() -> AsyncGoTrueAdminAPI: - return AsyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) diff --git a/src/auth/tests/_async/conftest.py b/src/auth/tests/_async/conftest.py new file mode 100644 index 00000000..ab213d0d --- /dev/null +++ b/src/auth/tests/_async/conftest.py @@ -0,0 +1,306 @@ +from dataclasses import dataclass +from random import random +from time import time + +import pytest +from aiohttp import ClientSession +from faker import Faker +from httpx import AsyncClient +from jwt import encode +from supabase_utils.http.adapters.aiohttp import AsyncAiohttpSession +from supabase_utils.http.adapters.httpx import AsyncHttpxSession +from typing_extensions import AsyncGenerator, NotRequired, TypedDict + +from supabase_auth import AsyncSupabaseAuthAdmin, AsyncSupabaseAuthClient +from supabase_auth.types import ( + AdminUserAttributes, + SignInWithPassword, + SignUpWithPassword, + User, +) + + +def mock_access_token() -> str: + return encode( + { + "sub": "1234567890", + "role": "anon_key", + }, + GOTRUE_JWT_SECRET, + ) + + +class OptionalCredentials(TypedDict): + email: NotRequired[str | None] + phone: NotRequired[str | None] + password: NotRequired[str | None] + + +@dataclass +class Credentials: + email: str + phone: str + password: str + + +def mock_user_credentials( + options: OptionalCredentials | None = None, +) -> Credentials: + fake = Faker() + user_options = options or {} + rand_numbers = str(int(time())) + return Credentials( + email=user_options.get("email") or fake.email(), + phone=user_options.get("phone") or f"1{rand_numbers[-11:]}", + password=user_options.get("password") or fake.password(), + ) + + +def mock_verification_otp() -> str: + return str(int(100000 + random() * 900000)) + + +def mock_user_metadata() -> dict[str, str]: + fake = Faker() + return { + "profile_image": fake.url(), + } + + +def mock_app_metadata() -> dict[str, list[str]]: + return { + "roles": ["editor", "publisher"], + } + + +SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT = 9999 +SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT = 9998 +SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT = 9997 +SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT = 9996 + +GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF = ( + f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT}" +) +GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON = ( + f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT}" +) +GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON = ( + f"http://localhost:{SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT}" +) +GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF = ( + f"http://localhost:{SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT}" +) + +GOTRUE_JWT_SECRET = "37c304f8-51aa-419a-a1af-06154e63707a" + +AUTH_ADMIN_JWT = encode( + { + "sub": "1234567890", + "role": "supabase_admin", + }, + GOTRUE_JWT_SECRET, +) + + +def httpx() -> AsyncHttpxSession: + return AsyncHttpxSession(client=AsyncClient(http2=True, verify=True)) + + +def aiohttp() -> AsyncAiohttpSession: + return AsyncAiohttpSession(client=ClientSession()) + + +http_sessions = [httpx, aiohttp] + + +@pytest.fixture +async def admin_client_with_user_and_credentials( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> AsyncGenerator[tuple[AsyncSupabaseAuthAdmin, User, Credentials]]: + credentials = mock_user_credentials() + response = await service_role_api_client.create_user( + AdminUserAttributes(email=credentials.email, password=credentials.password) + ) + yield (service_role_api_client, response.user, credentials) + await service_role_api_client.delete_user(response.user.id) + + +@pytest.fixture(params=http_sessions) +async def auth_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def auth_client_with_session( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=False, + http_session=request.param(), + ) as client: + credentials = mock_user_credentials() + await client.sign_up( + SignUpWithPassword.email( + email=credentials.email, password=credentials.password + ) + ) + await client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, password=credentials.password + ) + ) + yield client + await client.sign_out() + + +@pytest.fixture(params=http_sessions) +async def auth_client_with_asymmetric_session( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=False, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def auth_subscription_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def client_api_auto_confirm_enabled_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def client_api_auto_confirm_off_signups_enabled_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def client_api_auto_confirm_disabled_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthClient]: + async with AsyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def auth_admin_api_auto_confirm_enabled_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthAdmin]: + async with AsyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + default_headers={ + "Authorization": f"Bearer {AUTH_ADMIN_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def auth_admin_api_auto_confirm_disabled_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthAdmin]: + async with AsyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {AUTH_ADMIN_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +SERVICE_ROLE_JWT = encode( + { + "role": "service_role", + }, + GOTRUE_JWT_SECRET, +) + + +@pytest.fixture(params=http_sessions) +async def service_role_api_client( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthAdmin]: + async with AsyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def service_role_api_client_with_sms( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthAdmin]: + async with AsyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +async def service_role_api_client_no_sms( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncSupabaseAuthAdmin]: + async with AsyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client diff --git a/src/auth/tests/_async/test_gotrue.py b/src/auth/tests/_async/test_gotrue.py index f20861d1..864007bd 100644 --- a/src/auth/tests/_async/test_gotrue.py +++ b/src/auth/tests/_async/test_gotrue.py @@ -1,96 +1,102 @@ import time -from uuid import uuid4 import pytest from jwt import encode + +from supabase_auth import AsyncSupabaseAuthClient from supabase_auth.errors import ( AuthApiError, AuthInvalidJwtError, AuthSessionMissingError, ) from supabase_auth.helpers import decode_jwt -from supabase_auth.types import SignUpWithEmailAndPasswordCredentials +from supabase_auth.types import ( + AuthChangeEvent, + MFAEnroll, + Session, + SignInWithPassword, + SignUpWithPassword, +) -from .clients import ( +from .conftest import ( GOTRUE_JWT_SECRET, - auth_client, - auth_client_with_asymmetric_session, - auth_client_with_session, mock_user_credentials, ) -async def test_get_claims_returns_none_when_session_is_none() -> None: - claims = await auth_client().get_claims() +async def test_get_claims_returns_none_when_session_is_none( + auth_client: AsyncSupabaseAuthClient, +) -> None: + claims = await auth_client.get_claims() assert claims is None -async def test_get_claims_calls_get_user_if_symmetric_jwt(mocker) -> None: - client = auth_client() - spy = mocker.spy(client, "get_user") +async def test_get_claims_calls_get_user_if_symmetric_jwt( + mocker, auth_client: AsyncSupabaseAuthClient +) -> None: + spy = mocker.spy(auth_client.session_manager, "_get_user") credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (await client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (await auth_client.sign_up(options)).user assert user is not None - response = await client.get_claims() + response = await auth_client.get_claims() assert response - claims = response["claims"] + claims = response.claims - assert claims.get("email") == user.email + assert claims.model_extra + assert claims.model_extra["email"] == user.email spy.assert_called_once() -async def test_get_claims_fetches_jwks_to_verify_asymmetric_jwt(mocker) -> None: - client = auth_client_with_asymmetric_session() +async def test_get_claims_fetches_jwks_to_verify_asymmetric_jwt( + mocker, auth_client_with_asymmetric_session: AsyncSupabaseAuthClient +) -> None: credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (await client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (await auth_client_with_asymmetric_session.sign_up(options)).user assert user is not None - spy = mocker.spy(client, "_request") - - response = await client.get_claims() + response = await auth_client_with_asymmetric_session.get_claims() assert response - claims = response["claims"] - assert claims.get("email") == user.email + claims = response.claims - spy.assert_called_once() - spy.assert_called_with("GET", ".well-known/jwks.json") + assert claims.model_extra + assert claims.model_extra["email"] == user.email expected_keyid = "638c54b8-28c2-4b12-9598-ba12ef610a29" - assert len(client._jwks["keys"]) == 1 - assert client._jwks["keys"][0]["kid"] == expected_keyid - + assert len(auth_client_with_asymmetric_session._jwks.keys) == 1 + assert auth_client_with_asymmetric_session._jwks.keys[0].kid == expected_keyid -async def test_jwks_ttl_cache_behavior(mocker) -> None: - client = auth_client_with_asymmetric_session() - spy = mocker.spy(client, "_request") +async def test_jwks_ttl_cache_behavior( + mocker, auth_client_with_asymmetric_session: AsyncSupabaseAuthClient +) -> None: + spy = mocker.spy(auth_client_with_asymmetric_session.executor.session, "send") # First call should fetch JWKS from endpoint credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (await client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (await auth_client_with_asymmetric_session.sign_up(options)).user assert user is not None - await client.get_claims() - spy.assert_called_with("GET", ".well-known/jwks.json") + await auth_client_with_asymmetric_session.get_claims() + first_call_count = spy.call_count # Second call within TTL should use cache - await client.get_claims() + await auth_client_with_asymmetric_session.get_claims() assert spy.call_count == first_call_count # No additional JWKS request # Mock time to be after TTL expiry @@ -100,23 +106,21 @@ async def test_jwks_ttl_cache_behavior(mocker) -> None: mock_time.return_value = original_time() + 601 # TTL is 600 seconds # Call after TTL expiry should fetch fresh JWKS - await client.get_claims() + await auth_client_with_asymmetric_session.get_claims() assert spy.call_count == first_call_count + 1 # One more JWKS request finally: # Restore original time function mocker.patch("time.time", original_time) -async def test_set_session_with_valid_tokens() -> None: - client = auth_client() +async def test_set_session_with_valid_tokens( + auth_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -125,10 +129,10 @@ async def test_set_session_with_valid_tokens() -> None: refresh_token = signup_response.session.refresh_token # Clear the session - await client._remove_session() + await auth_client.session_manager.remove_session() # Set the session with the tokens - response = await client.set_session(access_token, refresh_token) + response = await auth_client.set_session(access_token, refresh_token) # Verify the response assert response.session is not None @@ -138,16 +142,14 @@ async def test_set_session_with_valid_tokens() -> None: assert response.user.email == credentials.email -async def test_set_session_with_expired_token() -> None: - client = auth_client() +async def test_set_session_with_expired_token( + auth_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -156,19 +158,19 @@ async def test_set_session_with_expired_token() -> None: refresh_token = signup_response.session.refresh_token # Clear the session - await client._remove_session() + await auth_client.session_manager.remove_session() # Create an expired token by modifying the JWT expired_token = access_token.split(".") - payload = decode_jwt(access_token)["payload"] - payload["exp"] = int(time.time()) - 3600 # Set expiry to 1 hour ago + payload = decode_jwt(access_token).payload + payload.exp = int(time.time()) - 3600 # Set expiry to 1 hour ago expired_token[1] = encode( dict(payload), GOTRUE_JWT_SECRET, algorithm="HS256" ).split(".")[1] expired_access_token = ".".join(expired_token) # Set the session with the expired token - response = await client.set_session(expired_access_token, refresh_token) + response = await auth_client.set_session(expired_access_token, refresh_token) # Verify the response has a new access token (refreshed) assert response.session is not None @@ -178,16 +180,14 @@ async def test_set_session_with_expired_token() -> None: assert response.user.email == credentials.email -async def test_set_session_without_refresh_token() -> None: - client = auth_client() +async def test_set_session_without_refresh_token( + auth_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -195,12 +195,12 @@ async def test_set_session_without_refresh_token() -> None: access_token = signup_response.session.access_token # Clear the session - await client._remove_session() + await auth_client.session_manager.remove_session() # Create an expired token expired_token = access_token.split(".") - payload = decode_jwt(access_token)["payload"] - payload["exp"] = int(time.time()) - 3600 # Set expiry to 1 hour ago + payload = decode_jwt(access_token).payload + payload.exp = int(time.time()) - 3600 # Set expiry to 1 hour ago expired_token[1] = encode( dict(payload), GOTRUE_JWT_SECRET, algorithm="HS256" ).split(".")[1] @@ -208,33 +208,27 @@ async def test_set_session_without_refresh_token() -> None: # Try to set the session with an expired token but no refresh token with pytest.raises(AuthSessionMissingError): - await client.set_session(expired_access_token, "") + await auth_client.set_session(expired_access_token, "") -async def test_set_session_with_invalid_token() -> None: - client = auth_client() - +async def test_set_session_with_invalid_token( + auth_client: AsyncSupabaseAuthClient, +) -> None: # Try to set the session with invalid tokens with pytest.raises(AuthInvalidJwtError): - await client.set_session("invalid.token.here", "invalid_refresh_token") - + await auth_client.set_session("invalid.token.here", "invalid_refresh_token") -async def test_mfa_enroll() -> None: - client = await auth_client_with_session() +async def test_mfa_enroll(auth_client_with_session: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + _signup_response = await auth_client_with_session.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) - # Test MFA enrollment - enroll_response = await client.mfa.enroll( - {"issuer": "test-issuer", "factor_type": "totp", "friendly_name": "test-factor"} + enroll_response = await auth_client_with_session.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) assert enroll_response.id is not None @@ -244,206 +238,118 @@ async def test_mfa_enroll() -> None: assert enroll_response.totp.qr_code is not None -async def test_mfa_challenge() -> None: - client = auth_client() +async def test_mfa_challenge(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - enroll_response = await client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + enroll_response = await auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA challenge - challenge_response = await client.mfa.challenge({"factor_id": enroll_response.id}) + challenge_response = await auth_client.mfa.challenge(factor_id=enroll_response.id) assert challenge_response.id is not None assert challenge_response.expires_at is not None -async def test_mfa_unenroll() -> None: - client = auth_client() +async def test_mfa_unenroll(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - enroll_response = await client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + enroll_response = await auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA unenroll - unenroll_response = await client.mfa.unenroll({"factor_id": enroll_response.id}) + unenroll_response = await auth_client.mfa.unenroll(factor_id=enroll_response.id) assert unenroll_response.id == enroll_response.id -async def test_mfa_list_factors() -> None: - client = auth_client() +async def test_mfa_list_factors(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - await client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + await auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA list factors - list_response = await client.mfa.list_factors() + list_response = await auth_client.mfa.list_factors() assert len(list_response.all) == 1 -async def test_exchange_code_for_session() -> None: - client = auth_client() - - # We'll test the flow type setting instead of the actual exchange, since the - # actual exchange requires a live OAuth flow which isn't practical in tests - assert client._flow_type in ["implicit", "pkce"] - - # This part would normally need a live OAuth flow, so we verify the logic paths - # Get the storage key for PKCE flow - storage_key = f"{client._storage_key}-code-verifier" - - # Set the flow type to pkce - client._flow_type = "pkce" - - # Test the PKCE URL generation which is needed for exchange_code_for_session - url, params = await client._get_url_for_provider( - f"{client._url}/authorize", "github", {} - ) - - # Verify PKCE parameters were added - assert "code_challenge" in params - assert "code_challenge_method" in params - - # Verify the code verifier was stored - code_verifier = await client._storage.get_item(storage_key) - assert code_verifier is not None - - -async def test_get_authenticator_assurance_level() -> None: - client = auth_client() +async def test_get_authenticator_assurance_level( + auth_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() # Without a session, should return null values - aal_response = await client.mfa.get_authenticator_assurance_level() + aal_response = await auth_client.mfa.get_authenticator_assurance_level() assert aal_response.current_level is None assert aal_response.next_level is None assert aal_response.current_authentication_methods == [] # Sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # With a session, should return authentication methods - aal_response = await client.mfa.get_authenticator_assurance_level() + aal_response = await auth_client.mfa.get_authenticator_assurance_level() # Basic auth will have password as an authentication method assert aal_response.current_authentication_methods is not None -async def test_link_identity() -> None: - client = auth_client() - credentials = mock_user_credentials() - - # Sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - assert signup_response.session is not None - - from unittest.mock import patch - - from httpx import Response - - # Since the test server has manual linking disabled, we'll mock the URL generation - with patch.object(client, "_get_url_for_provider") as mock_url_provider: - mock_url = "http://example.com/authorize?provider=github" - mock_params = {"provider": "github"} - mock_url_provider.return_value = (mock_url, mock_params) - - # Also mock the _request method since the server would reject it - with patch.object(client, "_request") as mock_request: - mock_request.return_value = Response( - content=f'{{"url":"{mock_url}"}}', status_code=200 - ) - - # Call the method - response = await client.link_identity({"provider": "github"}) - - # Verify the response - assert response.provider == "github" - assert response.url == mock_url - - -async def test_get_user_identities() -> None: - client = auth_client() +async def test_get_user_identities(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # Sign up to get a valid session - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # New users won't have any identities yet, but the call should work - identities_response = await client.get_user_identities() + identities_response = await auth_client.get_user_identities() assert identities_response is not None # For a new user, identities will be an empty list or None assert hasattr(identities_response, "identities") -async def test_sign_in_with_password() -> None: - client = auth_client() +async def test_sign_in_with_password(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - from supabase_auth.errors import AuthApiError, AuthInvalidCredentialsError # First create a user we can sign in with - signup_response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Test signing in with the same credentials (email) - signin_response = await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + signin_response = await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) # Verify the response has a valid session and user @@ -451,263 +357,48 @@ async def test_sign_in_with_password() -> None: assert signin_response.user is not None assert signin_response.user.email == credentials.email - # Test error case: wrong password - - # We need to create a custom client to avoid affecting other tests - test_client = auth_client() - try: - await test_client.sign_in_with_password( - { - "email": credentials.email, - "password": "wrong_password", - } + await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password="wrong_password", + ) ) raise AssertionError("Expected AuthApiError for wrong password") except AuthApiError: pass - # Test error case: missing credentials - try: - await test_client.sign_in_with_password({}) # type: ignore - raise AssertionError( - "Expected AuthInvalidCredentialsError for missing credentials" - ) - except AuthInvalidCredentialsError: - pass - - -async def test_sign_in_with_otp() -> None: - client = auth_client() - # Test with email OTP - email = f"test-{uuid4()}@example.com" - - # When sign_in_with_otp is called with valid email, it should return a AuthOtpResponse - # We can't fully test the actual OTP flow since that requires email verification - from unittest.mock import patch - - from httpx import Response - from supabase_auth.types import AuthOtpResponse +async def test_sign_out(auth_client: AsyncSupabaseAuthClient) -> None: + credentials = mock_user_credentials() - # First test for email OTP - auth_otp = AuthOtpResponse( - message_id="mock-message-id", + signup_response = await auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) - with patch.object(client, "_request") as mock_request: - mock_response = Response(content=auth_otp.model_dump_json(), status_code=200) - mock_request.return_value = mock_response - - response = await client.sign_in_with_otp( - { - "email": email, - "options": { - "email_redirect_to": "https://example.com/callback", - "should_create_user": True, - "data": {"custom": "data"}, - "captcha_token": "mock-captcha-token", - }, - } - ) - - # Verify request parameters - mock_request.assert_called_once() - args, kwargs = mock_request.call_args - assert args[0] == "POST" - assert args[1] == "otp" - assert kwargs["body"]["email"] == email - assert kwargs["body"]["create_user"] - assert kwargs["body"]["data"] == {"custom": "data"} - assert ( - kwargs["body"]["gotrue_meta_security"]["captcha_token"] - == "mock-captcha-token" - ) - assert kwargs["redirect_to"] == "https://example.com/callback" - - # Verify response - assert response == auth_otp - - # Test with phone OTP - phone = "+11234567890" - auth_otp = AuthOtpResponse(message_id="mock-message-id") - with patch.object(client, "_request") as mock_request: - mock_response = Response(content=auth_otp.model_dump_json(), status_code=200) - mock_request.return_value = mock_response - - response = await client.sign_in_with_otp( - { - "phone": phone, - "options": { - "should_create_user": True, - "data": {"custom": "data"}, - "channel": "whatsapp", # Test alternate channel - "captcha_token": "mock-captcha-token", - }, - } - ) + assert signup_response.session is not None - # Verify request parameters - mock_request.assert_called_once() - args, kwargs = mock_request.call_args - assert args[0] == "POST" - assert args[1] == "otp" - assert kwargs["body"]["phone"] == phone - assert kwargs["body"]["create_user"] - assert kwargs["body"]["data"] == {"custom": "data"} - assert kwargs["body"]["channel"] == "whatsapp" - assert ( - kwargs["body"]["gotrue_meta_security"]["captcha_token"] - == "mock-captcha-token" + signin_response = await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, ) - assert kwargs.get("redirect_to") is None # No redirect for phone - - # Verify response - assert response == auth_otp - - # Test with invalid parameters (missing both email and phone) - from supabase_auth.errors import AuthInvalidCredentialsError - - try: - await client.sign_in_with_otp({}) # type: ignore - raise AssertionError("Expected AuthInvalidCredentialsError") - except AuthInvalidCredentialsError: - pass - - -async def test_sign_out() -> None: - from datetime import datetime - from unittest.mock import patch - - from supabase_auth.types import Session, User - - client = auth_client() - - # Create a mock user and session - date = datetime(year=2023, month=1, day=1, hour=0, minute=0, second=0) - mock_user = User( - id="user123", - email="test@example.com", - app_metadata={}, - user_metadata={}, - aud="authenticated", - created_at=date, - confirmed_at=date, - last_sign_in_at=date, - role="authenticated", - updated_at=date, ) - mock_session = Session( - access_token="mock_access_token", - refresh_token="mock_refresh_token", - expires_in=3600, - token_type="bearer", - user=mock_user, - ) + # Verify the response has a valid session and user + assert signin_response.session is not None + assert signin_response.user is not None + assert signin_response.user.email == credentials.email - # Test sign_out with "global" scope (default) - # This should call admin.sign_out, _remove_session, and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope (global) - await client.sign_out() - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "global" - ) - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test sign_out with "local" scope - # Should behave the same as "global" for client-side - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with local scope - await client.sign_out({"scope": "local"}) - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "local" - ) - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test sign_out with "others" scope - # This should only call admin.sign_out but not _remove_session or _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with others scope - await client.sign_out({"scope": "others"}) - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "others" - ) - - # Verify that _remove_session was NOT called - mock_remove_session.assert_not_called() - - # Verify that _notify_all_subscribers was NOT called - mock_notify.assert_not_called() - - # Test sign_out with no session - # This should not call admin.sign_out but still call _remove_session and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = None - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope - await client.sign_out() - - # Verify that admin.sign_out was NOT called - mock_admin_sign_out.assert_not_called() - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test when admin.sign_out raises an error - # This should suppress the error and continue with _remove_session and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - mock_admin_sign_out.side_effect = AuthApiError( - "Test error", 401, "validation_failed" - ) + called = False + + def sign_out_callback(auth_event: AuthChangeEvent, session: Session | None) -> None: + nonlocal called + called = True - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope - await client.sign_out() + auth_client.on_auth_state_change(sign_out_callback) - # Verify that _remove_session was still called despite the error - mock_remove_session.assert_called_once() + await auth_client.sign_out() - # Verify that _notify_all_subscribers was still called despite the error - mock_notify.assert_called_once_with("SIGNED_OUT", None) + no_more_session = await auth_client.get_session() + assert no_more_session is None + assert called diff --git a/src/auth/tests/_async/test_gotrue_admin_api.py b/src/auth/tests/_async/test_gotrue_admin_api.py index 798eced9..d153b413 100644 --- a/src/auth/tests/_async/test_gotrue_admin_api.py +++ b/src/auth/tests/_async/test_gotrue_admin_api.py @@ -1,627 +1,422 @@ -import pytest -from supabase_auth.errors import ( - AuthApiError, - AuthError, - AuthSessionMissingError, - AuthWeakPasswordError, +from supabase_auth import AsyncSupabaseAuthAdmin, AsyncSupabaseAuthClient +from supabase_auth.types import ( + AdminUserAttributes, + CreateOAuthClientParams, + GenerateLinkParams, + MFAEnroll, + Resend, + SignInWithPassword, + SignUpWithPassword, + UpdateOAuthClientParams, + User, + UserAttributes, ) -from supabase_auth.types import CreateOAuthClientParams, UpdateOAuthClientParams - -from .clients import ( - auth_client, - auth_client_with_session, - client_api_auto_confirm_disabled_client, - client_api_auto_confirm_off_signups_enabled_client, - create_new_user_with_email, + +from .conftest import ( + Credentials, mock_app_metadata, mock_user_credentials, mock_user_metadata, - mock_verification_otp, - service_role_api_client, ) -async def test_create_user_should_create_a_new_user() -> None: - credentials = mock_user_credentials() - response = await create_new_user_with_email(email=credentials.email) - assert response.email == credentials.email +async def test_create_user_should_create_a_new_user( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (client, user, credentials) = admin_client_with_user_and_credentials + assert user.email == credentials.email -async def test_create_user_with_user_metadata() -> None: +async def test_create_user_with_user_metadata( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: user_metadata = mock_user_metadata() credentials = mock_user_credentials() - response = await service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "user_metadata": user_metadata, - } + response = await service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + user_metadata=user_metadata, + ) ) assert response.user.email == credentials.email assert response.user.user_metadata == user_metadata assert "profile_image" in response.user.user_metadata + await service_role_api_client.delete_user(response.user.id) -async def test_create_user_with_user_and_app_metadata() -> None: +async def test_create_user_with_user_and_app_metadata( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: user_metadata = mock_user_metadata() app_metadata = mock_app_metadata() credentials = mock_user_credentials() - response = await service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "user_metadata": user_metadata, - "app_metadata": app_metadata, - } + response = await service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + user_metadata=user_metadata, + app_metadata=app_metadata, + ) ) assert response.user.email == credentials.email assert "profile_image" in response.user.user_metadata assert "provider" in response.user.app_metadata assert "providers" in response.user.app_metadata + await service_role_api_client.delete_user(response.user.id) -async def test_list_users_should_return_registered_users() -> None: - credentials = mock_user_credentials() - await create_new_user_with_email(email=credentials.email) - users = await service_role_api_client().list_users() +async def test_list_users_should_return_registered_users( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials + users = await admin_client.list_users() assert users emails = [user.email for user in users] assert emails assert credentials.email in emails -async def test_get_user_by_id_should_a_registered_user_given_its_user_identifier() -> ( - None -): - credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) +async def test_get_user_by_id_should_a_registered_user_given_its_user_identifier( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials assert user.id - response = await service_role_api_client().get_user_by_id(user.id) + response = await admin_client.get_user_by_id(user.id) assert response.user.email == credentials.email -async def test_modify_email_using_update_user_by_id() -> None: - credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) - response = await service_role_api_client().update_user_by_id( +async def test_modify_email_using_update_user_by_id( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials + response = await admin_client.update_user_by_id( user.id, - { - "email": f"new_{user.email}", - }, + AdminUserAttributes( + email=f"new_{user.email}", + ), ) assert response.user.email == f"new_{user.email}" -async def test_modify_user_metadata_using_update_user_by_id() -> None: - credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) +async def test_modify_user_metadata_using_update_user_by_id( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials user_metadata = {"favorite_color": "yellow"} - response = await service_role_api_client().update_user_by_id( + response = await admin_client.update_user_by_id( user.id, - { - "user_metadata": user_metadata, - }, + AdminUserAttributes( + user_metadata=user_metadata, + ), ) assert response.user.email == user.email assert response.user.user_metadata == user_metadata -async def test_modify_app_metadata_using_update_user_by_id() -> None: - credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) +async def test_modify_app_metadata_using_update_user_by_id( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials app_metadata = {"roles": ["admin", "publisher"]} - response = await service_role_api_client().update_user_by_id( + response = await admin_client.update_user_by_id( user.id, - { - "app_metadata": app_metadata, - }, + AdminUserAttributes( + app_metadata=app_metadata, + ), ) assert response.user.email == user.email assert "roles" in response.user.app_metadata -async def test_modify_confirm_email_using_update_user_by_id() -> None: +async def test_modify_confirm_email_using_update_user_by_id( + service_role_api_client: AsyncSupabaseAuthAdmin, + client_api_auto_confirm_off_signups_enabled_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - response = await client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + response = await client_api_auto_confirm_off_signups_enabled_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) assert response.user assert not response.user.email_confirmed_at - auth_response = await service_role_api_client().update_user_by_id( + auth_response = await service_role_api_client.update_user_by_id( response.user.id, - { - "email_confirm": True, - }, + AdminUserAttributes( + email_confirm=True, + ), ) assert auth_response.user.email_confirmed_at + await service_role_api_client.delete_user(response.user.id) -async def test_invalid_credential_sign_in_with_phone() -> None: - try: - await ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_password( - { - "phone": "+123456789", - "password": "strong_pwd", - } - ) - ) - except AuthApiError as e: - assert e.to_dict() - - -async def test_invalid_credential_sign_in_with_email() -> None: - try: - await ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_password( - { - "email": "unknown_user@unknowndomain.com", - "password": "strong_pwd", - } - ) - ) - except AuthApiError as e: - assert e.to_dict() - - -async def test_sign_in_with_otp_email() -> None: - try: - await client_api_auto_confirm_off_signups_enabled_client().sign_in_with_otp( - { - "email": "unknown_user@unknowndomain.com", - } - ) - except AuthApiError as e: - assert e.to_dict() - - -async def test_sign_in_with_otp_phone() -> None: - try: - await client_api_auto_confirm_off_signups_enabled_client().sign_in_with_otp( - { - "phone": "+112345678", - } - ) - except AuthApiError as e: - assert e.to_dict() - - -async def test_resend() -> None: - await client_api_auto_confirm_off_signups_enabled_client().resend( - {"phone": "+112345678", "type": "sms"} +async def test_resend( + client_api_auto_confirm_off_signups_enabled_client: AsyncSupabaseAuthClient, +) -> None: + await client_api_auto_confirm_off_signups_enabled_client.resend( + Resend.phone(phone="+112345678", type="sms") ) -async def test_reauthenticate() -> None: - client = await auth_client_with_session() - await client.reauthenticate() +async def test_reauthenticate( + auth_client_with_session: AsyncSupabaseAuthClient, +) -> None: + await auth_client_with_session.reauthenticate() -async def test_refresh_session() -> None: - client = await auth_client_with_session() - await client.refresh_session() +async def test_refresh_session( + auth_client_with_session: AsyncSupabaseAuthClient, +) -> None: + await auth_client_with_session.refresh_session() -async def test_reset_password_for_email() -> None: +async def test_reset_password_for_email( + auth_client_with_session: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - client = await auth_client_with_session() - await client.reset_password_email(email=credentials.email) + await auth_client_with_session.reset_password_for_email(email=credentials.email) -async def test_resend_missing_credentials() -> None: +async def test_resend_missing_credentials( + client_api_auto_confirm_off_signups_enabled_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - await client_api_auto_confirm_off_signups_enabled_client().resend( - {"type": "email_change", "email": credentials.email} + await client_api_auto_confirm_off_signups_enabled_client.resend( + Resend.email(type="email_change", email=credentials.email) ) -async def test_sign_in_anonymously() -> None: - client = await auth_client_with_session() - await client.sign_in_anonymously() +async def test_sign_in_anonymously( + auth_client_with_session: AsyncSupabaseAuthClient, +) -> None: + await auth_client_with_session.sign_in_anonymously() -async def test_delete_user_should_be_able_delete_an_existing_user() -> None: +async def test_delete_user_should_be_able_delete_an_existing_user( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) - await service_role_api_client().delete_user(user.id) - users = await service_role_api_client().list_users() - emails = [user.email for user in users] - assert credentials.email not in emails + response = await service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + ) + ) + assert response.user.email == credentials.email + users = await service_role_api_client.list_users() + assert response.user.email in [user.email for user in users] + await service_role_api_client.delete_user(response.user.id) + users = await service_role_api_client.list_users() + assert response.user.email not in [user.email for user in users] -async def test_generate_link_supports_sign_up_with_generate_confirmation_signup_link() -> ( - None -): +async def test_generate_link_supports_sign_up_with_generate_confirmation_signup_link( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" user_metadata = {"status": "alpha"} - response = await service_role_api_client().generate_link( - { - "type": "signup", - "email": credentials.email, - "password": credentials.password, - "options": { - "data": user_metadata, - "redirect_to": redirect_to, - }, - }, + response = await service_role_api_client.generate_link( + GenerateLinkParams.sign_up( + email=credentials.email, + password=credentials.password, + data=user_metadata, + redirect_to=redirect_to, + ) ) assert response.user.user_metadata == user_metadata -async def test_generate_link_supports_updating_emails_with_generate_email_change_links() -> ( - None -): # noqa: E501 - credentials = mock_user_credentials() - user = await create_new_user_with_email(email=credentials.email) +async def test_generate_link_supports_updating_emails_with_generate_email_change_links( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: # noqa: E501 + (admin_client, user, credentials) = admin_client_with_user_and_credentials assert user.email assert user.email == credentials.email credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" - response = await service_role_api_client().generate_link( - { - "type": "email_change_current", - "email": user.email, - "new_email": credentials.email, - "options": { - "redirect_to": redirect_to, - }, - }, + response = await admin_client.generate_link( + GenerateLinkParams.email_change_current( + email=user.email, + new_email=credentials.email, + redirect_to=redirect_to, + ) ) assert response.user.new_email == credentials.email -async def test_invite_user_by_email_creates_a_new_user_with_an_invited_at_timestamp() -> ( - None -): +async def test_invite_user_by_email_creates_a_new_user_with_an_invited_at_timestamp( + admin_client_with_user_and_credentials: tuple[ + AsyncSupabaseAuthAdmin, User, Credentials + ], +) -> None: + (admin_client, user, credentials) = admin_client_with_user_and_credentials credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" user_metadata = {"status": "alpha"} - response = await service_role_api_client().invite_user_by_email( + response = await admin_client.invite_user_by_email( credentials.email, - { - "data": user_metadata, - "redirect_to": redirect_to, - }, + data=user_metadata, + redirect_to=redirect_to, ) assert response.user.invited_at -async def test_sign_out_with_an_valid_access_token() -> None: - credentials = mock_user_credentials() - client = await auth_client_with_session() - response = await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - }, +async def test_sign_in_with_oauth( + client_api_auto_confirm_off_signups_enabled_client: AsyncSupabaseAuthClient, +) -> None: + assert await client_api_auto_confirm_off_signups_enabled_client.sign_in_with_oauth( + provider="google" ) - assert response.session - await service_role_api_client().sign_out(response.session.access_token) - - -async def test_sign_out_with_an_invalid_access_token() -> None: - try: - await service_role_api_client().sign_out("this-is-a-bad-token") - raise AssertionError() - except AuthError: - pass -async def test_verify_otp_with_non_existent_phone_number() -> None: +async def test_get_item_from_memory_storage( + auth_client: AsyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - otp = mock_verification_otp() - try: - await client_api_auto_confirm_disabled_client().verify_otp( - { - "phone": credentials.phone, - "token": otp, - "type": "sms", - }, - ) - raise AssertionError() - except AuthError as e: - assert e.message == "Token has expired or is invalid" - - -async def test_verify_otp_with_invalid_phone_number() -> None: - credentials = mock_user_credentials() - otp = mock_verification_otp() - try: - await client_api_auto_confirm_disabled_client().verify_otp( - { - "phone": f"{credentials.phone}-invalid", - "token": otp, - "type": "sms", - }, - ) - raise AssertionError() - except AuthError as e: - assert e.message == "Invalid phone number format (E.164 required)" - - -async def test_sign_in_with_id_token() -> None: - try: - await ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_id_token( - { - "provider": "google", - "token": "123456", - } - ) - ) - except AuthApiError as e: - assert e.to_dict() - - -async def test_sign_in_with_sso() -> None: - with pytest.raises(AuthApiError, match=r"SAML 2.0 is disabled") as exc: - await client_api_auto_confirm_off_signups_enabled_client().sign_in_with_sso( - { - "domain": "google", - } - ) - assert exc.value is not None - - -async def test_sign_in_with_oauth() -> None: - assert ( - await client_api_auto_confirm_off_signups_enabled_client().sign_in_with_oauth( - { - "provider": "google", - } + await auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, ) ) - -async def test_link_identity_missing_session() -> None: - with pytest.raises(AuthSessionMissingError) as exc: - await client_api_auto_confirm_off_signups_enabled_client().link_identity( - { - "provider": "google", - } + await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, ) - assert exc.value is not None - - -async def test_get_item_from_memory_storage() -> None: - credentials = mock_user_credentials() - client = auth_client() - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - assert await client._storage.get_item(client._storage_key) is not None - - -async def test_remove_item_from_memory_storage() -> None: - credentials = mock_user_credentials() - client = auth_client() - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - await client._storage.remove_item(client._storage_key) - - -async def test_list_factors() -> None: - credentials = mock_user_credentials() - client = auth_client() - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - factors = await client._list_factors() - assert factors - assert isinstance(factors.totp, list) and isinstance(factors.phone, list) - - -async def test_start_auto_refresh_token() -> None: - credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } ) - - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + assert ( + await auth_client.session_manager.storage.get_item( + auth_client.session_manager.storage_key + ) + is not None ) -async def test_recover_and_refresh() -> None: +async def test_recover_and_refresh(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.session_manager.auto_refresh_token = True + await auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - await client._recover_and_refresh() - - -async def test_get_user_identities() -> None: - credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - assert (await client.get_user_identities()).identities[0].identity_data[ + await auth_client.session_manager.recover_and_refresh() + assert (await auth_client.get_user_identities()).identities[0].identity_data[ "email" ] == credentials.email -async def test_update_user() -> None: +async def test_update_user(auth_client: AsyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.session_manager.auto_refresh_token = True + await auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - await client.update_user({"password": "123e5a"}) - await client.sign_in_with_password( - { - "email": credentials.email, - "password": "123e5a", - } + await auth_client.update_user(UserAttributes(password="123e5a")) + await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password="123e5a", + ) ) -async def test_create_user_with_app_metadata() -> None: +async def test_create_user_with_app_metadata( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: app_metadata = mock_app_metadata() credentials = mock_user_credentials() - response = await service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "app_metadata": app_metadata, - } + response = await service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + app_metadata=app_metadata, + ) ) assert response.user.email == credentials.email assert "provider" in response.user.app_metadata assert "providers" in response.user.app_metadata -async def test_weak_email_password_error() -> None: - credentials = mock_user_credentials() - try: - await client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "email": credentials.email, - "password": "123", - } - ) - except (AuthWeakPasswordError, AuthApiError) as e: - assert e.to_dict() - - -async def test_weak_phone_password_error() -> None: - credentials = mock_user_credentials() - try: - await client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "phone": credentials.phone, - "password": "123", - } - ) - except (AuthWeakPasswordError, AuthApiError) as e: - assert e.to_dict() - - -async def test_admin_list_factors() -> None: +async def test_admin_list_factors( + auth_client: AsyncSupabaseAuthClient, + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: import pyotp credentials = mock_user_credentials() - client = auth_client() - await client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + await auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - auth_response = await client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + auth_response = await auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) assert auth_response.user - enroll_response = await client.mfa.enroll( - { - "factor_type": "totp", - "friendly_name": "test_otp", - } + enroll_response = await auth_client.mfa.enroll( + MFAEnroll.totp(friendly_name="test_otp") ) assert enroll_response.totp totp = pyotp.TOTP(enroll_response.totp.secret) - res = await client.mfa.challenge_and_verify( - { - "factor_id": enroll_response.id, - "code": totp.now(), - } - ) - admin_client = service_role_api_client() - factors = await admin_client.mfa.list_factors( - { - "user_id": res.user.id, - } + res = await auth_client.mfa.challenge_and_verify( + factor_id=enroll_response.id, + code=totp.now(), + ) + factors = await service_role_api_client.mfa.list_factors( + user_id=res.user.id, ) assert factors[0].friendly_name == "test_otp" assert factors[0].factor_type == "totp" assert factors[0].status == "verified" - await admin_client.mfa.delete_factor( - { - "id": factors[0].id, - "user_id": res.user.id, - } + await service_role_api_client.mfa.delete_factor( + factor_id=factors[0].id, + user_id=res.user.id, ) - factors = await admin_client.mfa.list_factors({"user_id": res.user.id}) + factors = await service_role_api_client.mfa.list_factors(user_id=res.user.id) assert len(factors) == 0 -async def test_create_oauth_client() -> None: +async def test_create_oauth_client( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test creating an OAuth client.""" - response = await service_role_api_client().oauth.create_client( + response = await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client", redirect_uris=["https://example.com/callback"], @@ -632,25 +427,28 @@ async def test_create_oauth_client() -> None: assert response.client.client_id is not None -async def test_list_oauth_clients() -> None: +async def test_list_oauth_clients( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test listing OAuth clients.""" - client = service_role_api_client() - await client.oauth.create_client( + await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client", redirect_uris=["https://example.com/callback"], ) ) - response = await client.oauth.list_clients() + response = await service_role_api_client.oauth.list_clients() assert len(response.clients) > 0 assert any(client.client_name == "Test OAuth Client" for client in response.clients) assert any(client.client_id is not None for client in response.clients) -async def test_get_oauth_client() -> None: +async def test_get_oauth_client( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test getting an OAuth client by ID.""" # First create a client - create_response = await service_role_api_client().oauth.create_client( + create_response = await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Get", redirect_uris=["https://example.com/callback"], @@ -658,17 +456,18 @@ async def test_get_oauth_client() -> None: ) if create_response.client: client_id = create_response.client.client_id - response = await service_role_api_client().oauth.get_client(client_id) + response = await service_role_api_client.oauth.get_client(client_id) assert response.client is not None assert response.client.client_id == client_id # Server is not yet released, so this test is not yet relevant. -async def test_update_oauth_client() -> None: +async def test_update_oauth_client( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test updating an OAuth client.""" # First create a client - client = service_role_api_client() - create_response = await client.oauth.create_client( + create_response = await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Update", redirect_uris=["https://example.com/callback"], @@ -676,7 +475,7 @@ async def test_update_oauth_client() -> None: ) assert create_response.client is not None client_id = create_response.client.client_id - response = await client.oauth.update_client( + response = await service_role_api_client.oauth.update_client( client_id, UpdateOAuthClientParams( client_name="Updated Test OAuth Client", @@ -686,11 +485,12 @@ async def test_update_oauth_client() -> None: assert response.client.client_name == "Updated Test OAuth Client" -async def test_delete_oauth_client() -> None: +async def test_delete_oauth_client( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test deleting an OAuth client.""" # First create a client - client = service_role_api_client() - create_response = await client.oauth.create_client( + create_response = await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Delete", redirect_uris=["https://example.com/callback"], @@ -698,13 +498,15 @@ async def test_delete_oauth_client() -> None: ) assert create_response.client is not None client_id = create_response.client.client_id - await client.oauth.delete_client(client_id) + await service_role_api_client.oauth.delete_client(client_id) -async def test_regenerate_oauth_client_secret() -> None: +async def test_regenerate_oauth_client_secret( + service_role_api_client: AsyncSupabaseAuthAdmin, +) -> None: """Test regenerating an OAuth client secret.""" # First create a client - create_response = await service_role_api_client().oauth.create_client( + create_response = await service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Regenerate", redirect_uris=["https://example.com/callback"], @@ -712,7 +514,7 @@ async def test_regenerate_oauth_client_secret() -> None: ) if create_response.client: client_id = create_response.client.client_id - response = await service_role_api_client().oauth.regenerate_client_secret( + response = await service_role_api_client.oauth.regenerate_client_secret( client_id ) assert response.client is not None diff --git a/src/auth/tests/_async/test_utils.py b/src/auth/tests/_async/test_utils.py deleted file mode 100644 index ed0a22ff..00000000 --- a/src/auth/tests/_async/test_utils.py +++ /dev/null @@ -1,25 +0,0 @@ -from time import time - -from .clients import ( - create_new_user_with_email, - mock_app_metadata, - mock_user_metadata, -) - - -async def test_create_new_user_with_email() -> None: - email = f"user+{int(time())}@example.com" - user = await create_new_user_with_email(email=email) - assert user.email == email - - -def test_mock_user_metadata() -> None: - user_metadata = mock_user_metadata() - assert user_metadata - assert user_metadata.get("profile_image") - - -def test_mock_app_metadata() -> None: - app_metadata = mock_app_metadata() - assert app_metadata - assert app_metadata.get("roles") diff --git a/src/auth/tests/_sync/clients.py b/src/auth/tests/_sync/clients.py deleted file mode 100644 index 162e5f9a..00000000 --- a/src/auth/tests/_sync/clients.py +++ /dev/null @@ -1,232 +0,0 @@ -from dataclasses import dataclass -from random import random -from time import time -from typing import Optional - -from faker import Faker -from jwt import encode -from supabase_auth import SyncGoTrueAdminAPI, SyncGoTrueClient -from supabase_auth.types import User -from typing_extensions import NotRequired, TypedDict - - -def mock_access_token() -> str: - return encode( - { - "sub": "1234567890", - "role": "anon_key", - }, - GOTRUE_JWT_SECRET, - ) - - -class OptionalCredentials(TypedDict): - email: NotRequired[Optional[str]] - phone: NotRequired[Optional[str]] - password: NotRequired[Optional[str]] - - -@dataclass -class Credentials: - email: str - phone: str - password: str - - -def mock_user_credentials( - options: Optional[OptionalCredentials] = None, -) -> Credentials: - fake = Faker() - user_options = options or {} - rand_numbers = str(int(time())) - return Credentials( - email=user_options.get("email") or fake.email(), - phone=user_options.get("phone") or f"1{rand_numbers[-11:]}", - password=user_options.get("password") or fake.password(), - ) - - -def mock_verification_otp() -> str: - return str(int(100000 + random() * 900000)) - - -class UserMetadata(TypedDict): - profile_image: str - - -def mock_user_metadata() -> UserMetadata: - fake = Faker() - return { - "profile_image": fake.url(), - } - - -class AppMetadata(TypedDict): - roles: list[str] - - -def mock_app_metadata() -> AppMetadata: - return { - "roles": ["editor", "publisher"], - } - - -def create_new_user_with_email( - *, - email: Optional[str] = None, - password: Optional[str] = None, -) -> User: - credentials = mock_user_credentials( - { - "email": email, - "password": password, - } - ) - response = service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - } - ) - return response.user - - -SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT = 9999 -SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT = 9998 -SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT = 9997 -SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT = 9996 - -GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF = ( - f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT}" -) -GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON = ( - f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT}" -) -GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON = ( - f"http://localhost:{SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT}" -) -GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF = ( - f"http://localhost:{SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT}" -) - -GOTRUE_JWT_SECRET = "37c304f8-51aa-419a-a1af-06154e63707a" - -AUTH_ADMIN_JWT = encode( - { - "sub": "1234567890", - "role": "supabase_admin", - }, - GOTRUE_JWT_SECRET, -) - - -def auth_client() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -def auth_client_with_session() -> SyncGoTrueClient: - client = SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=False, - ) - credentials = mock_user_credentials() - client.sign_up({"email": credentials.email, "password": credentials.password}) - return client - - -def auth_client_with_asymmetric_session() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=False, - ) - - -def auth_subscription_client() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_enabled_client() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_off_signups_enabled_client() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - auto_refresh_token=False, - persist_session=True, - ) - - -def client_api_auto_confirm_disabled_client() -> SyncGoTrueClient: - return SyncGoTrueClient( - url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, - auto_refresh_token=False, - persist_session=True, - ) - - -def auth_admin_api_auto_confirm_enabled_client() -> SyncGoTrueAdminAPI: - return SyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - headers={ - "Authorization": f"Bearer {AUTH_ADMIN_JWT}", - }, - ) - - -def auth_admin_api_auto_confirm_disabled_client() -> SyncGoTrueAdminAPI: - return SyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {AUTH_ADMIN_JWT}", - }, - ) - - -SERVICE_ROLE_JWT = encode( - { - "role": "service_role", - }, - GOTRUE_JWT_SECRET, -) - - -def service_role_api_client() -> SyncGoTrueAdminAPI: - return SyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) - - -def service_role_api_client_with_sms() -> SyncGoTrueAdminAPI: - return SyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) - - -def service_role_api_client_no_sms() -> SyncGoTrueAdminAPI: - return SyncGoTrueAdminAPI( - url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, - headers={ - "Authorization": f"Bearer {SERVICE_ROLE_JWT}", - }, - ) diff --git a/src/auth/tests/_sync/conftest.py b/src/auth/tests/_sync/conftest.py new file mode 100644 index 00000000..91f92647 --- /dev/null +++ b/src/auth/tests/_sync/conftest.py @@ -0,0 +1,304 @@ +from dataclasses import dataclass +from random import random +from time import time +from typing import Iterator + +import pytest +from faker import Faker +from httpx import Client +from jwt import encode +from supabase_utils.http.adapters.httpx import HttpxSession +from typing_extensions import NotRequired, TypedDict + +from supabase_auth import SyncSupabaseAuthAdmin, SyncSupabaseAuthClient +from supabase_auth.types import ( + AdminUserAttributes, + SignInWithPassword, + SignUpWithPassword, + User, +) + + +def mock_access_token() -> str: + return encode( + { + "sub": "1234567890", + "role": "anon_key", + }, + GOTRUE_JWT_SECRET, + ) + + +class OptionalCredentials(TypedDict): + email: NotRequired[str | None] + phone: NotRequired[str | None] + password: NotRequired[str | None] + + +@dataclass +class Credentials: + email: str + phone: str + password: str + + +def mock_user_credentials( + options: OptionalCredentials | None = None, +) -> Credentials: + fake = Faker() + user_options = options or {} + rand_numbers = str(int(time())) + return Credentials( + email=user_options.get("email") or fake.email(), + phone=user_options.get("phone") or f"1{rand_numbers[-11:]}", + password=user_options.get("password") or fake.password(), + ) + + +def mock_verification_otp() -> str: + return str(int(100000 + random() * 900000)) + + +def mock_user_metadata() -> dict[str, str]: + fake = Faker() + return { + "profile_image": fake.url(), + } + + +def mock_app_metadata() -> dict[str, list[str]]: + return { + "roles": ["editor", "publisher"], + } + + +def create_new_user_with_email( + service_role_api_client: SyncSupabaseAuthAdmin, + *, + email: str | None = None, + password: str | None = None, +) -> User: + credentials = mock_user_credentials( + { + "email": email, + "password": password, + } + ) + response = service_role_api_client.create_user( + AdminUserAttributes(email=credentials.email, password=credentials.password) + ) + return response.user + + +SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT = 9999 +SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT = 9998 +SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT = 9997 +SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT = 9996 + +GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF = ( + f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_OFF_PORT}" +) +GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON = ( + f"http://localhost:{SIGNUP_ENABLED_AUTO_CONFIRM_ON_PORT}" +) +GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON = ( + f"http://localhost:{SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON_PORT}" +) +GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF = ( + f"http://localhost:{SIGNUP_DISABLED_AUTO_CONFIRM_OFF_PORT}" +) + +GOTRUE_JWT_SECRET = "37c304f8-51aa-419a-a1af-06154e63707a" + +AUTH_ADMIN_JWT = encode( + { + "sub": "1234567890", + "role": "supabase_admin", + }, + GOTRUE_JWT_SECRET, +) + + +def httpx() -> HttpxSession: + return HttpxSession(client=Client(http2=True, verify=True)) + + +http_sessions = [httpx] + + +@pytest.fixture(params=http_sessions) +def auth_client(request: pytest.FixtureRequest) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def auth_client_with_session( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=False, + http_session=request.param(), + ) as client: + credentials = mock_user_credentials() + client.sign_up( + SignUpWithPassword.email( + email=credentials.email, password=credentials.password + ) + ) + client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, password=credentials.password + ) + ) + yield client + + +@pytest.fixture(params=http_sessions) +def auth_client_with_asymmetric_session( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_ASYMMETRIC_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=False, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def auth_subscription_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def client_api_auto_confirm_enabled_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def client_api_auto_confirm_off_signups_enabled_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def client_api_auto_confirm_disabled_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthClient]: + with SyncSupabaseAuthClient( + url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, + auto_refresh_token=False, + persist_session=True, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def auth_admin_api_auto_confirm_enabled_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthAdmin]: + with SyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + default_headers={ + "Authorization": f"Bearer {AUTH_ADMIN_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def auth_admin_api_auto_confirm_disabled_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthAdmin]: + with SyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {AUTH_ADMIN_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +SERVICE_ROLE_JWT = encode( + { + "role": "service_role", + }, + GOTRUE_JWT_SECRET, +) + + +@pytest.fixture(params=http_sessions) +def service_role_api_client( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthAdmin]: + with SyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_ON, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def service_role_api_client_with_sms( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthAdmin]: + with SyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_ENABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client + + +@pytest.fixture(params=http_sessions) +def service_role_api_client_no_sms( + request: pytest.FixtureRequest, +) -> Iterator[SyncSupabaseAuthAdmin]: + with SyncSupabaseAuthAdmin( + url=GOTRUE_URL_SIGNUP_DISABLED_AUTO_CONFIRM_OFF, + default_headers={ + "Authorization": f"Bearer {SERVICE_ROLE_JWT}", + }, + http_session=request.param(), + ) as client: + yield client diff --git a/src/auth/tests/_sync/test_gotrue.py b/src/auth/tests/_sync/test_gotrue.py index f260622d..b487e7e1 100644 --- a/src/auth/tests/_sync/test_gotrue.py +++ b/src/auth/tests/_sync/test_gotrue.py @@ -1,96 +1,102 @@ import time -from uuid import uuid4 import pytest from jwt import encode + +from supabase_auth import SyncSupabaseAuthClient from supabase_auth.errors import ( AuthApiError, AuthInvalidJwtError, AuthSessionMissingError, ) from supabase_auth.helpers import decode_jwt -from supabase_auth.types import SignUpWithEmailAndPasswordCredentials +from supabase_auth.types import ( + AuthChangeEvent, + MFAEnroll, + Session, + SignInWithPassword, + SignUpWithPassword, +) -from .clients import ( +from .conftest import ( GOTRUE_JWT_SECRET, - auth_client, - auth_client_with_asymmetric_session, - auth_client_with_session, mock_user_credentials, ) -def test_get_claims_returns_none_when_session_is_none() -> None: - claims = auth_client().get_claims() +def test_get_claims_returns_none_when_session_is_none( + auth_client: SyncSupabaseAuthClient, +) -> None: + claims = auth_client.get_claims() assert claims is None -def test_get_claims_calls_get_user_if_symmetric_jwt(mocker) -> None: - client = auth_client() - spy = mocker.spy(client, "get_user") +def test_get_claims_calls_get_user_if_symmetric_jwt( + mocker, auth_client: SyncSupabaseAuthClient +) -> None: + spy = mocker.spy(auth_client.session_manager, "_get_user") credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (auth_client.sign_up(options)).user assert user is not None - response = client.get_claims() + response = auth_client.get_claims() assert response - claims = response["claims"] + claims = response.claims - assert claims.get("email") == user.email + assert claims.model_extra + assert claims.model_extra["email"] == user.email spy.assert_called_once() -def test_get_claims_fetches_jwks_to_verify_asymmetric_jwt(mocker) -> None: - client = auth_client_with_asymmetric_session() +def test_get_claims_fetches_jwks_to_verify_asymmetric_jwt( + mocker, auth_client_with_asymmetric_session: SyncSupabaseAuthClient +) -> None: credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (auth_client_with_asymmetric_session.sign_up(options)).user assert user is not None - spy = mocker.spy(client, "_request") - - response = client.get_claims() + response = auth_client_with_asymmetric_session.get_claims() assert response - claims = response["claims"] - assert claims.get("email") == user.email + claims = response.claims - spy.assert_called_once() - spy.assert_called_with("GET", ".well-known/jwks.json") + assert claims.model_extra + assert claims.model_extra["email"] == user.email expected_keyid = "638c54b8-28c2-4b12-9598-ba12ef610a29" - assert len(client._jwks["keys"]) == 1 - assert client._jwks["keys"][0]["kid"] == expected_keyid - + assert len(auth_client_with_asymmetric_session._jwks.keys) == 1 + assert auth_client_with_asymmetric_session._jwks.keys[0].kid == expected_keyid -def test_jwks_ttl_cache_behavior(mocker) -> None: - client = auth_client_with_asymmetric_session() - spy = mocker.spy(client, "_request") +def test_jwks_ttl_cache_behavior( + mocker, auth_client_with_asymmetric_session: SyncSupabaseAuthClient +) -> None: + spy = mocker.spy(auth_client_with_asymmetric_session.executor.session, "send") # First call should fetch JWKS from endpoint credentials = mock_user_credentials() - options: SignUpWithEmailAndPasswordCredentials = { - "email": credentials.email, - "password": credentials.password, - } - user = (client.sign_up(options)).user + options = SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) + user = (auth_client_with_asymmetric_session.sign_up(options)).user assert user is not None - client.get_claims() - spy.assert_called_with("GET", ".well-known/jwks.json") + auth_client_with_asymmetric_session.get_claims() + first_call_count = spy.call_count # Second call within TTL should use cache - client.get_claims() + auth_client_with_asymmetric_session.get_claims() assert spy.call_count == first_call_count # No additional JWKS request # Mock time to be after TTL expiry @@ -100,23 +106,19 @@ def test_jwks_ttl_cache_behavior(mocker) -> None: mock_time.return_value = original_time() + 601 # TTL is 600 seconds # Call after TTL expiry should fetch fresh JWKS - client.get_claims() + auth_client_with_asymmetric_session.get_claims() assert spy.call_count == first_call_count + 1 # One more JWKS request finally: # Restore original time function mocker.patch("time.time", original_time) -def test_set_session_with_valid_tokens() -> None: - client = auth_client() +def test_set_session_with_valid_tokens(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -125,10 +127,10 @@ def test_set_session_with_valid_tokens() -> None: refresh_token = signup_response.session.refresh_token # Clear the session - client._remove_session() + auth_client.session_manager.remove_session() # Set the session with the tokens - response = client.set_session(access_token, refresh_token) + response = auth_client.set_session(access_token, refresh_token) # Verify the response assert response.session is not None @@ -138,16 +140,12 @@ def test_set_session_with_valid_tokens() -> None: assert response.user.email == credentials.email -def test_set_session_with_expired_token() -> None: - client = auth_client() +def test_set_session_with_expired_token(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -156,19 +154,19 @@ def test_set_session_with_expired_token() -> None: refresh_token = signup_response.session.refresh_token # Clear the session - client._remove_session() + auth_client.session_manager.remove_session() # Create an expired token by modifying the JWT expired_token = access_token.split(".") - payload = decode_jwt(access_token)["payload"] - payload["exp"] = int(time.time()) - 3600 # Set expiry to 1 hour ago + payload = decode_jwt(access_token).payload + payload.exp = int(time.time()) - 3600 # Set expiry to 1 hour ago expired_token[1] = encode( dict(payload), GOTRUE_JWT_SECRET, algorithm="HS256" ).split(".")[1] expired_access_token = ".".join(expired_token) # Set the session with the expired token - response = client.set_session(expired_access_token, refresh_token) + response = auth_client.set_session(expired_access_token, refresh_token) # Verify the response has a new access token (refreshed) assert response.session is not None @@ -178,16 +176,12 @@ def test_set_session_with_expired_token() -> None: assert response.user.email == credentials.email -def test_set_session_without_refresh_token() -> None: - client = auth_client() +def test_set_session_without_refresh_token(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get valid tokens - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None @@ -195,12 +189,12 @@ def test_set_session_without_refresh_token() -> None: access_token = signup_response.session.access_token # Clear the session - client._remove_session() + auth_client.session_manager.remove_session() # Create an expired token expired_token = access_token.split(".") - payload = decode_jwt(access_token)["payload"] - payload["exp"] = int(time.time()) - 3600 # Set expiry to 1 hour ago + payload = decode_jwt(access_token).payload + payload.exp = int(time.time()) - 3600 # Set expiry to 1 hour ago expired_token[1] = encode( dict(payload), GOTRUE_JWT_SECRET, algorithm="HS256" ).split(".")[1] @@ -208,33 +202,25 @@ def test_set_session_without_refresh_token() -> None: # Try to set the session with an expired token but no refresh token with pytest.raises(AuthSessionMissingError): - client.set_session(expired_access_token, "") + auth_client.set_session(expired_access_token, "") -def test_set_session_with_invalid_token() -> None: - client = auth_client() - +def test_set_session_with_invalid_token(auth_client: SyncSupabaseAuthClient) -> None: # Try to set the session with invalid tokens with pytest.raises(AuthInvalidJwtError): - client.set_session("invalid.token.here", "invalid_refresh_token") - + auth_client.set_session("invalid.token.here", "invalid_refresh_token") -def test_mfa_enroll() -> None: - client = auth_client_with_session() +def test_mfa_enroll(auth_client_with_session: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + _signup_response = auth_client_with_session.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) - # Test MFA enrollment - enroll_response = client.mfa.enroll( - {"issuer": "test-issuer", "factor_type": "totp", "friendly_name": "test-factor"} + enroll_response = auth_client_with_session.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) assert enroll_response.id is not None @@ -244,204 +230,116 @@ def test_mfa_enroll() -> None: assert enroll_response.totp.qr_code is not None -def test_mfa_challenge() -> None: - client = auth_client() +def test_mfa_challenge(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - enroll_response = client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + enroll_response = auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA challenge - challenge_response = client.mfa.challenge({"factor_id": enroll_response.id}) + challenge_response = auth_client.mfa.challenge(factor_id=enroll_response.id) assert challenge_response.id is not None assert challenge_response.expires_at is not None -def test_mfa_unenroll() -> None: - client = auth_client() +def test_mfa_unenroll(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - enroll_response = client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + enroll_response = auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA unenroll - unenroll_response = client.mfa.unenroll({"factor_id": enroll_response.id}) + unenroll_response = auth_client.mfa.unenroll(factor_id=enroll_response.id) assert unenroll_response.id == enroll_response.id -def test_mfa_list_factors() -> None: - client = auth_client() +def test_mfa_list_factors(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # First sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Enroll a factor first - client.mfa.enroll( - {"factor_type": "totp", "issuer": "test-issuer", "friendly_name": "test-factor"} + auth_client.mfa.enroll( + MFAEnroll.totp(issuer="test-issuer", friendly_name="test-factor") ) # Test MFA list factors - list_response = client.mfa.list_factors() + list_response = auth_client.mfa.list_factors() assert len(list_response.all) == 1 -def test_exchange_code_for_session() -> None: - client = auth_client() - - # We'll test the flow type setting instead of the actual exchange, since the - # actual exchange requires a live OAuth flow which isn't practical in tests - assert client._flow_type in ["implicit", "pkce"] - - # This part would normally need a live OAuth flow, so we verify the logic paths - # Get the storage key for PKCE flow - storage_key = f"{client._storage_key}-code-verifier" - - # Set the flow type to pkce - client._flow_type = "pkce" - - # Test the PKCE URL generation which is needed for exchange_code_for_session - url, params = client._get_url_for_provider(f"{client._url}/authorize", "github", {}) - - # Verify PKCE parameters were added - assert "code_challenge" in params - assert "code_challenge_method" in params - - # Verify the code verifier was stored - code_verifier = client._storage.get_item(storage_key) - assert code_verifier is not None - - -def test_get_authenticator_assurance_level() -> None: - client = auth_client() +def test_get_authenticator_assurance_level(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # Without a session, should return null values - aal_response = client.mfa.get_authenticator_assurance_level() + aal_response = auth_client.mfa.get_authenticator_assurance_level() assert aal_response.current_level is None assert aal_response.next_level is None assert aal_response.current_authentication_methods == [] # Sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # With a session, should return authentication methods - aal_response = client.mfa.get_authenticator_assurance_level() + aal_response = auth_client.mfa.get_authenticator_assurance_level() # Basic auth will have password as an authentication method assert aal_response.current_authentication_methods is not None -def test_link_identity() -> None: - client = auth_client() - credentials = mock_user_credentials() - - # Sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - assert signup_response.session is not None - - from unittest.mock import patch - - from httpx import Response - - # Since the test server has manual linking disabled, we'll mock the URL generation - with patch.object(client, "_get_url_for_provider") as mock_url_provider: - mock_url = "http://example.com/authorize?provider=github" - mock_params = {"provider": "github"} - mock_url_provider.return_value = (mock_url, mock_params) - - # Also mock the _request method since the server would reject it - with patch.object(client, "_request") as mock_request: - mock_request.return_value = Response( - content=f'{{"url":"{mock_url}"}}', status_code=200 - ) - - # Call the method - response = client.link_identity({"provider": "github"}) - - # Verify the response - assert response.provider == "github" - assert response.url == mock_url - - -def test_get_user_identities() -> None: - client = auth_client() +def test_get_user_identities(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() # Sign up to get a valid session - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # New users won't have any identities yet, but the call should work - identities_response = client.get_user_identities() + identities_response = auth_client.get_user_identities() assert identities_response is not None # For a new user, identities will be an empty list or None assert hasattr(identities_response, "identities") -def test_sign_in_with_password() -> None: - client = auth_client() +def test_sign_in_with_password(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - from supabase_auth.errors import AuthApiError, AuthInvalidCredentialsError # First create a user we can sign in with - signup_response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) assert signup_response.session is not None # Test signing in with the same credentials (email) - signin_response = client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + signin_response = auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) # Verify the response has a valid session and user @@ -449,263 +347,48 @@ def test_sign_in_with_password() -> None: assert signin_response.user is not None assert signin_response.user.email == credentials.email - # Test error case: wrong password - - # We need to create a custom client to avoid affecting other tests - test_client = auth_client() - try: - test_client.sign_in_with_password( - { - "email": credentials.email, - "password": "wrong_password", - } + auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password="wrong_password", + ) ) raise AssertionError("Expected AuthApiError for wrong password") except AuthApiError: pass - # Test error case: missing credentials - try: - test_client.sign_in_with_password({}) # type: ignore - raise AssertionError( - "Expected AuthInvalidCredentialsError for missing credentials" - ) - except AuthInvalidCredentialsError: - pass - - -def test_sign_in_with_otp() -> None: - client = auth_client() - # Test with email OTP - email = f"test-{uuid4()}@example.com" - - # When sign_in_with_otp is called with valid email, it should return a AuthOtpResponse - # We can't fully test the actual OTP flow since that requires email verification - from unittest.mock import patch - - from httpx import Response - from supabase_auth.types import AuthOtpResponse +def test_sign_out(auth_client: SyncSupabaseAuthClient) -> None: + credentials = mock_user_credentials() - # First test for email OTP - auth_otp = AuthOtpResponse( - message_id="mock-message-id", + signup_response = auth_client.sign_up( + SignUpWithPassword.email(email=credentials.email, password=credentials.password) ) - with patch.object(client, "_request") as mock_request: - mock_response = Response(content=auth_otp.model_dump_json(), status_code=200) - mock_request.return_value = mock_response - - response = client.sign_in_with_otp( - { - "email": email, - "options": { - "email_redirect_to": "https://example.com/callback", - "should_create_user": True, - "data": {"custom": "data"}, - "captcha_token": "mock-captcha-token", - }, - } - ) - - # Verify request parameters - mock_request.assert_called_once() - args, kwargs = mock_request.call_args - assert args[0] == "POST" - assert args[1] == "otp" - assert kwargs["body"]["email"] == email - assert kwargs["body"]["create_user"] - assert kwargs["body"]["data"] == {"custom": "data"} - assert ( - kwargs["body"]["gotrue_meta_security"]["captcha_token"] - == "mock-captcha-token" - ) - assert kwargs["redirect_to"] == "https://example.com/callback" - - # Verify response - assert response == auth_otp - - # Test with phone OTP - phone = "+11234567890" - auth_otp = AuthOtpResponse(message_id="mock-message-id") - with patch.object(client, "_request") as mock_request: - mock_response = Response(content=auth_otp.model_dump_json(), status_code=200) - mock_request.return_value = mock_response - - response = client.sign_in_with_otp( - { - "phone": phone, - "options": { - "should_create_user": True, - "data": {"custom": "data"}, - "channel": "whatsapp", # Test alternate channel - "captcha_token": "mock-captcha-token", - }, - } - ) + assert signup_response.session is not None - # Verify request parameters - mock_request.assert_called_once() - args, kwargs = mock_request.call_args - assert args[0] == "POST" - assert args[1] == "otp" - assert kwargs["body"]["phone"] == phone - assert kwargs["body"]["create_user"] - assert kwargs["body"]["data"] == {"custom": "data"} - assert kwargs["body"]["channel"] == "whatsapp" - assert ( - kwargs["body"]["gotrue_meta_security"]["captcha_token"] - == "mock-captcha-token" + signin_response = auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, ) - assert kwargs.get("redirect_to") is None # No redirect for phone - - # Verify response - assert response == auth_otp - - # Test with invalid parameters (missing both email and phone) - from supabase_auth.errors import AuthInvalidCredentialsError - - try: - client.sign_in_with_otp({}) # type: ignore - raise AssertionError("Expected AuthInvalidCredentialsError") - except AuthInvalidCredentialsError: - pass - - -def test_sign_out() -> None: - from datetime import datetime - from unittest.mock import patch - - from supabase_auth.types import Session, User - - client = auth_client() - - # Create a mock user and session - date = datetime(year=2023, month=1, day=1, hour=0, minute=0, second=0) - mock_user = User( - id="user123", - email="test@example.com", - app_metadata={}, - user_metadata={}, - aud="authenticated", - created_at=date, - confirmed_at=date, - last_sign_in_at=date, - role="authenticated", - updated_at=date, ) - mock_session = Session( - access_token="mock_access_token", - refresh_token="mock_refresh_token", - expires_in=3600, - token_type="bearer", - user=mock_user, - ) + # Verify the response has a valid session and user + assert signin_response.session is not None + assert signin_response.user is not None + assert signin_response.user.email == credentials.email - # Test sign_out with "global" scope (default) - # This should call admin.sign_out, _remove_session, and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope (global) - client.sign_out() - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "global" - ) - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test sign_out with "local" scope - # Should behave the same as "global" for client-side - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with local scope - client.sign_out({"scope": "local"}) - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "local" - ) - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test sign_out with "others" scope - # This should only call admin.sign_out but not _remove_session or _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with others scope - client.sign_out({"scope": "others"}) - - # Verify that admin.sign_out was called with correct parameters - mock_admin_sign_out.assert_called_once_with( - "mock_access_token", "others" - ) - - # Verify that _remove_session was NOT called - mock_remove_session.assert_not_called() - - # Verify that _notify_all_subscribers was NOT called - mock_notify.assert_not_called() - - # Test sign_out with no session - # This should not call admin.sign_out but still call _remove_session and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = None - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope - client.sign_out() - - # Verify that admin.sign_out was NOT called - mock_admin_sign_out.assert_not_called() - - # Verify that _remove_session was called - mock_remove_session.assert_called_once() - - # Verify that _notify_all_subscribers was called with SIGNED_OUT - mock_notify.assert_called_once_with("SIGNED_OUT", None) - - # Test when admin.sign_out raises an error - # This should suppress the error and continue with _remove_session and _notify_all_subscribers - with patch.object(client, "get_session") as mock_get_session: - mock_get_session.return_value = mock_session - - with patch.object(client.admin, "sign_out") as mock_admin_sign_out: - mock_admin_sign_out.side_effect = AuthApiError( - "Test error", 401, "validation_failed" - ) + called = False + + def sign_out_callback(auth_event: AuthChangeEvent, session: Session | None) -> None: + nonlocal called + called = True - with patch.object(client, "_remove_session") as mock_remove_session: - with patch.object(client, "_notify_all_subscribers") as mock_notify: - # Call sign_out with default scope - client.sign_out() + auth_client.on_auth_state_change(sign_out_callback) - # Verify that _remove_session was still called despite the error - mock_remove_session.assert_called_once() + auth_client.sign_out() - # Verify that _notify_all_subscribers was still called despite the error - mock_notify.assert_called_once_with("SIGNED_OUT", None) + no_more_session = auth_client.get_session() + assert no_more_session is None + assert called diff --git a/src/auth/tests/_sync/test_gotrue_admin_api.py b/src/auth/tests/_sync/test_gotrue_admin_api.py index 100445a1..cac847e4 100644 --- a/src/auth/tests/_sync/test_gotrue_admin_api.py +++ b/src/auth/tests/_sync/test_gotrue_admin_api.py @@ -1,58 +1,64 @@ -import pytest -from supabase_auth.errors import ( - AuthApiError, - AuthError, - AuthSessionMissingError, - AuthWeakPasswordError, +from supabase_auth import SyncSupabaseAuthAdmin, SyncSupabaseAuthClient +from supabase_auth.types import ( + AdminUserAttributes, + CreateOAuthClientParams, + GenerateLinkParams, + MFAEnroll, + Resend, + SignInWithPassword, + SignUpWithPassword, + UpdateOAuthClientParams, + UserAttributes, ) -from supabase_auth.types import CreateOAuthClientParams, UpdateOAuthClientParams -from .clients import ( - auth_client, - auth_client_with_session, - client_api_auto_confirm_disabled_client, - client_api_auto_confirm_off_signups_enabled_client, +from .conftest import ( create_new_user_with_email, mock_app_metadata, mock_user_credentials, mock_user_metadata, - mock_verification_otp, - service_role_api_client, ) -def test_create_user_should_create_a_new_user() -> None: +def test_create_user_should_create_a_new_user( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - response = create_new_user_with_email(email=credentials.email) + response = create_new_user_with_email( + service_role_api_client, email=credentials.email + ) assert response.email == credentials.email -def test_create_user_with_user_metadata() -> None: +def test_create_user_with_user_metadata( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: user_metadata = mock_user_metadata() credentials = mock_user_credentials() - response = service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "user_metadata": user_metadata, - } + response = service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + user_metadata=user_metadata, + ) ) assert response.user.email == credentials.email assert response.user.user_metadata == user_metadata assert "profile_image" in response.user.user_metadata -def test_create_user_with_user_and_app_metadata() -> None: +def test_create_user_with_user_and_app_metadata( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: user_metadata = mock_user_metadata() app_metadata = mock_app_metadata() credentials = mock_user_credentials() - response = service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "user_metadata": user_metadata, - "app_metadata": app_metadata, - } + response = service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + user_metadata=user_metadata, + app_metadata=app_metadata, + ) ) assert response.user.email == credentials.email assert "profile_image" in response.user.user_metadata @@ -60,562 +66,325 @@ def test_create_user_with_user_and_app_metadata() -> None: assert "providers" in response.user.app_metadata -def test_list_users_should_return_registered_users() -> None: +def test_list_users_should_return_registered_users( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - create_new_user_with_email(email=credentials.email) - users = service_role_api_client().list_users() + create_new_user_with_email(service_role_api_client, email=credentials.email) + users = service_role_api_client.list_users() assert users emails = [user.email for user in users] assert emails assert credentials.email in emails -def test_get_user_by_id_should_a_registered_user_given_its_user_identifier() -> None: +def test_get_user_by_id_should_a_registered_user_given_its_user_identifier( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) + user = create_new_user_with_email(service_role_api_client, email=credentials.email) assert user.id - response = service_role_api_client().get_user_by_id(user.id) + response = service_role_api_client.get_user_by_id(user.id) assert response.user.email == credentials.email -def test_modify_email_using_update_user_by_id() -> None: +def test_modify_email_using_update_user_by_id( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) - response = service_role_api_client().update_user_by_id( + user = create_new_user_with_email(service_role_api_client, email=credentials.email) + response = service_role_api_client.update_user_by_id( user.id, - { - "email": f"new_{user.email}", - }, + AdminUserAttributes( + email=f"new_{user.email}", + ), ) assert response.user.email == f"new_{user.email}" -def test_modify_user_metadata_using_update_user_by_id() -> None: +def test_modify_user_metadata_using_update_user_by_id( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) + user = create_new_user_with_email(service_role_api_client, email=credentials.email) user_metadata = {"favorite_color": "yellow"} - response = service_role_api_client().update_user_by_id( + response = service_role_api_client.update_user_by_id( user.id, - { - "user_metadata": user_metadata, - }, + AdminUserAttributes( + user_metadata=user_metadata, + ), ) assert response.user.email == user.email assert response.user.user_metadata == user_metadata -def test_modify_app_metadata_using_update_user_by_id() -> None: +def test_modify_app_metadata_using_update_user_by_id( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) + user = create_new_user_with_email(service_role_api_client, email=credentials.email) app_metadata = {"roles": ["admin", "publisher"]} - response = service_role_api_client().update_user_by_id( + response = service_role_api_client.update_user_by_id( user.id, - { - "app_metadata": app_metadata, - }, + AdminUserAttributes( + app_metadata=app_metadata, + ), ) assert response.user.email == user.email assert "roles" in response.user.app_metadata -def test_modify_confirm_email_using_update_user_by_id() -> None: +def test_modify_confirm_email_using_update_user_by_id( + service_role_api_client: SyncSupabaseAuthAdmin, + client_api_auto_confirm_off_signups_enabled_client: SyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - response = client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + response = client_api_auto_confirm_off_signups_enabled_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) assert response.user assert not response.user.email_confirmed_at - auth_response = service_role_api_client().update_user_by_id( + auth_response = service_role_api_client.update_user_by_id( response.user.id, - { - "email_confirm": True, - }, + AdminUserAttributes( + email_confirm=True, + ), ) assert auth_response.user.email_confirmed_at -def test_invalid_credential_sign_in_with_phone() -> None: - try: - ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_password( - { - "phone": "+123456789", - "password": "strong_pwd", - } - ) - ) - except AuthApiError as e: - assert e.to_dict() - - -def test_invalid_credential_sign_in_with_email() -> None: - try: - ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_password( - { - "email": "unknown_user@unknowndomain.com", - "password": "strong_pwd", - } - ) - ) - except AuthApiError as e: - assert e.to_dict() - - -def test_sign_in_with_otp_email() -> None: - try: - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_otp( - { - "email": "unknown_user@unknowndomain.com", - } - ) - except AuthApiError as e: - assert e.to_dict() - - -def test_sign_in_with_otp_phone() -> None: - try: - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_otp( - { - "phone": "+112345678", - } - ) - except AuthApiError as e: - assert e.to_dict() - - -def test_resend() -> None: - client_api_auto_confirm_off_signups_enabled_client().resend( - {"phone": "+112345678", "type": "sms"} +def test_resend( + client_api_auto_confirm_off_signups_enabled_client: SyncSupabaseAuthClient, +) -> None: + client_api_auto_confirm_off_signups_enabled_client.resend( + Resend.phone(phone="+112345678", type="sms") ) -def test_reauthenticate() -> None: - client = auth_client_with_session() - client.reauthenticate() +def test_reauthenticate(auth_client_with_session: SyncSupabaseAuthClient) -> None: + auth_client_with_session.reauthenticate() -def test_refresh_session() -> None: - client = auth_client_with_session() - client.refresh_session() +def test_refresh_session(auth_client_with_session: SyncSupabaseAuthClient) -> None: + auth_client_with_session.refresh_session() -def test_reset_password_for_email() -> None: +def test_reset_password_for_email( + auth_client_with_session: SyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - client = auth_client_with_session() - client.reset_password_email(email=credentials.email) + auth_client_with_session.reset_password_for_email(email=credentials.email) -def test_resend_missing_credentials() -> None: +def test_resend_missing_credentials( + client_api_auto_confirm_off_signups_enabled_client: SyncSupabaseAuthClient, +) -> None: credentials = mock_user_credentials() - client_api_auto_confirm_off_signups_enabled_client().resend( - {"type": "email_change", "email": credentials.email} + client_api_auto_confirm_off_signups_enabled_client.resend( + Resend.email(type="email_change", email=credentials.email) ) -def test_sign_in_anonymously() -> None: - client = auth_client_with_session() - client.sign_in_anonymously() +def test_sign_in_anonymously(auth_client_with_session: SyncSupabaseAuthClient) -> None: + auth_client_with_session.sign_in_anonymously() -def test_delete_user_should_be_able_delete_an_existing_user() -> None: +def test_delete_user_should_be_able_delete_an_existing_user( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) - service_role_api_client().delete_user(user.id) - users = service_role_api_client().list_users() + user = create_new_user_with_email(service_role_api_client, email=credentials.email) + service_role_api_client.delete_user(user.id) + users = service_role_api_client.list_users() emails = [user.email for user in users] assert credentials.email not in emails -def test_generate_link_supports_sign_up_with_generate_confirmation_signup_link() -> ( - None -): +def test_generate_link_supports_sign_up_with_generate_confirmation_signup_link( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" user_metadata = {"status": "alpha"} - response = service_role_api_client().generate_link( - { - "type": "signup", - "email": credentials.email, - "password": credentials.password, - "options": { - "data": user_metadata, - "redirect_to": redirect_to, - }, - }, + response = service_role_api_client.generate_link( + GenerateLinkParams.sign_up( + email=credentials.email, + password=credentials.password, + data=user_metadata, + redirect_to=redirect_to, + ) ) assert response.user.user_metadata == user_metadata -def test_generate_link_supports_updating_emails_with_generate_email_change_links() -> ( - None -): # noqa: E501 +def test_generate_link_supports_updating_emails_with_generate_email_change_links( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: # noqa: E501 credentials = mock_user_credentials() - user = create_new_user_with_email(email=credentials.email) + user = create_new_user_with_email(service_role_api_client, email=credentials.email) assert user.email assert user.email == credentials.email credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" - response = service_role_api_client().generate_link( - { - "type": "email_change_current", - "email": user.email, - "new_email": credentials.email, - "options": { - "redirect_to": redirect_to, - }, - }, + response = service_role_api_client.generate_link( + GenerateLinkParams.email_change_current( + email=user.email, + new_email=credentials.email, + redirect_to=redirect_to, + ) ) assert response.user.new_email == credentials.email -def test_invite_user_by_email_creates_a_new_user_with_an_invited_at_timestamp() -> None: +def test_invite_user_by_email_creates_a_new_user_with_an_invited_at_timestamp( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: credentials = mock_user_credentials() redirect_to = "http://localhost:9999/welcome" user_metadata = {"status": "alpha"} - response = service_role_api_client().invite_user_by_email( + response = service_role_api_client.invite_user_by_email( credentials.email, - { - "data": user_metadata, - "redirect_to": redirect_to, - }, + data=user_metadata, + redirect_to=redirect_to, ) assert response.user.invited_at -def test_sign_out_with_an_valid_access_token() -> None: - credentials = mock_user_credentials() - client = auth_client_with_session() - response = client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - }, +def test_sign_in_with_oauth( + client_api_auto_confirm_off_signups_enabled_client: SyncSupabaseAuthClient, +) -> None: + assert client_api_auto_confirm_off_signups_enabled_client.sign_in_with_oauth( + provider="google" ) - assert response.session - service_role_api_client().sign_out(response.session.access_token) - - -def test_sign_out_with_an_invalid_access_token() -> None: - try: - service_role_api_client().sign_out("this-is-a-bad-token") - raise AssertionError() - except AuthError: - pass - - -def test_verify_otp_with_non_existent_phone_number() -> None: - credentials = mock_user_credentials() - otp = mock_verification_otp() - try: - client_api_auto_confirm_disabled_client().verify_otp( - { - "phone": credentials.phone, - "token": otp, - "type": "sms", - }, - ) - raise AssertionError() - except AuthError as e: - assert e.message == "Token has expired or is invalid" -def test_verify_otp_with_invalid_phone_number() -> None: +def test_get_item_from_memory_storage(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - otp = mock_verification_otp() - try: - client_api_auto_confirm_disabled_client().verify_otp( - { - "phone": f"{credentials.phone}-invalid", - "token": otp, - "type": "sms", - }, - ) - raise AssertionError() - except AuthError as e: - assert e.message == "Invalid phone number format (E.164 required)" - - -def test_sign_in_with_id_token() -> None: - try: - ( - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_id_token( - { - "provider": "google", - "token": "123456", - } - ) + auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, ) - except AuthApiError as e: - assert e.to_dict() - - -def test_sign_in_with_sso() -> None: - with pytest.raises(AuthApiError, match=r"SAML 2.0 is disabled") as exc: - client_api_auto_confirm_off_signups_enabled_client().sign_in_with_sso( - { - "domain": "google", - } - ) - assert exc.value is not None - - -def test_sign_in_with_oauth() -> None: - assert client_api_auto_confirm_off_signups_enabled_client().sign_in_with_oauth( - { - "provider": "google", - } ) - -def test_link_identity_missing_session() -> None: - with pytest.raises(AuthSessionMissingError) as exc: - client_api_auto_confirm_off_signups_enabled_client().link_identity( - { - "provider": "google", - } + auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, ) - assert exc.value is not None - - -def test_get_item_from_memory_storage() -> None: - credentials = mock_user_credentials() - client = auth_client() - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } ) - assert client._storage.get_item(client._storage_key) is not None - - -def test_remove_item_from_memory_storage() -> None: - credentials = mock_user_credentials() - client = auth_client() - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - client._storage.remove_item(client._storage_key) - - -def test_list_factors() -> None: - credentials = mock_user_credentials() - client = auth_client() - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - factors = client._list_factors() - assert factors - assert isinstance(factors.totp, list) and isinstance(factors.phone, list) - - -def test_start_auto_refresh_token() -> None: - credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + assert ( + auth_client.session_manager.storage.get_item( + auth_client.session_manager.storage_key + ) + is not None ) -def test_recover_and_refresh() -> None: +def test_recover_and_refresh(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.session_manager.auto_refresh_token = True + auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - client._recover_and_refresh() - - -def test_get_user_identities() -> None: - credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } - ) - - client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } - ) - assert (client.get_user_identities()).identities[0].identity_data[ + auth_client.session_manager.recover_and_refresh() + assert (auth_client.get_user_identities()).identities[0].identity_data[ "email" ] == credentials.email -def test_update_user() -> None: +def test_update_user(auth_client: SyncSupabaseAuthClient) -> None: credentials = mock_user_credentials() - client = auth_client() - client._auto_refresh_token = True - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.session_manager.auto_refresh_token = True + auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - client.update_user({"password": "123e5a"}) - client.sign_in_with_password( - { - "email": credentials.email, - "password": "123e5a", - } + auth_client.update_user(UserAttributes(password="123e5a")) + auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password="123e5a", + ) ) -def test_create_user_with_app_metadata() -> None: +def test_create_user_with_app_metadata( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: app_metadata = mock_app_metadata() credentials = mock_user_credentials() - response = service_role_api_client().create_user( - { - "email": credentials.email, - "password": credentials.password, - "app_metadata": app_metadata, - } + response = service_role_api_client.create_user( + AdminUserAttributes( + email=credentials.email, + password=credentials.password, + app_metadata=app_metadata, + ) ) assert response.user.email == credentials.email assert "provider" in response.user.app_metadata assert "providers" in response.user.app_metadata -def test_weak_email_password_error() -> None: - credentials = mock_user_credentials() - try: - client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "email": credentials.email, - "password": "123", - } - ) - except (AuthWeakPasswordError, AuthApiError) as e: - assert e.to_dict() - - -def test_weak_phone_password_error() -> None: - credentials = mock_user_credentials() - try: - client_api_auto_confirm_off_signups_enabled_client().sign_up( - { - "phone": credentials.phone, - "password": "123", - } - ) - except (AuthWeakPasswordError, AuthApiError) as e: - assert e.to_dict() - - -def test_admin_list_factors() -> None: +def test_admin_list_factors( + auth_client: SyncSupabaseAuthClient, service_role_api_client: SyncSupabaseAuthAdmin +) -> None: import pyotp credentials = mock_user_credentials() - client = auth_client() - client.sign_up( - { - "email": credentials.email, - "password": credentials.password, - } + auth_client.sign_up( + SignUpWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) - auth_response = client.sign_in_with_password( - { - "email": credentials.email, - "password": credentials.password, - } + auth_response = auth_client.sign_in_with_password( + SignInWithPassword.email( + email=credentials.email, + password=credentials.password, + ) ) assert auth_response.user - enroll_response = client.mfa.enroll( - { - "factor_type": "totp", - "friendly_name": "test_otp", - } - ) + enroll_response = auth_client.mfa.enroll(MFAEnroll.totp(friendly_name="test_otp")) assert enroll_response.totp totp = pyotp.TOTP(enroll_response.totp.secret) - res = client.mfa.challenge_and_verify( - { - "factor_id": enroll_response.id, - "code": totp.now(), - } - ) - admin_client = service_role_api_client() - factors = admin_client.mfa.list_factors( - { - "user_id": res.user.id, - } + res = auth_client.mfa.challenge_and_verify( + factor_id=enroll_response.id, + code=totp.now(), + ) + factors = service_role_api_client.mfa.list_factors( + user_id=res.user.id, ) assert factors[0].friendly_name == "test_otp" assert factors[0].factor_type == "totp" assert factors[0].status == "verified" - admin_client.mfa.delete_factor( - { - "id": factors[0].id, - "user_id": res.user.id, - } + service_role_api_client.mfa.delete_factor( + factor_id=factors[0].id, + user_id=res.user.id, ) - factors = admin_client.mfa.list_factors({"user_id": res.user.id}) + factors = service_role_api_client.mfa.list_factors(user_id=res.user.id) assert len(factors) == 0 -def test_create_oauth_client() -> None: +def test_create_oauth_client(service_role_api_client: SyncSupabaseAuthAdmin) -> None: """Test creating an OAuth client.""" - response = service_role_api_client().oauth.create_client( + response = service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client", redirect_uris=["https://example.com/callback"], @@ -626,25 +395,24 @@ def test_create_oauth_client() -> None: assert response.client.client_id is not None -def test_list_oauth_clients() -> None: +def test_list_oauth_clients(service_role_api_client: SyncSupabaseAuthAdmin) -> None: """Test listing OAuth clients.""" - client = service_role_api_client() - client.oauth.create_client( + service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client", redirect_uris=["https://example.com/callback"], ) ) - response = client.oauth.list_clients() + response = service_role_api_client.oauth.list_clients() assert len(response.clients) > 0 assert any(client.client_name == "Test OAuth Client" for client in response.clients) assert any(client.client_id is not None for client in response.clients) -def test_get_oauth_client() -> None: +def test_get_oauth_client(service_role_api_client: SyncSupabaseAuthAdmin) -> None: """Test getting an OAuth client by ID.""" # First create a client - create_response = service_role_api_client().oauth.create_client( + create_response = service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Get", redirect_uris=["https://example.com/callback"], @@ -652,17 +420,16 @@ def test_get_oauth_client() -> None: ) if create_response.client: client_id = create_response.client.client_id - response = service_role_api_client().oauth.get_client(client_id) + response = service_role_api_client.oauth.get_client(client_id) assert response.client is not None assert response.client.client_id == client_id # Server is not yet released, so this test is not yet relevant. -def test_update_oauth_client() -> None: +def test_update_oauth_client(service_role_api_client: SyncSupabaseAuthAdmin) -> None: """Test updating an OAuth client.""" # First create a client - client = service_role_api_client() - create_response = client.oauth.create_client( + create_response = service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Update", redirect_uris=["https://example.com/callback"], @@ -670,7 +437,7 @@ def test_update_oauth_client() -> None: ) assert create_response.client is not None client_id = create_response.client.client_id - response = client.oauth.update_client( + response = service_role_api_client.oauth.update_client( client_id, UpdateOAuthClientParams( client_name="Updated Test OAuth Client", @@ -680,11 +447,10 @@ def test_update_oauth_client() -> None: assert response.client.client_name == "Updated Test OAuth Client" -def test_delete_oauth_client() -> None: +def test_delete_oauth_client(service_role_api_client: SyncSupabaseAuthAdmin) -> None: """Test deleting an OAuth client.""" # First create a client - client = service_role_api_client() - create_response = client.oauth.create_client( + create_response = service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Delete", redirect_uris=["https://example.com/callback"], @@ -692,13 +458,15 @@ def test_delete_oauth_client() -> None: ) assert create_response.client is not None client_id = create_response.client.client_id - client.oauth.delete_client(client_id) + service_role_api_client.oauth.delete_client(client_id) -def test_regenerate_oauth_client_secret() -> None: +def test_regenerate_oauth_client_secret( + service_role_api_client: SyncSupabaseAuthAdmin, +) -> None: """Test regenerating an OAuth client secret.""" # First create a client - create_response = service_role_api_client().oauth.create_client( + create_response = service_role_api_client.oauth.create_client( CreateOAuthClientParams( client_name="Test OAuth Client for Regenerate", redirect_uris=["https://example.com/callback"], @@ -706,6 +474,6 @@ def test_regenerate_oauth_client_secret() -> None: ) if create_response.client: client_id = create_response.client.client_id - response = service_role_api_client().oauth.regenerate_client_secret(client_id) + response = service_role_api_client.oauth.regenerate_client_secret(client_id) assert response.client is not None assert response.client.client_secret is not None diff --git a/src/auth/tests/_sync/test_utils.py b/src/auth/tests/_sync/test_utils.py deleted file mode 100644 index 9f237520..00000000 --- a/src/auth/tests/_sync/test_utils.py +++ /dev/null @@ -1,25 +0,0 @@ -from time import time - -from .clients import ( - create_new_user_with_email, - mock_app_metadata, - mock_user_metadata, -) - - -def test_create_new_user_with_email() -> None: - email = f"user+{int(time())}@example.com" - user = create_new_user_with_email(email=email) - assert user.email == email - - -def test_mock_user_metadata() -> None: - user_metadata = mock_user_metadata() - assert user_metadata - assert user_metadata.get("profile_image") - - -def test_mock_app_metadata() -> None: - app_metadata = mock_app_metadata() - assert app_metadata - assert app_metadata.get("roles") diff --git a/src/auth/tests/test_helpers.py b/src/auth/tests/test_helpers.py index e8ee6635..36a26a50 100644 --- a/src/auth/tests/test_helpers.py +++ b/src/auth/tests/test_helpers.py @@ -1,104 +1,45 @@ from datetime import datetime -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock -import httpx import pytest -import respx -from httpx import Headers, HTTPStatusError, Response -from pydantic import BaseModel +from supabase_utils.http.headers import Headers +from supabase_utils.http.request import Request, Response +from yarl import URL + from supabase_auth.constants import ( API_VERSION_HEADER_NAME, ) from supabase_auth.errors import ( - AuthApiError, AuthInvalidJwtError, - AuthRetryableError, - AuthUnknownError, - AuthWeakPasswordError, ) from supabase_auth.helpers import ( decode_jwt, generate_pkce_challenge, generate_pkce_verifier, - handle_exception, - model_dump, - model_dump_json, - model_validate, parse_link_identity_response, parse_response_api_version, validate_exp, ) -from ._sync.clients import mock_access_token +from ._sync.conftest import mock_access_token TEST_URL = "http://localhost" -def test_handle_exception_with_api_version_and_error_code() -> None: - err = { - "name": "without API version and error code", - "code": "unexpected_failure", - "ename": "AuthApiError", - } - - with respx.mock: - respx.get(f"{TEST_URL}/hello-world").mock( - return_value=Response(status_code=200), - side_effect=AuthApiError("Error code message", 400, "unexpected_failure"), - ) - with pytest.raises(AuthApiError, match=r"Error code message") as exc: - httpx.get(f"{TEST_URL}/hello-world") - assert exc.value is not None - assert exc.value.message == "Error code message" - assert exc.value.code == err["code"] - assert exc.value.name == err["ename"] - - -def test_handle_exception_without_api_version_and_weak_password_error_code() -> None: - err = { - "name": "without API version and weak password error code with payload", - "code": "weak_password", - "ename": "AuthWeakPasswordError", - } - - with respx.mock: - respx.get(f"{TEST_URL}/hello-world").mock( - return_value=Response(status_code=200), - side_effect=AuthWeakPasswordError( - "Error code message", 400, ["characters"] - ), - ) - with pytest.raises(AuthWeakPasswordError, match=r"Error code message") as exc: - httpx.get(f"{TEST_URL}/hello-world") - assert exc.value is not None - assert exc.value.message == "Error code message" - assert exc.value.code == err["code"] - assert exc.value.name == err["ename"] - - -def test_handle_exception_with_api_version_2024_01_01_and_error_code() -> None: - err = { - "name": "with API version 2024-01-01 and error code", - "code": "unexpected_failure", - "ename": "AuthApiError", - } - - with respx.mock: - respx.get(f"{TEST_URL}/hello-world").mock( - return_value=Response(status_code=200), - side_effect=AuthApiError("Error code message", 400, "unexpected_failure"), - ) - with pytest.raises(AuthApiError, match=r"Error code message") as exc: - httpx.get(f"{TEST_URL}/hello-world") - assert exc.value is not None - assert exc.value.message == "Error code message" - assert exc.value.code == err["code"] - assert exc.value.name == err["ename"] +def valid_request() -> Request: + return Request( + url=URL(TEST_URL), + method="GET", + headers=Headers.empty(), + content=None, + ) def test_parse_response_api_version_with_valid_date() -> None: - headers = Headers({API_VERSION_HEADER_NAME: "2024-01-01"}) - response = Response(headers=headers, status_code=200) + headers = Headers.from_mapping({API_VERSION_HEADER_NAME: "2024-01-01"}) + response = Response( + headers=headers, status=200, content=b"", request=valid_request() + ) api_ver = parse_response_api_version(response) assert api_ver assert datetime.timestamp(api_ver) == datetime.timestamp( @@ -109,14 +50,21 @@ def test_parse_response_api_version_with_valid_date() -> None: def test_parse_response_api_version_with_invalid_dates() -> None: dates = ["2024-01-32", "", "notadate", "Sat Feb 24 2024 17:59:17 GMT+0100"] for date in dates: - headers = Headers({API_VERSION_HEADER_NAME: date}) - response = Response(headers=headers, status_code=200) + headers = Headers.from_mapping({API_VERSION_HEADER_NAME: date}) + response = Response( + headers=headers, status=200, content=b"", request=valid_request() + ) api_ver = parse_response_api_version(response) assert api_ver is None def test_parse_link_identity_response() -> None: - resp = Response(content=f'{{"url": "{TEST_URL}/hello-world"}}', status_code=200) + resp = Response( + content=f'{{"url": "{TEST_URL}/hello-world"}}'.encode(), + status=200, + headers=Headers.empty(), + request=valid_request(), + ) assert parse_link_identity_response(resp) @@ -150,155 +98,6 @@ def test_parse_response_api_version_invalid_date() -> None: assert result is None -# Test for pydantic v1 compatibility in model_validate -def test_model_validate_pydantic_v1() -> None: - # Mock the behavior of the try block to raise AttributeError - mock_model = MagicMock() - mock_model.model_validate_json.side_effect = AttributeError - mock_model.parse_raw.return_value = "parsed_obj_result" - - # Use the patched model in the actual function - result = model_validate(mock_model, {"test": "data"}) # type: ignore - - # Check that parse_obj was called - mock_model.parse_raw.assert_called_once_with({"test": "data"}) - assert result == "parsed_obj_result" - - -# Test for pydantic v1 compatibility in model_dump -def test_model_dump_pydantic_v1() -> None: - # Create a mock model with necessary behavior - mock_model = MagicMock(spec=BaseModel) - mock_model.model_dump.side_effect = AttributeError - mock_model.dict.return_value = {"test": "data"} - - # Call the function - result = model_dump(mock_model) - - # Check the results - assert result == {"test": "data"} - mock_model.dict.assert_called_once() - - -# Test for pydantic v1 compatibility in model_dump_json -def test_model_dump_json_pydantic_v1() -> None: - # Create a mock model with necessary behavior - mock_model = MagicMock(spec=BaseModel) - mock_model.model_dump_json.side_effect = AttributeError - mock_model.json.return_value = '{"test": "data"}' - - # Call the function - result = model_dump_json(mock_model) - - # Check the results - assert result == '{"test": "data"}' - mock_model.json.assert_called_once() - - -def test_handle_exception_network_error() -> None: - # Test case for network errors (502, 503, 504) - mock_response = MagicMock(spec=Response) - mock_response.status_code = 503 - - exception = HTTPStatusError( - "Network error", request=MagicMock(), response=mock_response - ) - result = handle_exception(exception) - - assert isinstance(result, AuthRetryableError) - assert result.status == 503 - - -def test_handle_exception_with_weak_password_attribute() -> None: - # In the implementation there's a logical error in the code: - # It checks if data.get("weak_password") is BOTH a dict AND a list - # This can never be true. Let's just test the error_code path which works. - - # Test case with error_code=None, so we take the alternate default path - mock_response = MagicMock(spec=Response) - mock_response.status_code = 400 - mock_response.json.return_value = { - "message": "Invalid request", - "error_description": "Something went wrong", - } - - exception = HTTPStatusError("Error", request=MagicMock(), response=mock_response) - - with patch("supabase_auth.helpers.parse_response_api_version", return_value=None): - result = handle_exception(exception) - - # Will return a normal AuthApiError - assert isinstance(result, AuthApiError) - assert result.message == "Invalid request" - assert result.status == 400 - assert result.code is None - - -def test_handle_exception_weak_password_with_error_code() -> None: - # Test case for weak password identified by error_code - mock_response = MagicMock(spec=Response) - mock_response.status_code = 400 - mock_response.json.return_value = { - "message": "Password too weak", - "error_code": "weak_password", - "weak_password": {"reasons": ["Password too simple"]}, - } - - exception = HTTPStatusError( - "Password error", request=MagicMock(), response=mock_response - ) - - with patch("supabase_auth.helpers.parse_response_api_version", return_value=None): - result = handle_exception(exception) - - assert isinstance(result, AuthWeakPasswordError) - assert result.message == "Password too weak" - assert result.status == 400 - assert result.reasons == ["Password too simple"] - - -def test_handle_exception_with_new_api_version() -> None: - # Test case for new API version with "code" field - mock_response = MagicMock(spec=Response) - mock_response.status_code = 400 - mock_response.json.return_value = { - "message": "Password too weak", - "code": "weak_password", - "weak_password": {"reasons": ["Password too simple"]}, - } - - # Mock datetime for January 2, 2024 (after 2024-01-01 API version) - mock_date = datetime(2024, 1, 2) - - exception = HTTPStatusError( - "Password error", request=MagicMock(), response=mock_response - ) - - with patch( - "supabase_auth.helpers.parse_response_api_version", return_value=mock_date - ): - result = handle_exception(exception) - - assert isinstance(result, AuthWeakPasswordError) - assert result.message == "Password too weak" - assert result.status == 400 - - -def test_handle_exception_unknown_error() -> None: - # Test case for when json() raises an exception - mock_response = MagicMock(spec=Response) - mock_response.status_code = 500 - mock_response.json.side_effect = ValueError("Invalid JSON") - - exception = HTTPStatusError( - "Server error", request=MagicMock(), response=mock_response - ) - result = handle_exception(exception) - - assert isinstance(result, AuthUnknownError) - assert "Server error" in result.message - - def test_validate_exp_with_expired_exp() -> None: # Set expiry to 1 hour ago exp = int(datetime.now().timestamp()) - 3600 @@ -329,57 +128,3 @@ def test_is_http_url() -> None: assert is_http_url("example.com") is False # Missing scheme assert is_http_url("") is False assert is_http_url("not a url") is False - - -def test_handle_exception_weak_password_branch() -> None: - """Specifically targeting the unreachable branch in handle_exception with weak_password. - - This test attempts to test the branch where weak_password needs to be both a dict and a list, - which is logically impossible, so we'll test it by mocking the implementation details. - """ - import httpx - from supabase_auth.errors import AuthWeakPasswordError - from supabase_auth.helpers import handle_exception - - # Create a proper mock Response with headers - mock_response = MagicMock(spec=httpx.Response) - mock_response.status_code = 400 - mock_response.headers = {} - - # Create a special mock dict that pretends to be both a dict and a list - class WeirdDict(dict): - def __init__(self, *args, **kwargs) -> None: - super().__init__(*args, **kwargs) - self.reasons = ["Password too short"] - - # Mock json response with our special dict - mock_response.json.return_value = { - "message": "Password too weak", - "weak_password": {"reasons": ["Password too short"]}, - } - - # Create a proper HTTPStatusError - exception = httpx.HTTPStatusError( - "Password error", request=MagicMock(spec=httpx.Request), response=mock_response - ) - - # We need to directly target the specific branch handling weak passwords - # First, we need to monkey patch the implementation temporarily to reach our branch - original_isinstance = isinstance - - def patched_isinstance(obj, cls): # noqa - # Make weak_password appear as both dict and list when needed - if obj == mock_response.json()["weak_password"] and cls in (dict, list): - return True - return original_isinstance(obj, cls) - - with ( - patch("supabase_auth.helpers.isinstance", side_effect=patched_isinstance), - patch("supabase_auth.helpers.len", return_value=1), - ): - result = handle_exception(exception) - - # Check if our test coverage reached the AuthWeakPasswordError branch - assert isinstance(result, AuthWeakPasswordError) - assert result.message == "Password too weak" - assert result.status == 400 diff --git a/src/functions/Makefile b/src/functions/Makefile index c466e637..d2b12059 100644 --- a/src/functions/Makefile +++ b/src/functions/Makefile @@ -12,22 +12,10 @@ help:: @echo " pytest -- run pytest on supabase_functions package" mypy: - uv run --package supabase_functions mypy src/supabase_functions tests + uv run --package supabase_functions mypy --strict src/supabase_functions tests help:: @echo " mypy -- run mypy on supabase_functions package" -unasync: - uv run --package supabase_functions run-unasync.py -help:: - @echo " unasync -- invoke run-unasync.py helper" - -build-sync: unasync - sed -i '0,/SyncMock, /{s/SyncMock, //}' tests/_sync/test_function_client.py - sed -i 's/SyncMock/Mock/g' tests/_sync/test_function_client.py - sed -i 's/SyncClient/Client/g' src/supabase_functions/_sync/functions_client.py tests/_sync/test_function_client.py -help:: - @echo " build-sync -- generate _sync from _async implementation" - clean: rm -rf htmlcov .pytest_cache .mypy_cache .ruff_cache rm -f .coverage coverage.xml diff --git a/src/functions/pyproject.toml b/src/functions/pyproject.toml index f583851e..7c8ccffd 100644 --- a/src/functions/pyproject.toml +++ b/src/functions/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "supabase_functions" -version = "2.28.3" # {x-release-please-version} +version = "3.0.0a1" # {x-release-please-version} description = "Library for Supabase Functions" authors = [ { name = "Joel Lee", email = "joel@joellee.org" }, @@ -11,10 +11,11 @@ maintainers = [ ] license = "MIT" readme = "README.md" -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "httpx[http2] >=0.26,<0.29", + "pydantic>=2.12.2", "strenum >=0.4.15", + "supabase-utils==3.0.0a1", # x-release-please-version "yarl>=1.20.1", ] @@ -30,9 +31,9 @@ tests = [ "pytest >=7.4.2,<9.0.0", "pytest-cov >=4,<7", "pytest-asyncio >=0.21.1,<1.2.0", + "supabase_utils[all]", ] lints = [ - "unasync>=0.6.0", "ruff >=0.12.1", "python-lsp-server (>=1.12.2,<2.0.0)", "pylsp-mypy (>=0.7.0,<0.8.0)", @@ -43,6 +44,9 @@ dev = [{ include-group = "lints" }, {include-group = "tests" }] [tool.uv] default-groups = [ "dev" ] +[tool.uv.sources] +supabase-utils = { workspace = true } + [tool.pytest.ini_options] asyncio_mode = "auto" addopts = "tests" @@ -50,6 +54,32 @@ filterwarnings = [ "ignore::DeprecationWarning", # ignore deprecation warnings globally ] +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + # "B", + # flake8-simplify + # "SIM", + # isort + "I", +] +ignore = ["E712", "E501", "E402", "UP006", "UP035"] + [build-system] requires = ["uv_build>=0.8.3,<0.9.0"] build-backend = "uv_build" + +[tool.mypy] +strict = true +allow_redefinition = true + +no_warn_no_return = true +warn_return_any = true +warn_unused_configs = true +warn_redundant_casts = true diff --git a/src/functions/run-unasync.py b/src/functions/run-unasync.py deleted file mode 100644 index 61709f54..00000000 --- a/src/functions/run-unasync.py +++ /dev/null @@ -1,13 +0,0 @@ -from pathlib import Path - -import unasync - -paths = Path("src/supabase_functions").glob("**/*.py") -tests = Path("tests").glob("**/*.py") - -rules = (unasync._DEFAULT_RULE,) - -files = [str(p) for p in list(paths) + list(tests)] - -if __name__ == "__main__": - unasync.unasync_files(files, rules=rules) diff --git a/src/functions/src/supabase_functions/__init__.py b/src/functions/src/supabase_functions/__init__.py index 7faa9521..e3f578b5 100644 --- a/src/functions/src/supabase_functions/__init__.py +++ b/src/functions/src/supabase_functions/__init__.py @@ -1,9 +1,6 @@ from __future__ import annotations -from typing import Literal, Union, overload - -from ._async.functions_client import AsyncFunctionsClient -from ._sync.functions_client import SyncFunctionsClient +from .client import AsyncFunctionsClient, SyncFunctionsClient, create_client from .utils import FunctionRegion __all__ = [ @@ -12,28 +9,3 @@ "AsyncFunctionsClient", "SyncFunctionsClient", ] - - -@overload -def create_client( - url: str, headers: dict[str, str], *, is_async: Literal[True], verify: bool -) -> AsyncFunctionsClient: ... - - -@overload -def create_client( - url: str, headers: dict[str, str], *, is_async: Literal[False], verify: bool -) -> SyncFunctionsClient: ... - - -def create_client( - url: str, - headers: dict[str, str], - *, - is_async: bool, - verify: bool = True, -) -> Union[AsyncFunctionsClient, SyncFunctionsClient]: - if is_async: - return AsyncFunctionsClient(url, headers, verify) - else: - return SyncFunctionsClient(url, headers, verify) diff --git a/src/functions/src/supabase_functions/_async/__init__.py b/src/functions/src/supabase_functions/_async/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/functions/src/supabase_functions/_async/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/functions/src/supabase_functions/_async/functions_client.py b/src/functions/src/supabase_functions/_async/functions_client.py deleted file mode 100644 index f7958deb..00000000 --- a/src/functions/src/supabase_functions/_async/functions_client.py +++ /dev/null @@ -1,175 +0,0 @@ -import platform -import sys -from typing import Any, Dict, Literal, Optional, Union -from warnings import warn - -from httpx import AsyncClient, HTTPError, QueryParams, Response -from yarl import URL - -from ..errors import FunctionsHttpError, FunctionsRelayError -from ..utils import ( - FunctionRegion, - is_http_url, - is_valid_str_arg, -) -from ..version import __version__ - - -class AsyncFunctionsClient: - def __init__( - self, - url: str, - headers: Dict, - timeout: Optional[int] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[AsyncClient] = None, - ) -> None: - if not is_http_url(url): - raise ValueError("url must be a valid HTTP URL string") - self.url = URL(url) - self.headers = { - "X-Client-Info": f"supabase-py/supabase_functions v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = int(abs(timeout)) if timeout is not None else 60 - self._client = http_client or AsyncClient( - verify=self.verify, - timeout=self.timeout, - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - async def _request( - self, - method: Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"], - path: list[str], - headers: Optional[Dict[str, str]] = None, - json: Optional[Dict[Any, Any]] = None, - params: Optional[QueryParams] = None, - ) -> Response: - url = self.url.joinpath(*path) - headers = headers or dict() - headers.update(self.headers) - response = ( - await self._client.request( - method, str(url), data=json, headers=headers, params=params - ) - if isinstance(json, str) - else await self._client.request( - method, str(url), json=json, headers=headers, params=params - ) - ) - try: - response.raise_for_status() - except HTTPError as exc: - status_code = None - if hasattr(response, "status_code"): - status_code = response.status_code - - raise FunctionsHttpError( - response.json().get("error") - or f"An error occurred while requesting your edge function at {exc.request.url!r}.", - status_code, - ) from exc - - return response - - def set_auth(self, token: str) -> None: - """Updates the authorization header - - Parameters - ---------- - token : str - the new jwt token sent in the authorization header - """ - - self.headers["Authorization"] = f"Bearer {token}" - - async def invoke( - self, function_name: str, invoke_options: Optional[Dict] = None - ) -> Union[Dict, bytes]: - """Invokes a function - - Parameters - ---------- - function_name : the name of the function to invoke - invoke_options : object with the following properties - `headers`: object representing the headers to send with the request - `body`: the body of the request - `responseType`: how the response should be parsed. The default is `json` - """ - if not is_valid_str_arg(function_name): - raise ValueError("function_name must a valid string value.") - headers = self.headers - params = QueryParams() - body = None - response_type = "text/plain" - - if invoke_options is not None: - headers.update(invoke_options.get("headers", {})) - response_type = invoke_options.get("responseType", "text/plain") - - region = invoke_options.get("region") - if region: - if not isinstance(region, FunctionRegion): - warn(f"Use FunctionRegion({region})", stacklevel=2) - region = FunctionRegion(region) - - if region.value != "any": - headers["x-region"] = region.value - # Add region as query parameter - params = params.set("forceFunctionRegion", region.value) - - body = invoke_options.get("body") - if isinstance(body, str): - headers["Content-Type"] = "text/plain" - elif isinstance(body, dict): - headers["Content-Type"] = "application/json" - - response = await self._request( - "POST", [function_name], headers=headers, json=body, params=params - ) - is_relay_error = response.headers.get("x-relay-header") - - if is_relay_error and is_relay_error == "true": - raise FunctionsRelayError(response.json().get("error")) - - if response_type == "json": - data = response.json() - else: - data = response.content - return data diff --git a/src/functions/src/supabase_functions/_sync/__init__.py b/src/functions/src/supabase_functions/_sync/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/functions/src/supabase_functions/_sync/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/functions/src/supabase_functions/_sync/functions_client.py b/src/functions/src/supabase_functions/_sync/functions_client.py deleted file mode 100644 index 65268592..00000000 --- a/src/functions/src/supabase_functions/_sync/functions_client.py +++ /dev/null @@ -1,175 +0,0 @@ -import platform -import sys -from typing import Any, Dict, Literal, Optional, Union -from warnings import warn - -from httpx import Client, HTTPError, QueryParams, Response -from yarl import URL - -from ..errors import FunctionsHttpError, FunctionsRelayError -from ..utils import ( - FunctionRegion, - is_http_url, - is_valid_str_arg, -) -from ..version import __version__ - - -class SyncFunctionsClient: - def __init__( - self, - url: str, - headers: Dict, - timeout: Optional[int] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[Client] = None, - ) -> None: - if not is_http_url(url): - raise ValueError("url must be a valid HTTP URL string") - self.url = URL(url) - self.headers = { - "X-Client-Info": f"supabase-py/supabase_functions v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = int(abs(timeout)) if timeout is not None else 60 - self._client = http_client or Client( - verify=self.verify, - timeout=self.timeout, - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - def _request( - self, - method: Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"], - path: list[str], - headers: Optional[Dict[str, str]] = None, - json: Optional[Dict[Any, Any]] = None, - params: Optional[QueryParams] = None, - ) -> Response: - url = self.url.joinpath(*path) - headers = headers or dict() - headers.update(self.headers) - response = ( - self._client.request( - method, str(url), data=json, headers=headers, params=params - ) - if isinstance(json, str) - else self._client.request( - method, str(url), json=json, headers=headers, params=params - ) - ) - try: - response.raise_for_status() - except HTTPError as exc: - status_code = None - if hasattr(response, "status_code"): - status_code = response.status_code - - raise FunctionsHttpError( - response.json().get("error") - or f"An error occurred while requesting your edge function at {exc.request.url!r}.", - status_code, - ) from exc - - return response - - def set_auth(self, token: str) -> None: - """Updates the authorization header - - Parameters - ---------- - token : str - the new jwt token sent in the authorization header - """ - - self.headers["Authorization"] = f"Bearer {token}" - - def invoke( - self, function_name: str, invoke_options: Optional[Dict] = None - ) -> Union[Dict, bytes]: - """Invokes a function - - Parameters - ---------- - function_name : the name of the function to invoke - invoke_options : object with the following properties - `headers`: object representing the headers to send with the request - `body`: the body of the request - `responseType`: how the response should be parsed. The default is `json` - """ - if not is_valid_str_arg(function_name): - raise ValueError("function_name must a valid string value.") - headers = self.headers - params = QueryParams() - body = None - response_type = "text/plain" - - if invoke_options is not None: - headers.update(invoke_options.get("headers", {})) - response_type = invoke_options.get("responseType", "text/plain") - - region = invoke_options.get("region") - if region: - if not isinstance(region, FunctionRegion): - warn(f"Use FunctionRegion({region})", stacklevel=2) - region = FunctionRegion(region) - - if region.value != "any": - headers["x-region"] = region.value - # Add region as query parameter - params = params.set("forceFunctionRegion", region.value) - - body = invoke_options.get("body") - if isinstance(body, str): - headers["Content-Type"] = "text/plain" - elif isinstance(body, dict): - headers["Content-Type"] = "application/json" - - response = self._request( - "POST", [function_name], headers=headers, json=body, params=params - ) - is_relay_error = response.headers.get("x-relay-header") - - if is_relay_error and is_relay_error == "true": - raise FunctionsRelayError(response.json().get("error")) - - if response_type == "json": - data = response.json() - else: - data = response.content - return data diff --git a/src/functions/src/supabase_functions/client.py b/src/functions/src/supabase_functions/client.py new file mode 100644 index 00000000..c54be02e --- /dev/null +++ b/src/functions/src/supabase_functions/client.py @@ -0,0 +1,198 @@ +import platform +from typing import Dict, Generic, Literal, overload + +from httpx import AsyncClient, Client +from supabase_utils.http.adapters.httpx import AsyncHttpxSession, HttpxSession +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + HttpIO, + HttpMethod, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import ( + BytesRequest, + EmptyRequest, + HTTPRequestMethod, + JSONRequest, + Response, + TextRequest, + ToRequest, +) +from supabase_utils.types import JSON +from yarl import URL + +from .errors import on_error_response +from .utils import ( + FunctionRegion, + is_valid_str_arg, +) +from .version import __version__ + + +class FunctionsClient(Generic[HttpIO]): + def __init__(self, url: URL, headers: Dict[str, str], executor: HttpIO) -> None: + if not (url.scheme == "http" or url.scheme == "https"): + raise ValueError("url must be a valid HTTP URL string") + self.default_headers = Headers.from_mapping( + { + "X-Client-Info": f"supabase-py/supabase_functions v{__version__}", + "X-Supabase-Client-Platform": platform.system(), + "X-Supabase-Client-Platform-Version": platform.release(), + "X-Supabase-Client-Runtime": "python", + "X-Supabase-Client-Runtime-Version": platform.python_version(), + **headers, + } + ) + + self.executor: HttpIO = executor + self.base_url = url + + def set_auth(self, token: str) -> None: + """Updates the authorization header + + Parameters + ---------- + token : str + the new jwt token sent in the authorization header + """ + + self.default_headers = self.default_headers.override( + "Authorization", f"Bearer {token}" + ) + + def _invoke_options_to_request( + self, + function_name: str, + body: bytes | str | Dict[str, JSON] | None, + region: FunctionRegion | None, + headers: Dict[str, str] | None, + method: HTTPRequestMethod, + ) -> ToRequest: + if not is_valid_str_arg(function_name): + raise ValueError("function_name must a valid string value.") + + path = [function_name] + new_headers = Headers.from_mapping(headers) if headers else Headers.empty() + query_params = URLQuery.empty() + + if region and region != FunctionRegion.Any: + new_headers = new_headers.set("x-region", region.value) + # Add region as query parameter + query_params = query_params.set("forceFunctionRegion", region.value) + if isinstance(body, str): + return TextRequest( + text=body, + method=method, + path=path, + headers=new_headers, + query=query_params, + ) + elif isinstance(body, dict): + return JSONRequest( + body=body, + method=method, + path=path, + headers=new_headers, + query=query_params, + exclude_none=False, + ) + elif isinstance(body, bytes): + return BytesRequest( + body=body, + method=method, + path=path, + headers=new_headers, + query=query_params, + ) + else: + return EmptyRequest( + method=method, path=path, headers=new_headers, query=query_params + ) + + @handle_http_io + def invoke( + self, + function_name: str, + body: bytes | str | Dict[str, JSON] | None = None, + region: FunctionRegion | None = None, + headers: Dict[str, str] | None = None, + method: HTTPRequestMethod = "POST", + ) -> HttpMethod[Response]: + """Invokes a function + + Parameters + ---------- + function_name : the name of the function to invoke + invoke_options : object with the following properties + `headers`: object representing the headers to send with the request + `body`: the body of the request + `responseType`: how the response should be parsed. The default is `json` + """ + response = yield self._invoke_options_to_request( + function_name, body, region, headers, method + ) + if not response.is_success: + raise on_error_response(response) + return response + + +class AsyncFunctionsClient(FunctionsClient[AsyncHttpIO]): + def __init__( + self, + url: str, + headers: Dict[str, str], + http_client: AsyncClient | None = None, + ) -> None: + http_client = http_client or AsyncClient( + follow_redirects=True, + http2=True, + ) + FunctionsClient.__init__( + self, + url=URL(url), + executor=AsyncHttpIO(session=AsyncHttpxSession(client=http_client)), + headers=headers, + ) + + +class SyncFunctionsClient(FunctionsClient[SyncHttpIO]): + def __init__( + self, + url: str, + headers: Dict[str, str], + http_client: Client | None = None, + ) -> None: + http_client = http_client or Client( + follow_redirects=True, + http2=True, + ) + FunctionsClient.__init__( + self, + url=URL(url), + executor=SyncHttpIO(session=HttpxSession(client=http_client)), + headers=headers, + ) + + +@overload +def create_client( + url: str, headers: dict[str, str], *, is_async: Literal[True] +) -> AsyncFunctionsClient: ... + + +@overload +def create_client( + url: str, headers: dict[str, str], *, is_async: Literal[False] +) -> SyncFunctionsClient: ... + + +def create_client( + url: str, headers: dict[str, str], *, is_async: bool +) -> AsyncFunctionsClient | SyncFunctionsClient: + if is_async: + return AsyncFunctionsClient(url, headers) + else: + return SyncFunctionsClient(url, headers) diff --git a/src/functions/src/supabase_functions/errors.py b/src/functions/src/supabase_functions/errors.py index 0529e344..bfc29251 100644 --- a/src/functions/src/supabase_functions/errors.py +++ b/src/functions/src/supabase_functions/errors.py @@ -2,6 +2,8 @@ from typing import TypedDict +from supabase_utils.http.request import Response + class FunctionsApiErrorDict(TypedDict): name: str @@ -33,6 +35,15 @@ def __init__(self, message: str, code: int | None = None) -> None: ) +def on_error_response(response: Response) -> FunctionsHttpError | FunctionsRelayError: + is_relay_error = response.headers.get("x-relay-header") + if is_relay_error == "true": + return FunctionsRelayError( + response.content.decode("utf-8"), code=response.status + ) + return FunctionsHttpError(response.content.decode("utf-8"), response.status) + + class FunctionsRelayError(FunctionsError): """Base exception for relay errors.""" diff --git a/src/functions/src/supabase_functions/utils.py b/src/functions/src/supabase_functions/utils.py index a2a2fdf9..a6b18272 100644 --- a/src/functions/src/supabase_functions/utils.py +++ b/src/functions/src/supabase_functions/utils.py @@ -1,5 +1,4 @@ import sys -from urllib.parse import urlparse from httpx import AsyncClient as AsyncClient # noqa: F401 @@ -10,7 +9,6 @@ DEFAULT_FUNCTION_CLIENT_TIMEOUT = 5 -BASE64URL_REGEX = r"^([a-z0-9_-]{4})*($|[a-z0-9_-]{3}$|[a-z0-9_-]{2}$)$" class FunctionRegion(StrEnum): @@ -33,7 +31,3 @@ class FunctionRegion(StrEnum): def is_valid_str_arg(target: str) -> bool: return isinstance(target, str) and len(target.strip()) > 0 - - -def is_http_url(url: str) -> bool: - return urlparse(url).scheme in {"https", "http"} diff --git a/src/functions/tests/_async/test_function_client.py b/src/functions/tests/_async/test_function_client.py index c991aed2..4466c71e 100644 --- a/src/functions/tests/_async/test_function_client.py +++ b/src/functions/tests/_async/test_function_client.py @@ -1,230 +1,128 @@ -from typing import Dict -from unittest.mock import AsyncMock, Mock, patch +from types import TracebackType import pytest -from httpx import AsyncClient, HTTPError, Response, Timeout +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import AsyncHttpIO # Import the class to test -from supabase_functions import AsyncFunctionsClient +from supabase_utils.http.request import Request, Response +from yarl import URL + +from supabase_functions.client import AsyncFunctionsClient, FunctionsClient from supabase_functions.errors import FunctionsHttpError, FunctionsRelayError from supabase_functions.utils import FunctionRegion from supabase_functions.version import __version__ -@pytest.fixture -def valid_url() -> str: - return "https://example.com" - - -@pytest.fixture -def default_headers() -> Dict[str, str]: - return {"Authorization": "Bearer valid.jwt.token"} - - -@pytest.fixture -def client(valid_url: str, default_headers: Dict[str, str]) -> AsyncFunctionsClient: - return AsyncFunctionsClient( - url=valid_url, headers=default_headers, timeout=10, verify=True +def client_returning( + content: bytes, status: int, headers: Headers | None = None +) -> FunctionsClient[AsyncHttpIO]: + class MockHttpClient: + async def send(self, request: Request) -> Response: + return Response( + headers=headers or Headers.empty(), + status=status, + content=content, + request=request, + ) + + async def __aenter__(self) -> "MockHttpClient": + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + pass + + return FunctionsClient( + url=URL("https://supabase.com"), + headers={}, + executor=AsyncHttpIO(session=MockHttpClient()), ) -async def test_init_with_valid_params( - valid_url: str, default_headers: Dict[str, str] -) -> None: - client = AsyncFunctionsClient( - url=valid_url, headers=default_headers, timeout=10, verify=True - ) - assert str(client.url) == valid_url - assert "X-Client-Info" in client.headers +async def test_init_with_valid_params() -> None: + valid_url = "https://supabase.com" + client = AsyncFunctionsClient(url=valid_url, headers={}) + assert str(client.base_url) == valid_url + assert "X-Client-Info" in client.default_headers assert ( - client.headers["X-Client-Info"] + client.default_headers["X-Client-Info"] == f"supabase-py/supabase_functions v{__version__}" ) - assert client._client.timeout == Timeout(10) -@pytest.mark.parametrize("invalid_url", ["not-a-url", "ftp://invalid.com", "", None]) -def test_init_with_invalid_url( - invalid_url: str, default_headers: Dict[str, str] -) -> None: - with pytest.raises(ValueError, match="url must be a valid HTTP URL string"): - AsyncFunctionsClient(url=invalid_url, headers=default_headers, timeout=10) +@pytest.mark.parametrize("invalid_url", ["not-a-url", "ftp://invalid.com", ""]) +def test_init_with_invalid_url(invalid_url: str) -> None: + with pytest.raises(Exception, match="url must be a valid HTTP URL string"): + AsyncFunctionsClient(url=invalid_url, headers={}) -async def test_set_auth_valid_token(client: AsyncFunctionsClient) -> None: +async def test_set_auth_valid_token() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) valid_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U" + assert client.default_headers.get("Authorization") is None + client.set_auth("") + assert ( + client.default_headers["Authorization"] == "Bearer " + ) # just to ensure that authorization field is non-empty beforehand client.set_auth(valid_token) - assert client.headers["Authorization"] == f"Bearer {valid_token}" - - -async def test_invoke_success_json(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - result = await client.invoke( - "test-function", {"responseType": "json", "body": {"test": "data"}} - ) - - assert result == {"message": "success"} - mock_request.assert_called_once() - _, kwargs = mock_request.call_args - assert kwargs["json"] == {"test": "data"} - - -async def test_invoke_success_binary(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.content = b"binary content" - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - result = await client.invoke("test-function") - - assert result == b"binary content" - mock_request.assert_called_once() - - -async def test_invoke_with_region(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response + assert client.default_headers["Authorization"] == f"Bearer {valid_token}" - await client.invoke("test-function", {"region": FunctionRegion("us-east-1")}) - args, kwargs = mock_request.call_args - # Check that x-region header is present - assert kwargs["headers"]["x-region"] == "us-east-1" - # Check that the URL contains the forceFunctionRegion query parameter - assert kwargs["params"]["forceFunctionRegion"] == "us-east-1" +async def test_invoke_success_json() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = await client.invoke("test-function", body={"test": "data"}) + assert response.content == b'{"message": "success"}' + assert response.request.headers["Content-Type"] == "application/json" -async def test_invoke_with_region_string(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} +async def test_invoke_success_binary() -> None: + client = client_returning(content=b"binary content", status=200) + response = await client.invoke("test-function") + assert response.content == b"binary content" - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - with pytest.warns(UserWarning, match=r"Use FunctionRegion\(us-east-1\)"): - await client.invoke("test-function", {"region": "us-east-1"}) +async def test_invoke_with_region() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = await client.invoke("test-function", region=FunctionRegion.UsEast1) + assert response.request.headers["x-region"] == "us-east-1" + # Check that the URL contains the forceFunctionRegion query parameter + assert response.request.url.query["forceFunctionRegion"] == "us-east-1" - args, kwargs = mock_request.call_args - # Check that x-region header is present - assert kwargs["headers"]["x-region"] == "us-east-1" - # Check that the URL contains the forceFunctionRegion query parameter - assert kwargs["params"]["forceFunctionRegion"] == "us-east-1" +async def test_invoke_with_http_error() -> None: + client = client_returning(content=b'{"error": "Custom error message"}', status=400) + with pytest.raises(FunctionsHttpError): + await client.invoke("test-function") -async def test_invoke_with_http_error(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Custom error message"} - mock_response.raise_for_status.side_effect = HTTPError("HTTP Error") - mock_response.headers = {} - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(FunctionsHttpError, match="Custom error message"): - await client.invoke("test-function") - - -async def test_invoke_with_relay_error(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Relay error message"} - mock_response.raise_for_status = Mock() - mock_response.headers = {"x-relay-header": "true"} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(FunctionsRelayError, match="Relay error message"): - await client.invoke("test-function") +async def test_invoke_with_relay_error() -> None: + client = client_returning( + content=b'{"error": "Relay error message"}', + status=400, + headers=Headers.from_mapping({"x-relay-header": "true"}), + ) + with pytest.raises(FunctionsRelayError): + await client.invoke("test-function") -async def test_invoke_invalid_function_name(client: AsyncFunctionsClient) -> None: +async def test_invoke_invalid_function_name() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) with pytest.raises(ValueError, match="function_name must a valid string value."): await client.invoke("") -async def test_invoke_with_string_body(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - await client.invoke("test-function", {"body": "string data"}) - - _, kwargs = mock_request.call_args - assert kwargs["headers"]["Content-Type"] == "text/plain" - - -async def test_invoke_with_json_body(client: AsyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object( - client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - await client.invoke("test-function", {"body": {"key": "value"}}) - - _, kwargs = mock_request.call_args - assert kwargs["headers"]["Content-Type"] == "application/json" - - -async def test_init_with_httpx_client() -> None: - # Create a custom httpx client with specific options - headers = {"x-user-agent": "my-app/0.0.1"} - custom_client = AsyncClient( - timeout=Timeout(30), follow_redirects=True, max_redirects=5, headers=headers - ) - - # Initialize the functions client with the custom httpx client - client = AsyncFunctionsClient( - url="https://example.com", - headers={"Authorization": "Bearer token"}, - timeout=10, - http_client=custom_client, - ) +async def test_invoke_with_string_body() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = await client.invoke("test-function", body="string data") + assert response.request.headers["Content-Type"] == "text/plain; charset=utf-8" - # Verify the custom client options are preserved - assert client._client.timeout == Timeout(30) - assert client._client.follow_redirects is True - assert client._client.max_redirects == 5 - assert client._client.headers.get("x-user-agent") == "my-app/0.0.1" - # Verify the client is properly configured with our custom client - assert client._client is custom_client +async def test_invoke_with_json_body() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = await client.invoke("test-function", body={"key": "value"}) + assert response.request.headers["Content-Type"] == "application/json" diff --git a/src/functions/tests/_sync/test_function_client.py b/src/functions/tests/_sync/test_function_client.py index 469f7d36..4a3bdc30 100644 --- a/src/functions/tests/_sync/test_function_client.py +++ b/src/functions/tests/_sync/test_function_client.py @@ -1,214 +1,128 @@ -from typing import Dict -from unittest.mock import Mock, patch +from types import TracebackType import pytest -from httpx import Client, HTTPError, Response, Timeout +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import SyncHttpIO # Import the class to test -from supabase_functions import SyncFunctionsClient +from supabase_utils.http.request import Request, Response +from yarl import URL + +from supabase_functions.client import AsyncFunctionsClient, FunctionsClient from supabase_functions.errors import FunctionsHttpError, FunctionsRelayError from supabase_functions.utils import FunctionRegion from supabase_functions.version import __version__ -@pytest.fixture -def valid_url() -> str: - return "https://example.com" - - -@pytest.fixture -def default_headers() -> Dict[str, str]: - return {"Authorization": "Bearer valid.jwt.token"} - - -@pytest.fixture -def client(valid_url: str, default_headers: Dict[str, str]) -> SyncFunctionsClient: - return SyncFunctionsClient( - url=valid_url, headers=default_headers, timeout=10, verify=True +def client_returning( + content: bytes, status: int, headers: Headers | None = None +) -> FunctionsClient[SyncHttpIO]: + class MockHttpClient: + def send(self, request: Request) -> Response: + return Response( + headers=headers or Headers.empty(), + status=status, + content=content, + request=request, + ) + + def __enter__(self) -> "MockHttpClient": + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + pass + + return FunctionsClient( + url=URL("https://supabase.com"), + headers={}, + executor=SyncHttpIO(session=MockHttpClient()), ) -def test_init_with_valid_params( - valid_url: str, default_headers: Dict[str, str] -) -> None: - client = SyncFunctionsClient( - url=valid_url, headers=default_headers, timeout=10, verify=True - ) - assert str(client.url) == valid_url - assert "X-Client-Info" in client.headers +def test_init_with_valid_params() -> None: + valid_url = "https://supabase.com" + client = AsyncFunctionsClient(url=valid_url, headers={}) + assert str(client.base_url) == valid_url + assert "X-Client-Info" in client.default_headers assert ( - client.headers["X-Client-Info"] + client.default_headers["X-Client-Info"] == f"supabase-py/supabase_functions v{__version__}" ) - assert client._client.timeout == Timeout(10) -@pytest.mark.parametrize("invalid_url", ["not-a-url", "ftp://invalid.com", "", None]) -def test_init_with_invalid_url( - invalid_url: str, default_headers: Dict[str, str] -) -> None: - with pytest.raises(ValueError, match="url must be a valid HTTP URL string"): - SyncFunctionsClient(url=invalid_url, headers=default_headers, timeout=10) +@pytest.mark.parametrize("invalid_url", ["not-a-url", "ftp://invalid.com", ""]) +def test_init_with_invalid_url(invalid_url: str) -> None: + with pytest.raises(Exception, match="url must be a valid HTTP URL string"): + AsyncFunctionsClient(url=invalid_url, headers={}) -def test_set_auth_valid_token(client: SyncFunctionsClient) -> None: +def test_set_auth_valid_token() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) valid_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U" + assert client.default_headers.get("Authorization") is None + client.set_auth("") + assert ( + client.default_headers["Authorization"] == "Bearer " + ) # just to ensure that authorization field is non-empty beforehand client.set_auth(valid_token) - assert client.headers["Authorization"] == f"Bearer {valid_token}" - - -def test_invoke_success_json(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - result = client.invoke( - "test-function", {"responseType": "json", "body": {"test": "data"}} - ) - - assert result == {"message": "success"} - mock_request.assert_called_once() - _, kwargs = mock_request.call_args - assert kwargs["json"] == {"test": "data"} - - -def test_invoke_success_binary(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.content = b"binary content" - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - result = client.invoke("test-function") - - assert result == b"binary content" - mock_request.assert_called_once() - - -def test_invoke_with_region(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response + assert client.default_headers["Authorization"] == f"Bearer {valid_token}" - client.invoke("test-function", {"region": FunctionRegion("us-east-1")}) - args, kwargs = mock_request.call_args - # Check that x-region header is present - assert kwargs["headers"]["x-region"] == "us-east-1" - # Check that the URL contains the forceFunctionRegion query parameter - assert kwargs["params"]["forceFunctionRegion"] == "us-east-1" +def test_invoke_success_json() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = client.invoke("test-function", body={"test": "data"}) + assert response.content == b'{"message": "success"}' + assert response.request.headers["Content-Type"] == "application/json" -def test_invoke_with_region_string(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} +def test_invoke_success_binary() -> None: + client = client_returning(content=b"binary content", status=200) + response = client.invoke("test-function") + assert response.content == b"binary content" - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - with pytest.warns(UserWarning, match=r"Use FunctionRegion\(us-east-1\)"): - client.invoke("test-function", {"region": "us-east-1"}) +def test_invoke_with_region() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = client.invoke("test-function", region=FunctionRegion.UsEast1) + assert response.request.headers["x-region"] == "us-east-1" + # Check that the URL contains the forceFunctionRegion query parameter + assert response.request.url.query["forceFunctionRegion"] == "us-east-1" - args, kwargs = mock_request.call_args - # Check that x-region header is present - assert kwargs["headers"]["x-region"] == "us-east-1" - # Check that the URL contains the forceFunctionRegion query parameter - assert kwargs["params"]["forceFunctionRegion"] == "us-east-1" +def test_invoke_with_http_error() -> None: + client = client_returning(content=b'{"error": "Custom error message"}', status=400) + with pytest.raises(FunctionsHttpError): + client.invoke("test-function") -def test_invoke_with_http_error(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Custom error message"} - mock_response.raise_for_status.side_effect = HTTPError("HTTP Error") - mock_response.headers = {} - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(FunctionsHttpError, match="Custom error message"): - client.invoke("test-function") - - -def test_invoke_with_relay_error(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Relay error message"} - mock_response.raise_for_status = Mock() - mock_response.headers = {"x-relay-header": "true"} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(FunctionsRelayError, match="Relay error message"): - client.invoke("test-function") +def test_invoke_with_relay_error() -> None: + client = client_returning( + content=b'{"error": "Relay error message"}', + status=400, + headers=Headers.from_mapping({"x-relay-header": "true"}), + ) + with pytest.raises(FunctionsRelayError): + client.invoke("test-function") -def test_invoke_invalid_function_name(client: SyncFunctionsClient) -> None: +def test_invoke_invalid_function_name() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) with pytest.raises(ValueError, match="function_name must a valid string value."): client.invoke("") -def test_invoke_with_string_body(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - client.invoke("test-function", {"body": "string data"}) - - _, kwargs = mock_request.call_args - assert kwargs["headers"]["Content-Type"] == "text/plain" - - -def test_invoke_with_json_body(client: SyncFunctionsClient) -> None: - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"message": "success"} - mock_response.raise_for_status = Mock() - mock_response.headers = {} - - with patch.object(client._client, "request", new_callable=Mock) as mock_request: - mock_request.return_value = mock_response - - client.invoke("test-function", {"body": {"key": "value"}}) - - _, kwargs = mock_request.call_args - assert kwargs["headers"]["Content-Type"] == "application/json" - - -def test_init_with_httpx_client() -> None: - # Create a custom httpx client with specific options - headers = {"x-user-agent": "my-app/0.0.1"} - custom_client = Client( - timeout=Timeout(30), follow_redirects=True, max_redirects=5, headers=headers - ) - - # Initialize the functions client with the custom httpx client - client = SyncFunctionsClient( - url="https://example.com", - headers={"Authorization": "Bearer token"}, - timeout=10, - http_client=custom_client, - ) +def test_invoke_with_string_body() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = client.invoke("test-function", body="string data") + assert response.request.headers["Content-Type"] == "text/plain; charset=utf-8" - # Verify the custom client options are preserved - assert client._client.timeout == Timeout(30) - assert client._client.follow_redirects is True - assert client._client.max_redirects == 5 - assert client._client.headers.get("x-user-agent") == "my-app/0.0.1" - # Verify the client is properly configured with our custom client - assert client._client is custom_client +def test_invoke_with_json_body() -> None: + client = client_returning(content=b'{"message": "success"}', status=200) + response = client.invoke("test-function", body={"key": "value"}) + assert response.request.headers["Content-Type"] == "application/json" diff --git a/src/functions/tests/test_client.py b/src/functions/tests/test_client.py index 80a200e4..de590b4a 100644 --- a/src/functions/tests/test_client.py +++ b/src/functions/tests/test_client.py @@ -1,6 +1,7 @@ from typing import Dict import pytest + from supabase_functions import AsyncFunctionsClient, SyncFunctionsClient, create_client @@ -16,33 +17,31 @@ def valid_headers() -> Dict[str, str]: def test_create_async_client(valid_url: str, valid_headers: Dict[str, str]) -> None: # Test creating async client with explicit verify=True - client = create_client( - url=valid_url, headers=valid_headers, is_async=True, verify=True - ) + client = create_client(url=valid_url, headers=valid_headers, is_async=True) assert isinstance(client, AsyncFunctionsClient) - assert str(client.url) == valid_url - assert all(client.headers[key] == value for key, value in valid_headers.items()) + assert str(client.base_url) == valid_url + assert all( + client.default_headers[key] == value for key, value in valid_headers.items() + ) def test_create_sync_client(valid_url: str, valid_headers: Dict[str, str]) -> None: - # Test creating sync client with explicit verify=True - client = create_client( - url=valid_url, headers=valid_headers, is_async=False, verify=True - ) + client = create_client(url=valid_url, headers=valid_headers, is_async=False) assert isinstance(client, SyncFunctionsClient) - assert str(client.url) == valid_url - assert all(client.headers[key] == value for key, value in valid_headers.items()) + assert str(client.base_url) == valid_url + assert all( + client.default_headers[key] == value for key, value in valid_headers.items() + ) def test_type_hints() -> None: - from typing import Union, get_type_hints + from typing import get_type_hints hints = get_type_hints(create_client) assert hints["url"] is str assert hints["headers"] == dict[str, str] assert hints["is_async"] is bool - assert hints["verify"] is bool - assert hints["return"] == Union[AsyncFunctionsClient, SyncFunctionsClient] + assert hints["return"] == AsyncFunctionsClient | SyncFunctionsClient diff --git a/src/functions/tests/test_errors.py b/src/functions/tests/test_errors.py index 11eca4e0..3d8cfcc2 100644 --- a/src/functions/tests/test_errors.py +++ b/src/functions/tests/test_errors.py @@ -1,6 +1,7 @@ from typing import Type import pytest + from supabase_functions.errors import ( FunctionsApiErrorDict, FunctionsError, diff --git a/src/functions/tests/test_utils.py b/src/functions/tests/test_utils.py index cbaaa736..c83fc203 100644 --- a/src/functions/tests/test_utils.py +++ b/src/functions/tests/test_utils.py @@ -2,10 +2,9 @@ from typing import Any import pytest + from supabase_functions.utils import ( - BASE64URL_REGEX, FunctionRegion, - is_http_url, is_valid_str_arg, ) @@ -45,61 +44,10 @@ def test_is_valid_str_arg(test_input: Any, expected: bool) -> None: assert is_valid_str_arg(test_input) == expected -@pytest.mark.parametrize( - "test_input,expected", - [ - ("https://example.com", True), - ("http://localhost", True), - ("http://127.0.0.1:8000", True), - ("https://api.supabase.com", True), - ("ftp://example.com", False), - ("ws://example.com", False), - ("not-a-url", False), - ("", False), - ], -) -def test_is_http_url(test_input: str, expected: bool) -> None: - assert is_http_url(test_input) == expected - - -def test_base64url_regex() -> None: - import re - - pattern = re.compile(BASE64URL_REGEX, re.IGNORECASE) - - # Valid base64url strings - assert pattern.match("abcd") - assert pattern.match("1234") - assert pattern.match("abc") - assert pattern.match("12") - assert pattern.match("ab") - assert pattern.match("ABCD") - assert pattern.match("ABC") - assert pattern.match("AB") - assert pattern.match("a-b_") - - # Invalid base64url strings - assert not pattern.match("a") # too short - assert not pattern.match("abcde") # invalid length - assert not pattern.match("abc!") # invalid character - assert not pattern.match("abc@") # invalid character - - -@pytest.mark.skipif( - sys.version_info < (3, 11), - reason="StrEnum import test only relevant for Python 3.11+", -) def test_strenum_import_python_311_plus() -> None: - from enum import StrEnum as BuiltinStrEnum # type: ignore - - assert isinstance(FunctionRegion.Any, BuiltinStrEnum) - - -@pytest.mark.skipif( - sys.version_info >= (3, 11), - reason="strenum import test only relevant for Python < 3.11", -) -def test_strenum_import_python_310_and_below() -> None: - from strenum import StrEnum as ExternalStrEnum + if sys.version_info >= (3, 11): + from enum import StrEnum as StrEnum + else: + from strenum import StrEnum - assert isinstance(FunctionRegion.Any, ExternalStrEnum) + assert isinstance(FunctionRegion.Any, StrEnum) diff --git a/src/postgrest/pyproject.toml b/src/postgrest/pyproject.toml index 178d059c..d5fa6126 100644 --- a/src/postgrest/pyproject.toml +++ b/src/postgrest/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "postgrest" -version = "2.28.3" # {x-release-please-version} +version = "3.0.0a1" # {x-release-please-version} description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." authors = [ { name = "Lương Quang Mạnh", email = "luongquangmanh85@gmail.com"}, @@ -19,13 +19,13 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "httpx[http2] >=0.26,<0.29", "deprecation >=2.1.0", "pydantic >=1.9,<3.0", "strenum >=0.4.9; python_version < \"3.11\"", "yarl>=1.20.1", + "supabase_utils==3.0.0a1", # x-release-please-version ] [project.urls] @@ -41,6 +41,7 @@ test = [ "pytest-depends >=1.0.1", "pytest-asyncio >=1.0.0", "unasync >= 0.6.0", + "supabase_utils[all]", ] lints = [ "ruff >=0.12.1", @@ -72,8 +73,7 @@ select = [ # isort "I", ] -ignore = ["F401", "F403", "F841", "E712", "E501", "E402", "UP006", "UP035"] -# isort.required-imports = ["from __future__ import annotations"] +ignore = ["E712", "E501", "E402", "UP006", "UP035"] [tool.ruff.lint.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. diff --git a/src/postgrest/src/postgrest/__init__.py b/src/postgrest/src/postgrest/__init__.py index edb87f2e..e7c81803 100644 --- a/src/postgrest/src/postgrest/__init__.py +++ b/src/postgrest/src/postgrest/__init__.py @@ -2,29 +2,9 @@ from httpx import Timeout -from ._async.client import AsyncPostgrestClient -from ._async.request_builder import ( - AsyncFilterRequestBuilder, - AsyncMaybeSingleRequestBuilder, - AsyncQueryRequestBuilder, - AsyncRequestBuilder, - AsyncRPCFilterRequestBuilder, - AsyncSelectRequestBuilder, - AsyncSingleRequestBuilder, -) -from ._sync.client import SyncPostgrestClient -from ._sync.request_builder import ( - SyncFilterRequestBuilder, - SyncMaybeSingleRequestBuilder, - SyncQueryRequestBuilder, - SyncRequestBuilder, - SyncRPCFilterRequestBuilder, - SyncSelectRequestBuilder, - SyncSingleRequestBuilder, -) -from .base_request_builder import APIResponse -from .constants import DEFAULT_POSTGREST_CLIENT_HEADERS +from .client import AsyncPostgrestClient, SyncPostgrestClient from .exceptions import APIError +from .request_builder import APIResponse from .types import ( CountMethod, Filters, @@ -35,23 +15,8 @@ __all__ = [ "AsyncPostgrestClient", - "AsyncFilterRequestBuilder", - "AsyncQueryRequestBuilder", - "AsyncRequestBuilder", - "AsyncRPCFilterRequestBuilder", - "AsyncSelectRequestBuilder", - "AsyncSingleRequestBuilder", - "AsyncMaybeSingleRequestBuilder", "SyncPostgrestClient", - "SyncFilterRequestBuilder", - "SyncMaybeSingleRequestBuilder", - "SyncQueryRequestBuilder", - "SyncRequestBuilder", - "SyncRPCFilterRequestBuilder", - "SyncSelectRequestBuilder", - "SyncSingleRequestBuilder", "APIResponse", - "DEFAULT_POSTGREST_CLIENT_HEADERS", "APIError", "CountMethod", "Filters", diff --git a/src/postgrest/src/postgrest/_async/__init__.py b/src/postgrest/src/postgrest/_async/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/postgrest/src/postgrest/_async/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/postgrest/src/postgrest/_async/client.py b/src/postgrest/src/postgrest/_async/client.py deleted file mode 100644 index 00962e0b..00000000 --- a/src/postgrest/src/postgrest/_async/client.py +++ /dev/null @@ -1,195 +0,0 @@ -from __future__ import annotations - -import platform -import sys -from typing import Any, Dict, Optional, Union, cast -from warnings import warn - -from deprecation import deprecated -from httpx import AsyncClient, Headers, QueryParams, Timeout -from yarl import URL - -from ..base_client import BasePostgrestClient -from ..constants import ( - DEFAULT_POSTGREST_CLIENT_HEADERS, - DEFAULT_POSTGREST_CLIENT_TIMEOUT, -) -from ..types import CountMethod -from ..version import __version__ -from .request_builder import ( - AsyncRequestBuilder, - AsyncRPCFilterRequestBuilder, - RequestConfig, -) - - -class AsyncPostgrestClient(BasePostgrestClient): - """PostgREST client.""" - - def __init__( - self, - base_url: str, - *, - schema: str = "public", - headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, - timeout: Union[int, float, Timeout, None] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[AsyncClient] = None, - ) -> None: - headers = { - "X-Client-Info": f"supabase-py/postgrest-py v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = ( - timeout - if isinstance(timeout, Timeout) - else ( - int(abs(timeout)) - if timeout is not None - else DEFAULT_POSTGREST_CLIENT_TIMEOUT - ) - ) - BasePostgrestClient.__init__( - self, - URL(base_url), - schema=schema, - headers=headers, - timeout=self.timeout, - verify=self.verify, - proxy=proxy, - ) - - self.session = http_client or AsyncClient( - base_url=base_url, - headers=self.headers, - timeout=timeout, - verify=self.verify, - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - def schema(self, schema: str) -> AsyncPostgrestClient: - """Switch to another schema.""" - return AsyncPostgrestClient( - base_url=str(self.base_url), - schema=schema, - headers=dict(self.headers), - timeout=self.timeout, - verify=self.verify, - proxy=self.proxy, - ) - - async def __aenter__(self) -> AsyncPostgrestClient: - return self - - async def __aexit__(self, exc_type, exc, tb) -> None: - await self.aclose() - - async def aclose(self) -> None: - """Close the underlying HTTP connections.""" - await self.session.aclose() - - def from_(self, table: str) -> AsyncRequestBuilder: - """Perform a table operation. - - Args: - table: The name of the table - Returns: - :class:`AsyncRequestBuilder` - """ - return AsyncRequestBuilder( - self.session, self.base_url.joinpath(table), self.headers, self.basic_auth - ) - - def table(self, table: str) -> AsyncRequestBuilder: - """Alias to :meth:`from_`.""" - return self.from_(table) - - @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") - def from_table(self, table: str) -> AsyncRequestBuilder: - """Alias to :meth:`from_`.""" - return self.from_(table) - - def rpc( - self, - func: str, - params: dict[str, str], - count: Optional[CountMethod] = None, - head: bool = False, - get: bool = False, - ) -> AsyncRPCFilterRequestBuilder: - """Perform a stored procedure call. - - Args: - func: The name of the remote procedure to run. - params: The parameters to be passed to the remote procedure. - count: The method to use to get the count of rows returned. - head: When set to `true`, `data` will not be returned. Useful if you only need the count. - get: When set to `true`, the function will be called with read-only access mode. - Returns: - :class:`AsyncRPCFilterRequestBuilder` - Example: - .. code-block:: python - - await client.rpc("foobar", {"arg": "value"}).execute() - - .. versionchanged:: 0.10.9 - This method now returns a :class:`AsyncRPCFilterRequestBuilder`. - .. versionchanged:: 0.10.2 - This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to - filter on the RPC's resultset. - """ - method = "HEAD" if head else "GET" if get else "POST" - - headers = Headers({"Prefer": f"count={count}"}) if count else Headers() - headers.update(self.headers) - # the params here are params to be sent to the RPC and not the queryparams! - json, http_params = ( - ({}, QueryParams(params)) - if method in ("HEAD", "GET") - else (params, QueryParams()) - ) - request = RequestConfig( - self.session, - self.base_url.joinpath("rpc", func), - method, - headers, - http_params, - self.basic_auth, - json, - ) - return AsyncRPCFilterRequestBuilder(request) diff --git a/src/postgrest/src/postgrest/_async/request_builder.py b/src/postgrest/src/postgrest/_async/request_builder.py deleted file mode 100644 index 9f8efb4b..00000000 --- a/src/postgrest/src/postgrest/_async/request_builder.py +++ /dev/null @@ -1,419 +0,0 @@ -from __future__ import annotations - -from typing import Any, Generic, Literal, Optional, TypeVar, Union, overload - -from httpx import AsyncClient, BasicAuth, Headers, QueryParams, Response -from pydantic import ValidationError -from typing_extensions import override -from yarl import URL - -from ..base_request_builder import ( - APIResponse, - BaseFilterRequestBuilder, - BaseRPCRequestBuilder, - BaseSelectRequestBuilder, - CountMethod, - RequestConfig, - SingleAPIResponse, - pre_delete, - pre_insert, - pre_select, - pre_update, - pre_upsert, -) -from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message -from ..types import JSON, ReturnMethod -from ..utils import model_validate_json - -ReqConfig = RequestConfig[AsyncClient] - - -class AsyncQueryRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - async def execute(self) -> APIResponse: - """Execute the query. - - .. tip:: - This is the last method called, after the query is built. - - Returns: - :class:`APIResponse` - - Raises: - :class:`APIError` If the API raised an error. - """ - r = await self.request.send() - try: - if r.is_success: - return APIResponse.from_http_request_response(r) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class AsyncSingleRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - async def execute(self) -> SingleAPIResponse: - """Execute the query. - - .. tip:: - This is the last method called, after the query is built. - - Returns: - :class:`SingleAPIResponse` - na - Raises: - :class:`APIError` If the API raised an error. - """ - r = await self.request.send() - try: - if ( - 200 <= r.status_code <= 299 - ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) - return SingleAPIResponse.from_http_request_response(r) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class AsyncExplainRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - async def execute(self) -> str: - r = await self.request.send() - try: - if r.is_success: - return r.text - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class AsyncMaybeSingleRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - async def execute(self) -> Optional[SingleAPIResponse]: - r = await self.request.send() - try: - if r.is_success: - parsed = APIResponse.from_http_request_response(r) - if len(parsed.data) == 0: - return None - if len(parsed.data) == 1: - return SingleAPIResponse(data=parsed.data[0], count=parsed.count) - else: - raise APIError( - { - "message": "Cannot coerce the result to a single JSON object", - "code": "406", - "hint": "Please check traceback of the code", - "details": "The result contains more than one row.", - } - ) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class AsyncFilterRequestBuilder( - BaseFilterRequestBuilder[AsyncClient], AsyncQueryRequestBuilder -): - def __init__(self, request: ReqConfig) -> None: - BaseFilterRequestBuilder.__init__(self, request) - AsyncQueryRequestBuilder.__init__(self, request) - - -class AsyncRPCFilterRequestBuilder(BaseRPCRequestBuilder, AsyncSingleRequestBuilder): - def __init__(self, request: ReqConfig) -> None: - BaseFilterRequestBuilder.__init__(self, request) - AsyncSingleRequestBuilder.__init__(self, request) - - -class AsyncSelectRequestBuilder( - AsyncQueryRequestBuilder, BaseSelectRequestBuilder[AsyncClient] -): - def __init__(self, request: ReqConfig) -> None: - BaseSelectRequestBuilder.__init__(self, request) - AsyncQueryRequestBuilder.__init__(self, request) - - def single(self) -> AsyncSingleRequestBuilder: - """Specify that the query will only return a single row in response. - - .. caution:: - The API will raise an error if the query returned more than one row. - """ - self.request.headers["Accept"] = "application/vnd.pgrst.object+json" - return AsyncSingleRequestBuilder(self.request) - - def maybe_single(self) -> AsyncMaybeSingleRequestBuilder: - """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" - return AsyncMaybeSingleRequestBuilder(self.request) - - def text_search( - self, column: str, query: str, options: dict[str, Any] = {} - ) -> AsyncQueryRequestBuilder: - type_ = options.get("type") - type_part = "" - if type_ == "plain": - type_part = "pl" - elif type_ == "phrase": - type_part = "ph" - elif type_ == "web_search": - type_part = "w" - config_part = f"({options.get('config')})" if options.get("config") else "" - self.request.params = self.request.params.add( - column, f"{type_part}fts{config_part}.{query}" - ) - - return AsyncQueryRequestBuilder(self.request) - - def csv(self) -> AsyncSingleRequestBuilder: - """Specify that the query must retrieve data as a single CSV string.""" - self.request.headers["Accept"] = "text/csv" - return AsyncSingleRequestBuilder(self.request) - - @overload - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - format: Literal["text"] = "text", - ) -> AsyncExplainRequestBuilder: ... - - @overload - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - *, - format: Literal["json"], - ) -> AsyncSingleRequestBuilder: ... - - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - format: Literal["text", "json"] = "text", - ) -> AsyncExplainRequestBuilder | AsyncSingleRequestBuilder: - options = [ - key - for key, value in locals().items() - if key not in ["self", "format"] and value - ] - options_str = "|".join(options) - self.request.headers["Accept"] = ( - f"application/vnd.pgrst.plan+{format}; options={options_str}" - ) - if format == "text": - return AsyncExplainRequestBuilder(self.request) - else: - return AsyncSingleRequestBuilder(self.request) - - -class AsyncRequestBuilder: # - def __init__( - self, session: AsyncClient, path: URL, headers: Headers, auth: BasicAuth | None - ) -> None: - self.session = session - self.path = path - self.headers = headers - self.auth = auth - - def select( - self, - *columns: str, - count: Optional[CountMethod] = None, - head: Optional[bool] = None, - ) -> AsyncSelectRequestBuilder: - """Run a SELECT query. - - Args: - *columns: The names of the columns to fetch. - count: The method to use to get the count of rows returned. - Returns: - :class:`AsyncSelectRequestBuilder` - """ - method, params, headers, json = pre_select(*columns, count=count, head=head) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return AsyncSelectRequestBuilder(request) - - def insert( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - upsert: bool = False, - default_to_null: bool = True, - ) -> AsyncQueryRequestBuilder: - """Run an INSERT query. - - Args: - json: The row to be inserted. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - upsert: Whether the query should be an upsert. - default_to_null: Make missing fields default to `null`. - Otherwise, use the default value for the column. - Only applies for bulk inserts. - Returns: - :class:`AsyncQueryRequestBuilder` - """ - method, params, headers, json = pre_insert( - json, - count=count, - returning=returning, - upsert=upsert, - default_to_null=default_to_null, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return AsyncQueryRequestBuilder(request) - - def upsert( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ignore_duplicates: bool = False, - on_conflict: str = "", - default_to_null: bool = True, - ) -> AsyncQueryRequestBuilder: - """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. - - Args: - json: The row to be inserted. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - ignore_duplicates: Whether duplicate rows should be ignored. - on_conflict: Specified columns to be made to work with UNIQUE constraint. - default_to_null: Make missing fields default to `null`. Otherwise, use the - default value for the column. This only applies when inserting new rows, - not when merging with existing rows under `ignoreDuplicates: false`. - This also only applies when doing bulk upserts. - Returns: - :class:`AsyncQueryRequestBuilder` - """ - method, params, headers, json = pre_upsert( - json, - count=count, - returning=returning, - ignore_duplicates=ignore_duplicates, - on_conflict=on_conflict, - default_to_null=default_to_null, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return AsyncQueryRequestBuilder(request) - - def update( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ) -> AsyncFilterRequestBuilder: - """Run an UPDATE query. - - Args: - json: The updated fields. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - Returns: - :class:`AsyncFilterRequestBuilder` - """ - method, params, headers, json = pre_update( - json, - count=count, - returning=returning, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return AsyncFilterRequestBuilder(request) - - def delete( - self, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ) -> AsyncFilterRequestBuilder: - """Run a DELETE query. - - Args: - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - Returns: - :class:`AsyncFilterRequestBuilder` - """ - method, params, headers, json = pre_delete( - count=count, - returning=returning, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return AsyncFilterRequestBuilder(request) diff --git a/src/postgrest/src/postgrest/_sync/__init__.py b/src/postgrest/src/postgrest/_sync/__init__.py deleted file mode 100644 index 9d48db4f..00000000 --- a/src/postgrest/src/postgrest/_sync/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from __future__ import annotations diff --git a/src/postgrest/src/postgrest/_sync/client.py b/src/postgrest/src/postgrest/_sync/client.py deleted file mode 100644 index 14f48ea7..00000000 --- a/src/postgrest/src/postgrest/_sync/client.py +++ /dev/null @@ -1,195 +0,0 @@ -from __future__ import annotations - -import platform -import sys -from typing import Any, Dict, Optional, Union, cast -from warnings import warn - -from deprecation import deprecated -from httpx import Client, Headers, QueryParams, Timeout -from yarl import URL - -from ..base_client import BasePostgrestClient -from ..constants import ( - DEFAULT_POSTGREST_CLIENT_HEADERS, - DEFAULT_POSTGREST_CLIENT_TIMEOUT, -) -from ..types import CountMethod -from ..version import __version__ -from .request_builder import ( - RequestConfig, - SyncRequestBuilder, - SyncRPCFilterRequestBuilder, -) - - -class SyncPostgrestClient(BasePostgrestClient): - """PostgREST client.""" - - def __init__( - self, - base_url: str, - *, - schema: str = "public", - headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, - timeout: Union[int, float, Timeout, None] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[Client] = None, - ) -> None: - headers = { - "X-Client-Info": f"supabase-py/postgrest-py v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = ( - timeout - if isinstance(timeout, Timeout) - else ( - int(abs(timeout)) - if timeout is not None - else DEFAULT_POSTGREST_CLIENT_TIMEOUT - ) - ) - BasePostgrestClient.__init__( - self, - URL(base_url), - schema=schema, - headers=headers, - timeout=self.timeout, - verify=self.verify, - proxy=proxy, - ) - - self.session = http_client or Client( - base_url=base_url, - headers=self.headers, - timeout=timeout, - verify=self.verify, - proxy=proxy, - follow_redirects=True, - http2=True, - ) - - def schema(self, schema: str) -> SyncPostgrestClient: - """Switch to another schema.""" - return SyncPostgrestClient( - base_url=str(self.base_url), - schema=schema, - headers=dict(self.headers), - timeout=self.timeout, - verify=self.verify, - proxy=self.proxy, - ) - - def __enter__(self) -> SyncPostgrestClient: - return self - - def __exit__(self, exc_type, exc, tb) -> None: - self.aclose() - - def aclose(self) -> None: - """Close the underlying HTTP connections.""" - self.session.close() - - def from_(self, table: str) -> SyncRequestBuilder: - """Perform a table operation. - - Args: - table: The name of the table - Returns: - :class:`AsyncRequestBuilder` - """ - return SyncRequestBuilder( - self.session, self.base_url.joinpath(table), self.headers, self.basic_auth - ) - - def table(self, table: str) -> SyncRequestBuilder: - """Alias to :meth:`from_`.""" - return self.from_(table) - - @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") - def from_table(self, table: str) -> SyncRequestBuilder: - """Alias to :meth:`from_`.""" - return self.from_(table) - - def rpc( - self, - func: str, - params: dict[str, str], - count: Optional[CountMethod] = None, - head: bool = False, - get: bool = False, - ) -> SyncRPCFilterRequestBuilder: - """Perform a stored procedure call. - - Args: - func: The name of the remote procedure to run. - params: The parameters to be passed to the remote procedure. - count: The method to use to get the count of rows returned. - head: When set to `true`, `data` will not be returned. Useful if you only need the count. - get: When set to `true`, the function will be called with read-only access mode. - Returns: - :class:`AsyncRPCFilterRequestBuilder` - Example: - .. code-block:: python - - await client.rpc("foobar", {"arg": "value"}).execute() - - .. versionchanged:: 0.10.9 - This method now returns a :class:`AsyncRPCFilterRequestBuilder`. - .. versionchanged:: 0.10.2 - This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to - filter on the RPC's resultset. - """ - method = "HEAD" if head else "GET" if get else "POST" - - headers = Headers({"Prefer": f"count={count}"}) if count else Headers() - headers.update(self.headers) - # the params here are params to be sent to the RPC and not the queryparams! - json, http_params = ( - ({}, QueryParams(params)) - if method in ("HEAD", "GET") - else (params, QueryParams()) - ) - request = RequestConfig( - self.session, - self.base_url.joinpath("rpc", func), - method, - headers, - http_params, - self.basic_auth, - json, - ) - return SyncRPCFilterRequestBuilder(request) diff --git a/src/postgrest/src/postgrest/_sync/request_builder.py b/src/postgrest/src/postgrest/_sync/request_builder.py deleted file mode 100644 index bc88385a..00000000 --- a/src/postgrest/src/postgrest/_sync/request_builder.py +++ /dev/null @@ -1,419 +0,0 @@ -from __future__ import annotations - -from typing import Any, Generic, Literal, Optional, TypeVar, Union, overload - -from httpx import BasicAuth, Client, Headers, QueryParams, Response -from pydantic import ValidationError -from typing_extensions import override -from yarl import URL - -from ..base_request_builder import ( - APIResponse, - BaseFilterRequestBuilder, - BaseRPCRequestBuilder, - BaseSelectRequestBuilder, - CountMethod, - RequestConfig, - SingleAPIResponse, - pre_delete, - pre_insert, - pre_select, - pre_update, - pre_upsert, -) -from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message -from ..types import JSON, ReturnMethod -from ..utils import model_validate_json - -ReqConfig = RequestConfig[Client] - - -class SyncQueryRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - def execute(self) -> APIResponse: - """Execute the query. - - .. tip:: - This is the last method called, after the query is built. - - Returns: - :class:`APIResponse` - - Raises: - :class:`APIError` If the API raised an error. - """ - r = self.request.send() - try: - if r.is_success: - return APIResponse.from_http_request_response(r) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class SyncSingleRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - def execute(self) -> SingleAPIResponse: - """Execute the query. - - .. tip:: - This is the last method called, after the query is built. - - Returns: - :class:`SingleAPIResponse` - na - Raises: - :class:`APIError` If the API raised an error. - """ - r = self.request.send() - try: - if ( - 200 <= r.status_code <= 299 - ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) - return SingleAPIResponse.from_http_request_response(r) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class SyncExplainRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - def execute(self) -> str: - r = self.request.send() - try: - if r.is_success: - return r.text - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class SyncMaybeSingleRequestBuilder: - def __init__(self, request: ReqConfig): - self.request = request - - def execute(self) -> Optional[SingleAPIResponse]: - r = self.request.send() - try: - if r.is_success: - parsed = APIResponse.from_http_request_response(r) - if len(parsed.data) == 0: - return None - if len(parsed.data) == 1: - return SingleAPIResponse(data=parsed.data[0], count=parsed.count) - else: - raise APIError( - { - "message": "Cannot coerce the result to a single JSON object", - "code": "406", - "hint": "Please check traceback of the code", - "details": "The result contains more than one row.", - } - ) - else: - json_obj = model_validate_json(APIErrorFromJSON, r.content) - raise APIError(dict(json_obj)) - except ValidationError as e: - raise APIError(generate_default_error_message(r)) - - -class SyncFilterRequestBuilder( - BaseFilterRequestBuilder[Client], SyncQueryRequestBuilder -): - def __init__(self, request: ReqConfig) -> None: - BaseFilterRequestBuilder.__init__(self, request) - SyncQueryRequestBuilder.__init__(self, request) - - -class SyncRPCFilterRequestBuilder(BaseRPCRequestBuilder, SyncSingleRequestBuilder): - def __init__(self, request: ReqConfig) -> None: - BaseFilterRequestBuilder.__init__(self, request) - SyncSingleRequestBuilder.__init__(self, request) - - -class SyncSelectRequestBuilder( - SyncQueryRequestBuilder, BaseSelectRequestBuilder[Client] -): - def __init__(self, request: ReqConfig) -> None: - BaseSelectRequestBuilder.__init__(self, request) - SyncQueryRequestBuilder.__init__(self, request) - - def single(self) -> SyncSingleRequestBuilder: - """Specify that the query will only return a single row in response. - - .. caution:: - The API will raise an error if the query returned more than one row. - """ - self.request.headers["Accept"] = "application/vnd.pgrst.object+json" - return SyncSingleRequestBuilder(self.request) - - def maybe_single(self) -> SyncMaybeSingleRequestBuilder: - """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" - return SyncMaybeSingleRequestBuilder(self.request) - - def text_search( - self, column: str, query: str, options: dict[str, Any] = {} - ) -> SyncQueryRequestBuilder: - type_ = options.get("type") - type_part = "" - if type_ == "plain": - type_part = "pl" - elif type_ == "phrase": - type_part = "ph" - elif type_ == "web_search": - type_part = "w" - config_part = f"({options.get('config')})" if options.get("config") else "" - self.request.params = self.request.params.add( - column, f"{type_part}fts{config_part}.{query}" - ) - - return SyncQueryRequestBuilder(self.request) - - def csv(self) -> SyncSingleRequestBuilder: - """Specify that the query must retrieve data as a single CSV string.""" - self.request.headers["Accept"] = "text/csv" - return SyncSingleRequestBuilder(self.request) - - @overload - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - format: Literal["text"] = "text", - ) -> SyncExplainRequestBuilder: ... - - @overload - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - *, - format: Literal["json"], - ) -> SyncSingleRequestBuilder: ... - - def explain( - self, - analyze: bool = False, - verbose: bool = False, - settings: bool = False, - buffers: bool = False, - wal: bool = False, - format: Literal["text", "json"] = "text", - ) -> SyncExplainRequestBuilder | SyncSingleRequestBuilder: - options = [ - key - for key, value in locals().items() - if key not in ["self", "format"] and value - ] - options_str = "|".join(options) - self.request.headers["Accept"] = ( - f"application/vnd.pgrst.plan+{format}; options={options_str}" - ) - if format == "text": - return SyncExplainRequestBuilder(self.request) - else: - return SyncSingleRequestBuilder(self.request) - - -class SyncRequestBuilder: # - def __init__( - self, session: Client, path: URL, headers: Headers, auth: BasicAuth | None - ) -> None: - self.session = session - self.path = path - self.headers = headers - self.auth = auth - - def select( - self, - *columns: str, - count: Optional[CountMethod] = None, - head: Optional[bool] = None, - ) -> SyncSelectRequestBuilder: - """Run a SELECT query. - - Args: - *columns: The names of the columns to fetch. - count: The method to use to get the count of rows returned. - Returns: - :class:`SyncSelectRequestBuilder` - """ - method, params, headers, json = pre_select(*columns, count=count, head=head) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return SyncSelectRequestBuilder(request) - - def insert( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - upsert: bool = False, - default_to_null: bool = True, - ) -> SyncQueryRequestBuilder: - """Run an INSERT query. - - Args: - json: The row to be inserted. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - upsert: Whether the query should be an upsert. - default_to_null: Make missing fields default to `null`. - Otherwise, use the default value for the column. - Only applies for bulk inserts. - Returns: - :class:`SyncQueryRequestBuilder` - """ - method, params, headers, json = pre_insert( - json, - count=count, - returning=returning, - upsert=upsert, - default_to_null=default_to_null, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return SyncQueryRequestBuilder(request) - - def upsert( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ignore_duplicates: bool = False, - on_conflict: str = "", - default_to_null: bool = True, - ) -> SyncQueryRequestBuilder: - """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. - - Args: - json: The row to be inserted. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - ignore_duplicates: Whether duplicate rows should be ignored. - on_conflict: Specified columns to be made to work with UNIQUE constraint. - default_to_null: Make missing fields default to `null`. Otherwise, use the - default value for the column. This only applies when inserting new rows, - not when merging with existing rows under `ignoreDuplicates: false`. - This also only applies when doing bulk upserts. - Returns: - :class:`SyncQueryRequestBuilder` - """ - method, params, headers, json = pre_upsert( - json, - count=count, - returning=returning, - ignore_duplicates=ignore_duplicates, - on_conflict=on_conflict, - default_to_null=default_to_null, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return SyncQueryRequestBuilder(request) - - def update( - self, - json: JSON, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ) -> SyncFilterRequestBuilder: - """Run an UPDATE query. - - Args: - json: The updated fields. - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - Returns: - :class:`SyncFilterRequestBuilder` - """ - method, params, headers, json = pre_update( - json, - count=count, - returning=returning, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return SyncFilterRequestBuilder(request) - - def delete( - self, - *, - count: Optional[CountMethod] = None, - returning: ReturnMethod = ReturnMethod.representation, - ) -> SyncFilterRequestBuilder: - """Run a DELETE query. - - Args: - count: The method to use to get the count of rows returned. - returning: Either 'minimal' or 'representation' - Returns: - :class:`SyncFilterRequestBuilder` - """ - method, params, headers, json = pre_delete( - count=count, - returning=returning, - ) - headers.update(self.headers) - request = RequestConfig( - session=self.session, - path=self.path, - auth=self.auth, - params=params, - http_method=method, - headers=headers, - json=json, - ) - return SyncFilterRequestBuilder(request) diff --git a/src/postgrest/src/postgrest/base_client.py b/src/postgrest/src/postgrest/base_client.py deleted file mode 100644 index 0f27f1a0..00000000 --- a/src/postgrest/src/postgrest/base_client.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from typing import Dict, Optional, Union - -from httpx import AsyncClient, BasicAuth, Client, Headers, Timeout -from yarl import URL - -from .utils import is_http_url - - -class BasePostgrestClient(ABC): - """Base PostgREST client.""" - - def __init__( - self, - base_url: URL, - *, - schema: str, - headers: Dict[str, str], - timeout: Union[int, float, Timeout], - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - if not is_http_url(base_url): - ValueError("base_url must be a valid HTTP URL string") - - self.base_url = base_url - self.headers = Headers(headers) - self.headers["Accept-Profile"] = schema - self.headers["Content-Profile"] = schema - self.timeout = timeout - self.verify = verify - self.proxy = proxy - self.basic_auth: BasicAuth | None = None - - def auth( - self, - token: Optional[str], - *, - username: Union[str, bytes, None] = None, - password: Union[str, bytes] = "", - ): - """ - Authenticate the client with either bearer token or basic authentication. - - Raises: - `ValueError`: If neither authentication scheme is provided. - - .. note:: - Bearer token is preferred if both ones are provided. - """ - if token: - self.headers["Authorization"] = f"Bearer {token}" - elif username: - self.basic_auth = BasicAuth(username, password) - else: - raise ValueError( - "Neither bearer token or basic authentication scheme is provided" - ) - return self diff --git a/src/postgrest/src/postgrest/base_request_builder.py b/src/postgrest/src/postgrest/base_request_builder.py deleted file mode 100644 index 155dc4e2..00000000 --- a/src/postgrest/src/postgrest/base_request_builder.py +++ /dev/null @@ -1,664 +0,0 @@ -from __future__ import annotations - -import json -import sys -from json import JSONDecodeError -from re import search -from typing import ( - Any, - Awaitable, - Dict, - Generic, - Iterable, - List, - Literal, - NamedTuple, - Optional, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -from httpx import AsyncClient, BasicAuth, Client, Headers, QueryParams -from httpx import Response as RequestResponse -from pydantic import BaseModel, ValidationError -from yarl import URL - -if sys.version_info >= (3, 11): - from typing import Self -else: - from typing_extensions import Self - -try: - # >= 2.0.0 - from pydantic import field_validator -except ImportError: - # < 2.0.0 - from pydantic import validator as field_validator # type: ignore - -from .base_client import BasePostgrestClient -from .types import JSON, CountMethod, Filters, JSONAdapter, RequestMethod, ReturnMethod -from .utils import sanitize_param - - -class QueryArgs(NamedTuple): - # groups the method, json, headers and params for a query in a single object - method: RequestMethod - params: QueryParams - headers: Headers - json: JSON - - -C = TypeVar("C", Client, AsyncClient) - - -class RequestConfig(Generic[C]): - def __init__( - self, - session: C, - path: URL, - http_method: str, - headers: Headers, - params: QueryParams, - auth: BasicAuth | None, - json: JSON, - ) -> None: - self.session: C = session - self.path = path - self.http_method = http_method - self.headers = headers - self.params = params - self.json = None if http_method in {"GET", "HEAD"} else json - self.auth = auth - - @overload - def send(self: RequestConfig[Client]) -> RequestResponse: ... - @overload - def send(self: RequestConfig[AsyncClient]) -> Awaitable[RequestResponse]: ... - - def send(self: RequestConfig[C]): - return self.session.request( - self.http_method, - str(self.path), - json=self.json, - params=self.params, - headers=self.headers, - auth=self.auth, - ) - - -def _unique_columns(json: List[Dict[str, JSON]]): - unique_keys = {key for row in json for key in row.keys()} - columns = ",".join([f'"{k}"' for k in unique_keys]) - return columns - - -def _cleaned_columns(columns: Tuple[str, ...]) -> str: - quoted = False - cleaned = [] - - for column in columns: - clean_column = "" - for char in column: - if char.isspace() and not quoted: - continue - if char == '"': - quoted = not quoted - clean_column += char - cleaned.append(clean_column) - - return ",".join(cleaned) - - -def pre_select( - *columns: str, - count: Optional[CountMethod] = None, - head: Optional[bool] = None, -) -> QueryArgs: - method = RequestMethod.HEAD if head else RequestMethod.GET - cleaned_columns = _cleaned_columns(columns or ("*",)) - params = QueryParams({"select": cleaned_columns}) - - headers = Headers({"Prefer": f"count={count}"}) if count else Headers() - return QueryArgs(method, params, headers, {}) - - -def pre_insert( - json: JSON, - *, - count: Optional[CountMethod], - returning: ReturnMethod, - upsert: bool, - default_to_null: bool = True, -) -> QueryArgs: - prefer_headers = [f"return={returning}"] - if count: - prefer_headers.append(f"count={count}") - if upsert: - prefer_headers.append("resolution=merge-duplicates") - if not default_to_null: - prefer_headers.append("missing=default") - headers = Headers({"Prefer": ",".join(prefer_headers)}) - # Adding 'columns' query parameters - query_params = {} - if isinstance(json, list): - query_params = {"columns": _unique_columns(json)} - return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) - - -def pre_upsert( - json: JSON, - *, - count: Optional[CountMethod], - returning: ReturnMethod, - ignore_duplicates: bool, - on_conflict: str = "", - default_to_null: bool = True, -) -> QueryArgs: - query_params = {} - prefer_headers = [f"return={returning}"] - if count: - prefer_headers.append(f"count={count}") - resolution = "ignore" if ignore_duplicates else "merge" - prefer_headers.append(f"resolution={resolution}-duplicates") - if not default_to_null: - prefer_headers.append("missing=default") - headers = Headers({"Prefer": ",".join(prefer_headers)}) - if on_conflict: - query_params["on_conflict"] = on_conflict - # Adding 'columns' query parameters - if isinstance(json, list): - query_params["columns"] = _unique_columns(json) - return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) - - -def pre_update( - json: JSON, - *, - count: Optional[CountMethod], - returning: ReturnMethod, -) -> QueryArgs: - prefer_headers = [f"return={returning}"] - if count: - prefer_headers.append(f"count={count}") - headers = Headers({"Prefer": ",".join(prefer_headers)}) - return QueryArgs(RequestMethod.PATCH, QueryParams(), headers, json) - - -def pre_delete( - *, - count: Optional[CountMethod], - returning: ReturnMethod, -) -> QueryArgs: - prefer_headers = [f"return={returning}"] - if count: - prefer_headers.append(f"count={count}") - headers = Headers({"Prefer": ",".join(prefer_headers)}) - return QueryArgs(RequestMethod.DELETE, QueryParams(), headers, {}) - - -class APIResponse(BaseModel): - data: List[JSON] - """The data returned by the query.""" - count: Optional[int] = None - """The number of rows returned.""" - - @staticmethod - def _get_count_from_content_range_header( - content_range_header: str, - ) -> Optional[int]: - content_range = content_range_header.split("/") - return None if len(content_range) < 2 else int(content_range[1]) - - @staticmethod - def _is_count_in_prefer_header(prefer_header: str) -> bool: - pattern = f"count=({'|'.join([cm.value for cm in CountMethod])})" - return bool(search(pattern, prefer_header)) - - @staticmethod - def _get_count_from_http_request_response( - request_response: RequestResponse, - ) -> Optional[int]: - prefer_header: Optional[str] = request_response.request.headers.get("prefer") - if not prefer_header: - return None - is_count_in_prefer_header = APIResponse._is_count_in_prefer_header( - prefer_header - ) - content_range_header: Optional[str] = request_response.headers.get( - "content-range" - ) - if is_count_in_prefer_header and content_range_header: - return APIResponse._get_count_from_content_range_header( - content_range_header - ) - return None - - @staticmethod - def from_http_request_response(request_response: RequestResponse) -> APIResponse: - count = APIResponse._get_count_from_http_request_response(request_response) - try: - data = JSONAdapter.validate_json(request_response.content) - except ValidationError: - data = request_response.text if len(request_response.text) > 0 else [] - return APIResponse(data=data, count=count) - - -class SingleAPIResponse(APIResponse): - data: JSON # type: ignore - """The data returned by the query.""" - - @staticmethod - def from_http_request_response( - request_response: RequestResponse, - ) -> SingleAPIResponse: - count = APIResponse._get_count_from_http_request_response(request_response) - try: - data = request_response.json() - except JSONDecodeError: - data = request_response.text if len(request_response.text) > 0 else [] - return SingleAPIResponse(data=data, count=count) - - -class BaseFilterRequestBuilder(Generic[C]): - def __init__(self, request: RequestConfig[C]) -> None: - self.request: RequestConfig[C] = request - self.negate_next = False - - @property - def not_(self: Self) -> Self: - """Whether the filter applied next should be negated.""" - self.negate_next = True - return self - - def filter(self: Self, column: str, operator: str, criteria: str) -> Self: - """Apply filters on a query. - - Args: - column: The name of the column to apply a filter on - operator: The operator to use while filtering - criteria: The value to filter by - """ - if self.negate_next is True: - self.negate_next = False - operator = f"{Filters.NOT}.{operator}" - key, val = sanitize_param(column), f"{operator}.{criteria}" - self.request.params = self.request.params.add(key, val) - return self - - def eq(self: Self, column: str, value: Any) -> Self: - """An 'equal to' filter. - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.EQ, value) - - def neq(self: Self, column: str, value: Any) -> Self: - """A 'not equal to' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.NEQ, value) - - def gt(self: Self, column: str, value: Any) -> Self: - """A 'greater than' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.GT, value) - - def gte(self: Self, column: str, value: Any) -> Self: - """A 'greater than or equal to' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.GTE, value) - - def lt(self: Self, column: str, value: Any) -> Self: - """A 'less than' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.LT, value) - - def lte(self: Self, column: str, value: Any) -> Self: - """A 'less than or equal to' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - return self.filter(column, Filters.LTE, value) - - def is_(self: Self, column: str, value: Any) -> Self: - """An 'is' filter - - Args: - column: The name of the column to apply a filter on - value: The value to filter by - """ - if value is None: - value = "null" - return self.filter(column, Filters.IS, value) - - def like(self: Self, column: str, pattern: str) -> Self: - """A 'LIKE' filter, to use for pattern matching. - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - return self.filter(column, Filters.LIKE, pattern) - - def like_all_of(self: Self, column: str, pattern: str) -> Self: - """A 'LIKE' filter, to use for pattern matching. - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - - return self.filter(column, Filters.LIKE_ALL, f"{{{pattern}}}") - - def like_any_of(self: Self, column: str, pattern: str) -> Self: - """A 'LIKE' filter, to use for pattern matching. - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - - return self.filter(column, Filters.LIKE_ANY, f"{{{pattern}}}") - - def ilike_all_of(self: Self, column: str, pattern: str) -> Self: - """A 'ILIKE' filter, to use for pattern matching (case insensitive). - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - - return self.filter(column, Filters.ILIKE_ALL, f"{{{pattern}}}") - - def ilike_any_of(self: Self, column: str, pattern: str) -> Self: - """A 'ILIKE' filter, to use for pattern matching (case insensitive). - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - - return self.filter(column, Filters.ILIKE_ANY, f"{{{pattern}}}") - - def ilike(self: Self, column: str, pattern: str) -> Self: - """An 'ILIKE' filter, to use for pattern matching (case insensitive). - - Args: - column: The name of the column to apply a filter on - pattern: The pattern to filter by - """ - return self.filter(column, Filters.ILIKE, pattern) - - def or_(self: Self, filters: str, reference_table: Optional[str] = None) -> Self: - """An 'or' filter - - Args: - filters: The filters to use, following PostgREST syntax - reference_table: Set this to filter on referenced tables instead of the parent table - """ - key = f"{sanitize_param(reference_table)}.or" if reference_table else "or" - self.request.params = self.request.params.add(key, f"({filters})") - return self - - def fts(self: Self, column: str, query: Any) -> Self: - return self.filter(column, Filters.FTS, query) - - def plfts(self: Self, column: str, query: Any) -> Self: - return self.filter(column, Filters.PLFTS, query) - - def phfts(self: Self, column: str, query: Any) -> Self: - return self.filter(column, Filters.PHFTS, query) - - def wfts(self: Self, column: str, query: Any) -> Self: - return self.filter(column, Filters.WFTS, query) - - def in_(self: Self, column: str, values: Iterable[Any]) -> Self: - values = map(sanitize_param, values) - values = ",".join(values) - return self.filter(column, Filters.IN, f"({values})") - - def cs(self: Self, column: str, values: Iterable[Any]) -> Self: - values = ",".join(values) - return self.filter(column, Filters.CS, f"{{{values}}}") - - def cd(self: Self, column: str, values: Iterable[Any]) -> Self: - values = ",".join(values) - return self.filter(column, Filters.CD, f"{{{values}}}") - - def contains( - self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] - ) -> Self: - if isinstance(value, str): - # range types can be inclusive '[', ']' or exclusive '(', ')' so just - # keep it simple and accept a string - return self.filter(column, Filters.CS, value) - if not isinstance(value, dict) and isinstance(value, Iterable): - # Expected to be some type of iterable - stringified_values = ",".join(value) - return self.filter(column, Filters.CS, f"{{{stringified_values}}}") - - return self.filter(column, Filters.CS, json.dumps(value)) - - def contained_by( - self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] - ) -> Self: - if isinstance(value, str): - # range - return self.filter(column, Filters.CD, value) - if not isinstance(value, dict) and isinstance(value, Iterable): - stringified_values = ",".join(value) - return self.filter(column, Filters.CD, f"{{{stringified_values}}}") - return self.filter(column, Filters.CD, json.dumps(value)) - - def ov(self: Self, column: str, value: Iterable[Any]) -> Self: - if isinstance(value, str): - # range types can be inclusive '[', ']' or exclusive '(', ')' so just - # keep it simple and accept a string - return self.filter(column, Filters.OV, value) - if not isinstance(value, dict) and isinstance(value, Iterable): - # Expected to be some type of iterable - stringified_values = ",".join(value) - return self.filter(column, Filters.OV, f"{{{stringified_values}}}") - return self.filter(column, Filters.OV, json.dumps(value)) - - def sl(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.filter(column, Filters.SL, f"({range[0]},{range[1]})") - - def sr(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.filter(column, Filters.SR, f"({range[0]},{range[1]})") - - def nxl(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.filter(column, Filters.NXL, f"({range[0]},{range[1]})") - - def nxr(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.filter(column, Filters.NXR, f"({range[0]},{range[1]})") - - def adj(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.filter(column, Filters.ADJ, f"({range[0]},{range[1]})") - - def range_gt(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.sr(column, range) - - def range_gte(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.nxl(column, range) - - def range_lt(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.sl(column, range) - - def range_lte(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.nxr(column, range) - - def range_adjacent(self: Self, column: str, range: Tuple[int, int]) -> Self: - return self.adj(column, range) - - def overlaps(self: Self, column: str, values: Iterable[Any]) -> Self: - return self.ov(column, values) - - def match(self: Self, query: Dict[str, Any]) -> Self: - updated_query = self - - if not query: - raise ValueError( - "query dictionary should contain at least one key-value pair" - ) - - for key, value in query.items(): - updated_query = self.eq(key, value) - - return updated_query - - def max_affected(self: Self, value: int) -> Self: - """Set the maximum number of rows that can be affected by the query. - - Only available in PostgREST v13+ and only works with PATCH and DELETE methods. - - Args: - value: The maximum number of rows that can be affected - """ - prefer_header = self.request.headers.get("Prefer", "") - if prefer_header: - if "handling=strict" not in prefer_header: - prefer_header += ",handling=strict" - else: - prefer_header = "handling=strict" - - prefer_header += f",max-affected={value}" - - self.request.headers["Prefer"] = prefer_header - return self - - -class BaseSelectRequestBuilder(BaseFilterRequestBuilder[C]): - def order( - self: Self, - column: str, - *, - desc: bool = False, - nullsfirst: Optional[bool] = None, - foreign_table: Optional[str] = None, - ) -> Self: - """Sort the returned rows in some specific order. - - Args: - column: The column to order by - desc: Whether the rows should be ordered in descending order or not. - nullsfirst: nullsfirst - foreign_table: Foreign table name whose results are to be ordered. - .. versionchanged:: 0.10.3 - Allow ordering results for foreign tables with the foreign_table parameter. - """ - key = f"{foreign_table}.order" if foreign_table else "order" - existing_order = self.request.params.get(key) - - self.request.params = self.request.params.set( - key, - f"{existing_order + ',' if existing_order else ''}" - + f"{column}.{'desc' if desc else 'asc'}" - + ( - f".{'nullsfirst' if nullsfirst else 'nullslast'}" - if nullsfirst is not None - else "" - ), - ) - return self - - def limit(self: Self, size: int, *, foreign_table: Optional[str] = None) -> Self: - """Limit the number of rows returned by a query. - - Args: - size: The number of rows to be returned - foreign_table: Foreign table name to limit - .. versionchanged:: 0.10.3 - Allow limiting results returned for foreign tables with the foreign_table parameter. - """ - self.request.params = self.request.params.add( - f"{foreign_table}.limit" if foreign_table else "limit", - size, - ) - return self - - def offset(self: Self, size: int) -> Self: - """Set the starting row index returned by a query. - Args: - size: The number of the row to start at - """ - self.request.params = self.request.params.add( - "offset", - size, - ) - return self - - def range( - self: Self, start: int, end: int, foreign_table: Optional[str] = None - ) -> Self: - self.request.params = self.request.params.add( - f"{foreign_table}.offset" if foreign_table else "offset", start - ) - self.request.params = self.request.params.add( - f"{foreign_table}.limit" if foreign_table else "limit", - end - start + 1, - ) - return self - - -class BaseRPCRequestBuilder(BaseSelectRequestBuilder): - def select( - self, - *columns: str, - ) -> Self: - """Run a SELECT query. - - Args: - *columns: The names of the columns to fetch. - Returns: - :class:`BaseSelectRequestBuilder` - """ - method, params, headers, json = pre_select(*columns, count=None) - self.request.params = self.request.params.add("select", params.get("select")) - if self.request.headers.get("Prefer"): - self.request.headers["Prefer"] += ",return=representation" - else: - self.request.headers["Prefer"] = "return=representation" - - return self - - def single(self) -> Self: - """Specify that the query will only return a single row in response. - - .. caution:: - The API will raise an error if the query returned more than one row. - """ - self.request.headers["Accept"] = "application/vnd.pgrst.object+json" - return self - - def maybe_single(self) -> Self: - """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" - self.request.headers["Accept"] = "application/vnd.pgrst.object+json" - return self - - def csv(self) -> Self: - """Specify that the query must retrieve data as a single CSV string.""" - self.request.headers["Accept"] = "text/csv" - return self diff --git a/src/postgrest/src/postgrest/client.py b/src/postgrest/src/postgrest/client.py new file mode 100644 index 00000000..69d2c77b --- /dev/null +++ b/src/postgrest/src/postgrest/client.py @@ -0,0 +1,311 @@ +from __future__ import annotations + +from base64 import b64encode +from types import TracebackType +from typing import Generic, Literal, overload + +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + AsyncHttpSession, + HttpIO, + HttpSession, + SyncHttpIO, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import HTTPRequestMethod, JSONRequest +from typing_extensions import Self +from yarl import URL + +from .request_builder import ( + RequestBuilder, + RPCCountRequestBuilder, + RPCFilterRequestBuilder, +) +from .types import CountMethod + + +class PostgrestClient(Generic[HttpIO]): + """PostgREST client.""" + + def __init__( + self, + executor: HttpIO, + base_url: URL, + default_headers: Headers, + *, + schema: str = "public", + ) -> None: + self.executor: HttpIO = executor + self.base_url = base_url + self.default_headers = default_headers.set("Accept-Profile", schema).set( + "Content-Profile", schema + ) + + def set_auth( + self, + token: str, + ) -> Self: + """ + Authenticate the client with either bearer token or basic authentication. + + Raises: + `ValueError`: If neither authentication scheme is provided. + + .. note:: + Bearer token is preferred if both ones are provided. + """ + self.default_headers = self.default_headers.override( + "Authorization", f"Bearer {token}" + ) + return self + + def set_auth_with_password( + self, + username: str, + password: str, + ) -> Self: + userpass = f"{username}:{password}" + token = b64encode(userpass.encode("utf8")).decode() + self.default_headers = self.default_headers.override( + "Authorization", f"Basic {token}" + ) + return self + + def from_(self, table: str) -> RequestBuilder[HttpIO]: + """Perform a table operation. + + Args: + table: The name of the table + Returns: + :class:`AsyncRequestBuilder` + """ + return RequestBuilder( + executor=self.executor, + base_url=self.base_url.joinpath(table), + default_headers=self.default_headers, + ) + + def table(self, table: str) -> RequestBuilder[HttpIO]: + """Alias to :meth:`from_`.""" + return self.from_(table) + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + head: Literal[False], + count: CountMethod | None, + get: bool = ..., + ) -> RPCFilterRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + head: Literal[True], + count: CountMethod | None, + get: bool, + ) -> RPCCountRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + ) -> RPCFilterRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + get: Literal[True], + ) -> RPCFilterRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + count: CountMethod | None, + get: Literal[False], + ) -> RPCFilterRequestBuilder[HttpIO] | RPCCountRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + count: CountMethod | None, + get: Literal[True], + ) -> RPCFilterRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + count: CountMethod | None, + head: Literal[True], + ) -> RPCFilterRequestBuilder[HttpIO]: ... + + @overload + def rpc( + self, + func: str, + params: dict[str, str], + *, + head: bool = ..., + count: CountMethod | None = ..., + get: bool = ..., + ) -> RPCFilterRequestBuilder[HttpIO] | RPCCountRequestBuilder[HttpIO]: ... + + def rpc( + self, + func: str, + params: dict[str, str], + *, + head: bool = False, + count: CountMethod | None = None, + get: bool = False, + ) -> RPCFilterRequestBuilder[HttpIO] | RPCCountRequestBuilder[HttpIO]: + """Perform a stored procedure call. + + Args: + func: The name of the remote procedure to run. + params: The parameters to be passed to the remote procedure. + count: The method to use to get the count of rows returned. + head: When set to `true`, `data` will not be returned. Useful if you only need the count. + get: When set to `true`, the function will be called with read-only access mode. + Returns: + :class:`AsyncRPCFilterRequestBuilder` + Example: + .. code-block:: python + + await client.rpc("foobar", {"arg": "value"}).execute() + """ + method: HTTPRequestMethod = "HEAD" if head else "GET" if get else "POST" + + headers = ( + Headers.from_mapping({"Prefer": f"count={count}"}) + if count + else Headers.empty() + ) + + # the params here are params to be sent to the RPC and not the queryparams! + json, http_params = ( + ({}, URLQuery.from_mapping(params)) + if method in ("HEAD", "GET") + else (params, URLQuery.empty()) + ) + request = JSONRequest( + path=["rpc", func], + method=method, + headers=headers, + query=http_params, + body=json, + ) + if not head: + return RPCFilterRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + else: + return RPCCountRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + + +class AsyncPostgrestClient(PostgrestClient[AsyncHttpIO]): + def __init__( + self, + base_url: str, + http_session: AsyncHttpSession, + headers: dict[str, str] | None = None, + schema: str = "public", + ) -> None: + PostgrestClient.__init__( + self, + executor=AsyncHttpIO(session=http_session), + base_url=URL(base_url), + default_headers=Headers.from_mapping(headers) + if headers + else Headers.empty(), + schema=schema, + ) + + def schema(self, schema: str) -> AsyncPostgrestClient: + """Switch to another schema.""" + return AsyncPostgrestClient( + http_session=self.executor.session, + base_url=str(self.base_url), + headers=dict(self.default_headers), + schema=schema, + ) + + async def __aenter__(self) -> AsyncPostgrestClient: + await self.executor.session.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.executor.session.__aexit__(exc_type, exc, tb) + + +class SyncPostgrestClient(PostgrestClient[SyncHttpIO]): + def __init__( + self, + base_url: str, + http_session: HttpSession, + headers: dict[str, str] | None = None, + *, + schema: str = "public", + ) -> None: + PostgrestClient.__init__( + self, + executor=SyncHttpIO(session=http_session), + base_url=URL(base_url), + default_headers=Headers.from_mapping(headers) + if headers + else Headers.empty(), + schema=schema, + ) + + def __enter__(self) -> SyncPostgrestClient: + self.executor.session.__enter__() + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.executor.session.__exit__(exc_type, exc, tb) + + def schema(self, schema: str) -> SyncPostgrestClient: + """Switch to another schema.""" + return SyncPostgrestClient( + base_url=str(self.base_url), + headers=dict(self.default_headers), + schema=schema, + http_session=self.executor.session, + ) diff --git a/src/postgrest/src/postgrest/constants.py b/src/postgrest/src/postgrest/constants.py deleted file mode 100644 index 4c3c17c8..00000000 --- a/src/postgrest/src/postgrest/constants.py +++ /dev/null @@ -1,6 +0,0 @@ -DEFAULT_POSTGREST_CLIENT_HEADERS = { - "Accept": "application/json", - "Content-Type": "application/json", -} - -DEFAULT_POSTGREST_CLIENT_TIMEOUT = 120 diff --git a/src/postgrest/src/postgrest/exceptions.py b/src/postgrest/src/postgrest/exceptions.py index d4ef668d..dd80ef31 100644 --- a/src/postgrest/src/postgrest/exceptions.py +++ b/src/postgrest/src/postgrest/exceptions.py @@ -1,6 +1,7 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict from pydantic import BaseModel +from supabase_utils.http.request import Response class APIErrorFromJSON(BaseModel): @@ -9,13 +10,13 @@ class APIErrorFromJSON(BaseModel): from a json string. """ - message: Optional[str] + message: str | None """The error message.""" - code: Optional[str] + code: str | None """The error code.""" - hint: Optional[str] + hint: str | None """The error hint.""" - details: Optional[str] + details: str | None """The error details.""" @@ -25,13 +26,13 @@ class APIError(Exception): """ _raw_error: Dict[str, str] - message: Optional[str] + message: str | None """The error message.""" - code: Optional[str] + code: str | None """The error code.""" - hint: Optional[str] + hint: str | None """The error hint.""" - details: Optional[str] + details: str | None """The error details.""" def __init__(self, error: Dict[str, Any]) -> None: @@ -59,10 +60,10 @@ def json(self) -> Dict[str, str]: return self._raw_error -def generate_default_error_message(r): +def generate_default_error_message(r: Response) -> dict[str, str]: return { "message": "JSON could not be generated", - "code": r.status_code, + "code": str(r.status), "hint": "Refer to full message for details", "details": str(r.content), } diff --git a/src/postgrest/src/postgrest/request_builder.py b/src/postgrest/src/postgrest/request_builder.py new file mode 100644 index 00000000..389bb63a --- /dev/null +++ b/src/postgrest/src/postgrest/request_builder.py @@ -0,0 +1,1076 @@ +from __future__ import annotations + +import json +import sys +from dataclasses import dataclass +from re import search +from typing import ( + Any, + Dict, + Generic, + Iterable, + List, + Literal, + NamedTuple, + Tuple, + TypeVar, + overload, +) + +from httpx import AsyncClient, Client +from pydantic import TypeAdapter, ValidationError +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + HttpIO, + HttpMethod, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import HTTPRequestMethod, JSONRequest, Response +from supabase_utils.types import JSON +from typing_extensions import Self +from yarl import URL + +from .exceptions import APIError, APIErrorFromJSON, generate_default_error_message +from .types import CountMethod, Filters, ReturnMethod +from .utils import model_validate_json, sanitize_param + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + + +class QueryArgs(NamedTuple): + # groups the method, json, headers and params for a query in a single object + method: HTTPRequestMethod + params: URLQuery + headers: Headers + json: JSON + + +C = TypeVar("C", Client, AsyncClient) + + +def _unique_columns(json: List[Dict[str, JSON]]): + unique_keys = {key for row in json for key in row.keys()} + columns = ",".join([f'"{k}"' for k in unique_keys]) + return columns + + +def _cleaned_columns(columns: Tuple[str, ...]) -> str: + quoted = False + cleaned = [] + + for column in columns: + clean_column = "" + for char in column: + if char.isspace() and not quoted: + continue + if char == '"': + quoted = not quoted + clean_column += char + cleaned.append(clean_column) + + return ",".join(cleaned) + + +def pre_select( + *columns: str, + count: CountMethod | None = None, + head: bool | None = None, +) -> QueryArgs: + method: HTTPRequestMethod = "HEAD" if head else "GET" + cleaned_columns = _cleaned_columns(columns or ("*",)) + params = URLQuery.from_mapping({"select": cleaned_columns}) + + headers = ( + Headers.from_mapping({"Prefer": f"count={count}"}) if count else Headers.empty() + ) + return QueryArgs(method, params, headers, {}) + + +def pre_insert( + json: JSON, + *, + count: CountMethod | None, + returning: ReturnMethod, + upsert: bool, + default_to_null: bool = True, +) -> QueryArgs: + prefer_headers = Headers.from_mapping({"Prefer": f"return={returning}"}) + if count: + prefer_headers = prefer_headers.set("Prefer", f"count={count}") + if upsert: + prefer_headers = prefer_headers.set("Prefer", "resolution=merge-duplicates") + if not default_to_null: + prefer_headers = prefer_headers.set("Prefer", "missing=default") + # Adding 'columns' query parameters + query_params = {} + if isinstance(json, list): + query_params = {"columns": _unique_columns(json)} + return QueryArgs("POST", URLQuery.from_mapping(query_params), prefer_headers, json) + + +def pre_upsert( + json: JSON, + *, + count: CountMethod | None, + returning: ReturnMethod, + ignore_duplicates: bool, + on_conflict: str = "", + default_to_null: bool = True, +) -> QueryArgs: + query_params = {} + prefer_headers = Headers.from_mapping({"Prefer": f"return={returning}"}) + if count: + prefer_headers = prefer_headers.set("Prefer", f"count={count}") + resolution = "ignore" if ignore_duplicates else "merge" + prefer_headers = prefer_headers.set("Prefer", f"resolution={resolution}-duplicates") + if not default_to_null: + prefer_headers = prefer_headers.set("Prefer", "missing=default") + if on_conflict: + query_params["on_conflict"] = on_conflict + # Adding 'columns' query parameters + if isinstance(json, list): + query_params["columns"] = _unique_columns(json) + return QueryArgs("POST", URLQuery.from_mapping(query_params), prefer_headers, json) + + +def pre_update( + json: JSON, + *, + count: CountMethod | None, + returning: ReturnMethod, +) -> QueryArgs: + prefer_headers = Headers.from_mapping({"Prefer": f"return={returning}"}) + if count: + prefer_headers = prefer_headers.set("Prefer", f"count={count}") + return QueryArgs("PATCH", URLQuery.empty(), prefer_headers, json) + + +def pre_delete( + *, + count: CountMethod | None, + returning: ReturnMethod, +) -> QueryArgs: + prefer_headers = Headers.from_mapping({"Prefer": f"return={returning}"}) + if count: + prefer_headers = prefer_headers.set("Prefer", f"count={count}") + return QueryArgs("DELETE", URLQuery.empty(), prefer_headers, {}) + + +JSONListParser = TypeAdapter(List[Dict[str, JSON]]) +JSONDictParser = TypeAdapter(Dict[str, JSON]) + + +@dataclass +class APIResponse: + data: List[Dict[str, JSON]] + """The data returned by the query.""" + count: int | None = None + """The number of rows returned.""" + + @staticmethod + def _get_count_from_content_range_header( + content_range_header: str, + ) -> int | None: + content_range = content_range_header.split("/") + return None if len(content_range) < 2 else int(content_range[1]) + + @staticmethod + def _is_count_in_prefer_header(prefer_header: str) -> bool: + pattern = f"count=({'|'.join([cm.value for cm in CountMethod])})" + return bool(search(pattern, prefer_header)) + + @staticmethod + def _get_count_from_http_request_response( + response: Response, + ) -> int | None: + prefer_header: str | None = response.request.headers.get("prefer") + if not prefer_header: + return None + is_count_in_prefer_header = APIResponse._is_count_in_prefer_header( + prefer_header + ) + content_range_header: str | None = response.headers.get("content-range") + if is_count_in_prefer_header and content_range_header: + return APIResponse._get_count_from_content_range_header( + content_range_header + ) + return None + + @staticmethod + def from_http_request_response(response: Response) -> APIResponse: + count = APIResponse._get_count_from_http_request_response(response) + data = JSONListParser.validate_json(response.content) + return APIResponse(data=data, count=count) + + +@dataclass +class SingleAPIResponse: + data: Dict[str, JSON] + count: int | None + + @staticmethod + def from_http_request_response( + response: Response, + ) -> SingleAPIResponse: + count = APIResponse._get_count_from_http_request_response(response) + data = JSONDictParser.validate_json(response.content) + return SingleAPIResponse(data=data, count=count) + + +class BaseFilterRequestBuilder: + request: JSONRequest + negate_next: bool = False + + def __init__(self, request: JSONRequest, negate_next: bool = False) -> None: + self.request = request + self.negate_next = negate_next + + @property + def not_(self: Self) -> Self: + """Whether the filter applied next should be negated.""" + self.negate_next = True + return self + + def filter(self: Self, column: str, operator: str, criteria: str | int) -> Self: + """Apply filters on a query. + + Args: + column: The name of the column to apply a filter on + operator: The operator to use while filtering + criteria: The value to filter by + """ + if self.negate_next is True: + self.negate_next = False + operator = f"{Filters.NOT}.{operator}" + key, val = sanitize_param(column), f"{operator}.{criteria}" + self.request.query = self.request.query.set(key, val) + return self + + def eq(self: Self, column: str, value: str | int) -> Self: + """An 'equal to' filter. + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.EQ, value) + + def neq(self: Self, column: str, value: Any) -> Self: + """A 'not equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.NEQ, value) + + def gt(self: Self, column: str, value: Any) -> Self: + """A 'greater than' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.GT, value) + + def gte(self: Self, column: str, value: Any) -> Self: + """A 'greater than or equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.GTE, value) + + def lt(self: Self, column: str, value: Any) -> Self: + """A 'less than' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.LT, value) + + def lte(self: Self, column: str, value: Any) -> Self: + """A 'less than or equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.LTE, value) + + def is_(self: Self, column: str, value: Any) -> Self: + """An 'is' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + if value is None: + value = "null" + return self.filter(column, Filters.IS, value) + + def like(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + return self.filter(column, Filters.LIKE, pattern) + + def like_all_of(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.LIKE_ALL, f"{{{pattern}}}") + + def like_any_of(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.LIKE_ANY, f"{{{pattern}}}") + + def ilike_all_of(self: Self, column: str, pattern: str) -> Self: + """A 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.ILIKE_ALL, f"{{{pattern}}}") + + def ilike_any_of(self: Self, column: str, pattern: str) -> Self: + """A 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.ILIKE_ANY, f"{{{pattern}}}") + + def ilike(self: Self, column: str, pattern: str) -> Self: + """An 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + return self.filter(column, Filters.ILIKE, pattern) + + def or_(self: Self, filters: str, reference_table: str | None = None) -> Self: + """An 'or' filter + + Args: + filters: The filters to use, following PostgREST syntax + reference_table: Set this to filter on referenced tables instead of the parent table + """ + key = f"{sanitize_param(reference_table)}.or" if reference_table else "or" + self.request.query = self.request.query.set(key, f"({filters})") + return self + + def fts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.FTS, query) + + def plfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.PLFTS, query) + + def phfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.PHFTS, query) + + def wfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.WFTS, query) + + def in_(self: Self, column: str, values: Iterable[Any]) -> Self: + values = map(sanitize_param, values) + values = ",".join(values) + return self.filter(column, Filters.IN, f"({values})") + + def cs(self: Self, column: str, values: Iterable[Any]) -> Self: + values = ",".join(values) + return self.filter(column, Filters.CS, f"{{{values}}}") + + def cd(self: Self, column: str, values: Iterable[Any]) -> Self: + values = ",".join(values) + return self.filter(column, Filters.CD, f"{{{values}}}") + + def contains( + self: Self, column: str, value: Iterable[str] | str | Dict[str, JSON] + ) -> Self: + if isinstance(value, str): + # range types can be inclusive '[', ']' or exclusive '(', ')' so just + # keep it simple and accept a string + return self.filter(column, Filters.CS, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + # Expected to be some type of iterable + stringified_values = ",".join(value) + return self.filter(column, Filters.CS, f"{{{stringified_values}}}") + + return self.filter(column, Filters.CS, json.dumps(value)) + + def contained_by( + self: Self, column: str, value: Iterable[str] | str | Dict[str, JSON] + ) -> Self: + if isinstance(value, str): + # range + return self.filter(column, Filters.CD, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + stringified_values = ",".join(value) + return self.filter(column, Filters.CD, f"{{{stringified_values}}}") + return self.filter(column, Filters.CD, json.dumps(value)) + + def ov( + self: Self, column: str, value: Iterable[str] | str | Dict[str, JSON] + ) -> Self: + if isinstance(value, str): + # range types can be inclusive '[', ']' or exclusive '(', ')' so just + # keep it simple and accept a string + return self.filter(column, Filters.OV, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + # Expected to be some type of iterable + stringified_values = ",".join(value) + return self.filter(column, Filters.OV, f"{{{stringified_values}}}") + return self.filter(column, Filters.OV, json.dumps(value)) + + def sl(self: Self, column: str, min: str, max: str) -> Self: + return self.filter(column, Filters.SL, f"({min},{max})") + + def sr(self: Self, column: str, min: str, max: str) -> Self: + return self.filter(column, Filters.SR, f"({min},{max})") + + def nxl(self: Self, column: str, min: str, max: str) -> Self: + return self.filter(column, Filters.NXL, f"({min},{max})") + + def nxr(self: Self, column: str, min: str, max: str) -> Self: + return self.filter(column, Filters.NXR, f"({min},{max})") + + def adj(self: Self, column: str, min: str, max: str) -> Self: + return self.filter(column, Filters.ADJ, f"({min},{max})") + + def range_gt(self: Self, column: str, min: str, max: str) -> Self: + return self.sr(column, min, max) + + def range_gte(self: Self, column: str, min: str, max: str) -> Self: + return self.nxl(column, min, max) + + def range_lt(self: Self, column: str, min: str, max: str) -> Self: + return self.sl(column, min, max) + + def range_lte(self: Self, column: str, min: str, max: str) -> Self: + return self.nxr(column, min, max) + + def range_adjacent(self: Self, column: str, min: str, max: str) -> Self: + return self.adj(column, min, max) + + def overlaps(self: Self, column: str, values: str | Iterable[str]) -> Self: + return self.ov(column, values) + + def match(self: Self, query: Dict[str, str | int]) -> Self: + updated_query = self + + if not query: + raise ValueError( + "query dictionary should contain at least one key-value pair" + ) + + for key, value in query.items(): + updated_query = self.eq(key, value) + + return updated_query + + def max_affected(self: Self, value: int) -> Self: + """Set the maximum number of rows that can be affected by the query. + + Only available in PostgREST v13+ and only works with PATCH and DELETE methods. + + Args: + value: The maximum number of rows that can be affected + """ + self.request.headers = self.request.headers.set( + "Prefer", "handling=strict" + ).set("Prefer", f"max-affected={value}") + + return self + + +class BaseSelectRequestBuilder(BaseFilterRequestBuilder): + def order( + self: Self, + column: str, + *, + desc: bool = False, + nullsfirst: bool | None = None, + foreign_table: str | None = None, + ) -> Self: + """Sort the returned rows in some specific order. + + Args: + column: The column to order by + desc: Whether the rows should be ordered in descending order or not. + nullsfirst: nullsfirst + foreign_table: Foreign table name whose results are to be ordered. + .. versionchanged:: 0.10.3 + Allow ordering results for foreign tables with the foreign_table parameter. + """ + key = f"{foreign_table}.order" if foreign_table else "order" + order = f"{column}.{'desc' if desc else 'asc'}" + nullsfirst_str = ( + f".{'nullsfirst' if nullsfirst else 'nullslast'}" + if nullsfirst is not None + else "" + ) + val = f"{order}{nullsfirst_str}" + self.request.query = self.request.query.set(key, val) + return self + + def limit(self: Self, size: int, *, foreign_table: str | None = None) -> Self: + """Limit the number of rows returned by a query. + + Args: + size: The number of rows to be returned + foreign_table: Foreign table name to limit + .. versionchanged:: 0.10.3 + Allow limiting results returned for foreign tables with the foreign_table parameter. + """ + self.request.query = self.request.query.set( + f"{foreign_table}.limit" if foreign_table else "limit", + size, + ) + return self + + def offset(self: Self, size: int) -> Self: + """Set the starting row index returned by a query. + Args: + size: The number of the row to start at + """ + self.request.query = self.request.query.set( + "offset", + size, + ) + return self + + def range( + self: Self, start: int, end: int, foreign_table: str | None = None + ) -> Self: + self.request.query = self.request.query.set( + f"{foreign_table}.offset" if foreign_table else "offset", start + ) + self.request.query = self.request.query.set( + f"{foreign_table}.limit" if foreign_table else "limit", + end - start + 1, + ) + return self + + +class BaseRPCRequestBuilder(BaseSelectRequestBuilder): + def select( + self, + *columns: str, + ) -> Self: + """Run a SELECT query. + + Args: + *columns: The names of the columns to fetch. + Returns: + :class:`BaseSelectRequestBuilder` + """ + method, params, headers, json = pre_select(*columns, count=None) + self.request.query = self.request.query.merge(params) + self.request.headers = self.request.headers.set( + "Prefer", "return=representation" + ) + + return self + + def csv(self) -> Self: + """Specify that the query must retrieve data as a single CSV string.""" + self.request.headers = self.request.headers.set("Accept", "text/csv") + return self + + +@dataclass +class BaseRequestClient(Generic[HttpIO]): + executor: HttpIO + base_url: URL + default_headers: Headers + request: JSONRequest + + +class QueryRequestBuilder(BaseRequestClient[HttpIO]): + @handle_http_io + def execute(self) -> HttpMethod[APIResponse]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`APIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + r = yield self.request + try: + if r.is_success: + return APIResponse.from_http_request_response(r) + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError: + raise APIError(generate_default_error_message(r)) + + +class SingleRequestBuilder(BaseRequestClient[HttpIO]): + @handle_http_io + def execute(self) -> HttpMethod[SingleAPIResponse]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`SingleAPIResponse` + na + Raises: + :class:`APIError` If the API raised an error. + """ + response = yield self.request + if response.is_success: + return SingleAPIResponse.from_http_request_response(response) + else: + json_obj = model_validate_json(APIErrorFromJSON, response.content) + raise APIError(dict(json_obj)) + + +class TextRequestBuilder(BaseRequestClient[HttpIO]): + @handle_http_io + def execute(self) -> HttpMethod[str]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`SingleAPIResponse` + na + Raises: + :class:`APIError` If the API raised an error. + """ + response = yield self.request + if response.is_success: + return response.content.decode("utf8") + else: + json_obj = model_validate_json(APIErrorFromJSON, response.content) + raise APIError(dict(json_obj)) + + +class ExplainRequestBuilder(BaseRequestClient[HttpIO]): + @handle_http_io + def execute(self) -> HttpMethod[str]: + r = yield self.request + try: + if r.is_success: + return r.content.decode("utf-8") + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError: + raise APIError(generate_default_error_message(r)) + + +class MaybeSingleRequestBuilder(BaseRequestClient[HttpIO]): + @handle_http_io + def execute(self) -> HttpMethod[SingleAPIResponse | None]: + response = yield self.request + if response.is_success: + parsed = APIResponse.from_http_request_response(response) + if len(parsed.data) == 0: + return None + if len(parsed.data) == 1: + return SingleAPIResponse(data=parsed.data[0], count=parsed.count) + else: + raise APIError(dict()) + else: + json_obj = model_validate_json(APIErrorFromJSON, response.content) + raise APIError(dict(json_obj)) + + +class FilterRequestBuilder(QueryRequestBuilder[HttpIO], BaseFilterRequestBuilder): + pass + + +class RPCFilterRequestBuilder(QueryRequestBuilder[HttpIO], BaseRPCRequestBuilder): + def single(self) -> SingleRequestBuilder[HttpIO]: + """Specify that the query will only return a single row in response. + + .. caution:: + The API will raise an error if the query returned more than one row. + """ + self.request.headers = self.request.headers.set( + "Accept", "application/vnd.pgrst.object+json" + ) + return SingleRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + +class RPCCountRequestBuilder(BaseRequestClient[HttpIO], BaseRPCRequestBuilder): + @handle_http_io + def execute(self) -> HttpMethod[int | None]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`APIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + response = yield self.request + if response.is_success: + count = APIResponse._get_count_from_http_request_response(response) + return count + else: + json_obj = model_validate_json(APIErrorFromJSON, response.content) + raise APIError(dict(json_obj)) + + +class SelectRequestBuilder(QueryRequestBuilder[HttpIO], BaseSelectRequestBuilder): + def single(self) -> SingleRequestBuilder[HttpIO]: + """Specify that the query will only return a single row in response. + + .. caution:: + The API will raise an error if the query returned more than one row. + """ + self.request.headers = self.request.headers.set( + "Accept", "application/vnd.pgrst.object+json" + ) + return SingleRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + def maybe_single(self) -> MaybeSingleRequestBuilder[HttpIO]: + """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" + return MaybeSingleRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + def text_search( + self, column: str, query: str, options: dict[str, Any] = {} + ) -> QueryRequestBuilder[HttpIO]: + type_ = options.get("type") + type_part = "" + if type_ == "plain": + type_part = "pl" + elif type_ == "phrase": + type_part = "ph" + elif type_ == "web_search": + type_part = "w" + config_part = f"({options.get('config')})" if options.get("config") else "" + self.request.query = self.request.query.set( + column, f"{type_part}fts{config_part}.{query}" + ) + + return QueryRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + def csv(self) -> TextRequestBuilder[HttpIO]: + """Specify that the query must retrieve data as a single CSV string.""" + self.request.headers = self.request.headers.set("Accept", "text/csv") + return TextRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + @overload + def explain(self) -> ExplainRequestBuilder[HttpIO]: ... + + @overload + def explain( + self, + *, + analyze: bool, + verbose: bool, + settings: bool, + buffers: bool, + wal: bool, + format: Literal["text"], + ) -> ExplainRequestBuilder[HttpIO]: ... + + @overload + def explain( + self, + *, + analyze: bool, + verbose: bool, + settings: bool, + buffers: bool, + wal: bool, + format: Literal["json"], + ) -> QueryRequestBuilder[HttpIO]: ... + + def explain( + self, + *, + analyze: bool = False, + verbose: bool = False, + settings: bool = False, + buffers: bool = False, + wal: bool = False, + format: Literal["text", "json"] = "text", + ) -> ExplainRequestBuilder[HttpIO] | QueryRequestBuilder[HttpIO]: + options = [ + key + for key, value in locals().items() + if key not in ["self", "format"] and value + ] + options_str = "|".join(options) + self.request.headers = self.request.headers.set( + "Accept", f"application/vnd.pgrst.plan+{format}; options={options_str}" + ) + if format == "text": + return ExplainRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + else: + return QueryRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=self.request, + ) + + +class RequestBuilder(Generic[HttpIO]): # + def __init__( + self, + executor: HttpIO, + base_url: URL, + default_headers: Headers, + ) -> None: + self.executor: HttpIO = executor + self.base_url = base_url + self.default_headers = default_headers + + def select( + self, + *columns: str, + count: CountMethod | None = None, + head: bool | None = None, + ) -> SelectRequestBuilder[HttpIO]: + """Run a SELECT query. + + Args: + *columns: The names of the columns to fetch. + count: The method to use to get the count of rows returned. + Returns: + :class:`SelectRequestBuilder` + """ + method, params, headers, json = pre_select(*columns, count=count, head=head) + request = JSONRequest( + path=[], + query=params, + method=method, + headers=headers, + body=json, + ) + return SelectRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + + def insert( + self, + json: JSON, + *, + count: CountMethod | None = None, + returning: ReturnMethod = ReturnMethod.representation, + upsert: bool = False, + default_to_null: bool = True, + ) -> QueryRequestBuilder[HttpIO]: + """Run an INSERT query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + upsert: Whether the query should be an upsert. + default_to_null: Make missing fields default to `null`. + Otherwise, use the default value for the column. + Only applies for bulk inserts. + Returns: + :class:`AsyncQueryRequestBuilder` + """ + method, params, headers, json = pre_insert( + json, + count=count, + returning=returning, + upsert=upsert, + default_to_null=default_to_null, + ) + + request = JSONRequest( + path=[], + query=params, + method=method, + headers=headers, + body=json, + ) + return QueryRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + + def upsert( + self, + json: JSON, + *, + count: CountMethod | None = None, + returning: ReturnMethod = ReturnMethod.representation, + ignore_duplicates: bool = False, + on_conflict: str = "", + default_to_null: bool = True, + ) -> QueryRequestBuilder[HttpIO]: + """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + ignore_duplicates: Whether duplicate rows should be ignored. + on_conflict: Specified columns to be made to work with UNIQUE constraint. + default_to_null: Make missing fields default to `null`. Otherwise, use the + default value for the column. This only applies when inserting new rows, + not when merging with existing rows under `ignoreDuplicates: false`. + This also only applies when doing bulk upserts. + Returns: + :class:`AsyncQueryRequestBuilder` + """ + method, params, headers, json = pre_upsert( + json, + count=count, + returning=returning, + ignore_duplicates=ignore_duplicates, + on_conflict=on_conflict, + default_to_null=default_to_null, + ) + request = JSONRequest( + path=[], + query=params, + method=method, + headers=headers, + body=json, + ) + return QueryRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + + def update( + self, + json: JSON, + *, + count: CountMethod | None = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> FilterRequestBuilder[HttpIO]: + """Run an UPDATE query. + + Args: + json: The updated fields. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`AsyncFilterRequestBuilder` + """ + method, params, headers, json = pre_update( + json, + count=count, + returning=returning, + ) + request = JSONRequest( + path=[], + query=params, + method=method, + headers=headers, + body=json, + ) + return FilterRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) + + def delete( + self, + *, + count: CountMethod | None = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> FilterRequestBuilder[HttpIO]: + """Run a DELETE query. + + Args: + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`AsyncFilterRequestBuilder` + """ + method, params, headers, json = pre_delete( + count=count, + returning=returning, + ) + request = JSONRequest( + path=[], + query=params, + method=method, + headers=headers, + body=json, + ) + return FilterRequestBuilder( + executor=self.executor, + base_url=self.base_url, + default_headers=self.default_headers, + request=request, + ) diff --git a/src/postgrest/src/postgrest/types.py b/src/postgrest/src/postgrest/types.py index 748f87e4..fa6f94ce 100644 --- a/src/postgrest/src/postgrest/types.py +++ b/src/postgrest/src/postgrest/types.py @@ -1,25 +1,12 @@ from __future__ import annotations import sys -from collections.abc import Mapping, Sequence -from typing import Union - -from httpx import AsyncClient, BasicAuth, Client, Headers, QueryParams -from pydantic import TypeAdapter -from typing_extensions import TypeAliasType -from yarl import URL if sys.version_info >= (3, 11): from enum import StrEnum else: from strenum import StrEnum -# https://docs.pydantic.dev/2.11/concepts/types/#named-recursive-types -JSON = TypeAliasType( - "JSON", "Union[None, bool, str, int, float, Sequence[JSON], Mapping[str, JSON]]" -) -JSONAdapter: TypeAdapter = TypeAdapter(JSON) - class CountMethod(StrEnum): exact = "exact" diff --git a/src/postgrest/src/postgrest/utils.py b/src/postgrest/src/postgrest/utils.py index 0fa2cbec..c570897d 100644 --- a/src/postgrest/src/postgrest/utils.py +++ b/src/postgrest/src/postgrest/utils.py @@ -1,7 +1,6 @@ from __future__ import annotations -from typing import Any, Type, TypeVar, cast, get_origin -from urllib.parse import urlparse +from typing import Any, Type, TypeVar from deprecation import deprecated from httpx import AsyncClient # noqa: F401 diff --git a/src/postgrest/tests/_async/client.py b/src/postgrest/tests/_async/client.py deleted file mode 100644 index fd585bbb..00000000 --- a/src/postgrest/tests/_async/client.py +++ /dev/null @@ -1,28 +0,0 @@ -from httpx import AsyncClient, AsyncHTTPTransport, Limits - -from postgrest import AsyncPostgrestClient - -REST_URL = "http://127.0.0.1:3000" - - -def rest_client(): - return AsyncPostgrestClient( - base_url=REST_URL, - ) - - -def rest_client_httpx() -> AsyncPostgrestClient: - transport = AsyncHTTPTransport( - retries=4, - limits=Limits( - max_connections=1, - max_keepalive_connections=1, - keepalive_expiry=None, - ), - ) - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = AsyncClient(transport=transport, headers=headers) - return AsyncPostgrestClient( - base_url=REST_URL, - http_client=http_client, - ) diff --git a/src/postgrest/tests/_async/conftest.py b/src/postgrest/tests/_async/conftest.py new file mode 100644 index 00000000..b01fa025 --- /dev/null +++ b/src/postgrest/tests/_async/conftest.py @@ -0,0 +1,46 @@ +from typing import AsyncIterable + +import pytest +from aiohttp import ClientSession +from httpx import AsyncClient, AsyncHTTPTransport, Limits +from supabase_utils.http.adapters.aiohttp import AsyncAiohttpSession +from supabase_utils.http.adapters.httpx import AsyncHttpxSession + +from postgrest import AsyncPostgrestClient + +REST_URL = "http://127.0.0.1:3000" + + +def httpx_client() -> AsyncClient: + transport = AsyncHTTPTransport( + retries=4, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = AsyncClient( + transport=transport, headers=headers, http2=True, verify=True + ) + return http_client + + +def httpx() -> AsyncHttpxSession: + return AsyncHttpxSession(client=httpx_client()) + + +def aiohttp() -> AsyncAiohttpSession: + return AsyncAiohttpSession(client=ClientSession()) + + +@pytest.fixture(params=[httpx, aiohttp]) +async def postgrest_client( + request: pytest.FixtureRequest, +) -> AsyncIterable[AsyncPostgrestClient]: + async with AsyncPostgrestClient( + base_url=REST_URL, + http_session=request.param(), + ) as client: + yield client diff --git a/src/postgrest/tests/_async/test_client.py b/src/postgrest/tests/_async/test_client.py deleted file mode 100644 index 349a0345..00000000 --- a/src/postgrest/tests/_async/test_client.py +++ /dev/null @@ -1,167 +0,0 @@ -from unittest.mock import patch - -import pytest -from httpx import ( - AsyncClient, - AsyncHTTPTransport, - BasicAuth, - Headers, - Limits, - Request, - Response, - Timeout, -) - -from postgrest import AsyncPostgrestClient -from postgrest.exceptions import APIError - - -@pytest.fixture -async def postgrest_client(): - async with AsyncPostgrestClient("https://example.com") as client: - yield client - - -class TestConstructor: - def test_simple(self, postgrest_client: AsyncPostgrestClient): - session = postgrest_client.session - - assert session.base_url == "https://example.com" - headers = Headers( - { - "Accept": "application/json", - "Content-Type": "application/json", - "Accept-Profile": "public", - "Content-Profile": "public", - } - ) - assert session.headers.items() >= headers.items() - - @pytest.mark.asyncio - async def test_custom_headers(self): - async with AsyncPostgrestClient( - "https://example.com", schema="pub", headers={"Custom-Header": "value"} - ) as client: - session = client.session - - assert session.base_url == "https://example.com" - headers = Headers( - { - "Accept-Profile": "pub", - "Content-Profile": "pub", - "Custom-Header": "value", - } - ) - assert session.headers.items() >= headers.items() - - -class TestHttpxClientConstructor: - @pytest.mark.asyncio - async def test_custom_httpx_client(self) -> None: - transport = AsyncHTTPTransport( - retries=10, - limits=Limits( - max_connections=1, - max_keepalive_connections=1, - keepalive_expiry=None, - ), - ) - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = AsyncClient(transport=transport, headers=headers) - async with AsyncPostgrestClient( - "https://example.com", http_client=http_client, timeout=20.0 - ) as client: - assert str(client.base_url) == "https://example.com" - assert client.session.timeout == Timeout( - timeout=5.0 - ) # Should be the default 5 since we use custom httpx client - assert client.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert isinstance(client.session, AsyncClient) - - -class TestAuth: - def test_auth_token(self, postgrest_client: AsyncPostgrestClient): - postgrest_client.auth("s3cr3t") - assert postgrest_client.headers["Authorization"] == "Bearer s3cr3t" - - def test_auth_basic(self, postgrest_client: AsyncPostgrestClient): - postgrest_client.auth(None, username="admin", password="s3cr3t") - - assert isinstance(postgrest_client.basic_auth, BasicAuth) - assert ( - postgrest_client.basic_auth._auth_header - == BasicAuth("admin", "s3cr3t")._auth_header - ) - - -def test_schema(postgrest_client: AsyncPostgrestClient): - client = postgrest_client.schema("private") - subheaders = { - "accept-profile": "private", - "content-profile": "private", - } - - assert subheaders.items() < client.headers.items() - - -# @pytest.mark.asyncio -# async def test_params_purged_after_execute(postgrest_client: AsyncPostgrestClient): -# assert len(postgrest_client.session.params) == 0 -# with pytest.raises(APIError): -# await postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() -# assert len(postgrest_client.session.params) == 0 - - -@pytest.mark.asyncio -async def test_response_status_code_outside_ok(postgrest_client: AsyncPostgrestClient): - with patch( - "postgrest._async.request_builder.AsyncSelectRequestBuilder.execute", - side_effect=APIError( - { - "message": "mock error", - "code": "400", - "hint": "mock", - "details": "mock", - "errors": [{"code": 400}], - } - ), - ): - with pytest.raises(APIError) as exc_info: - await ( - postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() - ) # gives status_code = 400 - exc_response = exc_info.value.json() - assert not exc_response.get("success") - assert isinstance(exc_response.get("errors"), list) - assert ( - isinstance(exc_response["errors"][0], dict) - and "code" in exc_response["errors"][0] - ) - assert exc_response["errors"][0].get("code") == 400 - - -# https://github.com/supabase/postgrest-py/issues/595 -@pytest.mark.asyncio -async def test_response_client_invalid_response_but_valid_json( - postgrest_client: AsyncPostgrestClient, -): - with patch( - "httpx._client.AsyncClient.request", - return_value=Response( - status_code=502, - text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object - request=Request(method="GET", url="http://example.com"), - ), - ): - client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() - assert "Accept" in client.request.headers - assert ( - client.request.headers.get("Accept") == "application/vnd.pgrst.object+json" - ) - with pytest.raises(APIError) as exc_info: - await client.execute() - assert isinstance(exc_info, pytest.ExceptionInfo) - exc_response = exc_info.value.json() - assert isinstance(exc_response.get("message"), str) - assert exc_response.get("message") == "JSON could not be generated" - assert "code" in exc_response and int(exc_response["code"]) == 502 diff --git a/src/postgrest/tests/_async/test_filter_request_builder.py b/src/postgrest/tests/_async/test_filter_request_builder.py deleted file mode 100644 index e4a63eb9..00000000 --- a/src/postgrest/tests/_async/test_filter_request_builder.py +++ /dev/null @@ -1,300 +0,0 @@ -from typing import AsyncIterable - -import pytest -from httpx import AsyncClient, Headers, QueryParams -from yarl import URL - -from postgrest import AsyncFilterRequestBuilder -from postgrest._async.request_builder import RequestConfig - - -@pytest.fixture -async def filter_request_builder() -> AsyncIterable[AsyncFilterRequestBuilder]: - async with AsyncClient() as client: - request = RequestConfig( - client, URL("/example_table"), "GET", Headers(), QueryParams(), None, {} - ) - yield AsyncFilterRequestBuilder(request) - - -def test_constructor(filter_request_builder: AsyncFilterRequestBuilder): - builder = filter_request_builder - - assert str(builder.request.path) == "/example_table" - assert len(builder.request.headers) == 0 - assert len(builder.request.params) == 0 - assert builder.request.http_method == "GET" - assert builder.request.json is None - assert not builder.negate_next - - -def test_not_(filter_request_builder): - builder = filter_request_builder.not_ - - assert builder.negate_next - - -def test_filter(filter_request_builder): - builder = filter_request_builder.filter(":col.name", "eq", "val") - - assert builder.request.params['":col.name"'] == "eq.val" - - -@pytest.mark.parametrize( - "col_name, expected_query_prefix", - [ - ("col:name", "%22col%3Aname%22"), - ("col.name", "col.name"), - ], -) -def test_filter_special_characters( - filter_request_builder, col_name, expected_query_prefix -): - builder = filter_request_builder.filter(col_name, "eq", "val") - - assert str(builder.request.params) == f"{expected_query_prefix}=eq.val" - - -def test_multivalued_param(filter_request_builder): - builder = filter_request_builder.lte("x", "a").gte("x", "b") - - assert str(builder.request.params) == "x=lte.a&x=gte.b" - - -def test_match(filter_request_builder): - builder = filter_request_builder.match({"id": "1", "done": "false"}) - assert str(builder.request.params) == "id=eq.1&done=eq.false" - - -def test_equals(filter_request_builder): - builder = filter_request_builder.eq("x", "a") - - assert str(builder.request.params) == "x=eq.a" - - -def test_not_equal(filter_request_builder): - builder = filter_request_builder.neq("x", "a") - - assert str(builder.request.params) == "x=neq.a" - - -def test_greater_than(filter_request_builder): - builder = filter_request_builder.gt("x", "a") - - assert str(builder.request.params) == "x=gt.a" - - -def test_greater_than_or_equals_to(filter_request_builder): - builder = filter_request_builder.gte("x", "a") - - assert str(builder.request.params) == "x=gte.a" - - -def test_contains(filter_request_builder): - builder = filter_request_builder.contains("x", "a") - - assert str(builder.request.params) == "x=cs.a" - - -def test_contains_dictionary(filter_request_builder): - builder = filter_request_builder.contains("x", {"a": "b"}) - - # {"a":"b"} - assert str(builder.request.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" - - -def test_contains_any_item(filter_request_builder): - builder = filter_request_builder.contains("x", ["a", "b"]) - - # {a,b} - assert str(builder.request.params) == "x=cs.%7Ba%2Cb%7D" - - -def test_contains_in_list(filter_request_builder): - builder = filter_request_builder.contains("x", '[{"a": "b"}]') - - # [{"a":+"b"}] (the + represents the space) - assert str(builder.request.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" - - -def test_contained_by_mixed_items(filter_request_builder): - builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) - - # {a,["b",+"c"]} - assert str(builder.request.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" - - -def test_range_greater_than(filter_request_builder): - builder = filter_request_builder.range_gt( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_greater_than_or_equal_to(filter_request_builder): - builder = filter_request_builder.range_gte( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_less_than(filter_request_builder): - builder = filter_request_builder.range_lt( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_less_than_or_equal_to(filter_request_builder): - builder = filter_request_builder.range_lte( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_adjacent(filter_request_builder): - builder = filter_request_builder.range_adjacent( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_overlaps(filter_request_builder): - builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) - - # {a,["b",+"c"]} - assert str(builder.request.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" - - -def test_overlaps_with_timestamp_range(filter_request_builder): - builder = filter_request_builder.overlaps( - "x", "[2000-01-01 12:45, 2000-01-01 13:15)" - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" - ) - - -def test_like(filter_request_builder): - builder = filter_request_builder.like("x", "%a%") - - assert str(builder.request.params) == "x=like.%25a%25" - - -def test_ilike(filter_request_builder): - builder = filter_request_builder.ilike("x", "%a%") - - assert str(builder.request.params) == "x=ilike.%25a%25" - - -def test_like_all_of(filter_request_builder): - builder = filter_request_builder.like_all_of("x", "A*,*b") - - assert str(builder.request.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" - - -def test_like_any_of(filter_request_builder): - builder = filter_request_builder.like_any_of("x", "a*,*b") - - assert str(builder.request.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" - - -def test_ilike_all_of(filter_request_builder): - builder = filter_request_builder.ilike_all_of("x", "A*,*b") - - assert str(builder.request.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" - - -def test_ilike_any_of(filter_request_builder): - builder = filter_request_builder.ilike_any_of("x", "A*,*b") - - assert str(builder.request.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" - - -def test_is_(filter_request_builder): - builder = filter_request_builder.is_("x", "a") - - assert str(builder.request.params) == "x=is.a" - - -def test_in_(filter_request_builder): - builder = filter_request_builder.in_("x", ["a", "b"]) - - assert str(builder.request.params) == "x=in.%28a%2Cb%29" - - -def test_or_(filter_request_builder): - builder = filter_request_builder.or_("x.eq.1") - - assert str(builder.request.params) == "or=%28x.eq.1%29" - - -def test_or_in_contain(filter_request_builder): - builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") - - assert ( - str(builder.request.params) - == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" - ) - - -def test_max_affected(filter_request_builder): - builder = filter_request_builder.max_affected(5) - - assert builder.request.headers["prefer"] == "handling=strict,max-affected=5" - - -def test_max_affected_with_existing_prefer_header(filter_request_builder): - # Set an existing prefer header - filter_request_builder.request.headers["prefer"] = "return=representation" - builder = filter_request_builder.max_affected(10) - - assert ( - builder.request.headers["prefer"] - == "return=representation,handling=strict,max-affected=10" - ) - - -def test_max_affected_with_existing_handling_strict(filter_request_builder): - # Set an existing prefer header with handling=strict - filter_request_builder.request.headers["prefer"] = "handling=strict,return=minimal" - builder = filter_request_builder.max_affected(3) - - assert ( - builder.request.headers["prefer"] - == "handling=strict,return=minimal,max-affected=3" - ) - - -def test_max_affected_returns_self(filter_request_builder): - builder = filter_request_builder.max_affected(1) - - assert builder is filter_request_builder diff --git a/src/postgrest/tests/_async/test_filter_request_builder_integration.py b/src/postgrest/tests/_async/test_filter_request_builder_integration.py index 53404ec2..04455714 100644 --- a/src/postgrest/tests/_async/test_filter_request_builder_integration.py +++ b/src/postgrest/tests/_async/test_filter_request_builder_integration.py @@ -1,12 +1,9 @@ -from postgrest import CountMethod +from postgrest import AsyncPostgrestClient, CountMethod -from .client import rest_client, rest_client_httpx - -async def test_multivalued_param_httpx(): - res = ( - await rest_client_httpx() - .from_("countries") +async def test_multivalued_param_httpx(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso", count=CountMethod.exact) .lte("numcode", 8) .gte("numcode", 4) @@ -20,10 +17,9 @@ async def test_multivalued_param_httpx(): ] -async def test_multivalued_param(): - res = ( - await rest_client() - .from_("countries") +async def test_multivalued_param(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso", count=CountMethod.exact) .lte("numcode", 8) .gte("numcode", 4) @@ -37,10 +33,9 @@ async def test_multivalued_param(): ] -async def test_match(): - res = ( - await rest_client() - .from_("countries") +async def test_match(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 8, "nicename": "Albania"}) .single() @@ -50,23 +45,21 @@ async def test_match(): assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -async def test_match_maybe_single(): - res = ( - await rest_client() - .from_("countries") +async def test_match_maybe_single(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 8, "nicename": "Albania"}) .maybe_single() .execute() ) - + assert res assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -async def test_no_match_maybe_single(): +async def test_no_match_maybe_single(postgrest_client: AsyncPostgrestClient) -> None: res = ( - await rest_client() - .from_("countries") + await postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 100, "nicename": "Wonderland"}) .maybe_single() @@ -76,10 +69,9 @@ async def test_no_match_maybe_single(): assert res is None -async def test_equals(): - res = ( - await rest_client() - .from_("countries") +async def test_equals(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .eq("nicename", "Albania") .single() @@ -89,10 +81,9 @@ async def test_equals(): assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -async def test_not_equal(): - res = ( - await rest_client() - .from_("users") +async def test_not_equal(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("users") .select("id, name") .neq("name", "Jane") .single() @@ -102,10 +93,9 @@ async def test_not_equal(): assert res.data == {"id": 1, "name": "Michael"} -async def test_greater_than(): +async def test_greater_than(postgrest_client: AsyncPostgrestClient) -> None: res = ( - await rest_client() - .from_("users") + await postgrest_client.from_("users") .select("id, name") .gt("id", 1) .single() @@ -115,16 +105,19 @@ async def test_greater_than(): assert res.data == {"id": 2, "name": "Jane"} -async def test_greater_than_or_equals_to(): - res = await rest_client().from_("users").select("id, name").gte("id", 1).execute() +async def test_greater_than_or_equals_to( + postgrest_client: AsyncPostgrestClient, +) -> None: + res = ( + await postgrest_client.from_("users").select("id, name").gte("id", 1).execute() + ) assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] -async def test_contains_dictionary(): - res = ( - await rest_client() - .from_("users") +async def test_contains_dictionary(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("users") .select("name") .contains("address", {"postcode": 90210}) .single() @@ -134,10 +127,9 @@ async def test_contains_dictionary(): assert res.data == {"name": "Michael"} -async def test_contains_any_item(): - res = ( - await rest_client() - .from_("issues") +async def test_contains_any_item(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("issues") .select("title") .contains("tags", ["is:open", "priority:low"]) .execute() @@ -146,10 +138,9 @@ async def test_contains_any_item(): assert res.data == [{"title": "Cache invalidation is not working"}] -async def test_contains_on_range(): - res = ( - await rest_client() - .from_("reservations") +async def test_contains_on_range(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") .execute() @@ -158,10 +149,9 @@ async def test_contains_on_range(): assert res.data == [{"id": 1, "room_name": "Emerald"}] -async def test_contained_by_mixed_items(): - res = ( - await rest_client() - .from_("reservations") +async def test_contained_by_mixed_items(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") .execute() @@ -170,70 +160,68 @@ async def test_contained_by_mixed_items(): assert res.data == [{"id": 1, "room_name": "Emerald"}] -async def test_range_greater_than(): - res = ( - await rest_client() - .from_("reservations") +async def test_range_greater_than(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") - .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) + .range_gt("during", "2000-01-02 08:00", "2000-01-02 09:00") .execute() ) assert res.data == [{"id": 2, "room_name": "Topaz"}] -async def test_range_greater_than_or_equal_to(): - res = ( - await rest_client() - .from_("reservations") +async def test_range_greater_than_or_equal_to( + postgrest_client: AsyncPostgrestClient, +) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") - .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) + .range_gte("during", "2000-01-02 08:30", "2000-01-02 09:30") .execute() ) assert res.data == [{"id": 2, "room_name": "Topaz"}] -async def test_range_less_than(): - res = ( - await rest_client() - .from_("reservations") +async def test_range_less_than(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") - .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) + .range_lt("during", "2000-01-01 15:00", "2000-01-02 16:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -async def test_range_less_than_or_equal_to(): - res = ( - await rest_client() - .from_("reservations") +async def test_range_less_than_or_equal_to( + postgrest_client: AsyncPostgrestClient, +) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") - .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) + .range_lte("during", "2000-01-01 14:00", "2000-01-01 16:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -async def test_range_adjacent(): - res = ( - await rest_client() - .from_("reservations") +async def test_range_adjacent(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("reservations") .select("id, room_name") - .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) + .range_adjacent("during", "2000-01-01 12:00", "2000-01-01 13:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -async def test_overlaps(): - res = ( - await rest_client() - .from_("issues") +async def test_overlaps(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("issues") .select("title") .overlaps("tags", ["is:closed", "severity:high"]) .execute() @@ -245,10 +233,11 @@ async def test_overlaps(): ] -async def test_overlaps_with_timestamp_range(): - res = ( - await rest_client() - .from_("reservations") +async def test_overlaps_with_timestamp_range( + postgrest_client: AsyncPostgrestClient, +) -> None: + res = await ( + postgrest_client.from_("reservations") .select("room_name") .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") .execute() @@ -259,10 +248,9 @@ async def test_overlaps_with_timestamp_range(): ] -async def test_like(): - res = ( - await rest_client() - .from_("countries") +async def test_like(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .like("nicename", "%Alba%") .execute() @@ -271,10 +259,9 @@ async def test_like(): assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] -async def test_ilike(): - res = ( - await rest_client() - .from_("countries") +async def test_ilike(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .ilike("nicename", "%alban%") .execute() @@ -283,10 +270,9 @@ async def test_ilike(): assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] -async def test_like_all_of(): - res = ( - await rest_client() - .from_("countries") +async def test_like_all_of(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("nicename, iso") .like_all_of("nicename", "A*,*n") .execute() @@ -295,10 +281,9 @@ async def test_like_all_of(): assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] -async def test_like_any_of(): - res = ( - await rest_client() - .from_("countries") +async def test_like_any_of(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("nicename, iso") .like_any_of("nicename", "Al*,*ia") .execute() @@ -310,10 +295,9 @@ async def test_like_any_of(): ] -async def test_ilike_all_of(): - res = ( - await rest_client() - .from_("countries") +async def test_ilike_all_of(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("nicename, iso") .ilike_all_of("nicename", "a*,*n") .execute() @@ -322,10 +306,9 @@ async def test_ilike_all_of(): assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] -async def test_ilike_any_of(): - res = ( - await rest_client() - .from_("countries") +async def test_ilike_any_of(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("nicename, iso") .ilike_any_of("nicename", "al*,*ia") .execute() @@ -337,10 +320,9 @@ async def test_ilike_any_of(): ] -async def test_is_(): - res = ( - await rest_client() - .from_("countries") +async def test_is_(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .is_("numcode", "null") .limit(1) @@ -351,10 +333,9 @@ async def test_is_(): assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] -async def test_is_not(): - res = ( - await rest_client() - .from_("countries") +async def test_is_not(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .not_.is_("numcode", "null") .limit(1) @@ -365,10 +346,9 @@ async def test_is_not(): assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] -async def test_in_(): - res = ( - await rest_client() - .from_("countries") +async def test_in_(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .in_("nicename", ["Albania", "Algeria"]) .execute() @@ -380,10 +360,9 @@ async def test_in_(): ] -async def test_or_(): - res = ( - await rest_client() - .from_("countries") +async def test_or_(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .or_("iso.eq.DZ,nicename.eq.Albania") .execute() @@ -395,10 +374,9 @@ async def test_or_(): ] -async def test_or_with_and(): - res = ( - await rest_client() - .from_("countries") +async def test_or_with_and(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") .execute() @@ -410,10 +388,9 @@ async def test_or_with_and(): ] -async def test_or_in(): - res = ( - await rest_client() - .from_("issues") +async def test_or_in(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("issues") .select("id, title") .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") .execute() @@ -426,10 +403,9 @@ async def test_or_in(): ] -async def test_or_on_reference_table(): - res = ( - await rest_client() - .from_("countries") +async def test_or_on_reference_table(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") .execute() @@ -448,37 +424,51 @@ async def test_or_on_reference_table(): ] -async def test_explain_json(): - res = ( - await rest_client() - .from_("countries") +async def test_explain_json(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") - .explain(format="json", analyze=True) + .explain( + format="json", + analyze=True, + verbose=False, + settings=False, + wal=False, + buffers=False, + ) .execute() ) + assert isinstance(res.data, list) + assert isinstance(res.data[0], dict) + assert isinstance(res.data[0]["Plan"], dict) assert res.data[0]["Plan"]["Node Type"] == "Aggregate" -async def test_csv(): - res = ( - await rest_client() - .from_("countries") +async def test_csv(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .in_("nicename", ["Albania", "Algeria"]) .csv() .execute() ) - assert "ALBANIA,AL\nALGERIA,DZ" in res.data + assert "ALBANIA,AL\nALGERIA,DZ" in res -async def test_explain_text(): - res = ( - await rest_client() - .from_("countries") +async def test_explain_text(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") - .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) + .explain( + analyze=True, + verbose=True, + settings=True, + wal=True, + buffers=True, + format="text", + ) .execute() ) assert ( @@ -487,10 +477,9 @@ async def test_explain_text(): ) -async def test_rpc_with_single(): - res = ( - await rest_client() - .rpc("list_stored_countries", {}) +async def test_rpc_with_single(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, country_name, iso") .eq("nicename", "Albania") .single() @@ -500,10 +489,9 @@ async def test_rpc_with_single(): assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} -async def test_rpc_with_limit(): - res = ( - await rest_client() - .rpc("list_stored_countries", {}) +async def test_rpc_with_limit(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, country_name, iso") .eq("nicename", "Albania") .limit(1) @@ -513,10 +501,9 @@ async def test_rpc_with_limit(): assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] -async def test_rpc_with_range(): - res = ( - await rest_client() - .rpc("list_stored_countries", {}) +async def test_rpc_with_range(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, iso") .range(1, 2) .execute() @@ -528,30 +515,29 @@ async def test_rpc_with_range(): ] -async def test_rpc_post_with_args(): - res = ( - await rest_client() - .rpc("search_countries_by_name", {"search_name": "Alban"}) +async def test_rpc_post_with_args(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc("search_countries_by_name", {"search_name": "Alban"}) .select("nicename, iso") .execute() ) assert res.data == [{"nicename": "Albania", "iso": "AL"}] -async def test_rpc_get_with_args(): - res = ( - await rest_client() - .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) +async def test_rpc_get_with_args(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc( + "search_countries_by_name", {"search_name": "Alger"}, get=True + ) .select("nicename, iso") .execute() ) assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] -async def test_rpc_get_with_count(): - res = ( - await rest_client() - .rpc( +async def test_rpc_get_with_count(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.rpc( "search_countries_by_name", {"search_name": "Al"}, get=True, @@ -564,26 +550,20 @@ async def test_rpc_get_with_count(): assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] -async def test_rpc_head_count(): - res = ( - await rest_client() - .rpc( - "search_countries_by_name", - {"search_name": "Al"}, - head=True, - count=CountMethod.exact, - ) - .execute() - ) +async def test_rpc_head_count(postgrest_client: AsyncPostgrestClient) -> None: + res = await postgrest_client.rpc( + "search_countries_by_name", + {"search_name": "Al"}, + head=True, + count=CountMethod.exact, + ).execute() - assert res.count == 2 - assert res.data == [] + assert res == 2 -async def test_order(): - res = ( - await rest_client() - .from_("countries") +async def test_order(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("countries") .select("country_name, iso") .limit(3) .order("nicename", desc=True) @@ -597,10 +577,9 @@ async def test_order(): ] -async def test_order_on_foreign_table(): - res = ( - await rest_client() - .from_("orchestral_sections") +async def test_order_on_foreign_table(postgrest_client: AsyncPostgrestClient) -> None: + res = await ( + postgrest_client.from_("orchestral_sections") .select("name, instruments(name)") .order("name", desc=True, foreign_table="instruments") .execute() diff --git a/src/postgrest/tests/_async/test_query_request_builder.py b/src/postgrest/tests/_async/test_query_request_builder.py deleted file mode 100644 index 78edc2da..00000000 --- a/src/postgrest/tests/_async/test_query_request_builder.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import AsyncIterable - -import pytest -from httpx import AsyncClient, Headers, QueryParams -from yarl import URL - -from postgrest import AsyncQueryRequestBuilder -from postgrest._async.request_builder import RequestConfig - - -@pytest.fixture -async def query_request_builder() -> AsyncIterable[AsyncQueryRequestBuilder]: - async with AsyncClient() as client: - request = RequestConfig( - client, URL("/example_table"), "GET", Headers(), QueryParams(), None, {} - ) - yield AsyncQueryRequestBuilder(request) - - -def test_constructor(query_request_builder: AsyncQueryRequestBuilder): - builder = query_request_builder - - assert str(builder.request.path) == "/example_table" - assert len(builder.request.headers) == 0 - assert len(builder.request.params) == 0 - assert builder.request.http_method == "GET" - assert builder.request.json is None diff --git a/src/postgrest/tests/_async/test_request_builder.py b/src/postgrest/tests/_async/test_request_builder.py index 356be18f..c4f23ddd 100644 --- a/src/postgrest/tests/_async/test_request_builder.py +++ b/src/postgrest/tests/_async/test_request_builder.py @@ -1,206 +1,230 @@ from typing import Any, AsyncIterable, Dict, List import pytest -from httpx import AsyncClient, Headers, QueryParams, Request, Response +from httpx import AsyncClient +from supabase_utils.http.adapters.httpx import AsyncHttpxSession +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import AsyncHttpIO +from supabase_utils.http.request import Request, Response +from supabase_utils.types import JSON, JSONParser from yarl import URL -from postgrest import AsyncRequestBuilder, AsyncSingleRequestBuilder -from postgrest._async.request_builder import RequestConfig -from postgrest.base_request_builder import APIResponse, SingleAPIResponse -from postgrest.types import JSON, CountMethod +from postgrest.request_builder import ( + APIResponse, + RequestBuilder, + SingleAPIResponse, + TextRequestBuilder, +) +from postgrest.types import CountMethod @pytest.fixture -async def request_builder() -> AsyncIterable[AsyncRequestBuilder]: +async def request_builder() -> AsyncIterable[RequestBuilder[AsyncHttpIO]]: async with AsyncClient() as client: - yield AsyncRequestBuilder(client, URL("/example_table"), Headers(), None) - - -def test_constructor(request_builder): - assert str(request_builder.path) == "/example_table" + yield RequestBuilder( + executor=AsyncHttpIO(session=AsyncHttpxSession(client=client)), + base_url=URL("/example_table"), + default_headers=Headers.empty(), + ) class TestSelect: - def test_select(self, request_builder: AsyncRequestBuilder): + def test_select(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.select("col1", "col2") - assert builder.request.params["select"] == "col1,col2" + assert builder.request.query["select"] == "col1,col2" assert builder.request.headers.get("prefer") is None - assert builder.request.http_method == "GET" - assert builder.request.json is None + assert builder.request.method == "GET" + assert builder.request.body == {} - def test_select_with_count(self, request_builder: AsyncRequestBuilder): + def test_select_with_count( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.select(count=CountMethod.exact) - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert builder.request.headers["prefer"] == "count=exact" - assert builder.request.http_method == "GET" - assert builder.request.json is None + assert builder.request.method == "GET" + assert builder.request.body == {} - def test_select_with_head(self, request_builder: AsyncRequestBuilder): + def test_select_with_head( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.select("col1", "col2", head=True) - assert builder.request.params.get("select") == "col1,col2" + assert builder.request.query.get("select") == "col1,col2" assert builder.request.headers.get("prefer") is None - assert builder.request.http_method == "HEAD" - assert builder.request.json is None + assert builder.request.method == "HEAD" + assert builder.request.body == {} - def test_select_as_csv(self, request_builder: AsyncRequestBuilder): + def test_select_as_csv(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.select("*").csv() assert builder.request.headers["Accept"] == "text/csv" - assert isinstance(builder, AsyncSingleRequestBuilder) + assert isinstance(builder, TextRequestBuilder) class TestInsert: - def test_insert(self, request_builder: AsyncRequestBuilder): + def test_insert(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.insert({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_insert_with_count(self, request_builder: AsyncRequestBuilder): + def test_insert_with_count( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_insert_with_upsert(self, request_builder: AsyncRequestBuilder): + def test_insert_with_upsert( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.insert({"key1": "val1"}, upsert=True) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_upsert_with_default_single(self, request_builder: AsyncRequestBuilder): + def test_upsert_with_default_single( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [{"key1": "val1"}] - assert builder.request.params.get("columns") == '"key1"' + assert builder.request.method == "POST" + assert builder.request.body == [{"key1": "val1"}] + assert builder.request.query.get("columns") == '"key1"' - def test_bulk_insert_using_default(self, request_builder: AsyncRequestBuilder): + def test_bulk_insert_using_default( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.insert( [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False ) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [ + assert builder.request.method == "POST" + assert builder.request.body == [ {"key1": "val1", "key2": "val2"}, {"key3": "val3"}, ] - assert set(builder.request.params["columns"].split(",")) == set( + assert set(builder.request.query["columns"].split(",")) == set( '"key1","key2","key3"'.split(",") ) - def test_upsert(self, request_builder: AsyncRequestBuilder): + def test_upsert(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.upsert({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_bulk_upsert_with_default(self, request_builder: AsyncRequestBuilder): + def test_bulk_upsert_with_default( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.upsert( [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False ) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [ + assert builder.request.method == "POST" + assert builder.request.body == [ {"key1": "val1", "key2": "val2"}, {"key3": "val3"}, ] - assert set(builder.request.params["columns"].split(",")) == set( + assert set(builder.request.query["columns"].split(",")) == set( '"key1","key2","key3"'.split(",") ) class TestUpdate: - def test_update(self, request_builder: AsyncRequestBuilder): + def test_update(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.update({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} - def test_update_with_count(self, request_builder: AsyncRequestBuilder): + def test_update_with_count( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} - def test_update_with_max_affected(self, request_builder: AsyncRequestBuilder): + def test_update_with_max_affected( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.update({"key1": "val1"}).max_affected(5) assert "handling=strict" in builder.request.headers["prefer"] assert "max-affected=5" in builder.request.headers["prefer"] assert "return=representation" in builder.request.headers["prefer"] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} class TestDelete: - def test_delete(self, request_builder: AsyncRequestBuilder): + def test_delete(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.delete() - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "DELETE" + assert builder.request.body == {} - def test_delete_with_count(self, request_builder: AsyncRequestBuilder): + def test_delete_with_count( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.delete(count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.method == "DELETE" + assert builder.request.body == {} - def test_delete_with_max_affected(self, request_builder: AsyncRequestBuilder): + def test_delete_with_max_affected( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.delete().max_affected(10) assert "handling=strict" in builder.request.headers["prefer"] assert "max-affected=10" in builder.request.headers["prefer"] assert "return=representation" in builder.request.headers["prefer"] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.method == "DELETE" + assert builder.request.body == {} class TestTextSearch: - def test_text_search(self, request_builder: AsyncRequestBuilder): + def test_text_search(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.select("catchphrase").text_search( "catchphrase", "'fat' & 'cat'", @@ -209,24 +233,32 @@ def test_text_search(self, request_builder: AsyncRequestBuilder): "config": "english", }, ) - assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( - builder.request.params + assert builder.request.query.get("select") == "catchphrase" + assert ( + builder.request.query.get("catchphrase") == "plfts(english).'fat' & 'cat'" ) class TestExplain: - def test_explain_plain(self, request_builder: AsyncRequestBuilder): + def test_explain_plain(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.select("*").explain() - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert "application/vnd.pgrst.plan" in str( builder.request.headers.get("accept") ) - def test_explain_options(self, request_builder: AsyncRequestBuilder): + def test_explain_options( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.select("*").explain( - format="json", analyze=True, verbose=True, buffers=True, wal=True + analyze=True, + verbose=True, + buffers=True, + settings=False, + wal=True, + format="json", ) - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert "application/vnd.pgrst.plan+json;" in str( builder.request.headers.get("accept") ) @@ -236,49 +268,58 @@ def test_explain_options(self, request_builder: AsyncRequestBuilder): class TestOrder: - def test_order(self, request_builder: AsyncRequestBuilder): + def test_order(self, request_builder: RequestBuilder[AsyncHttpIO]) -> None: builder = request_builder.select().order("country_name", desc=True) - assert str(builder.request.params) == "select=%2A&order=country_name.desc" + assert builder.request.query.get("select") == "*" + assert builder.request.query.get("order") == "country_name.desc" - def test_multiple_orders(self, request_builder: AsyncRequestBuilder): + def test_multiple_orders( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = ( request_builder.select() .order("country_name", desc=True) .order("iso", desc=True) ) - assert ( - str(builder.request.params) - == "select=%2A&order=country_name.desc%2Ciso.desc" - ) + assert builder.request.query.get("select") == "*" + assert builder.request.query.get_list("order") == [ + "country_name.desc", + "iso.desc", + ] def test_multiple_orders_on_foreign_table( - self, request_builder: AsyncRequestBuilder - ): + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: foreign_table = "cities" builder = ( request_builder.select() .order("city_name", desc=True, foreign_table=foreign_table) .order("id", desc=True, foreign_table=foreign_table) ) - assert ( - str(builder.request.params) - == "select=%2A&cities.order=city_name.desc%2Cid.desc" - ) + assert builder.request.query.get("select") == "*" + assert builder.request.query.get_list("cities.order") == [ + "city_name.desc", + "id.desc", + ] class TestRange: - def test_range_on_own_table(self, request_builder: AsyncRequestBuilder): + def test_range_on_own_table( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: builder = request_builder.select("*").range(0, 1) - assert builder.request.params["select"] == "*" - assert builder.request.params["limit"] == "2" - assert builder.request.params["offset"] == "0" + assert builder.request.query["select"] == "*" + assert builder.request.query["limit"] == "2" + assert builder.request.query["offset"] == "0" - def test_range_on_foreign_table(self, request_builder: AsyncRequestBuilder): + def test_range_on_foreign_table( + self, request_builder: RequestBuilder[AsyncHttpIO] + ) -> None: foreign_table = "cities" builder = request_builder.select("*").range(1, 2, foreign_table) - assert builder.request.params["select"] == "*" - assert builder.request.params[f"{foreign_table}.limit"] == "2" - assert builder.request.params[f"{foreign_table}.offset"] == "1" + assert builder.request.query["select"] == "*" + assert builder.request.query[f"{foreign_table}.limit"] == "2" + assert builder.request.query[f"{foreign_table}.offset"] == "1" @pytest.fixture @@ -296,7 +337,7 @@ def api_response_with_error() -> Dict[str, Any]: @pytest.fixture -def api_response() -> List[Dict[str, Any]]: +def api_response() -> List[Dict[str, JSON]]: return [ { "id": 1, @@ -318,7 +359,7 @@ def api_response() -> List[Dict[str, Any]]: @pytest.fixture -def single_api_response() -> Dict[str, Any]: +def single_api_response() -> Dict[str, JSON]: return { "id": 1, "name": "Bonaire, Sint Eustatius and Saba", @@ -352,7 +393,15 @@ def prefer_header_without_count() -> str: @pytest.fixture def request_response_without_prefer_header() -> Response: return Response( - status_code=200, request=Request(method="GET", url="http://example.com") + status=200, + request=Request( + method="GET", + url=URL("http://example.com"), + content=None, + headers=Headers.empty(), + ), + content=b"", + headers=Headers.empty(), ) @@ -361,11 +410,14 @@ def request_response_with_prefer_header_without_count( prefer_header_without_count: str, ) -> Response: return Response( - status_code=200, + status=200, + headers=Headers.empty(), + content=b"", request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_without_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_without_count}), + content=None, ), ) @@ -375,12 +427,16 @@ def request_response_with_prefer_header_with_count_and_content_range( prefer_header_with_count: str, content_range_header_with_count: str ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=b"", request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -389,16 +445,19 @@ def request_response_with_prefer_header_with_count_and_content_range( def request_response_with_data( prefer_header_with_count: str, content_range_header_with_count: str, - api_response: List[Dict[str, Any]], + api_response: List[Dict[str, JSON]], ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, - json=api_response, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=JSONParser.dump_json(api_response), request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -407,16 +466,19 @@ def request_response_with_data( def request_response_with_single_data( prefer_header_with_count: str, content_range_header_with_count: str, - single_api_response: Dict[str, Any], + single_api_response: Dict[str, JSON], ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, - json=single_api_response, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=JSONParser.dump_json(single_api_response), request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -424,22 +486,28 @@ def request_response_with_single_data( @pytest.fixture def request_response_with_csv_data(csv_api_response: str) -> Response: return Response( - status_code=200, - text=csv_api_response, - request=Request(method="GET", url="http://example.com"), + status=200, + content=csv_api_response.encode("utf-8"), + headers=Headers.empty(), + request=Request( + method="GET", + url=URL("http://example.com"), + headers=Headers.empty(), + content=None, + ), ) class TestApiResponse: - def test_response_raises_when_api_error(self, api_response_with_error: List[JSON]): - with pytest.raises(ValueError): - APIResponse(data=api_response_with_error) - - def test_parses_valid_response_only_data(self, api_response: List[JSON]): + def test_parses_valid_response_only_data( + self, api_response: List[Dict[str, JSON]] + ) -> None: result = APIResponse(data=api_response) assert result.data == api_response - def test_parses_valid_response_data_and_count(self, api_response: List[JSON]): + def test_parses_valid_response_data_and_count( + self, api_response: List[Dict[str, JSON]] + ) -> None: count = len(api_response) result = APIResponse(data=api_response, count=count) assert result.data == api_response @@ -447,7 +515,7 @@ def test_parses_valid_response_data_and_count(self, api_response: List[JSON]): def test_get_count_from_content_range_header_with_count( self, content_range_header_with_count: str - ): + ) -> None: assert ( APIResponse._get_count_from_content_range_header( content_range_header_with_count @@ -457,7 +525,7 @@ def test_get_count_from_content_range_header_with_count( def test_get_count_from_content_range_header_without_count( self, content_range_header_without_count: str - ): + ) -> None: assert ( APIResponse._get_count_from_content_range_header( content_range_header_without_count @@ -465,15 +533,19 @@ def test_get_count_from_content_range_header_without_count( is None ) - def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): + def test_is_count_in_prefer_header_true( + self, prefer_header_with_count: str + ) -> None: assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) - def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): + def test_is_count_in_prefer_header_false( + self, prefer_header_without_count: str + ) -> None: assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) def test_get_count_from_http_request_response_without_prefer_header( self, request_response_without_prefer_header: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_without_prefer_header @@ -483,7 +555,7 @@ def test_get_count_from_http_request_response_without_prefer_header( def test_get_count_from_http_request_response_with_prefer_header_without_count( self, request_response_with_prefer_header_without_count: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_with_prefer_header_without_count @@ -493,7 +565,7 @@ def test_get_count_from_http_request_response_with_prefer_header_without_count( def test_get_count_from_http_request_response_with_count_and_content_range( self, request_response_with_prefer_header_with_count_and_content_range: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_with_prefer_header_with_count_and_content_range @@ -503,7 +575,7 @@ def test_get_count_from_http_request_response_with_count_and_content_range( def test_from_http_request_response_constructor( self, request_response_with_data: Response, api_response: List[Dict[str, Any]] - ): + ) -> None: result = APIResponse.from_http_request_response(request_response_with_data) assert result.data == api_response assert result.count == 2 @@ -512,19 +584,10 @@ def test_single_from_http_request_response_constructor( self, request_response_with_single_data: Response, single_api_response: Dict[str, Any], - ): + ) -> None: result = SingleAPIResponse.from_http_request_response( request_response_with_single_data ) assert isinstance(result.data, dict) assert result.data == single_api_response assert result.count == 2 - - def test_single_with_csv_data( - self, request_response_with_csv_data: Response, csv_api_response: str - ): - result = SingleAPIResponse.from_http_request_response( - request_response_with_csv_data - ) - assert isinstance(result.data, str) - assert result.data == csv_api_response diff --git a/src/postgrest/tests/_sync/client.py b/src/postgrest/tests/_sync/client.py deleted file mode 100644 index 832095aa..00000000 --- a/src/postgrest/tests/_sync/client.py +++ /dev/null @@ -1,28 +0,0 @@ -from httpx import Client, HTTPTransport, Limits - -from postgrest import SyncPostgrestClient - -REST_URL = "http://127.0.0.1:3000" - - -def rest_client(): - return SyncPostgrestClient( - base_url=REST_URL, - ) - - -def rest_client_httpx() -> SyncPostgrestClient: - transport = HTTPTransport( - retries=4, - limits=Limits( - max_connections=1, - max_keepalive_connections=1, - keepalive_expiry=None, - ), - ) - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = Client(transport=transport, headers=headers) - return SyncPostgrestClient( - base_url=REST_URL, - http_client=http_client, - ) diff --git a/src/postgrest/tests/_sync/conftest.py b/src/postgrest/tests/_sync/conftest.py new file mode 100644 index 00000000..eab79fcb --- /dev/null +++ b/src/postgrest/tests/_sync/conftest.py @@ -0,0 +1,38 @@ +from typing import Iterable + +import pytest +from httpx import Client, HTTPTransport, Limits +from supabase_utils.http.adapters.httpx import HttpxSession + +from postgrest import SyncPostgrestClient + +REST_URL = "http://127.0.0.1:3000" + + +def httpx_client() -> Client: + transport = HTTPTransport( + retries=4, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = Client(transport=transport, headers=headers, http2=True, verify=True) + return http_client + + +def httpx() -> HttpxSession: + return HttpxSession(client=httpx_client()) + + +@pytest.fixture(params=[httpx]) +def postgrest_client( + request: pytest.FixtureRequest, +) -> Iterable[SyncPostgrestClient]: + with SyncPostgrestClient( + base_url=REST_URL, + http_session=request.param(), + ) as client: + yield client diff --git a/src/postgrest/tests/_sync/test_client.py b/src/postgrest/tests/_sync/test_client.py deleted file mode 100644 index 9a7f117c..00000000 --- a/src/postgrest/tests/_sync/test_client.py +++ /dev/null @@ -1,165 +0,0 @@ -from unittest.mock import patch - -import pytest -from httpx import ( - BasicAuth, - Client, - Headers, - HTTPTransport, - Limits, - Request, - Response, - Timeout, -) - -from postgrest import SyncPostgrestClient -from postgrest.exceptions import APIError - - -@pytest.fixture -def postgrest_client(): - with SyncPostgrestClient("https://example.com") as client: - yield client - - -class TestConstructor: - def test_simple(self, postgrest_client: SyncPostgrestClient): - session = postgrest_client.session - - assert session.base_url == "https://example.com" - headers = Headers( - { - "Accept": "application/json", - "Content-Type": "application/json", - "Accept-Profile": "public", - "Content-Profile": "public", - } - ) - assert session.headers.items() >= headers.items() - - def test_custom_headers(self): - with SyncPostgrestClient( - "https://example.com", schema="pub", headers={"Custom-Header": "value"} - ) as client: - session = client.session - - assert session.base_url == "https://example.com" - headers = Headers( - { - "Accept-Profile": "pub", - "Content-Profile": "pub", - "Custom-Header": "value", - } - ) - assert session.headers.items() >= headers.items() - - -class TestHttpxClientConstructor: - def test_custom_httpx_client(self) -> None: - transport = HTTPTransport( - retries=10, - limits=Limits( - max_connections=1, - max_keepalive_connections=1, - keepalive_expiry=None, - ), - ) - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = Client(transport=transport, headers=headers) - with SyncPostgrestClient( - "https://example.com", http_client=http_client, timeout=20.0 - ) as client: - assert str(client.base_url) == "https://example.com" - assert client.session.timeout == Timeout( - timeout=5.0 - ) # Should be the default 5 since we use custom httpx client - assert client.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert isinstance(client.session, Client) - - -class TestAuth: - def test_auth_token(self, postgrest_client: SyncPostgrestClient): - postgrest_client.auth("s3cr3t") - assert postgrest_client.headers["Authorization"] == "Bearer s3cr3t" - - def test_auth_basic(self, postgrest_client: SyncPostgrestClient): - postgrest_client.auth(None, username="admin", password="s3cr3t") - - assert isinstance(postgrest_client.basic_auth, BasicAuth) - assert ( - postgrest_client.basic_auth._auth_header - == BasicAuth("admin", "s3cr3t")._auth_header - ) - - -def test_schema(postgrest_client: SyncPostgrestClient): - client = postgrest_client.schema("private") - subheaders = { - "accept-profile": "private", - "content-profile": "private", - } - - assert subheaders.items() < client.headers.items() - - -# -# async def test_params_purged_after_execute(postgrest_client: SyncPostgrestClient): -# assert len(postgrest_client.session.params) == 0 -# with pytest.raises(APIError): -# await postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() -# assert len(postgrest_client.session.params) == 0 - - -def test_response_status_code_outside_ok(postgrest_client: SyncPostgrestClient): - with patch( - "postgrest._sync.request_builder.SyncSelectRequestBuilder.execute", - side_effect=APIError( - { - "message": "mock error", - "code": "400", - "hint": "mock", - "details": "mock", - "errors": [{"code": 400}], - } - ), - ): - with pytest.raises(APIError) as exc_info: - ( - postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() - ) # gives status_code = 400 - exc_response = exc_info.value.json() - assert not exc_response.get("success") - assert isinstance(exc_response.get("errors"), list) - assert ( - isinstance(exc_response["errors"][0], dict) - and "code" in exc_response["errors"][0] - ) - assert exc_response["errors"][0].get("code") == 400 - - -# https://github.com/supabase/postgrest-py/issues/595 - - -def test_response_client_invalid_response_but_valid_json( - postgrest_client: SyncPostgrestClient, -): - with patch( - "httpx._client.Client.request", - return_value=Response( - status_code=502, - text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object - request=Request(method="GET", url="http://example.com"), - ), - ): - client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() - assert "Accept" in client.request.headers - assert ( - client.request.headers.get("Accept") == "application/vnd.pgrst.object+json" - ) - with pytest.raises(APIError) as exc_info: - client.execute() - assert isinstance(exc_info, pytest.ExceptionInfo) - exc_response = exc_info.value.json() - assert isinstance(exc_response.get("message"), str) - assert exc_response.get("message") == "JSON could not be generated" - assert "code" in exc_response and int(exc_response["code"]) == 502 diff --git a/src/postgrest/tests/_sync/test_filter_request_builder.py b/src/postgrest/tests/_sync/test_filter_request_builder.py deleted file mode 100644 index c5c5c1d8..00000000 --- a/src/postgrest/tests/_sync/test_filter_request_builder.py +++ /dev/null @@ -1,300 +0,0 @@ -from typing import Iterable - -import pytest -from httpx import Client, Headers, QueryParams -from yarl import URL - -from postgrest import SyncFilterRequestBuilder -from postgrest._sync.request_builder import RequestConfig - - -@pytest.fixture -def filter_request_builder() -> Iterable[SyncFilterRequestBuilder]: - with Client() as client: - request = RequestConfig( - client, URL("/example_table"), "GET", Headers(), QueryParams(), None, {} - ) - yield SyncFilterRequestBuilder(request) - - -def test_constructor(filter_request_builder: SyncFilterRequestBuilder): - builder = filter_request_builder - - assert str(builder.request.path) == "/example_table" - assert len(builder.request.headers) == 0 - assert len(builder.request.params) == 0 - assert builder.request.http_method == "GET" - assert builder.request.json is None - assert not builder.negate_next - - -def test_not_(filter_request_builder): - builder = filter_request_builder.not_ - - assert builder.negate_next - - -def test_filter(filter_request_builder): - builder = filter_request_builder.filter(":col.name", "eq", "val") - - assert builder.request.params['":col.name"'] == "eq.val" - - -@pytest.mark.parametrize( - "col_name, expected_query_prefix", - [ - ("col:name", "%22col%3Aname%22"), - ("col.name", "col.name"), - ], -) -def test_filter_special_characters( - filter_request_builder, col_name, expected_query_prefix -): - builder = filter_request_builder.filter(col_name, "eq", "val") - - assert str(builder.request.params) == f"{expected_query_prefix}=eq.val" - - -def test_multivalued_param(filter_request_builder): - builder = filter_request_builder.lte("x", "a").gte("x", "b") - - assert str(builder.request.params) == "x=lte.a&x=gte.b" - - -def test_match(filter_request_builder): - builder = filter_request_builder.match({"id": "1", "done": "false"}) - assert str(builder.request.params) == "id=eq.1&done=eq.false" - - -def test_equals(filter_request_builder): - builder = filter_request_builder.eq("x", "a") - - assert str(builder.request.params) == "x=eq.a" - - -def test_not_equal(filter_request_builder): - builder = filter_request_builder.neq("x", "a") - - assert str(builder.request.params) == "x=neq.a" - - -def test_greater_than(filter_request_builder): - builder = filter_request_builder.gt("x", "a") - - assert str(builder.request.params) == "x=gt.a" - - -def test_greater_than_or_equals_to(filter_request_builder): - builder = filter_request_builder.gte("x", "a") - - assert str(builder.request.params) == "x=gte.a" - - -def test_contains(filter_request_builder): - builder = filter_request_builder.contains("x", "a") - - assert str(builder.request.params) == "x=cs.a" - - -def test_contains_dictionary(filter_request_builder): - builder = filter_request_builder.contains("x", {"a": "b"}) - - # {"a":"b"} - assert str(builder.request.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" - - -def test_contains_any_item(filter_request_builder): - builder = filter_request_builder.contains("x", ["a", "b"]) - - # {a,b} - assert str(builder.request.params) == "x=cs.%7Ba%2Cb%7D" - - -def test_contains_in_list(filter_request_builder): - builder = filter_request_builder.contains("x", '[{"a": "b"}]') - - # [{"a":+"b"}] (the + represents the space) - assert str(builder.request.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" - - -def test_contained_by_mixed_items(filter_request_builder): - builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) - - # {a,["b",+"c"]} - assert str(builder.request.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" - - -def test_range_greater_than(filter_request_builder): - builder = filter_request_builder.range_gt( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_greater_than_or_equal_to(filter_request_builder): - builder = filter_request_builder.range_gte( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_less_than(filter_request_builder): - builder = filter_request_builder.range_lt( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_less_than_or_equal_to(filter_request_builder): - builder = filter_request_builder.range_lte( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_range_adjacent(filter_request_builder): - builder = filter_request_builder.range_adjacent( - "x", ["2000-01-02 08:30", "2000-01-02 09:30"] - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" - ) - - -def test_overlaps(filter_request_builder): - builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) - - # {a,["b",+"c"]} - assert str(builder.request.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" - - -def test_overlaps_with_timestamp_range(filter_request_builder): - builder = filter_request_builder.overlaps( - "x", "[2000-01-01 12:45, 2000-01-01 13:15)" - ) - - # {a,["b",+"c"]} - assert ( - str(builder.request.params) - == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" - ) - - -def test_like(filter_request_builder): - builder = filter_request_builder.like("x", "%a%") - - assert str(builder.request.params) == "x=like.%25a%25" - - -def test_ilike(filter_request_builder): - builder = filter_request_builder.ilike("x", "%a%") - - assert str(builder.request.params) == "x=ilike.%25a%25" - - -def test_like_all_of(filter_request_builder): - builder = filter_request_builder.like_all_of("x", "A*,*b") - - assert str(builder.request.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" - - -def test_like_any_of(filter_request_builder): - builder = filter_request_builder.like_any_of("x", "a*,*b") - - assert str(builder.request.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" - - -def test_ilike_all_of(filter_request_builder): - builder = filter_request_builder.ilike_all_of("x", "A*,*b") - - assert str(builder.request.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" - - -def test_ilike_any_of(filter_request_builder): - builder = filter_request_builder.ilike_any_of("x", "A*,*b") - - assert str(builder.request.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" - - -def test_is_(filter_request_builder): - builder = filter_request_builder.is_("x", "a") - - assert str(builder.request.params) == "x=is.a" - - -def test_in_(filter_request_builder): - builder = filter_request_builder.in_("x", ["a", "b"]) - - assert str(builder.request.params) == "x=in.%28a%2Cb%29" - - -def test_or_(filter_request_builder): - builder = filter_request_builder.or_("x.eq.1") - - assert str(builder.request.params) == "or=%28x.eq.1%29" - - -def test_or_in_contain(filter_request_builder): - builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") - - assert ( - str(builder.request.params) - == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" - ) - - -def test_max_affected(filter_request_builder): - builder = filter_request_builder.max_affected(5) - - assert builder.request.headers["prefer"] == "handling=strict,max-affected=5" - - -def test_max_affected_with_existing_prefer_header(filter_request_builder): - # Set an existing prefer header - filter_request_builder.request.headers["prefer"] = "return=representation" - builder = filter_request_builder.max_affected(10) - - assert ( - builder.request.headers["prefer"] - == "return=representation,handling=strict,max-affected=10" - ) - - -def test_max_affected_with_existing_handling_strict(filter_request_builder): - # Set an existing prefer header with handling=strict - filter_request_builder.request.headers["prefer"] = "handling=strict,return=minimal" - builder = filter_request_builder.max_affected(3) - - assert ( - builder.request.headers["prefer"] - == "handling=strict,return=minimal,max-affected=3" - ) - - -def test_max_affected_returns_self(filter_request_builder): - builder = filter_request_builder.max_affected(1) - - assert builder is filter_request_builder diff --git a/src/postgrest/tests/_sync/test_filter_request_builder_integration.py b/src/postgrest/tests/_sync/test_filter_request_builder_integration.py index b5a9d79d..57c80fb7 100644 --- a/src/postgrest/tests/_sync/test_filter_request_builder_integration.py +++ b/src/postgrest/tests/_sync/test_filter_request_builder_integration.py @@ -1,12 +1,9 @@ -from postgrest import CountMethod +from postgrest import CountMethod, SyncPostgrestClient -from .client import rest_client, rest_client_httpx - -def test_multivalued_param_httpx(): +def test_multivalued_param_httpx(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client_httpx() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso", count=CountMethod.exact) .lte("numcode", 8) .gte("numcode", 4) @@ -20,10 +17,9 @@ def test_multivalued_param_httpx(): ] -def test_multivalued_param(): +def test_multivalued_param(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso", count=CountMethod.exact) .lte("numcode", 8) .gte("numcode", 4) @@ -37,10 +33,9 @@ def test_multivalued_param(): ] -def test_match(): +def test_match(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 8, "nicename": "Albania"}) .single() @@ -50,23 +45,21 @@ def test_match(): assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -def test_match_maybe_single(): +def test_match_maybe_single(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 8, "nicename": "Albania"}) .maybe_single() .execute() ) - + assert res assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -def test_no_match_maybe_single(): +def test_no_match_maybe_single(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .match({"numcode": 100, "nicename": "Wonderland"}) .maybe_single() @@ -76,10 +69,9 @@ def test_no_match_maybe_single(): assert res is None -def test_equals(): +def test_equals(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .eq("nicename", "Albania") .single() @@ -89,10 +81,9 @@ def test_equals(): assert res.data == {"country_name": "ALBANIA", "iso": "AL"} -def test_not_equal(): +def test_not_equal(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("users") + postgrest_client.from_("users") .select("id, name") .neq("name", "Jane") .single() @@ -102,22 +93,29 @@ def test_not_equal(): assert res.data == {"id": 1, "name": "Michael"} -def test_greater_than(): - res = rest_client().from_("users").select("id, name").gt("id", 1).single().execute() +def test_greater_than(postgrest_client: SyncPostgrestClient) -> None: + res = ( + postgrest_client.from_("users") + .select("id, name") + .gt("id", 1) + .single() + .execute() + ) assert res.data == {"id": 2, "name": "Jane"} -def test_greater_than_or_equals_to(): - res = rest_client().from_("users").select("id, name").gte("id", 1).execute() +def test_greater_than_or_equals_to( + postgrest_client: SyncPostgrestClient, +) -> None: + res = postgrest_client.from_("users").select("id, name").gte("id", 1).execute() assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] -def test_contains_dictionary(): +def test_contains_dictionary(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("users") + postgrest_client.from_("users") .select("name") .contains("address", {"postcode": 90210}) .single() @@ -127,10 +125,9 @@ def test_contains_dictionary(): assert res.data == {"name": "Michael"} -def test_contains_any_item(): +def test_contains_any_item(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("issues") + postgrest_client.from_("issues") .select("title") .contains("tags", ["is:open", "priority:low"]) .execute() @@ -139,10 +136,9 @@ def test_contains_any_item(): assert res.data == [{"title": "Cache invalidation is not working"}] -def test_contains_on_range(): +def test_contains_on_range(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") .execute() @@ -151,10 +147,9 @@ def test_contains_on_range(): assert res.data == [{"id": 1, "room_name": "Emerald"}] -def test_contained_by_mixed_items(): +def test_contained_by_mixed_items(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") .execute() @@ -163,70 +158,68 @@ def test_contained_by_mixed_items(): assert res.data == [{"id": 1, "room_name": "Emerald"}] -def test_range_greater_than(): +def test_range_greater_than(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") - .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) + .range_gt("during", "2000-01-02 08:00", "2000-01-02 09:00") .execute() ) assert res.data == [{"id": 2, "room_name": "Topaz"}] -def test_range_greater_than_or_equal_to(): +def test_range_greater_than_or_equal_to( + postgrest_client: SyncPostgrestClient, +) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") - .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) + .range_gte("during", "2000-01-02 08:30", "2000-01-02 09:30") .execute() ) assert res.data == [{"id": 2, "room_name": "Topaz"}] -def test_range_less_than(): +def test_range_less_than(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") - .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) + .range_lt("during", "2000-01-01 15:00", "2000-01-02 16:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -def test_range_less_than_or_equal_to(): +def test_range_less_than_or_equal_to( + postgrest_client: SyncPostgrestClient, +) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") - .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) + .range_lte("during", "2000-01-01 14:00", "2000-01-01 16:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -def test_range_adjacent(): +def test_range_adjacent(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("id, room_name") - .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) + .range_adjacent("during", "2000-01-01 12:00", "2000-01-01 13:00") .execute() ) assert res.data == [{"id": 1, "room_name": "Emerald"}] -def test_overlaps(): +def test_overlaps(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("issues") + postgrest_client.from_("issues") .select("title") .overlaps("tags", ["is:closed", "severity:high"]) .execute() @@ -238,10 +231,11 @@ def test_overlaps(): ] -def test_overlaps_with_timestamp_range(): +def test_overlaps_with_timestamp_range( + postgrest_client: SyncPostgrestClient, +) -> None: res = ( - rest_client() - .from_("reservations") + postgrest_client.from_("reservations") .select("room_name") .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") .execute() @@ -252,10 +246,9 @@ def test_overlaps_with_timestamp_range(): ] -def test_like(): +def test_like(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .like("nicename", "%Alba%") .execute() @@ -264,10 +257,9 @@ def test_like(): assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] -def test_ilike(): +def test_ilike(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .ilike("nicename", "%alban%") .execute() @@ -276,10 +268,9 @@ def test_ilike(): assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] -def test_like_all_of(): +def test_like_all_of(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("nicename, iso") .like_all_of("nicename", "A*,*n") .execute() @@ -288,10 +279,9 @@ def test_like_all_of(): assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] -def test_like_any_of(): +def test_like_any_of(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("nicename, iso") .like_any_of("nicename", "Al*,*ia") .execute() @@ -303,10 +293,9 @@ def test_like_any_of(): ] -def test_ilike_all_of(): +def test_ilike_all_of(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("nicename, iso") .ilike_all_of("nicename", "a*,*n") .execute() @@ -315,10 +304,9 @@ def test_ilike_all_of(): assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] -def test_ilike_any_of(): +def test_ilike_any_of(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("nicename, iso") .ilike_any_of("nicename", "al*,*ia") .execute() @@ -330,10 +318,9 @@ def test_ilike_any_of(): ] -def test_is_(): +def test_is_(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .is_("numcode", "null") .limit(1) @@ -344,10 +331,9 @@ def test_is_(): assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] -def test_is_not(): +def test_is_not(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .not_.is_("numcode", "null") .limit(1) @@ -358,10 +344,9 @@ def test_is_not(): assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] -def test_in_(): +def test_in_(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .in_("nicename", ["Albania", "Algeria"]) .execute() @@ -373,10 +358,9 @@ def test_in_(): ] -def test_or_(): +def test_or_(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .or_("iso.eq.DZ,nicename.eq.Albania") .execute() @@ -388,10 +372,9 @@ def test_or_(): ] -def test_or_with_and(): +def test_or_with_and(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") .execute() @@ -403,10 +386,9 @@ def test_or_with_and(): ] -def test_or_in(): +def test_or_in(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("issues") + postgrest_client.from_("issues") .select("id, title") .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") .execute() @@ -419,10 +401,9 @@ def test_or_in(): ] -def test_or_on_reference_table(): +def test_or_on_reference_table(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") .execute() @@ -441,37 +422,51 @@ def test_or_on_reference_table(): ] -def test_explain_json(): +def test_explain_json(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") - .explain(format="json", analyze=True) + .explain( + format="json", + analyze=True, + verbose=False, + settings=False, + wal=False, + buffers=False, + ) .execute() ) + assert isinstance(res.data, list) + assert isinstance(res.data[0], dict) + assert isinstance(res.data[0]["Plan"], dict) assert res.data[0]["Plan"]["Node Type"] == "Aggregate" -def test_csv(): +def test_csv(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .in_("nicename", ["Albania", "Algeria"]) .csv() .execute() ) - assert "ALBANIA,AL\nALGERIA,DZ" in res.data + assert "ALBANIA,AL\nALGERIA,DZ" in res -def test_explain_text(): +def test_explain_text(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, cities!inner(name)") .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") - .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) + .explain( + analyze=True, + verbose=True, + settings=True, + wal=True, + buffers=True, + format="text", + ) .execute() ) assert ( @@ -480,10 +475,9 @@ def test_explain_text(): ) -def test_rpc_with_single(): +def test_rpc_with_single(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc("list_stored_countries", {}) + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, country_name, iso") .eq("nicename", "Albania") .single() @@ -493,10 +487,9 @@ def test_rpc_with_single(): assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} -def test_rpc_with_limit(): +def test_rpc_with_limit(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc("list_stored_countries", {}) + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, country_name, iso") .eq("nicename", "Albania") .limit(1) @@ -506,10 +499,9 @@ def test_rpc_with_limit(): assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] -def test_rpc_with_range(): +def test_rpc_with_range(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc("list_stored_countries", {}) + postgrest_client.rpc("list_stored_countries", {}) .select("nicename, iso") .range(1, 2) .execute() @@ -521,30 +513,29 @@ def test_rpc_with_range(): ] -def test_rpc_post_with_args(): +def test_rpc_post_with_args(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc("search_countries_by_name", {"search_name": "Alban"}) + postgrest_client.rpc("search_countries_by_name", {"search_name": "Alban"}) .select("nicename, iso") .execute() ) assert res.data == [{"nicename": "Albania", "iso": "AL"}] -def test_rpc_get_with_args(): +def test_rpc_get_with_args(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) + postgrest_client.rpc( + "search_countries_by_name", {"search_name": "Alger"}, get=True + ) .select("nicename, iso") .execute() ) assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] -def test_rpc_get_with_count(): +def test_rpc_get_with_count(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .rpc( + postgrest_client.rpc( "search_countries_by_name", {"search_name": "Al"}, get=True, @@ -557,26 +548,20 @@ def test_rpc_get_with_count(): assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] -def test_rpc_head_count(): - res = ( - rest_client() - .rpc( - "search_countries_by_name", - {"search_name": "Al"}, - head=True, - count=CountMethod.exact, - ) - .execute() - ) +def test_rpc_head_count(postgrest_client: SyncPostgrestClient) -> None: + res = postgrest_client.rpc( + "search_countries_by_name", + {"search_name": "Al"}, + head=True, + count=CountMethod.exact, + ).execute() - assert res.count == 2 - assert res.data == [] + assert res == 2 -def test_order(): +def test_order(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("countries") + postgrest_client.from_("countries") .select("country_name, iso") .limit(3) .order("nicename", desc=True) @@ -590,10 +575,9 @@ def test_order(): ] -def test_order_on_foreign_table(): +def test_order_on_foreign_table(postgrest_client: SyncPostgrestClient) -> None: res = ( - rest_client() - .from_("orchestral_sections") + postgrest_client.from_("orchestral_sections") .select("name, instruments(name)") .order("name", desc=True, foreign_table="instruments") .execute() diff --git a/src/postgrest/tests/_sync/test_query_request_builder.py b/src/postgrest/tests/_sync/test_query_request_builder.py deleted file mode 100644 index 58fbf2fc..00000000 --- a/src/postgrest/tests/_sync/test_query_request_builder.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Iterable - -import pytest -from httpx import Client, Headers, QueryParams -from yarl import URL - -from postgrest import SyncQueryRequestBuilder -from postgrest._sync.request_builder import RequestConfig - - -@pytest.fixture -def query_request_builder() -> Iterable[SyncQueryRequestBuilder]: - with Client() as client: - request = RequestConfig( - client, URL("/example_table"), "GET", Headers(), QueryParams(), None, {} - ) - yield SyncQueryRequestBuilder(request) - - -def test_constructor(query_request_builder: SyncQueryRequestBuilder): - builder = query_request_builder - - assert str(builder.request.path) == "/example_table" - assert len(builder.request.headers) == 0 - assert len(builder.request.params) == 0 - assert builder.request.http_method == "GET" - assert builder.request.json is None diff --git a/src/postgrest/tests/_sync/test_request_builder.py b/src/postgrest/tests/_sync/test_request_builder.py index 5217de24..ce8b8c7e 100644 --- a/src/postgrest/tests/_sync/test_request_builder.py +++ b/src/postgrest/tests/_sync/test_request_builder.py @@ -1,206 +1,230 @@ from typing import Any, Dict, Iterable, List import pytest -from httpx import Client, Headers, QueryParams, Request, Response +from httpx import Client +from supabase_utils.http.adapters.httpx import HttpxSession +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import SyncHttpIO +from supabase_utils.http.request import Request, Response +from supabase_utils.types import JSON, JSONParser from yarl import URL -from postgrest import SyncRequestBuilder, SyncSingleRequestBuilder -from postgrest._async.request_builder import RequestConfig -from postgrest.base_request_builder import APIResponse, SingleAPIResponse -from postgrest.types import JSON, CountMethod +from postgrest.request_builder import ( + APIResponse, + RequestBuilder, + SingleAPIResponse, + TextRequestBuilder, +) +from postgrest.types import CountMethod @pytest.fixture -def request_builder() -> Iterable[SyncRequestBuilder]: +def request_builder() -> Iterable[RequestBuilder[SyncHttpIO]]: with Client() as client: - yield SyncRequestBuilder(client, URL("/example_table"), Headers(), None) - - -def test_constructor(request_builder): - assert str(request_builder.path) == "/example_table" + yield RequestBuilder( + executor=SyncHttpIO(session=HttpxSession(client=client)), + base_url=URL("/example_table"), + default_headers=Headers.empty(), + ) class TestSelect: - def test_select(self, request_builder: SyncRequestBuilder): + def test_select(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select("col1", "col2") - assert builder.request.params["select"] == "col1,col2" + assert builder.request.query["select"] == "col1,col2" assert builder.request.headers.get("prefer") is None - assert builder.request.http_method == "GET" - assert builder.request.json is None + assert builder.request.method == "GET" + assert builder.request.body == {} - def test_select_with_count(self, request_builder: SyncRequestBuilder): + def test_select_with_count( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.select(count=CountMethod.exact) - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert builder.request.headers["prefer"] == "count=exact" - assert builder.request.http_method == "GET" - assert builder.request.json is None + assert builder.request.method == "GET" + assert builder.request.body == {} - def test_select_with_head(self, request_builder: SyncRequestBuilder): + def test_select_with_head( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.select("col1", "col2", head=True) - assert builder.request.params.get("select") == "col1,col2" + assert builder.request.query.get("select") == "col1,col2" assert builder.request.headers.get("prefer") is None - assert builder.request.http_method == "HEAD" - assert builder.request.json is None + assert builder.request.method == "HEAD" + assert builder.request.body == {} - def test_select_as_csv(self, request_builder: SyncRequestBuilder): + def test_select_as_csv(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select("*").csv() assert builder.request.headers["Accept"] == "text/csv" - assert isinstance(builder, SyncSingleRequestBuilder) + assert isinstance(builder, TextRequestBuilder) class TestInsert: - def test_insert(self, request_builder: SyncRequestBuilder): + def test_insert(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.insert({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_insert_with_count(self, request_builder: SyncRequestBuilder): + def test_insert_with_count( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_insert_with_upsert(self, request_builder: SyncRequestBuilder): + def test_insert_with_upsert( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.insert({"key1": "val1"}, upsert=True) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_upsert_with_default_single(self, request_builder: SyncRequestBuilder): + def test_upsert_with_default_single( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [{"key1": "val1"}] - assert builder.request.params.get("columns") == '"key1"' + assert builder.request.method == "POST" + assert builder.request.body == [{"key1": "val1"}] + assert builder.request.query.get("columns") == '"key1"' - def test_bulk_insert_using_default(self, request_builder: SyncRequestBuilder): + def test_bulk_insert_using_default( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.insert( [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False ) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [ + assert builder.request.method == "POST" + assert builder.request.body == [ {"key1": "val1", "key2": "val2"}, {"key3": "val3"}, ] - assert set(builder.request.params["columns"].split(",")) == set( + assert set(builder.request.query["columns"].split(",")) == set( '"key1","key2","key3"'.split(",") ) - def test_upsert(self, request_builder: SyncRequestBuilder): + def test_upsert(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.upsert({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", ] - assert builder.request.http_method == "POST" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "POST" + assert builder.request.body == {"key1": "val1"} - def test_bulk_upsert_with_default(self, request_builder: SyncRequestBuilder): + def test_bulk_upsert_with_default( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.upsert( [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False ) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "resolution=merge-duplicates", "missing=default", ] - assert builder.request.http_method == "POST" - assert builder.request.json == [ + assert builder.request.method == "POST" + assert builder.request.body == [ {"key1": "val1", "key2": "val2"}, {"key3": "val3"}, ] - assert set(builder.request.params["columns"].split(",")) == set( + assert set(builder.request.query["columns"].split(",")) == set( '"key1","key2","key3"'.split(",") ) class TestUpdate: - def test_update(self, request_builder: SyncRequestBuilder): + def test_update(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.update({"key1": "val1"}) - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} - def test_update_with_count(self, request_builder: SyncRequestBuilder): + def test_update_with_count( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} - def test_update_with_max_affected(self, request_builder: SyncRequestBuilder): + def test_update_with_max_affected( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.update({"key1": "val1"}).max_affected(5) assert "handling=strict" in builder.request.headers["prefer"] assert "max-affected=5" in builder.request.headers["prefer"] assert "return=representation" in builder.request.headers["prefer"] - assert builder.request.http_method == "PATCH" - assert builder.request.json == {"key1": "val1"} + assert builder.request.method == "PATCH" + assert builder.request.body == {"key1": "val1"} class TestDelete: - def test_delete(self, request_builder: SyncRequestBuilder): + def test_delete(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.delete() - assert builder.request.headers.get_list("prefer", True) == [ - "return=representation" - ] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.headers.get_list("prefer") == ["return=representation"] + assert builder.request.method == "DELETE" + assert builder.request.body == {} - def test_delete_with_count(self, request_builder: SyncRequestBuilder): + def test_delete_with_count( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.delete(count=CountMethod.exact) - assert builder.request.headers.get_list("prefer", True) == [ + assert builder.request.headers.get_list("prefer") == [ "return=representation", "count=exact", ] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.method == "DELETE" + assert builder.request.body == {} - def test_delete_with_max_affected(self, request_builder: SyncRequestBuilder): + def test_delete_with_max_affected( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.delete().max_affected(10) assert "handling=strict" in builder.request.headers["prefer"] assert "max-affected=10" in builder.request.headers["prefer"] assert "return=representation" in builder.request.headers["prefer"] - assert builder.request.http_method == "DELETE" - assert builder.request.json == {} + assert builder.request.method == "DELETE" + assert builder.request.body == {} class TestTextSearch: - def test_text_search(self, request_builder: SyncRequestBuilder): + def test_text_search(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select("catchphrase").text_search( "catchphrase", "'fat' & 'cat'", @@ -209,24 +233,30 @@ def test_text_search(self, request_builder: SyncRequestBuilder): "config": "english", }, ) - assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( - builder.request.params + assert builder.request.query.get("select") == "catchphrase" + assert ( + builder.request.query.get("catchphrase") == "plfts(english).'fat' & 'cat'" ) class TestExplain: - def test_explain_plain(self, request_builder: SyncRequestBuilder): + def test_explain_plain(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select("*").explain() - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert "application/vnd.pgrst.plan" in str( builder.request.headers.get("accept") ) - def test_explain_options(self, request_builder: SyncRequestBuilder): + def test_explain_options(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select("*").explain( - format="json", analyze=True, verbose=True, buffers=True, wal=True + analyze=True, + verbose=True, + buffers=True, + settings=False, + wal=True, + format="json", ) - assert builder.request.params["select"] == "*" + assert builder.request.query["select"] == "*" assert "application/vnd.pgrst.plan+json;" in str( builder.request.headers.get("accept") ) @@ -236,49 +266,56 @@ def test_explain_options(self, request_builder: SyncRequestBuilder): class TestOrder: - def test_order(self, request_builder: SyncRequestBuilder): + def test_order(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = request_builder.select().order("country_name", desc=True) - assert str(builder.request.params) == "select=%2A&order=country_name.desc" + assert builder.request.query.get("select") == "*" + assert builder.request.query.get("order") == "country_name.desc" - def test_multiple_orders(self, request_builder: SyncRequestBuilder): + def test_multiple_orders(self, request_builder: RequestBuilder[SyncHttpIO]) -> None: builder = ( request_builder.select() .order("country_name", desc=True) .order("iso", desc=True) ) - assert ( - str(builder.request.params) - == "select=%2A&order=country_name.desc%2Ciso.desc" - ) + assert builder.request.query.get("select") == "*" + assert builder.request.query.get_list("order") == [ + "country_name.desc", + "iso.desc", + ] def test_multiple_orders_on_foreign_table( - self, request_builder: SyncRequestBuilder - ): + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: foreign_table = "cities" builder = ( request_builder.select() .order("city_name", desc=True, foreign_table=foreign_table) .order("id", desc=True, foreign_table=foreign_table) ) - assert ( - str(builder.request.params) - == "select=%2A&cities.order=city_name.desc%2Cid.desc" - ) + assert builder.request.query.get("select") == "*" + assert builder.request.query.get_list("cities.order") == [ + "city_name.desc", + "id.desc", + ] class TestRange: - def test_range_on_own_table(self, request_builder: SyncRequestBuilder): + def test_range_on_own_table( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: builder = request_builder.select("*").range(0, 1) - assert builder.request.params["select"] == "*" - assert builder.request.params["limit"] == "2" - assert builder.request.params["offset"] == "0" + assert builder.request.query["select"] == "*" + assert builder.request.query["limit"] == "2" + assert builder.request.query["offset"] == "0" - def test_range_on_foreign_table(self, request_builder: SyncRequestBuilder): + def test_range_on_foreign_table( + self, request_builder: RequestBuilder[SyncHttpIO] + ) -> None: foreign_table = "cities" builder = request_builder.select("*").range(1, 2, foreign_table) - assert builder.request.params["select"] == "*" - assert builder.request.params[f"{foreign_table}.limit"] == "2" - assert builder.request.params[f"{foreign_table}.offset"] == "1" + assert builder.request.query["select"] == "*" + assert builder.request.query[f"{foreign_table}.limit"] == "2" + assert builder.request.query[f"{foreign_table}.offset"] == "1" @pytest.fixture @@ -296,7 +333,7 @@ def api_response_with_error() -> Dict[str, Any]: @pytest.fixture -def api_response() -> List[Dict[str, Any]]: +def api_response() -> List[Dict[str, JSON]]: return [ { "id": 1, @@ -318,7 +355,7 @@ def api_response() -> List[Dict[str, Any]]: @pytest.fixture -def single_api_response() -> Dict[str, Any]: +def single_api_response() -> Dict[str, JSON]: return { "id": 1, "name": "Bonaire, Sint Eustatius and Saba", @@ -352,7 +389,15 @@ def prefer_header_without_count() -> str: @pytest.fixture def request_response_without_prefer_header() -> Response: return Response( - status_code=200, request=Request(method="GET", url="http://example.com") + status=200, + request=Request( + method="GET", + url=URL("http://example.com"), + content=None, + headers=Headers.empty(), + ), + content=b"", + headers=Headers.empty(), ) @@ -361,11 +406,14 @@ def request_response_with_prefer_header_without_count( prefer_header_without_count: str, ) -> Response: return Response( - status_code=200, + status=200, + headers=Headers.empty(), + content=b"", request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_without_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_without_count}), + content=None, ), ) @@ -375,12 +423,16 @@ def request_response_with_prefer_header_with_count_and_content_range( prefer_header_with_count: str, content_range_header_with_count: str ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=b"", request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -389,16 +441,19 @@ def request_response_with_prefer_header_with_count_and_content_range( def request_response_with_data( prefer_header_with_count: str, content_range_header_with_count: str, - api_response: List[Dict[str, Any]], + api_response: List[Dict[str, JSON]], ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, - json=api_response, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=JSONParser.dump_json(api_response), request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -407,16 +462,19 @@ def request_response_with_data( def request_response_with_single_data( prefer_header_with_count: str, content_range_header_with_count: str, - single_api_response: Dict[str, Any], + single_api_response: Dict[str, JSON], ) -> Response: return Response( - status_code=200, - headers={"content-range": content_range_header_with_count}, - json=single_api_response, + status=200, + headers=Headers.from_mapping( + {"content-range": content_range_header_with_count} + ), + content=JSONParser.dump_json(single_api_response), request=Request( method="GET", - url="http://example.com", - headers={"prefer": prefer_header_with_count}, + url=URL("http://example.com"), + headers=Headers.from_mapping({"prefer": prefer_header_with_count}), + content=None, ), ) @@ -424,22 +482,28 @@ def request_response_with_single_data( @pytest.fixture def request_response_with_csv_data(csv_api_response: str) -> Response: return Response( - status_code=200, - text=csv_api_response, - request=Request(method="GET", url="http://example.com"), + status=200, + content=csv_api_response.encode("utf-8"), + headers=Headers.empty(), + request=Request( + method="GET", + url=URL("http://example.com"), + headers=Headers.empty(), + content=None, + ), ) class TestApiResponse: - def test_response_raises_when_api_error(self, api_response_with_error: List[JSON]): - with pytest.raises(ValueError): - APIResponse(data=api_response_with_error) - - def test_parses_valid_response_only_data(self, api_response: List[JSON]): + def test_parses_valid_response_only_data( + self, api_response: List[Dict[str, JSON]] + ) -> None: result = APIResponse(data=api_response) assert result.data == api_response - def test_parses_valid_response_data_and_count(self, api_response: List[JSON]): + def test_parses_valid_response_data_and_count( + self, api_response: List[Dict[str, JSON]] + ) -> None: count = len(api_response) result = APIResponse(data=api_response, count=count) assert result.data == api_response @@ -447,7 +511,7 @@ def test_parses_valid_response_data_and_count(self, api_response: List[JSON]): def test_get_count_from_content_range_header_with_count( self, content_range_header_with_count: str - ): + ) -> None: assert ( APIResponse._get_count_from_content_range_header( content_range_header_with_count @@ -457,7 +521,7 @@ def test_get_count_from_content_range_header_with_count( def test_get_count_from_content_range_header_without_count( self, content_range_header_without_count: str - ): + ) -> None: assert ( APIResponse._get_count_from_content_range_header( content_range_header_without_count @@ -465,15 +529,19 @@ def test_get_count_from_content_range_header_without_count( is None ) - def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): + def test_is_count_in_prefer_header_true( + self, prefer_header_with_count: str + ) -> None: assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) - def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): + def test_is_count_in_prefer_header_false( + self, prefer_header_without_count: str + ) -> None: assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) def test_get_count_from_http_request_response_without_prefer_header( self, request_response_without_prefer_header: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_without_prefer_header @@ -483,7 +551,7 @@ def test_get_count_from_http_request_response_without_prefer_header( def test_get_count_from_http_request_response_with_prefer_header_without_count( self, request_response_with_prefer_header_without_count: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_with_prefer_header_without_count @@ -493,7 +561,7 @@ def test_get_count_from_http_request_response_with_prefer_header_without_count( def test_get_count_from_http_request_response_with_count_and_content_range( self, request_response_with_prefer_header_with_count_and_content_range: Response - ): + ) -> None: assert ( APIResponse._get_count_from_http_request_response( request_response_with_prefer_header_with_count_and_content_range @@ -503,7 +571,7 @@ def test_get_count_from_http_request_response_with_count_and_content_range( def test_from_http_request_response_constructor( self, request_response_with_data: Response, api_response: List[Dict[str, Any]] - ): + ) -> None: result = APIResponse.from_http_request_response(request_response_with_data) assert result.data == api_response assert result.count == 2 @@ -512,19 +580,10 @@ def test_single_from_http_request_response_constructor( self, request_response_with_single_data: Response, single_api_response: Dict[str, Any], - ): + ) -> None: result = SingleAPIResponse.from_http_request_response( request_response_with_single_data ) assert isinstance(result.data, dict) assert result.data == single_api_response assert result.count == 2 - - def test_single_with_csv_data( - self, request_response_with_csv_data: Response, csv_api_response: str - ): - result = SingleAPIResponse.from_http_request_response( - request_response_with_csv_data - ) - assert isinstance(result.data, str) - assert result.data == csv_api_response diff --git a/src/postgrest/tests/test_filter_builder.py b/src/postgrest/tests/test_filter_builder.py new file mode 100644 index 00000000..596ef9cf --- /dev/null +++ b/src/postgrest/tests/test_filter_builder.py @@ -0,0 +1,290 @@ +from typing import Iterable + +import pytest +from supabase_utils.http.headers import Headers +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import JSONRequest + +from postgrest.request_builder import BaseFilterRequestBuilder +from postgrest.utils import sanitize_param + + +@pytest.fixture +def filter_request_builder() -> Iterable[BaseFilterRequestBuilder]: + request = JSONRequest( + path=["example_table"], + method="GET", + headers=Headers.empty(), + query=URLQuery.empty(), + body={}, + ) + yield BaseFilterRequestBuilder( + request=request, + negate_next=False, + ) + + +def test_filter(filter_request_builder: BaseFilterRequestBuilder) -> None: + builder = filter_request_builder.filter(":col.name", "eq", "val") + + assert builder.request.query['":col.name"'] == "eq.val" + + +@pytest.mark.parametrize( + "col_name, expected_query_prefix", + [ + ("col:name", "%22col%3Aname%22"), + ("col.name", "col.name"), + ], +) +def test_filter_special_characters( + filter_request_builder: BaseFilterRequestBuilder, + col_name: str, + expected_query_prefix: str, +): + builder = filter_request_builder.filter(col_name, "eq", "val") + + assert builder.request.query.get(sanitize_param(col_name)) == "eq.val" + + +def test_multivalued_param( + filter_request_builder: BaseFilterRequestBuilder, +) -> None: + builder = filter_request_builder.lte("x", "a").gte("x", "b") + + assert builder.request.query.get_list("x") == ["lte.a", "gte.b"] + + +def test_match(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.match({"id": "1", "done": "false"}) + assert builder.request.query.get("id") == "eq.1" + assert builder.request.query.get("done") == "eq.false" + + +def test_equals(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.eq("x", "a") + + assert builder.request.query.get("x") == "eq.a" + + +def test_not_equal(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.neq("x", "a") + + assert builder.request.query.get("x") == "neq.a" + + +def test_greater_than(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.gt("x", "a") + + assert builder.request.query.get("x") == "gt.a" + + +def test_greater_than_or_equals_to( + filter_request_builder: BaseFilterRequestBuilder, +): + builder = filter_request_builder.gte("x", "a") + + assert builder.request.query.get("x") == "gte.a" + + +def test_contains(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.contains("x", "a") + + assert builder.request.query.get("x") == "cs.a" + + +def test_contains_dictionary(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.contains("x", {"a": "b"}) + + # {"a":"b"} + assert builder.request.query.get("x") == 'cs.{"a": "b"}' + + +def test_contains_any_item(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.contains("x", ["a", "b"]) + + # {a,b} + assert builder.request.query.get("x") == "cs.{a,b}" + + +def test_contains_in_list(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.contains("x", '[{"a": "b"}]') + + # [{"a":+"b"}] (the + represents the space) + assert builder.request.query.get("x") == 'cs.[{"a": "b"}]' + + +def test_contained_by_mixed_items( + filter_request_builder: BaseFilterRequestBuilder, +): + builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == 'cd.{a,["b", "c"]}' + + +def test_range_greater_than(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.range_gt( + "x", "2000-01-02 08:30", "2000-01-02 09:30" + ) + + assert builder.request.query.get("x") == "sr.(2000-01-02 08:30,2000-01-02 09:30)" + + +def test_range_greater_than_or_equal_to( + filter_request_builder: BaseFilterRequestBuilder, +): + builder = filter_request_builder.range_gte( + "x", "2000-01-02 08:30", "2000-01-02 09:30" + ) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "nxl.(2000-01-02 08:30,2000-01-02 09:30)" + + +def test_range_less_than(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.range_lt( + "x", "2000-01-02 08:30", "2000-01-02 09:30" + ) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "sl.(2000-01-02 08:30,2000-01-02 09:30)" + + +def test_range_less_than_or_equal_to( + filter_request_builder: BaseFilterRequestBuilder, +): + builder = filter_request_builder.range_lte( + "x", "2000-01-02 08:30", "2000-01-02 09:30" + ) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "nxr.(2000-01-02 08:30,2000-01-02 09:30)" + + +def test_range_adjacent(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.range_adjacent( + "x", "2000-01-02 08:30", "2000-01-02 09:30" + ) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "adj.(2000-01-02 08:30,2000-01-02 09:30)" + + +def test_overlaps(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "ov.{is:closed,severity:high}" + + +def test_overlaps_with_timestamp_range( + filter_request_builder: BaseFilterRequestBuilder, +): + builder = filter_request_builder.overlaps( + "x", "[2000-01-01 12:45, 2000-01-01 13:15)" + ) + + # {a,["b",+"c"]} + assert builder.request.query.get("x") == "ov.[2000-01-01 12:45, 2000-01-01 13:15)" + + +def test_like(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.like("x", "%a%") + + assert builder.request.query.get("x") == "like.%a%" + + +def test_ilike(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.ilike("x", "%a%") + + assert builder.request.query.get("x") == "ilike.%a%" + + +def test_like_all_of(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.like_all_of("x", "A*,*b") + + assert builder.request.query.get("x") == "like(all).{A*,*b}" + + +def test_like_any_of(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.like_any_of("x", "a*,*b") + + assert builder.request.query.get("x") == "like(any).{a*,*b}" + + +def test_ilike_all_of(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.ilike_all_of("x", "A*,*b") + + assert builder.request.query.get("x") == "ilike(all).{A*,*b}" + + +def test_ilike_any_of(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.ilike_any_of("x", "A*,*b") + + assert builder.request.query.get("x") == "ilike(any).{A*,*b}" + + +def test_is_(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.is_("x", "a") + + assert builder.request.query.get("x") == "is.a" + + +def test_in_(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.in_("x", ["a", "b"]) + + assert builder.request.query.get("x") == "in.(a,b)" + + +def test_or_(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.or_("x.eq.1") + + assert builder.request.query.get("or") == "(x.eq.1)" + + +def test_or_in_contain(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") + + assert builder.request.query.get("or") == "(id.in.(5,6,7), arraycol.cs.{'a','b'})" + + +def test_max_affected(filter_request_builder: BaseFilterRequestBuilder): + builder = filter_request_builder.max_affected(5) + + assert builder.request.headers.get_list("prefer") == [ + "handling=strict", + "max-affected=5", + ] + + +def test_max_affected_with_existing_prefer_header( + filter_request_builder: BaseFilterRequestBuilder, +): + # Set an existing prefer header + filter_request_builder.request.headers = filter_request_builder.request.headers.set( + "prefer", "return=representation" + ) + builder = filter_request_builder.max_affected(10) + + assert builder.request.headers.get_list("prefer") == [ + "return=representation", + "handling=strict", + "max-affected=10", + ] + + +def test_max_affected_with_existing_handling_strict( + filter_request_builder: BaseFilterRequestBuilder, +): + # Set an existing prefer header with handling=strict + filter_request_builder.request.headers = filter_request_builder.request.headers.set( + "prefer", "handling=strict,return=minimal" + ) + builder = filter_request_builder.max_affected(3) + + assert builder.request.headers.get_list("prefer") == [ + "handling=strict,return=minimal", + "handling=strict", + "max-affected=3", + ] diff --git a/src/postgrest/tests/test_request_builder.py b/src/postgrest/tests/test_request_builder.py new file mode 100644 index 00000000..e69de29b diff --git a/src/realtime/pyproject.toml b/src/realtime/pyproject.toml index 3282301b..3a4aef1f 100644 --- a/src/realtime/pyproject.toml +++ b/src/realtime/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "realtime" -version = "2.28.3" # {x-release-please-version} +version = "3.0.0a1" # {x-release-please-version} description = "" authors = [ { name = "Joel Lee", email="joel@joellee.org"}, diff --git a/src/storage/Makefile b/src/storage/Makefile index 93318a88..14fe8a1e 100644 --- a/src/storage/Makefile +++ b/src/storage/Makefile @@ -26,15 +26,6 @@ pytest: start-infra help:: @echo " pytest -- run pytest on storage3" -build-sync: - uv run --package storage3 run-unasync.py - sed -i '0,/SyncMock, /{s/SyncMock, //}' tests/_sync/test_bucket.py tests/_sync/test_client.py - sed -i 's/SyncMock/Mock/g' tests/_sync/test_bucket.py tests/_sync/test_client.py - sed -i 's/SyncClient/Client/g' src/storage3/_sync/client.py src/storage3/_sync/bucket.py src/storage3/_sync/file_api.py tests/_sync/test_bucket.py tests/_sync/test_client.py - sed -i 's/self\.session\.aclose/self\.session\.close/g' src/storage3/_sync/client.py -help:: - @echo " build-sync -- generate _sync from _async implementation" - clean: rm -rf htmlcov .pytest_cache .mypy_cache .ruff_cache rm -f .coverage coverage.xml diff --git a/src/storage/pyproject.toml b/src/storage/pyproject.toml index 0109554e..2a7d11bb 100644 --- a/src/storage/pyproject.toml +++ b/src/storage/pyproject.toml @@ -18,15 +18,16 @@ classifiers = [ ] license = "MIT" readme = "README.md" -version = "2.28.3" # {x-release-please-version} -requires-python = ">=3.9" +version = "3.0.0a1" # {x-release-please-version} +requires-python = ">=3.10" dependencies = [ - "httpx[http2] >=0.26,<0.29", "deprecation >=2.1.0", "pydantic >=2.11.7", "yarl>=1.20.1", "pyiceberg>=0.10.0", + "supabase_utils==3.0.0a1", # x-release-please-version + "typing-extensions>=4.15.0", ] [project.urls] @@ -53,6 +54,7 @@ tests = [ "pytest-asyncio >=0.21.0", "pytest-cov >=6.1.0", "python-dotenv >=1.1.0", + "supabase_utils[all]", ] dev = [ { include-group = "lints" }, @@ -68,16 +70,33 @@ filterwarnings = [ ] [tool.mypy] -python_version = "3.9" +strict = true follow_untyped_imports = true # for deprecation module that does not have stubs check_untyped_defs = true allow_redefinition = true -# warn_return_any = true +warn_return_any = true warn_unused_configs = true warn_redundant_casts = true warn_unused_ignores = true +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + # "B", + # flake8-simplify + # "SIM", + # isort + "I", +] +ignore = ["E712", "E501", "E402", "UP006", "UP035"] + [tool.uv] default-groups = [ "dev" ] diff --git a/src/storage/run-unasync.py b/src/storage/run-unasync.py deleted file mode 100644 index 7c1a7702..00000000 --- a/src/storage/run-unasync.py +++ /dev/null @@ -1,21 +0,0 @@ -from pathlib import Path - -import unasync - -paths = Path("src/storage3").glob("**/*.py") -tests = Path("tests").glob("**/*.py") - -rules = ( - unasync.Rule( - fromdir="/_async/", - todir="/_sync/", - additional_replacements={"AsyncClient": "Client"}, - ), - unasync._DEFAULT_RULE, -) - - -files = [str(p) for p in list(paths) + list(tests)] - -if __name__ == "__main__": - unasync.unasync_files(files, rules=rules) diff --git a/src/storage/src/storage3/__init__.py b/src/storage/src/storage3/__init__.py index 48b4cb74..28ece260 100644 --- a/src/storage/src/storage3/__init__.py +++ b/src/storage/src/storage3/__init__.py @@ -1,44 +1,12 @@ from __future__ import annotations -from typing import Literal, Union, overload - -from storage3._async import AsyncStorageClient -from storage3._async.bucket import AsyncStorageBucketAPI -from storage3._async.file_api import AsyncBucket -from storage3._sync import SyncStorageClient -from storage3._sync.bucket import SyncStorageBucketAPI -from storage3._sync.file_api import SyncBucket -from storage3.constants import DEFAULT_TIMEOUT +from storage3.client import AsyncStorageClient, SyncStorageClient +from storage3.file_api import StorageFileApiClient from storage3.version import __version__ __all__ = [ - "create_client", "__version__", "AsyncStorageClient", - "AsyncBucket", - "AsyncStorageBucketAPI", "SyncStorageClient", - "SyncBucket", - "SyncStorageBucketAPI", + "StorageFileApiClient", ] - - -@overload -def create_client( - url: str, headers: dict[str, str], *, is_async: Literal[True] -) -> AsyncStorageClient: ... - - -@overload -def create_client( - url: str, headers: dict[str, str], *, is_async: Literal[False] -) -> SyncStorageClient: ... - - -def create_client( - url: str, headers: dict[str, str], *, is_async: bool, timeout: int = DEFAULT_TIMEOUT -) -> Union[AsyncStorageClient, SyncStorageClient]: - if is_async: - return AsyncStorageClient(url, headers, timeout) - else: - return SyncStorageClient(url, headers, timeout) diff --git a/src/storage/src/storage3/_async/__init__.py b/src/storage/src/storage3/_async/__init__.py deleted file mode 100644 index 694f552f..00000000 --- a/src/storage/src/storage3/_async/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .client import AsyncStorageClient as AsyncStorageClient diff --git a/src/storage/src/storage3/_async/analytics.py b/src/storage/src/storage3/_async/analytics.py deleted file mode 100644 index 915a999a..00000000 --- a/src/storage/src/storage3/_async/analytics.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import List, Optional - -from httpx import QueryParams -from pyiceberg.catalog.rest import RestCatalog - -from ..types import ( - AnalyticsBucket, - AnalyticsBucketDeleteResponse, - AnalyticsBucketsParser, - SortColumn, - SortOrder, -) -from .request import AsyncRequestBuilder - - -class AsyncStorageAnalyticsClient: - def __init__(self, request: AsyncRequestBuilder) -> None: - self._request = request - - async def create(self, bucket_name: str) -> AnalyticsBucket: - body = {"name": bucket_name} - data = await self._request.send(http_method="POST", path=["bucket"], body=body) - return AnalyticsBucket.model_validate_json(data.content) - - async def list( - self, - limit: Optional[int] = None, - offset: Optional[int] = None, - sort_column: Optional[SortColumn] = None, - sort_order: Optional[SortOrder] = None, - search: Optional[str] = None, - ) -> List[AnalyticsBucket]: - params = dict( - limit=limit, - offset=offset, - sort_column=sort_column, - sort_order=sort_order, - search=search, - ) - filtered_params = QueryParams( - **{k: v for k, v in params.items() if v is not None} - ) - data = await self._request.send( - http_method="GET", path=["bucket"], query_params=filtered_params - ) - return AnalyticsBucketsParser.validate_json(data.content) - - async def delete(self, bucket_name: str) -> AnalyticsBucketDeleteResponse: - data = await self._request.send( - http_method="DELETE", path=["bucket", bucket_name] - ) - return AnalyticsBucketDeleteResponse.model_validate_json(data.content) - - def catalog( - self, catalog_name: str, access_key_id: str, secret_access_key: str - ) -> RestCatalog: - catalog_uri = self._request._base_url - s3_endpoint = self._request._base_url.parent.joinpath("s3") - service_key = self._request.headers.get("apiKey") - assert service_key, "apiKey must be passed in the headers." - return RestCatalog( - catalog_name, - warehouse=catalog_name, - uri=str(catalog_uri), - token=service_key, - **{ - "py-io-impl": "pyiceberg.io.pyarrow.PyArrowFileIO", - "s3.endpoint": str(s3_endpoint), - "s3.access-key-id": access_key_id, - "s3.secret-access-key": secret_access_key, - "s3.force-virtual-addressing": "False", - }, - ) diff --git a/src/storage/src/storage3/_async/bucket.py b/src/storage/src/storage3/_async/bucket.py deleted file mode 100644 index 062c256e..00000000 --- a/src/storage/src/storage3/_async/bucket.py +++ /dev/null @@ -1,136 +0,0 @@ -from __future__ import annotations - -import warnings -from typing import Any, Optional - -from httpx import AsyncClient, Headers, HTTPStatusError, Response -from yarl import URL - -from ..exceptions import StorageApiError -from ..types import CreateOrUpdateBucketOptions, RequestMethod -from .file_api import AsyncBucket - -__all__ = ["AsyncStorageBucketAPI"] - - -class AsyncStorageBucketAPI: - """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket""" - - def __init__(self, session: AsyncClient, url: str, headers: Headers) -> None: - if url and url[-1] != "/": - warnings.warn( - "Storage endpoint URL should have a trailing slash. " - "The URL has been automatically corrected.", - UserWarning, - stacklevel=2, - ) - url += "/" - self._base_url = URL(url) - self._client = session - self._headers = headers - - async def _request( - self, - method: RequestMethod, - path: list[str], - json: Optional[dict[Any, Any]] = None, - ) -> Response: - try: - url_path = self._base_url.joinpath(*path) - response = await self._client.request( - method, str(url_path), json=json, headers=self._headers - ) - response.raise_for_status() - except HTTPStatusError as exc: - resp = exc.response.json() - raise StorageApiError( - resp["message"], resp["error"], resp["statusCode"] - ) from exc - - return response - - async def list_buckets(self) -> list[AsyncBucket]: - """Retrieves the details of all storage buckets within an existing product.""" - # if the request doesn't error, it is assured to return a list - res = await self._request("GET", ["bucket"]) - return [AsyncBucket(**bucket) for bucket in res.json()] - - async def get_bucket(self, id: str) -> AsyncBucket: - """Retrieves the details of an existing storage bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to retrieve. - """ - res = await self._request("GET", ["bucket", id]) - json = res.json() - return AsyncBucket(**json) - - async def create_bucket( - self, - id: str, - name: Optional[str] = None, - options: Optional[CreateOrUpdateBucketOptions] = None, - ) -> dict[str, str]: - """Creates a new storage bucket. - - Parameters - ---------- - id - A unique identifier for the bucket you are creating. - name - A name for the bucket you are creating. If not passed, the id is used as the name as well. - options - Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and - `allowed_mime_types`. - """ - json: dict[str, Any] = {"id": id, "name": name or id} - if options: - json.update(**options) - res = await self._request( - "POST", - ["bucket"], - json=json, - ) - return res.json() - - async def update_bucket( - self, id: str, options: CreateOrUpdateBucketOptions - ) -> dict[str, str]: - """Update a storage bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to update. - options - The properties you want to update. Valid options are `public`, `file_size_limit` and - `allowed_mime_types`. - """ - json = {"id": id, "name": id, **options} - res = await self._request("PUT", ["bucket", id], json=json) - return res.json() - - async def empty_bucket(self, id: str) -> dict[str, str]: - """Removes all objects inside a single bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to empty. - """ - res = await self._request("POST", ["bucket", id, "empty"], json={}) - return res.json() - - async def delete_bucket(self, id: str) -> dict[str, str]: - """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first - `empty()` the bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to delete. - """ - res = await self._request("DELETE", ["bucket", id], json={}) - return res.json() diff --git a/src/storage/src/storage3/_async/client.py b/src/storage/src/storage3/_async/client.py deleted file mode 100644 index 2caec5ef..00000000 --- a/src/storage/src/storage3/_async/client.py +++ /dev/null @@ -1,113 +0,0 @@ -from __future__ import annotations - -import platform -import sys -from typing import Optional -from warnings import warn - -from httpx import AsyncClient, Headers - -from storage3.constants import DEFAULT_TIMEOUT - -from ..version import __version__ -from .analytics import AsyncStorageAnalyticsClient -from .bucket import AsyncStorageBucketAPI -from .file_api import AsyncBucketProxy -from .request import AsyncRequestBuilder -from .vectors import AsyncStorageVectorsClient - -__all__ = [ - "AsyncStorageClient", -] - - -class AsyncStorageClient(AsyncStorageBucketAPI): - """Manage storage buckets and files.""" - - def __init__( - self, - url: str, - headers: dict[str, str], - timeout: Optional[int] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[AsyncClient] = None, - ) -> None: - headers = { - "X-Client-Info": f"supabase-py/storage3 v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = int(abs(timeout)) if timeout is not None else DEFAULT_TIMEOUT - - self.session = http_client or AsyncClient( - headers=headers, - timeout=self.timeout, - proxy=proxy, - verify=self.verify, - follow_redirects=True, - http2=True, - ) - super().__init__(self.session, url, Headers(headers)) - - async def __aenter__(self) -> AsyncStorageClient: - return self - - async def __aexit__(self, exc_type, exc, tb) -> None: - await self.session.aclose() - - def from_(self, id: str) -> AsyncBucketProxy: - """Run a storage file operation. - - Parameters - ---------- - id - The unique identifier of the bucket - """ - return AsyncBucketProxy(id, self._base_url, self._headers, self._client) - - def vectors(self) -> AsyncStorageVectorsClient: - return AsyncStorageVectorsClient( - url=self._base_url.joinpath("vector"), - headers=self._headers, - session=self.session, - ) - - def analytics(self) -> AsyncStorageAnalyticsClient: - request = AsyncRequestBuilder( - session=self.session, - headers=self._headers, - base_url=self._base_url.joinpath("iceberg"), - ) - return AsyncStorageAnalyticsClient(request=request) diff --git a/src/storage/src/storage3/_async/file_api.py b/src/storage/src/storage3/_async/file_api.py deleted file mode 100644 index a5d7363a..00000000 --- a/src/storage/src/storage3/_async/file_api.py +++ /dev/null @@ -1,617 +0,0 @@ -from __future__ import annotations - -import base64 -import json -import urllib.parse -from dataclasses import dataclass, field -from io import BufferedReader, FileIO -from pathlib import Path -from typing import Any, Dict, List, Literal, Optional, Union, cast - -from httpx import AsyncClient, Headers, HTTPStatusError, Response -from yarl import URL - -from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..exceptions import StorageApiError -from ..types import ( - BaseBucket, - CreateSignedUploadUrlOptions, - CreateSignedUrlResponse, - CreateSignedURLsOptions, - DownloadOptions, - FileOptions, - ListBucketFilesOptions, - RequestMethod, - SearchV2Options, - SearchV2Result, - SignedUploadURL, - SignedUrlJsonResponse, - SignedUrlResponse, - SignedUrlsJsonResponse, - TransformOptions, - UploadData, - UploadResponse, - UploadSignedUrlFileOptions, - URLOptions, - transform_to_dict, -) -from ..utils import StorageException - -__all__ = ["AsyncBucket"] - - -def relative_path_to_parts(path: str) -> tuple[str, ...]: - url = URL(path) - if url.absolute or url.parts[0] == "/": - return url.parts[1:] - return url.parts - - -class AsyncBucketActionsMixin: - """Functions needed to access the file API.""" - - id: str - _base_url: URL - _client: AsyncClient - _headers: Headers - - async def _request( - self, - method: RequestMethod, - path: list[str], - headers: Optional[dict[str, Any]] = None, - json: Optional[dict[Any, Any]] = None, - files: Optional[Any] = None, - query_params: Optional[dict[str, str]] = None, - **kwargs: Any, - ) -> Response: - try: - url_path = self._base_url.joinpath(*path).with_query(query_params) - headers = headers or dict() - headers.update(self._headers) - response = await self._client.request( - method, - str(url_path), - headers=headers, - json=json, - files=files, - **kwargs, - ) - response.raise_for_status() - except HTTPStatusError as exc: - try: - resp = exc.response.json() - raise StorageApiError( - resp["message"], resp["error"], resp["statusCode"] - ) from exc - except KeyError as err: - message = f"Unable to parse error message: {resp.text}" - raise StorageApiError(message, "InternalError", 400) from err - - # close the resource before returning the response - if files and "file" in files and isinstance(files["file"][1], BufferedReader): - files["file"][1].close() - - return response - - async def create_signed_upload_url( - self, - path: str, - options: Optional[CreateSignedUploadUrlOptions] = None, - ) -> SignedUploadURL: - """ - Creates a signed upload URL. - - Parameters - ---------- - path - The file path, including the file name. For example `folder/image.png`. - options - Additional options for the upload url creation. - """ - headers: dict[str, str] = dict() - if options is not None and options.upsert: - headers.update({"x-upsert": options.upsert}) - - path_parts = relative_path_to_parts(path) - response = await self._request( - "POST", ["object", "upload", "sign", self.id, *path_parts], headers=headers - ) - data = response.json() - full_url: urllib.parse.ParseResult = urllib.parse.urlparse( - str(self._base_url) + cast(str, data["url"]).lstrip("/") - ) - query_params = urllib.parse.parse_qs(full_url.query) - if not query_params.get("token"): - raise StorageException("No token sent by the API") - return { - "signed_url": full_url.geturl(), - "signedUrl": full_url.geturl(), - "token": query_params["token"][0], - "path": path, - } - - async def upload_to_signed_url( - self, - path: str, - token: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[UploadSignedUrlFileOptions] = None, - ) -> UploadResponse: - """ - Upload a file with a token generated from :meth:`.create_signed_url` - - Parameters - ---------- - path - The file path, including the file name - token - The token generated from :meth:`.create_signed_url` - file - The file contents or a file-like object to upload - file_options - Additional options for the uploaded file - """ - path_parts = relative_path_to_parts(path) - query_params = {"token": token} - - final_url = ["object", "upload", "sign", self.id, *path_parts] - - options: UploadSignedUrlFileOptions = file_options or {} - cache_control = options.get("cache-control") - # cacheControl is also passed as form data - # https://github.com/supabase/storage-js/blob/fa44be8156295ba6320ffeff96bdf91016536a46/src/packages/StorageFileApi.ts#L89 - _data = {} - if cache_control: - options["cache-control"] = f"max-age={cache_control}" - _data = {"cacheControl": cache_control} - headers = { - **self._client.headers, - **DEFAULT_FILE_OPTIONS, - **options, - } - filename = path_parts[-1] - - if ( - isinstance(file, BufferedReader) - or isinstance(file, bytes) - or isinstance(file, FileIO) - ): - # bytes or byte-stream-like object received - _file = {"file": (filename, file, headers.pop("content-type"))} - else: - # str or pathlib.path received - _file = { - "file": ( - filename, - open(file, "rb"), - headers.pop("content-type"), - ) - } - response = await self._request( - "PUT", - final_url, - files=_file, - headers=headers, - data=_data, - query_params=query_params, - ) - data: UploadData = response.json() - - return UploadResponse(path=path, Key=data["Key"]) - - def _make_signed_url( - self, signed_url: str, download_query: dict[str, str] - ) -> SignedUrlResponse: - url = URL(signed_url[1:]) # ignore starting slash - signedURL = self._base_url.join(url).extend_query(download_query) - return {"signedURL": str(signedURL), "signedUrl": str(signedURL)} - - async def create_signed_url( - self, path: str, expires_in: int, options: Optional[URLOptions] = None - ) -> SignedUrlResponse: - """ - Parameters - ---------- - path - file path to be downloaded, including the current file name. - expires_in - number of seconds until the signed URL expires. - options - options to be passed for downloading or transforming the file. - """ - json: dict[str, str | bool | TransformOptions] = {"expiresIn": str(expires_in)} - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - json.update({"download": download}) - download_query = {"download": "" if download is True else download} - if transform := url_options.get("transform"): - json.update({"transform": transform}) - - path_parts = relative_path_to_parts(path) - response = await self._request( - "POST", - ["object", "sign", self.id, *path_parts], - json=json, - ) - - data = SignedUrlJsonResponse.model_validate_json(response.content) - return self._make_signed_url(data.signedURL, download_query) - - async def create_signed_urls( - self, - paths: List[str], - expires_in: int, - options: Optional[CreateSignedURLsOptions] = None, - ) -> List[CreateSignedUrlResponse]: - """ - Parameters - ---------- - path - file path to be downloaded, including the current file name. - expires_in - number of seconds until the signed URL expires. - options - options to be passed for downloading the file. - """ - json: dict[str, str | bool | None | list[str]] = { - "paths": paths, - "expiresIn": str(expires_in), - } - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - json.update({"download": download}) - download_query = {"download": "" if download is True else download} - - response = await self._request( - "POST", - ["object", "sign", self.id], - json=json, - ) - data = SignedUrlsJsonResponse.validate_json(response.content) - signed_urls = [] - for item in data: - # Prepare URL - url = self._make_signed_url(item.signedURL, download_query) - signed_item: CreateSignedUrlResponse = { - "error": item.error, - "path": item.path, - "signedURL": url["signedURL"], - "signedUrl": url["signedURL"], - } - signed_urls.append(signed_item) - return signed_urls - - async def get_public_url( - self, path: str, options: Optional[URLOptions] = None - ) -> str: - """ - Parameters - ---------- - path - file path, including the path and file name. For example `folder/image.png`. - """ - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - download_query = {"download": "" if download is True else download} - - render_path = ( - ["render", "image"] if url_options.get("transform") else ["object"] - ) - transformation = ( - transform_to_dict(t) if (t := url_options.get("transform")) else dict() - ) - - path_parts = relative_path_to_parts(path) - url = ( - self._base_url.joinpath(*render_path, "public", self.id, *path_parts) - .with_query(download_query) - .extend_query(transformation) - ) - return str(url) - - async def move(self, from_path: str, to_path: str) -> dict[str, str]: - """ - Moves an existing file, optionally renaming it at the same time. - - Parameters - ---------- - from_path - The original file path, including the current file name. For example `folder/image.png`. - to_path - The new file path, including the new file name. For example `folder/image-copy.png`. - """ - res = await self._request( - "POST", - ["object", "move"], - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path, - }, - ) - return res.json() - - async def copy(self, from_path: str, to_path: str) -> dict[str, str]: - """ - Copies an existing file to a new path in the same bucket. - - Parameters - ---------- - from_path - The original file path, including the current file name. For example `folder/image.png`. - to_path - The new file path, including the new file name. For example `folder/image-copy.png`. - """ - res = await self._request( - "POST", - ["object", "copy"], - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path, - }, - ) - return res.json() - - async def remove(self, paths: list[str]) -> list[dict[str, Any]]: - """ - Deletes files within the same bucket - - Parameters - ---------- - paths - An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. - """ - response = await self._request( - "DELETE", - ["object", self.id], - json={"prefixes": paths}, - ) - return response.json() - - async def info( - self, - path: str, - ) -> dict[str, Any]: - """ - Lists info for a particular file. - - Parameters - ---------- - path - The path to the file. - """ - path_parts = relative_path_to_parts(path) # split paths by / - response = await self._request( - "GET", - ["object", "info", self.id, *path_parts], - ) - return response.json() - - async def exists( - self, - path: str, - ) -> bool: - """ - Returns True if the file exists, False otherwise. - - Parameters - ---------- - path - The path to the file. - """ - try: - path_parts = relative_path_to_parts(path) # split paths by / - response = await self._request( - "HEAD", - ["object", self.id, *path_parts], - ) - return response.status_code == 200 - except json.JSONDecodeError: - return False - - async def list( - self, - path: Optional[str] = None, - options: Optional[ListBucketFilesOptions] = None, - ) -> list[dict[str, Any]]: - """ - Lists all the files within a bucket. - - Parameters - ---------- - path - The folder path. - options - Search options, including `limit`, `offset`, `sortBy` and `search`. - """ - extra_options = options or {} - extra_headers = {"Content-Type": "application/json"} - body = { - **DEFAULT_SEARCH_OPTIONS, - **extra_options, - "prefix": path or "", - } - response = await self._request( - "POST", - ["object", "list", self.id], - json=body, - headers=extra_headers, - ) - return response.json() - - async def list_v2( - self, - options: Optional[SearchV2Options] = None, - ) -> SearchV2Result: - body = {**options} if options else {} - response = await self._request( - "POST", - ["object", "list-v2", self.id], - json=body, - ) - return SearchV2Result.model_validate_json(response.content) - - async def download( - self, - path: str, - options: Optional[DownloadOptions] = None, - query_params: Optional[Dict[str, str]] = None, - ) -> bytes: - """ - Downloads a file. - - Parameters - ---------- - path - The file path to be downloaded, including the path and file name. For example `folder/image.png`. - """ - url_options = options or DownloadOptions() - render_path = ( - ["render", "image", "authenticated"] - if url_options.get("transform") - else ["object"] - ) - - transform_options = url_options.get("transform") or TransformOptions() - - path_parts = relative_path_to_parts(path) - response = await self._request( - "GET", - [*render_path, self.id, *path_parts], - query_params={ - **transform_to_dict(transform_options), - **(query_params or {}), - }, - ) - return response.content - - async def _upload_or_update( - self, - method: Literal["POST", "PUT"], - path: tuple[str, ...], - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - """ - Uploads a file to an existing bucket. - - Parameters - ---------- - path - The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. - The bucket must already exist before attempting to upload. - file - The File object to be stored in the bucket. or a async generator of chunks - file_options - HTTP headers. - """ - if file_options is None: - file_options = {} - cache_control = file_options.pop("cache-control", None) - _data = {} - - upsert = file_options.pop("upsert", None) - if upsert: - file_options.update({"x-upsert": upsert}) - - metadata = file_options.pop("metadata", None) - file_opts_headers = file_options.pop("headers", None) - - headers = { - **self._client.headers, - **DEFAULT_FILE_OPTIONS, - **file_options, - } - - if metadata: - metadata_str = json.dumps(metadata) - headers["x-metadata"] = base64.b64encode(metadata_str.encode()) - _data.update({"metadata": metadata_str}) - - if file_opts_headers: - headers.update({**file_opts_headers}) - - # Only include x-upsert on a POST method - if method != "POST": - del headers["x-upsert"] - - filename = path[-1] - - if cache_control: - headers["cache-control"] = f"max-age={cache_control}" - _data.update({"cacheControl": cache_control}) - - if ( - isinstance(file, BufferedReader) - or isinstance(file, bytes) - or isinstance(file, FileIO) - ): - # bytes or byte-stream-like object received - files = {"file": (filename, file, headers.pop("content-type"))} - else: - # str or pathlib.path received - files = { - "file": ( - filename, - open(file, "rb"), - headers.pop("content-type"), - ) - } - - response = await self._request( - method, ["object", self.id, *path], files=files, headers=headers, data=_data - ) - - data: UploadData = response.json() - - return UploadResponse(path="/".join(path), Key=data["Key"]) - - async def upload( - self, - path: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - """ - Uploads a file to an existing bucket. - - Parameters - ---------- - path - The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. - The bucket must already exist before attempting to upload. - file - The File object to be stored in the bucket. or a async generator of chunks - file_options - HTTP headers. - """ - path_parts = relative_path_to_parts(path) - return await self._upload_or_update("POST", path_parts, file, file_options) - - async def update( - self, - path: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - path_parts = relative_path_to_parts(path) - return await self._upload_or_update("PUT", path_parts, file, file_options) - - -class AsyncBucket(BaseBucket): - """Represents a storage bucket.""" - - -@dataclass -class AsyncBucketProxy(AsyncBucketActionsMixin): - """A bucket proxy, this contains the minimum required fields to query the File API.""" - - id: str - _base_url: URL - _headers: Headers - _client: AsyncClient = field(repr=False) diff --git a/src/storage/src/storage3/_async/request.py b/src/storage/src/storage3/_async/request.py deleted file mode 100644 index bf15b5c6..00000000 --- a/src/storage/src/storage3/_async/request.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Optional - -from httpx import AsyncClient, Headers, HTTPStatusError, QueryParams, Response -from pydantic import ValidationError -from yarl import URL - -from ..exceptions import StorageApiError, VectorBucketErrorMessage -from ..types import JSON, RequestMethod - - -class AsyncRequestBuilder: - def __init__(self, session: AsyncClient, base_url: URL, headers: Headers) -> None: - self._session = session - self._base_url = base_url - self.headers = headers - - async def send( - self, - http_method: RequestMethod, - path: list[str], - body: JSON = None, - query_params: Optional[QueryParams] = None, - ) -> Response: - response = await self._session.request( - method=http_method, - json=body, - url=str(self._base_url.joinpath(*path)), - headers=self.headers, - params=query_params or QueryParams(), - ) - try: - response.raise_for_status() - return response - except HTTPStatusError as exc: - try: - error = VectorBucketErrorMessage.model_validate_json(response.content) - raise StorageApiError( - message=error.message, - code=error.code or "400", - status=error.statusCode, - ) from exc - except ValidationError as exc: - raise StorageApiError( - message=f"The request failed, but could not parse error message response:'{response.text}'", - code="LibraryError", - status=response.status_code, - ) from exc diff --git a/src/storage/src/storage3/_async/vectors.py b/src/storage/src/storage3/_async/vectors.py deleted file mode 100644 index 8da9b83f..00000000 --- a/src/storage/src/storage3/_async/vectors.py +++ /dev/null @@ -1,212 +0,0 @@ -from __future__ import annotations - -from typing import List, Optional - -from httpx import AsyncClient, Headers -from yarl import URL - -from ..exceptions import StorageApiError, VectorBucketException -from ..types import ( - JSON, - DistanceMetric, - GetVectorBucketResponse, - GetVectorIndexResponse, - GetVectorsResponse, - ListVectorBucketsResponse, - ListVectorIndexesResponse, - ListVectorsResponse, - MetadataConfiguration, - QueryVectorsResponse, - VectorData, - VectorFilter, - VectorObject, -) -from .request import AsyncRequestBuilder - - -# used to not send non-required values as `null` -# for they cannot be null -def remove_none(**kwargs: JSON) -> JSON: - return {key: val for key, val in kwargs.items() if val is not None} - - -class AsyncVectorBucketScope: - def __init__(self, request: AsyncRequestBuilder, bucket_name: str) -> None: - self._request = request - self._bucket_name = bucket_name - - def with_metadata(self, **data: JSON) -> JSON: - return remove_none(vectorBucketName=self._bucket_name, **data) - - async def create_index( - self, - index_name: str, - dimension: int, - distance_metric: DistanceMetric, - data_type: str, - metadata: Optional[MetadataConfiguration] = None, - ) -> None: - body = self.with_metadata( - indexName=index_name, - dimension=dimension, - distanceMetric=distance_metric, - dataType=data_type, - metadataConfiguration=metadata.model_dump(by_alias=True) - if metadata - else None, - ) - await self._request.send(http_method="POST", path=["CreateIndex"], body=body) - - async def get_index(self, index_name: str) -> Optional[GetVectorIndexResponse]: - body = self.with_metadata(indexName=index_name) - try: - data = await self._request.send( - http_method="POST", path=["GetIndex"], body=body - ) - return GetVectorIndexResponse.model_validate_json(data.content) - except StorageApiError: - return None - - async def list_indexes( - self, - next_token: Optional[str] = None, - max_results: Optional[int] = None, - prefix: Optional[str] = None, - ) -> ListVectorIndexesResponse: - body = self.with_metadata( - next_token=next_token, max_results=max_results, prefix=prefix - ) - data = await self._request.send( - http_method="POST", path=["ListIndexes"], body=body - ) - return ListVectorIndexesResponse.model_validate_json(data.content) - - async def delete_index(self, index_name: str) -> None: - body = self.with_metadata(indexName=index_name) - await self._request.send(http_method="POST", path=["DeleteIndex"], body=body) - - def index(self, index_name: str) -> AsyncVectorIndexScope: - return AsyncVectorIndexScope(self._request, self._bucket_name, index_name) - - -class AsyncVectorIndexScope: - def __init__( - self, request: AsyncRequestBuilder, bucket_name: str, index_name: str - ) -> None: - self._request = request - self._bucket_name = bucket_name - self._index_name = index_name - - def with_metadata(self, **data: JSON) -> JSON: - return remove_none( - vectorBucketName=self._bucket_name, - indexName=self._index_name, - **data, - ) - - async def put(self, vectors: List[VectorObject]) -> None: - body = self.with_metadata( - vectors=[v.model_dump(exclude_none=True) for v in vectors] - ) - await self._request.send(http_method="POST", path=["PutVectors"], body=body) - - async def get( - self, *keys: str, return_data: bool = True, return_metadata: bool = True - ) -> GetVectorsResponse: - body = self.with_metadata( - keys=keys, returnData=return_data, returnMetadata=return_metadata - ) - data = await self._request.send( - http_method="POST", path=["GetVectors"], body=body - ) - return GetVectorsResponse.model_validate_json(data.content) - - async def list( - self, - max_results: Optional[int] = None, - next_token: Optional[str] = None, - return_data: bool = True, - return_metadata: bool = True, - segment_count: Optional[int] = None, - segment_index: Optional[int] = None, - ) -> ListVectorsResponse: - body = self.with_metadata( - maxResults=max_results, - nextToken=next_token, - returnData=return_data, - returnMetadata=return_metadata, - segmentCount=segment_count, - segmentIndex=segment_index, - ) - data = await self._request.send( - http_method="POST", path=["ListVectors"], body=body - ) - return ListVectorsResponse.model_validate_json(data.content) - - async def query( - self, - query_vector: VectorData, - topK: Optional[int] = None, - filter: Optional[VectorFilter] = None, - return_distance: bool = True, - return_metadata: bool = True, - ) -> QueryVectorsResponse: - body = self.with_metadata( - queryVector=dict(query_vector), - topK=topK, - filter=filter, - returnDistance=return_distance, - returnMetadata=return_metadata, - ) - data = await self._request.send( - http_method="POST", path=["QueryVectors"], body=body - ) - return QueryVectorsResponse.model_validate_json(data.content) - - async def delete(self, keys: List[str]) -> None: - if len(keys) < 1 or len(keys) > 500: - raise VectorBucketException("Keys batch size must be between 1 and 500.") - body = self.with_metadata(keys=keys) - await self._request.send(http_method="POST", path=["DeleteVectors"], body=body) - - -class AsyncStorageVectorsClient: - def __init__(self, url: URL, headers: Headers, session: AsyncClient) -> None: - self._request = AsyncRequestBuilder(session, base_url=URL(url), headers=headers) - - def from_(self, bucket_name: str) -> AsyncVectorBucketScope: - return AsyncVectorBucketScope(self._request, bucket_name) - - async def create_bucket(self, bucket_name: str) -> None: - body = {"vectorBucketName": bucket_name} - await self._request.send( - http_method="POST", path=["CreateVectorBucket"], body=body - ) - - async def get_bucket(self, bucket_name: str) -> Optional[GetVectorBucketResponse]: - body = {"vectorBucketName": bucket_name} - try: - data = await self._request.send( - http_method="POST", path=["GetVectorBucket"], body=body - ) - return GetVectorBucketResponse.model_validate_json(data.content) - except StorageApiError: - return None - - async def list_buckets( - self, - prefix: Optional[str] = None, - max_results: Optional[int] = None, - next_token: Optional[str] = None, - ) -> ListVectorBucketsResponse: - body = remove_none(prefix=prefix, maxResults=max_results, nextToken=next_token) - data = await self._request.send( - http_method="POST", path=["ListVectorBuckets"], body=body - ) - return ListVectorBucketsResponse.model_validate_json(data.content) - - async def delete_bucket(self, bucket_name: str) -> None: - body = {"vectorBucketName": bucket_name} - await self._request.send( - http_method="POST", path=["DeleteVectorBucket"], body=body - ) diff --git a/src/storage/src/storage3/_sync/__init__.py b/src/storage/src/storage3/_sync/__init__.py deleted file mode 100644 index 9eedb131..00000000 --- a/src/storage/src/storage3/_sync/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .client import SyncStorageClient as SyncStorageClient diff --git a/src/storage/src/storage3/_sync/analytics.py b/src/storage/src/storage3/_sync/analytics.py deleted file mode 100644 index b467e566..00000000 --- a/src/storage/src/storage3/_sync/analytics.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List, Optional - -from httpx import QueryParams -from pyiceberg.catalog.rest import RestCatalog - -from ..types import ( - AnalyticsBucket, - AnalyticsBucketDeleteResponse, - AnalyticsBucketsParser, - SortColumn, - SortOrder, -) -from .request import SyncRequestBuilder - - -class SyncStorageAnalyticsClient: - def __init__(self, request: SyncRequestBuilder) -> None: - self._request = request - - def create(self, bucket_name: str) -> AnalyticsBucket: - body = {"name": bucket_name} - data = self._request.send(http_method="POST", path=["bucket"], body=body) - return AnalyticsBucket.model_validate_json(data.content) - - def list( - self, - limit: Optional[int] = None, - offset: Optional[int] = None, - sort_column: Optional[SortColumn] = None, - sort_order: Optional[SortOrder] = None, - search: Optional[str] = None, - ) -> List[AnalyticsBucket]: - params = dict( - limit=limit, - offset=offset, - sort_column=sort_column, - sort_order=sort_order, - search=search, - ) - filtered_params = QueryParams( - **{k: v for k, v in params.items() if v is not None} - ) - data = self._request.send( - http_method="GET", path=["bucket"], query_params=filtered_params - ) - return AnalyticsBucketsParser.validate_json(data.content) - - def delete(self, bucket_name: str) -> AnalyticsBucketDeleteResponse: - data = self._request.send(http_method="DELETE", path=["bucket", bucket_name]) - return AnalyticsBucketDeleteResponse.model_validate_json(data.content) - - def catalog( - self, catalog_name: str, access_key_id: str, secret_access_key: str - ) -> RestCatalog: - catalog_uri = self._request._base_url - s3_endpoint = self._request._base_url.parent.joinpath("s3") - service_key = self._request.headers.get("apiKey") - assert service_key, "apiKey must be passed in the headers." - return RestCatalog( - catalog_name, - warehouse=catalog_name, - uri=str(catalog_uri), - token=service_key, - **{ - "py-io-impl": "pyiceberg.io.pyarrow.PyArrowFileIO", - "s3.endpoint": str(s3_endpoint), - "s3.access-key-id": access_key_id, - "s3.secret-access-key": secret_access_key, - "s3.force-virtual-addressing": "False", - }, - ) diff --git a/src/storage/src/storage3/_sync/bucket.py b/src/storage/src/storage3/_sync/bucket.py deleted file mode 100644 index e0db56a4..00000000 --- a/src/storage/src/storage3/_sync/bucket.py +++ /dev/null @@ -1,136 +0,0 @@ -from __future__ import annotations - -import warnings -from typing import Any, Optional - -from httpx import Client, Headers, HTTPStatusError, Response -from yarl import URL - -from ..exceptions import StorageApiError -from ..types import CreateOrUpdateBucketOptions, RequestMethod -from .file_api import SyncBucket - -__all__ = ["SyncStorageBucketAPI"] - - -class SyncStorageBucketAPI: - """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket""" - - def __init__(self, session: Client, url: str, headers: Headers) -> None: - if url and url[-1] != "/": - warnings.warn( - "Storage endpoint URL should have a trailing slash. " - "The URL has been automatically corrected.", - UserWarning, - stacklevel=2, - ) - url += "/" - self._base_url = URL(url) - self._client = session - self._headers = headers - - def _request( - self, - method: RequestMethod, - path: list[str], - json: Optional[dict[Any, Any]] = None, - ) -> Response: - try: - url_path = self._base_url.joinpath(*path) - response = self._client.request( - method, str(url_path), json=json, headers=self._headers - ) - response.raise_for_status() - except HTTPStatusError as exc: - resp = exc.response.json() - raise StorageApiError( - resp["message"], resp["error"], resp["statusCode"] - ) from exc - - return response - - def list_buckets(self) -> list[SyncBucket]: - """Retrieves the details of all storage buckets within an existing product.""" - # if the request doesn't error, it is assured to return a list - res = self._request("GET", ["bucket"]) - return [SyncBucket(**bucket) for bucket in res.json()] - - def get_bucket(self, id: str) -> SyncBucket: - """Retrieves the details of an existing storage bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to retrieve. - """ - res = self._request("GET", ["bucket", id]) - json = res.json() - return SyncBucket(**json) - - def create_bucket( - self, - id: str, - name: Optional[str] = None, - options: Optional[CreateOrUpdateBucketOptions] = None, - ) -> dict[str, str]: - """Creates a new storage bucket. - - Parameters - ---------- - id - A unique identifier for the bucket you are creating. - name - A name for the bucket you are creating. If not passed, the id is used as the name as well. - options - Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and - `allowed_mime_types`. - """ - json: dict[str, Any] = {"id": id, "name": name or id} - if options: - json.update(**options) - res = self._request( - "POST", - ["bucket"], - json=json, - ) - return res.json() - - def update_bucket( - self, id: str, options: CreateOrUpdateBucketOptions - ) -> dict[str, str]: - """Update a storage bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to update. - options - The properties you want to update. Valid options are `public`, `file_size_limit` and - `allowed_mime_types`. - """ - json = {"id": id, "name": id, **options} - res = self._request("PUT", ["bucket", id], json=json) - return res.json() - - def empty_bucket(self, id: str) -> dict[str, str]: - """Removes all objects inside a single bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to empty. - """ - res = self._request("POST", ["bucket", id, "empty"], json={}) - return res.json() - - def delete_bucket(self, id: str) -> dict[str, str]: - """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first - `empty()` the bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to delete. - """ - res = self._request("DELETE", ["bucket", id], json={}) - return res.json() diff --git a/src/storage/src/storage3/_sync/client.py b/src/storage/src/storage3/_sync/client.py deleted file mode 100644 index 956ede79..00000000 --- a/src/storage/src/storage3/_sync/client.py +++ /dev/null @@ -1,113 +0,0 @@ -from __future__ import annotations - -import platform -import sys -from typing import Optional -from warnings import warn - -from httpx import Client, Headers - -from storage3.constants import DEFAULT_TIMEOUT - -from ..version import __version__ -from .analytics import SyncStorageAnalyticsClient -from .bucket import SyncStorageBucketAPI -from .file_api import SyncBucketProxy -from .request import SyncRequestBuilder -from .vectors import SyncStorageVectorsClient - -__all__ = [ - "SyncStorageClient", -] - - -class SyncStorageClient(SyncStorageBucketAPI): - """Manage storage buckets and files.""" - - def __init__( - self, - url: str, - headers: dict[str, str], - timeout: Optional[int] = None, - verify: Optional[bool] = None, - proxy: Optional[str] = None, - http_client: Optional[Client] = None, - ) -> None: - headers = { - "X-Client-Info": f"supabase-py/storage3 v{__version__}", - "X-Supabase-Client-Platform": platform.system(), - "X-Supabase-Client-Platform-Version": platform.release(), - "X-Supabase-Client-Runtime": "python", - "X-Supabase-Client-Runtime-Version": platform.python_version(), - **headers, - } - - if sys.version_info < (3, 10): - warn( - "Python versions below 3.10 are deprecated and will not be supported in future versions. Please upgrade to Python 3.10 or newer.", - DeprecationWarning, - stacklevel=2, - ) - - if timeout is not None: - warn( - "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if verify is not None: - warn( - "The 'verify' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - if proxy is not None: - warn( - "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", - DeprecationWarning, - stacklevel=2, - ) - - self.verify = bool(verify) if verify is not None else True - self.timeout = int(abs(timeout)) if timeout is not None else DEFAULT_TIMEOUT - - self.session = http_client or Client( - headers=headers, - timeout=self.timeout, - proxy=proxy, - verify=self.verify, - follow_redirects=True, - http2=True, - ) - super().__init__(self.session, url, Headers(headers)) - - def __enter__(self) -> SyncStorageClient: - return self - - def __exit__(self, exc_type, exc, tb) -> None: - self.session.close() - - def from_(self, id: str) -> SyncBucketProxy: - """Run a storage file operation. - - Parameters - ---------- - id - The unique identifier of the bucket - """ - return SyncBucketProxy(id, self._base_url, self._headers, self._client) - - def vectors(self) -> SyncStorageVectorsClient: - return SyncStorageVectorsClient( - url=self._base_url.joinpath("vector"), - headers=self._headers, - session=self.session, - ) - - def analytics(self) -> SyncStorageAnalyticsClient: - request = SyncRequestBuilder( - session=self.session, - headers=self._headers, - base_url=self._base_url.joinpath("iceberg"), - ) - return SyncStorageAnalyticsClient(request=request) diff --git a/src/storage/src/storage3/_sync/file_api.py b/src/storage/src/storage3/_sync/file_api.py deleted file mode 100644 index 831fb6a6..00000000 --- a/src/storage/src/storage3/_sync/file_api.py +++ /dev/null @@ -1,615 +0,0 @@ -from __future__ import annotations - -import base64 -import json -import urllib.parse -from dataclasses import dataclass, field -from io import BufferedReader, FileIO -from pathlib import Path -from typing import Any, Dict, List, Literal, Optional, Union, cast - -from httpx import Client, Headers, HTTPStatusError, Response -from yarl import URL - -from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..exceptions import StorageApiError -from ..types import ( - BaseBucket, - CreateSignedUploadUrlOptions, - CreateSignedUrlResponse, - CreateSignedURLsOptions, - DownloadOptions, - FileOptions, - ListBucketFilesOptions, - RequestMethod, - SearchV2Options, - SearchV2Result, - SignedUploadURL, - SignedUrlJsonResponse, - SignedUrlResponse, - SignedUrlsJsonResponse, - TransformOptions, - UploadData, - UploadResponse, - UploadSignedUrlFileOptions, - URLOptions, - transform_to_dict, -) -from ..utils import StorageException - -__all__ = ["SyncBucket"] - - -def relative_path_to_parts(path: str) -> tuple[str, ...]: - url = URL(path) - if url.absolute or url.parts[0] == "/": - return url.parts[1:] - return url.parts - - -class SyncBucketActionsMixin: - """Functions needed to access the file API.""" - - id: str - _base_url: URL - _client: Client - _headers: Headers - - def _request( - self, - method: RequestMethod, - path: list[str], - headers: Optional[dict[str, Any]] = None, - json: Optional[dict[Any, Any]] = None, - files: Optional[Any] = None, - query_params: Optional[dict[str, str]] = None, - **kwargs: Any, - ) -> Response: - try: - url_path = self._base_url.joinpath(*path).with_query(query_params) - headers = headers or dict() - headers.update(self._headers) - response = self._client.request( - method, - str(url_path), - headers=headers, - json=json, - files=files, - **kwargs, - ) - response.raise_for_status() - except HTTPStatusError as exc: - try: - resp = exc.response.json() - raise StorageApiError( - resp["message"], resp["error"], resp["statusCode"] - ) from exc - except KeyError as err: - message = f"Unable to parse error message: {resp.text}" - raise StorageApiError(message, "InternalError", 400) from err - - # close the resource before returning the response - if files and "file" in files and isinstance(files["file"][1], BufferedReader): - files["file"][1].close() - - return response - - def create_signed_upload_url( - self, - path: str, - options: Optional[CreateSignedUploadUrlOptions] = None, - ) -> SignedUploadURL: - """ - Creates a signed upload URL. - - Parameters - ---------- - path - The file path, including the file name. For example `folder/image.png`. - options - Additional options for the upload url creation. - """ - headers: dict[str, str] = dict() - if options is not None and options.upsert: - headers.update({"x-upsert": options.upsert}) - - path_parts = relative_path_to_parts(path) - response = self._request( - "POST", ["object", "upload", "sign", self.id, *path_parts], headers=headers - ) - data = response.json() - full_url: urllib.parse.ParseResult = urllib.parse.urlparse( - str(self._base_url) + cast(str, data["url"]).lstrip("/") - ) - query_params = urllib.parse.parse_qs(full_url.query) - if not query_params.get("token"): - raise StorageException("No token sent by the API") - return { - "signed_url": full_url.geturl(), - "signedUrl": full_url.geturl(), - "token": query_params["token"][0], - "path": path, - } - - def upload_to_signed_url( - self, - path: str, - token: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[UploadSignedUrlFileOptions] = None, - ) -> UploadResponse: - """ - Upload a file with a token generated from :meth:`.create_signed_url` - - Parameters - ---------- - path - The file path, including the file name - token - The token generated from :meth:`.create_signed_url` - file - The file contents or a file-like object to upload - file_options - Additional options for the uploaded file - """ - path_parts = relative_path_to_parts(path) - query_params = {"token": token} - - final_url = ["object", "upload", "sign", self.id, *path_parts] - - options: UploadSignedUrlFileOptions = file_options or {} - cache_control = options.get("cache-control") - # cacheControl is also passed as form data - # https://github.com/supabase/storage-js/blob/fa44be8156295ba6320ffeff96bdf91016536a46/src/packages/StorageFileApi.ts#L89 - _data = {} - if cache_control: - options["cache-control"] = f"max-age={cache_control}" - _data = {"cacheControl": cache_control} - headers = { - **self._client.headers, - **DEFAULT_FILE_OPTIONS, - **options, - } - filename = path_parts[-1] - - if ( - isinstance(file, BufferedReader) - or isinstance(file, bytes) - or isinstance(file, FileIO) - ): - # bytes or byte-stream-like object received - _file = {"file": (filename, file, headers.pop("content-type"))} - else: - # str or pathlib.path received - _file = { - "file": ( - filename, - open(file, "rb"), - headers.pop("content-type"), - ) - } - response = self._request( - "PUT", - final_url, - files=_file, - headers=headers, - data=_data, - query_params=query_params, - ) - data: UploadData = response.json() - - return UploadResponse(path=path, Key=data["Key"]) - - def _make_signed_url( - self, signed_url: str, download_query: dict[str, str] - ) -> SignedUrlResponse: - url = URL(signed_url[1:]) # ignore starting slash - signedURL = self._base_url.join(url).extend_query(download_query) - return {"signedURL": str(signedURL), "signedUrl": str(signedURL)} - - def create_signed_url( - self, path: str, expires_in: int, options: Optional[URLOptions] = None - ) -> SignedUrlResponse: - """ - Parameters - ---------- - path - file path to be downloaded, including the current file name. - expires_in - number of seconds until the signed URL expires. - options - options to be passed for downloading or transforming the file. - """ - json: dict[str, str | bool | TransformOptions] = {"expiresIn": str(expires_in)} - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - json.update({"download": download}) - download_query = {"download": "" if download is True else download} - if transform := url_options.get("transform"): - json.update({"transform": transform}) - - path_parts = relative_path_to_parts(path) - response = self._request( - "POST", - ["object", "sign", self.id, *path_parts], - json=json, - ) - - data = SignedUrlJsonResponse.model_validate_json(response.content) - return self._make_signed_url(data.signedURL, download_query) - - def create_signed_urls( - self, - paths: List[str], - expires_in: int, - options: Optional[CreateSignedURLsOptions] = None, - ) -> List[CreateSignedUrlResponse]: - """ - Parameters - ---------- - path - file path to be downloaded, including the current file name. - expires_in - number of seconds until the signed URL expires. - options - options to be passed for downloading the file. - """ - json: dict[str, str | bool | None | list[str]] = { - "paths": paths, - "expiresIn": str(expires_in), - } - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - json.update({"download": download}) - download_query = {"download": "" if download is True else download} - - response = self._request( - "POST", - ["object", "sign", self.id], - json=json, - ) - data = SignedUrlsJsonResponse.validate_json(response.content) - signed_urls = [] - for item in data: - # Prepare URL - url = self._make_signed_url(item.signedURL, download_query) - signed_item: CreateSignedUrlResponse = { - "error": item.error, - "path": item.path, - "signedURL": url["signedURL"], - "signedUrl": url["signedURL"], - } - signed_urls.append(signed_item) - return signed_urls - - def get_public_url(self, path: str, options: Optional[URLOptions] = None) -> str: - """ - Parameters - ---------- - path - file path, including the path and file name. For example `folder/image.png`. - """ - download_query = {} - url_options = options or {} - if download := url_options.get("download"): - download_query = {"download": "" if download is True else download} - - render_path = ( - ["render", "image"] if url_options.get("transform") else ["object"] - ) - transformation = ( - transform_to_dict(t) if (t := url_options.get("transform")) else dict() - ) - - path_parts = relative_path_to_parts(path) - url = ( - self._base_url.joinpath(*render_path, "public", self.id, *path_parts) - .with_query(download_query) - .extend_query(transformation) - ) - return str(url) - - def move(self, from_path: str, to_path: str) -> dict[str, str]: - """ - Moves an existing file, optionally renaming it at the same time. - - Parameters - ---------- - from_path - The original file path, including the current file name. For example `folder/image.png`. - to_path - The new file path, including the new file name. For example `folder/image-copy.png`. - """ - res = self._request( - "POST", - ["object", "move"], - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path, - }, - ) - return res.json() - - def copy(self, from_path: str, to_path: str) -> dict[str, str]: - """ - Copies an existing file to a new path in the same bucket. - - Parameters - ---------- - from_path - The original file path, including the current file name. For example `folder/image.png`. - to_path - The new file path, including the new file name. For example `folder/image-copy.png`. - """ - res = self._request( - "POST", - ["object", "copy"], - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path, - }, - ) - return res.json() - - def remove(self, paths: list[str]) -> list[dict[str, Any]]: - """ - Deletes files within the same bucket - - Parameters - ---------- - paths - An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. - """ - response = self._request( - "DELETE", - ["object", self.id], - json={"prefixes": paths}, - ) - return response.json() - - def info( - self, - path: str, - ) -> dict[str, Any]: - """ - Lists info for a particular file. - - Parameters - ---------- - path - The path to the file. - """ - path_parts = relative_path_to_parts(path) # split paths by / - response = self._request( - "GET", - ["object", "info", self.id, *path_parts], - ) - return response.json() - - def exists( - self, - path: str, - ) -> bool: - """ - Returns True if the file exists, False otherwise. - - Parameters - ---------- - path - The path to the file. - """ - try: - path_parts = relative_path_to_parts(path) # split paths by / - response = self._request( - "HEAD", - ["object", self.id, *path_parts], - ) - return response.status_code == 200 - except json.JSONDecodeError: - return False - - def list( - self, - path: Optional[str] = None, - options: Optional[ListBucketFilesOptions] = None, - ) -> list[dict[str, Any]]: - """ - Lists all the files within a bucket. - - Parameters - ---------- - path - The folder path. - options - Search options, including `limit`, `offset`, `sortBy` and `search`. - """ - extra_options = options or {} - extra_headers = {"Content-Type": "application/json"} - body = { - **DEFAULT_SEARCH_OPTIONS, - **extra_options, - "prefix": path or "", - } - response = self._request( - "POST", - ["object", "list", self.id], - json=body, - headers=extra_headers, - ) - return response.json() - - def list_v2( - self, - options: Optional[SearchV2Options] = None, - ) -> SearchV2Result: - body = {**options} if options else {} - response = self._request( - "POST", - ["object", "list-v2", self.id], - json=body, - ) - return SearchV2Result.model_validate_json(response.content) - - def download( - self, - path: str, - options: Optional[DownloadOptions] = None, - query_params: Optional[Dict[str, str]] = None, - ) -> bytes: - """ - Downloads a file. - - Parameters - ---------- - path - The file path to be downloaded, including the path and file name. For example `folder/image.png`. - """ - url_options = options or DownloadOptions() - render_path = ( - ["render", "image", "authenticated"] - if url_options.get("transform") - else ["object"] - ) - - transform_options = url_options.get("transform") or TransformOptions() - - path_parts = relative_path_to_parts(path) - response = self._request( - "GET", - [*render_path, self.id, *path_parts], - query_params={ - **transform_to_dict(transform_options), - **(query_params or {}), - }, - ) - return response.content - - def _upload_or_update( - self, - method: Literal["POST", "PUT"], - path: tuple[str, ...], - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - """ - Uploads a file to an existing bucket. - - Parameters - ---------- - path - The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. - The bucket must already exist before attempting to upload. - file - The File object to be stored in the bucket. or a async generator of chunks - file_options - HTTP headers. - """ - if file_options is None: - file_options = {} - cache_control = file_options.pop("cache-control", None) - _data = {} - - upsert = file_options.pop("upsert", None) - if upsert: - file_options.update({"x-upsert": upsert}) - - metadata = file_options.pop("metadata", None) - file_opts_headers = file_options.pop("headers", None) - - headers = { - **self._client.headers, - **DEFAULT_FILE_OPTIONS, - **file_options, - } - - if metadata: - metadata_str = json.dumps(metadata) - headers["x-metadata"] = base64.b64encode(metadata_str.encode()) - _data.update({"metadata": metadata_str}) - - if file_opts_headers: - headers.update({**file_opts_headers}) - - # Only include x-upsert on a POST method - if method != "POST": - del headers["x-upsert"] - - filename = path[-1] - - if cache_control: - headers["cache-control"] = f"max-age={cache_control}" - _data.update({"cacheControl": cache_control}) - - if ( - isinstance(file, BufferedReader) - or isinstance(file, bytes) - or isinstance(file, FileIO) - ): - # bytes or byte-stream-like object received - files = {"file": (filename, file, headers.pop("content-type"))} - else: - # str or pathlib.path received - files = { - "file": ( - filename, - open(file, "rb"), - headers.pop("content-type"), - ) - } - - response = self._request( - method, ["object", self.id, *path], files=files, headers=headers, data=_data - ) - - data: UploadData = response.json() - - return UploadResponse(path="/".join(path), Key=data["Key"]) - - def upload( - self, - path: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - """ - Uploads a file to an existing bucket. - - Parameters - ---------- - path - The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. - The bucket must already exist before attempting to upload. - file - The File object to be stored in the bucket. or a async generator of chunks - file_options - HTTP headers. - """ - path_parts = relative_path_to_parts(path) - return self._upload_or_update("POST", path_parts, file, file_options) - - def update( - self, - path: str, - file: Union[BufferedReader, bytes, FileIO, str, Path], - file_options: Optional[FileOptions] = None, - ) -> UploadResponse: - path_parts = relative_path_to_parts(path) - return self._upload_or_update("PUT", path_parts, file, file_options) - - -class SyncBucket(BaseBucket): - """Represents a storage bucket.""" - - -@dataclass -class SyncBucketProxy(SyncBucketActionsMixin): - """A bucket proxy, this contains the minimum required fields to query the File API.""" - - id: str - _base_url: URL - _headers: Headers - _client: Client = field(repr=False) diff --git a/src/storage/src/storage3/_sync/request.py b/src/storage/src/storage3/_sync/request.py deleted file mode 100644 index e2221c56..00000000 --- a/src/storage/src/storage3/_sync/request.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Optional - -from httpx import Client, Headers, HTTPStatusError, QueryParams, Response -from pydantic import ValidationError -from yarl import URL - -from ..exceptions import StorageApiError, VectorBucketErrorMessage -from ..types import JSON, RequestMethod - - -class SyncRequestBuilder: - def __init__(self, session: Client, base_url: URL, headers: Headers) -> None: - self._session = session - self._base_url = base_url - self.headers = headers - - def send( - self, - http_method: RequestMethod, - path: list[str], - body: JSON = None, - query_params: Optional[QueryParams] = None, - ) -> Response: - response = self._session.request( - method=http_method, - json=body, - url=str(self._base_url.joinpath(*path)), - headers=self.headers, - params=query_params or QueryParams(), - ) - try: - response.raise_for_status() - return response - except HTTPStatusError as exc: - try: - error = VectorBucketErrorMessage.model_validate_json(response.content) - raise StorageApiError( - message=error.message, - code=error.code or "400", - status=error.statusCode, - ) from exc - except ValidationError as exc: - raise StorageApiError( - message=f"The request failed, but could not parse error message response:'{response.text}'", - code="LibraryError", - status=response.status_code, - ) from exc diff --git a/src/storage/src/storage3/_sync/vectors.py b/src/storage/src/storage3/_sync/vectors.py deleted file mode 100644 index 0673bd23..00000000 --- a/src/storage/src/storage3/_sync/vectors.py +++ /dev/null @@ -1,198 +0,0 @@ -from __future__ import annotations - -from typing import List, Optional - -from httpx import Client, Headers -from yarl import URL - -from ..exceptions import StorageApiError, VectorBucketException -from ..types import ( - JSON, - DistanceMetric, - GetVectorBucketResponse, - GetVectorIndexResponse, - GetVectorsResponse, - ListVectorBucketsResponse, - ListVectorIndexesResponse, - ListVectorsResponse, - MetadataConfiguration, - QueryVectorsResponse, - VectorData, - VectorFilter, - VectorObject, -) -from .request import SyncRequestBuilder - - -# used to not send non-required values as `null` -# for they cannot be null -def remove_none(**kwargs: JSON) -> JSON: - return {key: val for key, val in kwargs.items() if val is not None} - - -class SyncVectorBucketScope: - def __init__(self, request: SyncRequestBuilder, bucket_name: str) -> None: - self._request = request - self._bucket_name = bucket_name - - def with_metadata(self, **data: JSON) -> JSON: - return remove_none(vectorBucketName=self._bucket_name, **data) - - def create_index( - self, - index_name: str, - dimension: int, - distance_metric: DistanceMetric, - data_type: str, - metadata: Optional[MetadataConfiguration] = None, - ) -> None: - body = self.with_metadata( - indexName=index_name, - dimension=dimension, - distanceMetric=distance_metric, - dataType=data_type, - metadataConfiguration=metadata.model_dump(by_alias=True) - if metadata - else None, - ) - self._request.send(http_method="POST", path=["CreateIndex"], body=body) - - def get_index(self, index_name: str) -> Optional[GetVectorIndexResponse]: - body = self.with_metadata(indexName=index_name) - try: - data = self._request.send(http_method="POST", path=["GetIndex"], body=body) - return GetVectorIndexResponse.model_validate_json(data.content) - except StorageApiError: - return None - - def list_indexes( - self, - next_token: Optional[str] = None, - max_results: Optional[int] = None, - prefix: Optional[str] = None, - ) -> ListVectorIndexesResponse: - body = self.with_metadata( - next_token=next_token, max_results=max_results, prefix=prefix - ) - data = self._request.send(http_method="POST", path=["ListIndexes"], body=body) - return ListVectorIndexesResponse.model_validate_json(data.content) - - def delete_index(self, index_name: str) -> None: - body = self.with_metadata(indexName=index_name) - self._request.send(http_method="POST", path=["DeleteIndex"], body=body) - - def index(self, index_name: str) -> SyncVectorIndexScope: - return SyncVectorIndexScope(self._request, self._bucket_name, index_name) - - -class SyncVectorIndexScope: - def __init__( - self, request: SyncRequestBuilder, bucket_name: str, index_name: str - ) -> None: - self._request = request - self._bucket_name = bucket_name - self._index_name = index_name - - def with_metadata(self, **data: JSON) -> JSON: - return remove_none( - vectorBucketName=self._bucket_name, - indexName=self._index_name, - **data, - ) - - def put(self, vectors: List[VectorObject]) -> None: - body = self.with_metadata( - vectors=[v.model_dump(exclude_none=True) for v in vectors] - ) - self._request.send(http_method="POST", path=["PutVectors"], body=body) - - def get( - self, *keys: str, return_data: bool = True, return_metadata: bool = True - ) -> GetVectorsResponse: - body = self.with_metadata( - keys=keys, returnData=return_data, returnMetadata=return_metadata - ) - data = self._request.send(http_method="POST", path=["GetVectors"], body=body) - return GetVectorsResponse.model_validate_json(data.content) - - def list( - self, - max_results: Optional[int] = None, - next_token: Optional[str] = None, - return_data: bool = True, - return_metadata: bool = True, - segment_count: Optional[int] = None, - segment_index: Optional[int] = None, - ) -> ListVectorsResponse: - body = self.with_metadata( - maxResults=max_results, - nextToken=next_token, - returnData=return_data, - returnMetadata=return_metadata, - segmentCount=segment_count, - segmentIndex=segment_index, - ) - data = self._request.send(http_method="POST", path=["ListVectors"], body=body) - return ListVectorsResponse.model_validate_json(data.content) - - def query( - self, - query_vector: VectorData, - topK: Optional[int] = None, - filter: Optional[VectorFilter] = None, - return_distance: bool = True, - return_metadata: bool = True, - ) -> QueryVectorsResponse: - body = self.with_metadata( - queryVector=dict(query_vector), - topK=topK, - filter=filter, - returnDistance=return_distance, - returnMetadata=return_metadata, - ) - data = self._request.send(http_method="POST", path=["QueryVectors"], body=body) - return QueryVectorsResponse.model_validate_json(data.content) - - def delete(self, keys: List[str]) -> None: - if len(keys) < 1 or len(keys) > 500: - raise VectorBucketException("Keys batch size must be between 1 and 500.") - body = self.with_metadata(keys=keys) - self._request.send(http_method="POST", path=["DeleteVectors"], body=body) - - -class SyncStorageVectorsClient: - def __init__(self, url: URL, headers: Headers, session: Client) -> None: - self._request = SyncRequestBuilder(session, base_url=URL(url), headers=headers) - - def from_(self, bucket_name: str) -> SyncVectorBucketScope: - return SyncVectorBucketScope(self._request, bucket_name) - - def create_bucket(self, bucket_name: str) -> None: - body = {"vectorBucketName": bucket_name} - self._request.send(http_method="POST", path=["CreateVectorBucket"], body=body) - - def get_bucket(self, bucket_name: str) -> Optional[GetVectorBucketResponse]: - body = {"vectorBucketName": bucket_name} - try: - data = self._request.send( - http_method="POST", path=["GetVectorBucket"], body=body - ) - return GetVectorBucketResponse.model_validate_json(data.content) - except StorageApiError: - return None - - def list_buckets( - self, - prefix: Optional[str] = None, - max_results: Optional[int] = None, - next_token: Optional[str] = None, - ) -> ListVectorBucketsResponse: - body = remove_none(prefix=prefix, maxResults=max_results, nextToken=next_token) - data = self._request.send( - http_method="POST", path=["ListVectorBuckets"], body=body - ) - return ListVectorBucketsResponse.model_validate_json(data.content) - - def delete_bucket(self, bucket_name: str) -> None: - body = {"vectorBucketName": bucket_name} - self._request.send(http_method="POST", path=["DeleteVectorBucket"], body=body) diff --git a/src/storage/src/storage3/analytics.py b/src/storage/src/storage3/analytics.py new file mode 100644 index 00000000..1dc614a8 --- /dev/null +++ b/src/storage/src/storage3/analytics.py @@ -0,0 +1,97 @@ +from dataclasses import dataclass +from typing import Generic, List + +from pyiceberg.catalog.rest import RestCatalog +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + HttpIO, + HttpMethod, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import ( + EmptyRequest, + JSONRequest, +) +from yarl import URL + +from .exceptions import validate_adapter, validate_model +from .types import ( + AnalyticsBucket, + AnalyticsBucketDeleteResponse, + AnalyticsBucketsParser, + SortColumn, + SortOrder, +) + + +@dataclass +class StorageAnalyticsClient(Generic[HttpIO]): + default_headers: Headers + base_url: URL + executor: HttpIO + + @handle_http_io + def create(self, bucket_name: str) -> HttpMethod[AnalyticsBucket]: + body = {"name": bucket_name} + response = yield JSONRequest( + method="POST", + path=["bucket"], + body=body, + ) + return validate_model(response, AnalyticsBucket) + + @handle_http_io + def list( + self, + limit: int | None = None, + offset: int | None = None, + sort_column: SortColumn | None = None, + sort_order: SortOrder | None = None, + search: str | None = None, + ) -> HttpMethod[List[AnalyticsBucket]]: + params = dict( + limit=limit, + offset=offset, + sort_column=sort_column, + sort_order=sort_order, + search=search, + ) + filtered_params = URLQuery.from_mapping( + {k: v for k, v in params.items() if v is not None} + ) + response = yield EmptyRequest( + method="GET", + path=["bucket"], + query=filtered_params, + ) + return validate_adapter(response, AnalyticsBucketsParser) + + @handle_http_io + def delete(self, bucket_name: str) -> HttpMethod[AnalyticsBucketDeleteResponse]: + response = yield EmptyRequest( + method="DELETE", + path=["bucket", bucket_name], + ) + return validate_model(response, AnalyticsBucketDeleteResponse) + + def catalog( + self, catalog_name: str, access_key_id: str, secret_access_key: str + ) -> RestCatalog: + catalog_uri = self.base_url + s3_endpoint = self.base_url.parent.joinpath("s3") + service_key = self.default_headers.get("apiKey") + assert service_key, "apiKey must be passed in the headers." + return RestCatalog( + catalog_name, + warehouse=catalog_name, + uri=str(catalog_uri), + token=service_key, + **{ + "py-io-impl": "pyiceberg.io.pyarrow.PyArrowFileIO", + "s3.endpoint": str(s3_endpoint), + "s3.access-key-id": access_key_id, + "s3.secret-access-key": secret_access_key, + "s3.force-virtual-addressing": "False", + }, + ) diff --git a/src/storage/src/storage3/client.py b/src/storage/src/storage3/client.py new file mode 100644 index 00000000..d6c1b996 --- /dev/null +++ b/src/storage/src/storage3/client.py @@ -0,0 +1,270 @@ +from __future__ import annotations + +import platform +import warnings +from types import TracebackType +from typing import Generic + +from pydantic import TypeAdapter +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + AsyncHttpIO, + AsyncHttpSession, + HttpIO, + HttpMethod, + HttpSession, + SyncHttpIO, + handle_http_io, +) +from supabase_utils.http.request import EmptyRequest, JSONRequest +from yarl import URL + +from .analytics import StorageAnalyticsClient +from .exceptions import validate_adapter, validate_model +from .file_api import StorageFileApiClient +from .types import Bucket, BucketName, CreateOrUpdateBucketBody, MessageResponse +from .vectors import StorageVectorsClient +from .version import __version__ + +DEFAULT_TIMEOUT = 20 + +__all__ = [ + "StorageClient", +] + +ListBucketAdapter = TypeAdapter(list[Bucket]) + + +class StorageClient(Generic[HttpIO]): + """Manage storage buckets and files.""" + + def __init__( + self, + url: str, + executor: HttpIO, + headers: dict[str, str], + ) -> None: + headers = { + "X-Client-Info": f"supabase-py/storage3 v{__version__}", + "X-Supabase-Client-Platform": platform.system(), + "X-Supabase-Client-Platform-Version": platform.release(), + "X-Supabase-Client-Runtime": "python", + "X-Supabase-Client-Runtime-Version": platform.python_version(), + **headers, + } + + self.executor: HttpIO = executor + if url and url[-1] != "/": + warnings.warn("Storage endpoint URL should have a trailing slash.") + url += "/" + self.base_url = URL(url) + self.default_headers = Headers.from_mapping(headers) + + def from_(self, id: str) -> StorageFileApiClient[HttpIO]: + """Run a storage file operation. + + Parameters + ---------- + id + The unique identifier of the bucket + """ + return StorageFileApiClient( + id, self.base_url, self.executor, self.default_headers + ) + + def vectors(self) -> StorageVectorsClient[HttpIO]: + return StorageVectorsClient( + base_url=self.base_url.joinpath("vector"), + default_headers=self.default_headers, + executor=self.executor, + ) + + def analytics(self) -> StorageAnalyticsClient[HttpIO]: + return StorageAnalyticsClient( + default_headers=self.default_headers, + base_url=self.base_url.joinpath("iceberg"), + executor=self.executor, + ) + + @handle_http_io + def list_buckets(self) -> HttpMethod[list[Bucket]]: + """Retrieves the details of all storage buckets within an existing product.""" + # if the request doesn't error, it is assured to return a list + response = yield EmptyRequest( + method="GET", + path=["bucket"], + ) + return validate_adapter(response, ListBucketAdapter) + + @handle_http_io + def get_bucket(self, id: str) -> HttpMethod[Bucket]: + """Retrieves the details of an existing storage bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to retrieve. + """ + response = yield EmptyRequest( + method="GET", + path=["bucket", id], + ) + return validate_model(response, Bucket) + + @handle_http_io + def create_bucket( + self, + id: str, + name: str | None = None, + public: bool | None = None, + file_size_limit: int | None = None, + allowed_mime_types: list[str] | None = None, + ) -> HttpMethod[BucketName]: + """Creates a new storage bucket. + + Parameters + ---------- + id + A unique identifier for the bucket you are creating. + name + A name for the bucket you are creating. If not passed, the id is used as the name as well. + options + Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and + `allowed_mime_types`. + """ + body = CreateOrUpdateBucketBody( + id=id, + name=name or id, + public=public, + file_size_limit=file_size_limit, + allowed_mime_types=allowed_mime_types, + ) + response = yield JSONRequest( + method="POST", + path=["bucket"], + body=body, + exclude_none=True, + ) + + return validate_model(response, BucketName) + + @handle_http_io + def update_bucket( + self, + id: str, + public: bool | None = None, + file_size_limit: int | None = None, + allowed_mime_types: list[str] | None = None, + ) -> HttpMethod[MessageResponse]: + """Update a storage bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to update. + options + The properties you want to update. Valid options are `public`, `file_size_limit` and + `allowed_mime_types`. + """ + body = CreateOrUpdateBucketBody( + id=id, + name=id, + public=public, + file_size_limit=file_size_limit, + allowed_mime_types=allowed_mime_types, + ) + response = yield JSONRequest( + method="PUT", + path=["bucket", id], + body=body, + exclude_none=True, + ) + return validate_model(response, MessageResponse) + + @handle_http_io + def empty_bucket(self, id: str) -> HttpMethod[MessageResponse]: + """Removes all objects inside a single bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to empty. + """ + response = yield EmptyRequest( + method="POST", + path=["bucket", id, "empty"], + ) + + return validate_model(response, MessageResponse) + + @handle_http_io + def delete_bucket(self, id: str) -> HttpMethod[MessageResponse]: + """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first + `empty()` the bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to delete. + """ + response = yield EmptyRequest( + method="DELETE", + path=["bucket", id], + ) + return validate_model(response, MessageResponse) + + +class AsyncStorageClient(StorageClient[AsyncHttpIO]): + def __init__( + self, + url: str, + headers: dict[str, str], + http_session: AsyncHttpSession, + timeout: int | None = None, + ) -> None: + StorageClient.__init__( + self, + url=url, + headers=headers, + executor=AsyncHttpIO(session=http_session), + ) + + async def __aenter__(self) -> AsyncStorageClient: + await self.executor.session.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.executor.session.__aexit__(exc_type, exc, tb) + + +class SyncStorageClient(StorageClient[SyncHttpIO]): + def __init__( + self, + url: str, + headers: dict[str, str], + http_session: HttpSession, + timeout: int | None = None, + ) -> None: + StorageClient.__init__( + self, + url=url, + headers=headers, + executor=SyncHttpIO(session=http_session), + ) + + def __enter__(self) -> SyncStorageClient: + self.executor.session.__enter__() + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.executor.session.__exit__(exc_type, exc, tb) diff --git a/src/storage/src/storage3/constants.py b/src/storage/src/storage3/constants.py deleted file mode 100644 index 7bfc2f32..00000000 --- a/src/storage/src/storage3/constants.py +++ /dev/null @@ -1,15 +0,0 @@ -DEFAULT_SEARCH_OPTIONS = { - "limit": 100, - "offset": 0, - "sortBy": { - "column": "name", - "order": "asc", - }, -} -DEFAULT_FILE_OPTIONS = { - "cache-control": "3600", - "content-type": "text/plain;charset=UTF-8", - "x-upsert": "false", -} - -DEFAULT_TIMEOUT = 20 diff --git a/src/storage/src/storage3/exceptions.py b/src/storage/src/storage3/exceptions.py index d5be790a..34cb7316 100644 --- a/src/storage/src/storage3/exceptions.py +++ b/src/storage/src/storage3/exceptions.py @@ -1,46 +1,63 @@ -from typing import Optional, TypedDict, Union +from typing import TypeVar -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter, ValidationError +from pydantic.dataclasses import dataclass +from supabase_utils.http.request import Response -from .utils import StorageException + +class StorageException(Exception): + """Error raised when an operation on the storage API fails.""" -class VectorBucketException(Exception): +class VectorBucketException(StorageException): def __init__(self, msg: str) -> None: self.msg = msg class VectorBucketErrorMessage(BaseModel): - statusCode: Union[str, int] + statusCode: str | int error: str message: str - code: Optional[str] = None + code: str | None = None -class StorageApiErrorDict(TypedDict): - name: str +@dataclass +class StorageApiError(StorageException): message: str code: str - status: Union[int, str] + status: int | str + def __repr__(self) -> str: + return str(self) + + def __str__(self) -> str: + return f"StorageApiError(message='{self.message}', code={self.code}, status='{self.status}')" + + +StorageApiErrorParser = TypeAdapter(StorageApiError) + + +def parse_api_error(response: Response) -> StorageApiError: + try: + return StorageApiErrorParser.validate_json(response.content) + except ValidationError: + message = f"Unable to parse error message: {response.content.decode('utf-8')}" + return StorageApiError(message=message, code="InternalError", status=400) + + +Inner = TypeVar("Inner") + + +def validate_adapter(response: Response, type_adapter: TypeAdapter[Inner]) -> Inner: + if response.is_success: + return type_adapter.validate_json(response.content) + raise parse_api_error(response) + + +Model = TypeVar("Model", bound=BaseModel) -class StorageApiError(StorageException): - """Error raised when an operation on the storage API fails.""" - def __init__(self, message: str, code: str, status: Union[int, str]) -> None: - error_message = ( - f"{{'statusCode': {status}, 'error': {code}, 'message': {message}}}" - ) - super().__init__(error_message) - self.name = "StorageApiError" - self.message = message - self.code = code - self.status = status - - def to_dict(self) -> StorageApiErrorDict: - return { - "name": self.name, - "code": self.code, - "message": self.message, - "status": self.status, - } +def validate_model(response: Response, model: type[Model]) -> Model: + if response.is_success: + return model.model_validate_json(response.content) + raise parse_api_error(response) diff --git a/src/storage/src/storage3/file_api.py b/src/storage/src/storage3/file_api.py new file mode 100644 index 00000000..37f4af6b --- /dev/null +++ b/src/storage/src/storage3/file_api.py @@ -0,0 +1,647 @@ +from __future__ import annotations + +import base64 +from dataclasses import dataclass +from io import BufferedReader, FileIO +from pathlib import Path +from typing import Any, Dict, Generic, List, Literal, Tuple + +from pydantic import TypeAdapter +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + HttpIO, + HttpMethod, + handle_http_io, +) +from supabase_utils.http.query import URLQuery +from supabase_utils.http.request import ( + DataField, + EmptyRequest, + FileField, + JSONRequest, + MultipartFormDataRequest, + PartField, + Response, +) +from supabase_utils.types import JSONParser +from yarl import URL + +from .exceptions import parse_api_error, validate_adapter, validate_model +from .types import ( + CreateSignedUrlBody, + CreateSignedUrlResponse, + CreateSignedUrlsBody, + FileObject, + ListBody, + ListFileObject, + MessageResponse, + SearchV2Body, + SearchV2Result, + SignedUploadURL, + SignedUploadUrlResponse, + SignedUrlJsonResponse, + SignedUrlsJsonResponse, + SortByType, + SortByV2, + TransformOptions, + UploadResponse, + transform_to_dict, +) + +__all__ = ["StorageFileApiClient"] + + +def relative_path_to_parts(path: str) -> Tuple[str, ...]: + url = URL(path) + if url.absolute or url.parts[0] == "/": + return url.parts[1:] + return url.parts + + +def maybe_read_file(file: BufferedReader | bytes | FileIO | str | Path) -> bytes: + if isinstance(file, (BufferedReader, FileIO)): + # bytes or byte-stream-like object received + return file.read() + elif isinstance(file, bytes): + return file + else: + # str or pathlib.path received + with open(file, "rb") as f: + return f.read() + + +FileObjectsAdapter = TypeAdapter(list[FileObject]) +ListFileObjectsAdapter = TypeAdapter(list[ListFileObject]) + + +@dataclass +class StorageFileApiClient(Generic[HttpIO]): + """Functions needed to access the file API.""" + + id: str + base_url: URL + executor: HttpIO + default_headers: Headers + + def _parse_signed_url_response(self, response: Response) -> SignedUploadURL: + if not response.is_success: + raise parse_api_error(response) + signed_url_upload = SignedUploadUrlResponse.model_validate_json( + response.content + ) + path_parts = URL(signed_url_upload.url.lstrip("/")) + url = self.base_url.join(path_parts) + + return SignedUploadURL( + signed_url=str(url), + token=signed_url_upload.token, + ) + + @handle_http_io + def create_signed_upload_url( + self, + path: str, + upsert: str | None = None, + ) -> HttpMethod[SignedUploadURL]: + """ + Creates a signed upload URL. + + Parameters + ---------- + path + The file path, including the file name. For example `folder/image.png`. + options + Additional options for the upload url creation. + """ + headers = Headers.empty() + if upsert: + headers = headers.set("x-upsert", upsert) + + path_parts: Tuple[str, ...] = relative_path_to_parts(path) + response = yield EmptyRequest( + method="POST", + path=["object", "upload", "sign", self.id, *path_parts], + headers=headers, + ) + + return self._parse_signed_url_response(response) + + @handle_http_io + def upload_to_signed_url( + self, + path: str, + token: str, + file: BufferedReader | bytes | FileIO | str | Path, + content_type: str = "text/plain;charset=UTF-8", + cache_control: str = "3600", + metadata: Dict[str, Any] | None = None, + headers: Dict[str, str] | None = None, + ) -> HttpMethod[UploadResponse]: + """ + Upload a file with a token generated from :meth:`.create_signed_url` + + Parameters + ---------- + path + The file path, including the file name + token + The token generated from :meth:`.create_signed_url` + file + The file contents or a file-like object to upload + """ + path_parts: Tuple[str, ...] = relative_path_to_parts(path) + query_params = URLQuery.from_mapping({"token": token}) + + http_headers = Headers.from_mapping(headers) if headers else Headers.empty() + http_headers = http_headers.set("x-upsert", "false").set( + "cache-control", f"max-age={cache_control}" + ) + + cache_control_field = DataField( + name="cacheControl", + data=cache_control.encode("utf-8"), + ) + fields: list[PartField] = [cache_control_field] + if metadata is not None: + metadata_bytes = JSONParser.dump_json(metadata) + metadata_b64_encoded = base64.b64encode(metadata_bytes) + http_headers = http_headers.set( + "x-metadata", metadata_b64_encoded.decode("utf-8") + ) + fields.append( + DataField( + name="metadata", + data=metadata_bytes, + ) + ) + file_field = FileField( + name="file", + filename=path_parts[-1], + data=maybe_read_file(file), + content_type=content_type, + ) + fields.append(file_field) + response = yield MultipartFormDataRequest( + method="PUT", + path=["object", "upload", "sign", self.id, *path_parts], + fields=fields, + headers=http_headers, + query=query_params, + ) + + return validate_model(response, UploadResponse) + + def _make_signed_url(self, signed_url: str, download_query: URLQuery) -> str: + url = URL(signed_url[1:]) # ignore starting slash + signed = self.base_url.join(url).extend_query(download_query.as_query()) + return str(signed) + + def _parse_signed_url(self, response: Response, download_query: URLQuery) -> str: + if not response.is_success: + raise parse_api_error(response) + signed_url_obj = SignedUrlJsonResponse.model_validate_json(response.content) + return self._make_signed_url(signed_url_obj.signedURL, download_query) + + @handle_http_io + def create_signed_url( + self, + path: str, + expires_in: int, + download: str | bool | None = None, + transform: TransformOptions | None = None, + ) -> HttpMethod[str]: + """ + Parameters + ---------- + path + file path to be downloaded, including the current file name. + expires_in + number of seconds until the signed URL expires. + options + options to be passed for downloading or transforming the file. + """ + download_query = URLQuery.empty() + if download: + download_query = download_query.set( + "download", "" if download is True else download + ) + + path_parts: Tuple[str, ...] = relative_path_to_parts(path) + body = CreateSignedUrlBody( + expiresIn=expires_in, + download=download, + transform=transform, + ) + response = yield JSONRequest( + method="POST", + path=["object", "sign", self.id, *path_parts], + body=body, + exclude_none=True, + ) + + return self._parse_signed_url(response, download_query) + + def _parse_signed_urls( + self, response: Response, download_query: URLQuery + ) -> List[CreateSignedUrlResponse]: + if not response.is_success: + raise parse_api_error(response) + data = SignedUrlsJsonResponse.validate_json(response.content) + signed_urls = [] + for item in data: + # Prepare URL + url = self._make_signed_url(item.signedURL, download_query) + signed_item = CreateSignedUrlResponse( + error=item.error, + path=item.path, + signed_url=url, + ) + signed_urls.append(signed_item) + return signed_urls + + @handle_http_io + def create_signed_urls( + self, + paths: List[str], + expires_in: int, + download: bool | str | None = None, + ) -> HttpMethod[List[CreateSignedUrlResponse]]: + """ + Parameters + ---------- + path + file path to be downloaded, including the current file name. + expires_in + number of seconds until the signed URL expires. + options + options to be passed for downloading the file. + """ + download_query = URLQuery.empty() + if download: + download_query = download_query.set( + "download", "" if download is True else download + ) + + body = CreateSignedUrlsBody( + download=download, + expiresIn=expires_in, + paths=paths, + ) + response = yield JSONRequest( + method="POST", + path=["object", "sign", self.id], + body=body, + ) + return self._parse_signed_urls(response, download_query) + + def get_public_url( + self, + path: str, + download: bool | str | None = None, + transform: TransformOptions | None = None, + ) -> str: + """ + Parameters + ---------- + path + file path, including the path and file name. For example `folder/image.png`. + """ + download_query = URLQuery.empty() + if download: + download_query = download_query.set( + "download", "" if download is True else download + ) + + render_path = ["render", "image"] if transform else ["object"] + transformation = transform_to_dict(transform) if transform else dict() + + path_parts = relative_path_to_parts(path) + url = ( + self.base_url.joinpath(*render_path, "public", self.id, *path_parts) + .with_query(download_query.as_query()) + .extend_query(transformation) + ) + return str(url) + + @handle_http_io + def move(self, from_path: str, to_path: str) -> HttpMethod[MessageResponse]: + """ + Moves an existing file, optionally renaming it at the same time. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + response = yield JSONRequest( + method="POST", + path=["object", "move"], + body={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) + return validate_model(response, MessageResponse) + + @handle_http_io + def copy(self, from_path: str, to_path: str) -> HttpMethod[UploadResponse]: + """ + Copies an existing file to a new path in the same bucket. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + response = yield JSONRequest( + method="POST", + path=["object", "copy"], + body={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) + + return validate_model(response, UploadResponse) + + @handle_http_io + def remove(self, paths: list[str]) -> HttpMethod[list[FileObject]]: + """ + Deletes files within the same bucket + + Parameters + ---------- + paths + An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. + """ + response = yield JSONRequest( + method="DELETE", + path=["object", self.id], + body={"prefixes": paths}, + ) + return validate_adapter(response, FileObjectsAdapter) + + @handle_http_io + def info( + self, + path: str, + ) -> HttpMethod[FileObject]: + """ + Lists info for a particular file. + + Parameters + ---------- + path + The path to the file. + """ + path_parts: Tuple[str, ...] = relative_path_to_parts(path) # split paths by / + response = yield EmptyRequest( + method="GET", + path=["object", "info", self.id, *path_parts], + ) + return validate_model(response, FileObject) + + @handle_http_io + def exists( + self, + path: str, + ) -> HttpMethod[bool]: + """ + Returns True if the file exists, False otherwise. + + Parameters + ---------- + path + The path to the file. + """ + path_parts: Tuple[str, ...] = relative_path_to_parts(path) # split paths by / + response = yield EmptyRequest( + method="HEAD", + path=["object", self.id, *path_parts], + ) + if response.is_success: + return True + elif 400 <= response.status <= 401: + return False + else: + raise parse_api_error(response) + + @handle_http_io + def list( + self, + path: str | None = None, + limit: int = 100, + offset: int = 0, + search: str | None = None, + sortBy: SortByType | None = None, + ) -> HttpMethod[List[ListFileObject]]: + """ + Lists all the files within a bucket. + + Parameters + ---------- + path + The folder path. + options + Search options, including `limit`, `offset`, `sortBy` and `search`. + """ + body = ListBody( + prefix=path or "", + limit=limit, + offset=offset, + sortBy=sortBy or SortByType(), + search=search, + ) + response = yield JSONRequest( + method="POST", + path=["object", "list", self.id], + body=body, + ) + return validate_adapter(response, ListFileObjectsAdapter) + + @handle_http_io + def list_v2( + self, + limit: int | None = None, + prefix: str | None = None, + cursor: str | None = None, + with_delimiter: bool | None = None, + sort_by: SortByV2 | None = None, + ) -> HttpMethod[SearchV2Result]: + body = SearchV2Body( + limit=limit, + prefix=prefix, + cursor=cursor, + with_delimiter=with_delimiter, + sortBy=sort_by, + ) + response = yield JSONRequest( + method="POST", + path=["object", "list-v2", self.id], + body=body, + exclude_none=True, + ) + return validate_model(response, SearchV2Result) + + @handle_http_io + def download( + self, + path: str, + transform: TransformOptions | None = None, + query_params: Dict[str, str] | None = None, + ) -> HttpMethod[bytes]: + """ + Downloads a file. + + Parameters + ---------- + path + The file path to be downloaded, including the path and file name. For example `folder/image.png`. + """ + render_path: List[str] = ["object"] + params = ( + URLQuery.from_mapping(query_params) if query_params else URLQuery.empty() + ) + if transform: + params = params.merge(URLQuery.from_mapping(transform_to_dict(transform))) + render_path = ["render", "image", "authenticated"] + path_parts: Tuple[str, ...] = relative_path_to_parts(path) + response = yield EmptyRequest( + method="GET", + path=[*render_path, self.id, *path_parts], + query=params, + ) + if not response.is_success: + raise parse_api_error(response) + return response.content + + def _upload_or_update( + self, + method: Literal["POST", "PUT"], + path: tuple[str, ...], + file: BufferedReader | bytes | FileIO | str | Path, + cache_control: str, + content_type: str, + upsert: str, + metadata: Dict[str, Any] | None, + headers: Dict[str, str] | None, + ) -> HttpMethod[UploadResponse]: + """ + Uploads a file to an existing bucket. + + Parameters + ---------- + path + The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. + The bucket must already exist before attempting to upload. + file + The File object to be stored in the bucket. or a async generator of chunks + file_options + HTTP headers. + """ + + http_headers = Headers.from_mapping(headers) if headers else Headers.empty() + + http_headers = http_headers.set("cache-control", f"max-age={cache_control}") + + # Only include x-upsert on a POST method + if method == "POST": + http_headers = http_headers.set("x-upsert", upsert) + + cache_control_field = DataField( + name="cacheControl", + data=cache_control.encode("utf-8"), + ) + fields: list[PartField] = [cache_control_field] + if metadata is not None: + metadata_bytes = JSONParser.dump_json(metadata) + metadata_b64_encoded = base64.b64encode(metadata_bytes) + http_headers = http_headers.set( + "x-metadata", metadata_b64_encoded.decode("utf-8") + ) + fields.append( + DataField( + name="metadata", + data=metadata_bytes, + ) + ) + file_field = FileField( + name="file", + filename=path[-1], + data=maybe_read_file(file), + content_type=content_type, + ) + fields.append(file_field) + response = yield MultipartFormDataRequest( + method=method, + path=["object", self.id, *path], + fields=fields, + headers=http_headers, + ) + + return validate_model(response, UploadResponse) + + @handle_http_io + def upload( + self, + path: str, + file: BufferedReader | bytes | FileIO | str | Path, + cache_control: str = "3600", + content_type: str = "text/plain;charset=UTF-8", + upsert: str = "false", + metadata: Dict[str, Any] | None = None, + headers: Dict[str, str] | None = None, + ) -> HttpMethod[UploadResponse]: + """ + Uploads a file to an existing bucket. + + Parameters + ---------- + path + The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. + The bucket must already exist before attempting to upload. + file + The File object to be stored in the bucket. or a async generator of chunks + file_options + HTTP headers. + """ + path_parts = relative_path_to_parts(path) + return self._upload_or_update( + method="POST", + path=path_parts, + file=file, + cache_control=cache_control, + content_type=content_type, + upsert=upsert, + metadata=metadata, + headers=headers, + ) + + @handle_http_io + def update( + self, + path: str, + file: BufferedReader | bytes | FileIO | str | Path, + cache_control: str = "3600", + content_type: str = "text/plain;charset=UTF-8", + upsert: str = "false", + metadata: Dict[str, Any] | None = None, + headers: Dict[str, str] | None = None, + ) -> HttpMethod[UploadResponse]: + path_parts = relative_path_to_parts(path) + return self._upload_or_update( + method="PUT", + path=path_parts, + file=file, + cache_control=cache_control, + content_type=content_type, + upsert=upsert, + metadata=metadata, + headers=headers, + ) diff --git a/src/storage/src/storage3/types.py b/src/storage/src/storage3/types.py index 886f50b7..8758b5b7 100644 --- a/src/storage/src/storage3/types.py +++ b/src/storage/src/storage3/types.py @@ -1,23 +1,16 @@ from __future__ import annotations -from collections.abc import Mapping, Sequence -from dataclasses import asdict, dataclass from datetime import datetime -from typing import Any, Dict, List, Literal, Optional, TypedDict, Union +from typing import Any, Dict, List, Literal from pydantic import BaseModel, Field, TypeAdapter -from typing_extensions import ReadOnly, TypeAlias, TypeAliasType +from pydantic.dataclasses import dataclass +from typing_extensions import TypeAlias, TypedDict RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] -# https://docs.pydantic.dev/2.11/concepts/types/#named-recursive-types -JSON = TypeAliasType( - "JSON", "Union[None, bool, str, int, float, Sequence[JSON], Mapping[str, JSON]]" -) -JSONAdapter: TypeAdapter = TypeAdapter(JSON) - -class BaseBucket(BaseModel, extra="ignore"): +class Bucket(BaseModel, extra="ignore"): """Represents a file storage bucket.""" id: str @@ -26,56 +19,90 @@ class BaseBucket(BaseModel, extra="ignore"): public: bool created_at: datetime updated_at: datetime - file_size_limit: Optional[int] - allowed_mime_types: Optional[list[str]] - type: Optional[str] = None + file_size_limit: int | None + allowed_mime_types: list[str] | None + type: Literal["STANDARD", "ANALYTICS"] | None = None + + +class BucketName(BaseModel, extra="ignore"): + name: str # used in bucket.list method's option parameter -class _sortByType(TypedDict, total=False): - column: str - order: Literal["asc", "desc"] +@dataclass +class SortByType: + column: str = "name" + order: Literal["asc", "desc"] = "asc" + + +class ListBody(BaseModel): + prefix: str + limit: int + offset: int + search: str | None + sortBy: SortByType + + +class SignedUploadUrlResponse(BaseModel): + url: str + token: str -class SignedUploadURL(TypedDict): +@dataclass +class SignedUploadURL: signed_url: str - signedUrl: str token: str - path: str -class CreateOrUpdateBucketOptions(TypedDict, total=False): - public: bool - file_size_limit: int - allowed_mime_types: list[str] +@dataclass +class TransformOptions: + height: int | None = None + width: int | None = None + resize: Literal["cover", "contain", "fill"] | None = None + format: Literal["origin", "avif"] | None = None + quality: int | None = None -class ListBucketFilesOptions(TypedDict, total=False): - limit: int - offset: int - sortBy: _sortByType - search: str +class CreateSignedUrlBody(BaseModel): + expiresIn: int + download: str | bool | None + transform: TransformOptions | None -class TransformOptions(TypedDict, total=False): - height: ReadOnly[int] - width: ReadOnly[int] - resize: ReadOnly[Literal["cover", "contain", "fill"]] - format: ReadOnly[Literal["origin", "avif"]] - quality: ReadOnly[int] +class CreateSignedUrlsBody(BaseModel): + paths: List[str] + expiresIn: int + download: str | bool | None def transform_to_dict(t: TransformOptions) -> dict[str, str]: - return {key: str(val) for key, val in t.items()} + return {key: str(val) for key, val in t.__dict__.items() if val} -class URLOptions(TypedDict, total=False): - download: Union[str, bool] - transform: TransformOptions +class CreateOrUpdateBucketBody(BaseModel): + id: str + name: str | None + public: bool | None + file_size_limit: int | None + allowed_mime_types: list[str] | None + + +class MessageResponse(BaseModel): + message: str -class CreateSignedURLsOptions(TypedDict, total=False): - download: Union[str, bool] +class FileObject(BaseModel): + id: str + version: str + name: str + bucket_id: str + created_at: datetime + metadata: Dict[str, Any] + last_modified: datetime | None = None + size: int | None = None + cache_control: str | None = None + content_type: str | None = None + etag: str | None = None class SortByV2(TypedDict, total=False): @@ -97,39 +124,40 @@ class SearchV2Object(BaseModel): updated_at: datetime created_at: datetime metadata: Dict[str, Any] - key: Optional[str] = None + key: str | None = None class SearchV2Folder(BaseModel): key: str name: str - created_at: Optional[datetime] = None - updated_at: Optional[datetime] = None + created_at: datetime | None = None + updated_at: datetime | None = None + + +class SearchV2Body(BaseModel): + limit: int | None = None + prefix: str | None = None + cursor: str | None = None + with_delimiter: bool | None = None + sortBy: SortByV2 | None = None class SearchV2Result(BaseModel): hasNext: bool folders: List[SearchV2Folder] objects: List[SearchV2Object] - nextCursor: Optional[str] = None + nextCursor: str | None = None -class DownloadOptions(TypedDict, total=False): - transform: TransformOptions - - -FileOptions = TypedDict( - "FileOptions", - { - "cache-control": str, - "content-type": str, - "x-upsert": str, - "upsert": str, - "metadata": Dict[str, Any], - "headers": Dict[str, str], - }, - total=False, -) +class ListFileObject(BaseModel): + id: str + name: str + owner: str | None = None + bucket_id: str | None = None + updated_at: datetime + created_at: datetime + metadata: Dict[str, Any] + buckets: Bucket | None = None class UploadData(TypedDict, total=False): @@ -137,30 +165,15 @@ class UploadData(TypedDict, total=False): Key: str -@dataclass -class UploadResponse: - path: str - full_path: str - fullPath: str - - def __init__(self, path: str, Key: str) -> None: - self.path = path - self.full_path = Key - self.fullPath = Key - - dict = asdict - - -class SignedUrlResponse(TypedDict): - signedURL: str - signedUrl: str +class UploadResponse(BaseModel): + Key: str -class CreateSignedUrlResponse(TypedDict): - error: Optional[str] +@dataclass +class CreateSignedUrlResponse: + error: str | None path: str - signedURL: str - signedUrl: str + signed_url: str class SignedUrlJsonResponse(BaseModel, extra="ignore"): @@ -168,42 +181,26 @@ class SignedUrlJsonResponse(BaseModel, extra="ignore"): class SignedUrlsJsonItem(BaseModel, extra="ignore"): - error: Optional[str] + error: str | None path: str signedURL: str SignedUrlsJsonResponse = TypeAdapter(list[SignedUrlsJsonItem]) - -class CreateSignedUploadUrlOptions(BaseModel, extra="ignore"): - upsert: str - - -UploadSignedUrlFileOptions = TypedDict( - "UploadSignedUrlFileOptions", - { - "cache-control": str, - "content-type": str, - "metadata": Dict[str, Any], - "headers": Dict[str, str], - }, - total=False, -) - DistanceMetric: TypeAlias = Literal["cosine", "euclidean"] class MetadataConfiguration(BaseModel, extra="ignore"): - non_filterable_metadata_keys: Optional[List[str]] = Field( + non_filterable_metadata_keys: List[str] | None = Field( alias="nonFilterableMetadataKeys" ) class ListIndexesOptions(BaseModel, extra="ignore"): - nextToken: Optional[str] = None - maxResults: Optional[int] = None - prefix: Optional[str] = None + nextToken: str | None = None + maxResults: int | None = None + prefix: str | None = None class ListIndexesResponseItem(BaseModel, extra="ignore"): @@ -212,7 +209,7 @@ class ListIndexesResponseItem(BaseModel, extra="ignore"): class ListVectorIndexesResponse(BaseModel, extra="ignore"): indexes: List[ListIndexesResponseItem] - nextToken: Optional[str] = None + nextToken: str | None = None class VectorIndex(BaseModel, extra="ignore"): @@ -221,10 +218,10 @@ class VectorIndex(BaseModel, extra="ignore"): data_type: str = Field(alias="dataType") dimension: int distance_metric: DistanceMetric = Field(alias="distanceMetric") - metadata: Optional[MetadataConfiguration] = Field( + metadata: MetadataConfiguration | None = Field( alias="metadataConfiguration", default=None ) - creation_time: Optional[datetime] = None + creation_time: datetime | None = None class GetVectorIndexResponse(BaseModel, extra="ignore"): @@ -241,14 +238,14 @@ class VectorData(BaseModel, extra="ignore"): class VectorObject(BaseModel, extra="ignore"): key: str data: VectorData - metadata: Optional[dict[str, Union[str, bool, float]]] = None + metadata: dict[str, str | bool | float] | None = None class VectorMatch(BaseModel, extra="ignore"): key: str - data: Optional[VectorData] = None - distance: Optional[float] = None - metadata: Optional[dict[str, Any]] = None + data: VectorData | None = None + distance: float | None = None + metadata: dict[str, Any] | None = None class GetVectorsResponse(BaseModel, extra="ignore"): @@ -257,7 +254,7 @@ class GetVectorsResponse(BaseModel, extra="ignore"): class ListVectorsResponse(BaseModel, extra="ignore"): vectors: List[VectorMatch] - nextToken: Optional[str] = None + nextToken: str | None = None class QueryVectorsResponse(BaseModel, extra="ignore"): @@ -266,8 +263,8 @@ class QueryVectorsResponse(BaseModel, extra="ignore"): class AnalyticsBucket(BaseModel, extra="ignore"): name: str - type: Optional[Literal["ANALYTICS"]] = None - format: Optional[str] = None + type: Literal["ANALYTICS"] | None = None + format: str | None = None created_at: datetime updated_at: datetime @@ -283,14 +280,14 @@ class AnalyticsBucketDeleteResponse(BaseModel, extra="ignore"): class VectorBucketEncryptionConfiguration(BaseModel, extra="ignore"): - kmsKeyArn: Optional[str] = None - sseType: Optional[str] = None + kmsKeyArn: str | None = None + sseType: str | None = None class VectorBucket(BaseModel, extra="ignore"): vectorBucketName: str - creationTime: Optional[datetime] = None - encryptionConfiguration: Optional[VectorBucketEncryptionConfiguration] = None + creationTime: datetime | None = None + encryptionConfiguration: VectorBucketEncryptionConfiguration | None = None class GetVectorBucketResponse(BaseModel, extra="ignore"): @@ -303,4 +300,4 @@ class ListVectorBucketsItem(BaseModel, extra="ignore"): class ListVectorBucketsResponse(BaseModel, extra="ignore"): vectorBuckets: List[ListVectorBucketsItem] - nextToken: Optional[str] = None + nextToken: str | None = None diff --git a/src/storage/src/storage3/utils.py b/src/storage/src/storage3/utils.py deleted file mode 100644 index 0b323ec2..00000000 --- a/src/storage/src/storage3/utils.py +++ /dev/null @@ -1,26 +0,0 @@ -from deprecation import deprecated -from httpx import AsyncClient as AsyncClient # noqa: F401 -from httpx import Client - -from .version import __version__ - - -class SyncClient(Client): - @deprecated( - "0.11.3", "3.0.0", __version__, "Use `Client` from the httpx package instead" - ) - def __init__(self, *args, **kwargs) -> None: - super().__init__(*args, **kwargs) - - @deprecated( - "0.11.3", - "3.0.0", - __version__, - "Use `close` method from `Client` in the httpx package instead", - ) - def aclose(self) -> None: - self.close() - - -class StorageException(Exception): - """Error raised when an operation on the storage API fails.""" diff --git a/src/storage/src/storage3/vectors.py b/src/storage/src/storage3/vectors.py new file mode 100644 index 00000000..e78a8b6f --- /dev/null +++ b/src/storage/src/storage3/vectors.py @@ -0,0 +1,297 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Generic, List + +from supabase_utils.http.headers import Headers +from supabase_utils.http.io import ( + HttpIO, + HttpMethod, + handle_http_io, +) +from supabase_utils.http.request import JSONRequest +from supabase_utils.types import JSON +from yarl import URL + +from .exceptions import VectorBucketException, parse_api_error, validate_model +from .types import ( + DistanceMetric, + GetVectorBucketResponse, + GetVectorIndexResponse, + GetVectorsResponse, + ListVectorBucketsResponse, + ListVectorIndexesResponse, + ListVectorsResponse, + MetadataConfiguration, + QueryVectorsResponse, + VectorData, + VectorFilter, + VectorObject, +) + + +# used to not send non-required values as `null` +# for they cannot be null +def remove_none(**kwargs: JSON) -> JSON: + return {key: val for key, val in kwargs.items() if val is not None} + + +@dataclass +class VectorBucketScope(Generic[HttpIO]): + base_url: URL + default_headers: Headers + bucket_name: str + executor: HttpIO + + def with_metadata(self, **data: JSON) -> JSON: + return remove_none(vectorBucketName=self.bucket_name, **data) + + @handle_http_io + def create_index( + self, + index_name: str, + dimension: int, + distance_metric: DistanceMetric, + data_type: str, + metadata: MetadataConfiguration | None = None, + ) -> HttpMethod[None]: + body = self.with_metadata( + indexName=index_name, + dimension=dimension, + distanceMetric=distance_metric, + dataType=data_type, + metadataConfiguration=metadata.model_dump(by_alias=True) + if metadata + else None, + ) + response = yield JSONRequest( + method="POST", + path=["CreateIndex"], + body=body, + ) + if not response.is_success: + raise parse_api_error(response) + + @handle_http_io + def get_index(self, index_name: str) -> HttpMethod[GetVectorIndexResponse | None]: + body = self.with_metadata(indexName=index_name) + response = yield JSONRequest( + method="POST", + path=["GetIndex"], + body=body, + ) + if response.is_success: + return GetVectorIndexResponse.model_validate_json(response.content) + elif 400 <= response.status <= 401: + return None + else: + raise parse_api_error(response) + + @handle_http_io + def list_indexes( + self, + next_token: str | None = None, + max_results: int | None = None, + prefix: str | None = None, + ) -> HttpMethod[ListVectorIndexesResponse]: + body = self.with_metadata( + next_token=next_token, max_results=max_results, prefix=prefix + ) + response = yield JSONRequest( + method="POST", + path=["ListIndexes"], + body=body, + ) + return validate_model(response, ListVectorIndexesResponse) + + @handle_http_io + def delete_index(self, index_name: str) -> HttpMethod[None]: + body = self.with_metadata(indexName=index_name) + response = yield JSONRequest(method="POST", path=["DeleteIndex"], body=body) + if not response.is_success: + raise parse_api_error(response) + + def index(self, index_name: str) -> VectorIndexScope[HttpIO]: + return VectorIndexScope( + bucket_name=self.bucket_name, + index_name=index_name, + base_url=self.base_url, + executor=self.executor, + default_headers=self.default_headers, + ) + + +@dataclass +class VectorIndexScope(Generic[HttpIO]): + executor: HttpIO + bucket_name: str + index_name: str + default_headers: Headers + base_url: URL + + def with_metadata(self, **data: JSON) -> JSON: + return remove_none( + vectorBucketName=self.bucket_name, + indexName=self.index_name, + **data, + ) + + @handle_http_io + def put(self, vectors: List[VectorObject]) -> HttpMethod[None]: + body = self.with_metadata( + vectors=[v.model_dump(exclude_none=True) for v in vectors] + ) + response = yield JSONRequest( + method="POST", + path=["PutVectors"], + body=body, + ) + if not response.is_success: + raise parse_api_error(response) + + @handle_http_io + def get( + self, *keys: str, return_data: bool = True, return_metadata: bool = True + ) -> HttpMethod[GetVectorsResponse]: + body = self.with_metadata( + keys=keys, + returnData=return_data, + returnMetadata=return_metadata, + ) + response = yield JSONRequest( + method="POST", + path=["GetVectors"], + body=body, + ) + return validate_model(response, GetVectorsResponse) + + @handle_http_io + def list( + self, + max_results: int | None = None, + next_token: str | None = None, + return_data: bool = True, + return_metadata: bool = True, + segment_count: int | None = None, + segment_index: int | None = None, + ) -> HttpMethod[ListVectorsResponse]: + body = self.with_metadata( + maxResults=max_results, + nextToken=next_token, + returnData=return_data, + returnMetadata=return_metadata, + segmentCount=segment_count, + segmentIndex=segment_index, + ) + response = yield JSONRequest( + method="POST", + path=["ListVectors"], + body=body, + ) + return validate_model(response, ListVectorsResponse) + + @handle_http_io + def query( + self, + query_vector: VectorData, + topK: int | None = None, + filter: VectorFilter | None = None, + return_distance: bool = True, + return_metadata: bool = True, + ) -> HttpMethod[QueryVectorsResponse]: + body = self.with_metadata( + queryVector=dict(query_vector), + topK=topK, + filter=filter, + returnDistance=return_distance, + returnMetadata=return_metadata, + ) + response = yield JSONRequest( + method="POST", + path=["QueryVectors"], + body=body, + ) + return validate_model(response, QueryVectorsResponse) + + @handle_http_io + def delete(self, keys: List[str]) -> HttpMethod[None]: + if len(keys) < 1 or len(keys) > 500: + raise VectorBucketException("Keys batch size must be between 1 and 500.") + body = self.with_metadata(keys=keys) + response = yield JSONRequest( + method="POST", + path=["DeleteVectors"], + body=body, + ) + if not response.is_success: + raise parse_api_error(response) + + +@dataclass +class StorageVectorsClient(Generic[HttpIO]): + base_url: URL + default_headers: Headers + executor: HttpIO + + def from_(self, bucket_name: str) -> VectorBucketScope[HttpIO]: + return VectorBucketScope( + bucket_name=bucket_name, + base_url=self.base_url, + executor=self.executor, + default_headers=self.default_headers, + ) + + @handle_http_io + def create_bucket(self, bucket_name: str) -> HttpMethod[None]: + body = {"vectorBucketName": bucket_name} + response = yield JSONRequest( + method="POST", + path=["CreateVectorBucket"], + body=body, + ) + if not response.is_success: + raise parse_api_error(response) + + @handle_http_io + def get_bucket( + self, bucket_name: str + ) -> HttpMethod[GetVectorBucketResponse | None]: + body = {"vectorBucketName": bucket_name} + response = yield JSONRequest( + method="POST", + path=["GetVectorBucket"], + body=body, + ) + if response.is_success: + return GetVectorBucketResponse.model_validate_json(response.content) + elif 400 <= response.status <= 401: + return None + else: + raise parse_api_error(response) + + @handle_http_io + def list_buckets( + self, + prefix: str | None = None, + max_results: int | None = None, + next_token: str | None = None, + ) -> HttpMethod[ListVectorBucketsResponse]: + body = {"prefix": prefix, "maxResults": max_results, "nextToken": next_token} + response = yield JSONRequest( + method="POST", + path=["ListVectorBuckets"], + body=body, + exclude_none=True, + ) + return validate_model(response, ListVectorBucketsResponse) + + @handle_http_io + def delete_bucket(self, bucket_name: str) -> HttpMethod[None]: + body = {"vectorBucketName": bucket_name} + response = yield JSONRequest( + method="POST", + path=["DeleteVectorBucket"], + body=body, + ) + if not response.is_success: + raise parse_api_error(response) diff --git a/src/storage/tests/__init__.py b/src/storage/tests/__init__.py index bb6bf309..e69de29b 100644 --- a/src/storage/tests/__init__.py +++ b/src/storage/tests/__init__.py @@ -1,2 +0,0 @@ -from storage3._async.file_api import AsyncBucketProxy as AsyncBucketProxy -from storage3._sync.file_api import SyncBucketProxy as SyncBucketProxy diff --git a/src/storage/tests/_async/conftest.py b/src/storage/tests/_async/conftest.py index 1b8b6bc6..149c9c01 100644 --- a/src/storage/tests/_async/conftest.py +++ b/src/storage/tests/_async/conftest.py @@ -4,26 +4,39 @@ from collections.abc import AsyncGenerator import pytest +from aiohttp import ClientSession from dotenv import load_dotenv +from httpx import AsyncClient +from supabase_utils.http.adapters.aiohttp import AsyncAiohttpSession +from supabase_utils.http.adapters.httpx import AsyncHttpxSession + from storage3 import AsyncStorageClient -def pytest_configure(config) -> None: +def pytest_configure(config: pytest.Config) -> None: load_dotenv(dotenv_path="tests/tests.env") -@pytest.fixture -async def storage() -> AsyncGenerator[AsyncStorageClient]: +def httpx() -> AsyncHttpxSession: + return AsyncHttpxSession(client=AsyncClient(http2=True, verify=True)) + + +def aiohttp() -> AsyncAiohttpSession: + return AsyncAiohttpSession(client=ClientSession()) + + +@pytest.fixture(params=[httpx, aiohttp]) +async def storage(request: pytest.FixtureRequest) -> AsyncGenerator[AsyncStorageClient]: url = os.environ.get("SUPABASE_TEST_URL") assert url is not None, "Must provide SUPABASE_TEST_URL environment variable" key = os.environ.get("SUPABASE_TEST_KEY") assert key is not None, "Must provide SUPABASE_TEST_KEY environment variable" async with AsyncStorageClient( url, - { + headers={ "apiKey": key, "Authorization": f"Bearer {key}", }, + http_session=request.param(), ) as client: - client.session.timeout = None yield client diff --git a/src/storage/tests/_async/test_bucket.py b/src/storage/tests/_async/test_bucket.py deleted file mode 100644 index 295c938e..00000000 --- a/src/storage/tests/_async/test_bucket.py +++ /dev/null @@ -1,217 +0,0 @@ -from unittest.mock import AsyncMock, Mock - -import pytest -from httpx import AsyncClient, Headers, HTTPStatusError, Response -from storage3 import AsyncBucket, AsyncStorageBucketAPI -from storage3.exceptions import StorageApiError -from storage3.types import CreateOrUpdateBucketOptions - - -@pytest.fixture -def mock_client() -> AsyncMock: - return AsyncMock() - - -@pytest.fixture -def headers() -> Headers: - return Headers() - - -@pytest.fixture -def storage_api(mock_client: AsyncClient, headers: Headers) -> AsyncStorageBucketAPI: - return AsyncStorageBucketAPI(mock_client, "", headers) - - -@pytest.fixture -def mock_response() -> Mock: - response = Mock(spec=Response) - response.raise_for_status = Mock() - return response - - -async def test_list_buckets(storage_api, mock_client, mock_response) -> None: - # Mock response data - mock_response.json.return_value = [ - { - "id": "bucket1", - "name": "Bucket 1", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - { - "id": "bucket2", - "name": "Bucket 2", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - ] - mock_client.request.return_value = mock_response - - buckets = await storage_api.list_buckets() - - assert len(buckets) == 2 - assert all(isinstance(bucket, AsyncBucket) for bucket in buckets) - assert buckets[0].id == "bucket1" - assert buckets[1].id == "bucket2" - - mock_client.request.assert_called_once_with("GET", "bucket", json=None, headers={}) - - -async def test_get_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "test-bucket" - mock_response.json.return_value = { - "id": bucket_id, - "name": "Test Bucket", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - } - mock_client.request.return_value = mock_response - - bucket = await storage_api.get_bucket(bucket_id) - - assert isinstance(bucket, AsyncBucket) - assert bucket.id == bucket_id - assert bucket.name == "Test Bucket" - assert bucket.public is True - assert bucket.owner == "test-owner" - - mock_client.request.assert_called_once_with( - "GET", f"bucket/{bucket_id}", json=None, headers={} - ) - - -async def test_create_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "new-bucket" - bucket_name = "New Bucket" - options = CreateOrUpdateBucketOptions( - public=True, file_size_limit=1000000, allowed_mime_types=["image/*"] - ) - - mock_response.json.return_value = {"message": "Bucket created successfully"} - mock_client.request.return_value = mock_response - - result = await storage_api.create_bucket(bucket_id, bucket_name, options) - - assert result == {"message": "Bucket created successfully"} - mock_client.request.assert_called_once_with( - "POST", - "bucket", - json={ - "id": bucket_id, - "name": bucket_name, - "public": True, - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - headers={}, - ) - - -async def test_create_bucket_minimal(storage_api, mock_client, mock_response) -> None: - bucket_id = "minimal-bucket" - mock_response.json.return_value = {"message": "Bucket created successfully"} - mock_client.request.return_value = mock_response - - result = await storage_api.create_bucket(bucket_id) - - assert result == {"message": "Bucket created successfully"} - mock_client.request.assert_called_once_with( - "POST", "bucket", json={"id": bucket_id, "name": bucket_id}, headers={} - ) - - -async def test_update_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "update-bucket" - options = CreateOrUpdateBucketOptions(public=False, file_size_limit=2000000) - - mock_response.json.return_value = {"message": "Bucket updated successfully"} - mock_client.request.return_value = mock_response - - result = await storage_api.update_bucket(bucket_id, options) - - assert result == {"message": "Bucket updated successfully"} - mock_client.request.assert_called_once_with( - "PUT", - f"bucket/{bucket_id}", - json={ - "id": bucket_id, - "name": bucket_id, - "public": False, - "file_size_limit": 2000000, - }, - headers={}, - ) - - -async def test_empty_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "empty-bucket" - mock_response.json.return_value = {"message": "Bucket emptied successfully"} - mock_client.request.return_value = mock_response - - result = await storage_api.empty_bucket(bucket_id) - - assert result == {"message": "Bucket emptied successfully"} - mock_client.request.assert_called_once_with( - "POST", f"bucket/{bucket_id}/empty", json={}, headers={} - ) - - -async def test_delete_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "delete-bucket" - mock_response.json.return_value = {"message": "Bucket deleted successfully"} - mock_client.request.return_value = mock_response - - result = await storage_api.delete_bucket(bucket_id) - - assert result == {"message": "Bucket deleted successfully"} - mock_client.request.assert_called_once_with( - "DELETE", f"bucket/{bucket_id}", json={}, headers={} - ) - - -async def test_request_error_handling(storage_api, mock_client) -> None: - error_response = Mock(spec=Response) - error_response.json.return_value = { - "message": "Test error message", - "error": "Test error", - "statusCode": 400, - } - - exc = HTTPStatusError("HTTP Error", request=Mock(), response=error_response) - mock_client.request.side_effect = exc - - with pytest.raises(StorageApiError) as exc_info: - await storage_api._request("GET", ["test"]) - - assert exc_info.value.message == "Test error message" - - -@pytest.mark.parametrize( - "method,path,json_data", - [ - ("GET", "test", None), - ("POST", "test", {"key": "value"}), - ("PUT", "test", {"id": "123"}), - ("DELETE", "test", {}), - ], -) -async def test_request_methods( - storage_api, mock_client, mock_response, method, path, json_data -) -> None: - mock_client.request.return_value = mock_response - await storage_api._request(method, [path], json_data) - mock_client.request.assert_called_once_with( - method, path, json=json_data, headers={} - ) diff --git a/src/storage/tests/_async/test_client.py b/src/storage/tests/_async/test_client.py index b1d7d069..e99ec611 100644 --- a/src/storage/tests/_async/test_client.py +++ b/src/storage/tests/_async/test_client.py @@ -8,12 +8,13 @@ import pytest from httpx import AsyncClient as HttpxClient -from httpx import HTTPStatusError, Response -from storage3 import AsyncStorageClient -from storage3.exceptions import StorageApiError -from storage3.utils import StorageException +from supabase_utils.http.io import AsyncHttpIO +from supabase_utils.http.request import Response + +from storage3 import AsyncStorageClient, StorageFileApiClient +from storage3.exceptions import StorageApiError, StorageException +from storage3.types import TransformOptions -from .. import AsyncBucketProxy from ..utils import AsyncFinalizerFactory if TYPE_CHECKING: @@ -35,7 +36,7 @@ def method() -> str: @pytest.fixture -async def delete_left_buckets( +def delete_left_buckets( request: pytest.FixtureRequest, storage: AsyncStorageClient, ) -> None: @@ -88,7 +89,7 @@ async def public_bucket( global temp_test_buckets_ids temp_test_buckets_ids.append(bucket_id) - await storage.create_bucket(id=bucket_id, options={"public": True}) + await storage.create_bucket(id=bucket_id, public=True) yield bucket_id @@ -101,7 +102,7 @@ async def public_bucket( @pytest.fixture def storage_file_client( storage: AsyncStorageClient, bucket: str -) -> Generator[AsyncBucketProxy]: +) -> Generator[StorageFileApiClient[AsyncHttpIO]]: """Creates the storage file client for the whole storage tests run""" yield storage.from_(bucket) @@ -109,7 +110,7 @@ def storage_file_client( @pytest.fixture def storage_file_client_public( storage: AsyncStorageClient, public_bucket: str -) -> Generator[AsyncBucketProxy]: +) -> Generator[StorageFileApiClient[AsyncHttpIO]]: """Creates the storage file client for the whole storage tests run""" yield storage.from_(public_bucket) @@ -267,126 +268,129 @@ def multi_file(tmp_path: Path, uuid_factory: Callable[[], str]) -> list[FileForT async def test_client_upload( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + path=file.bucket_path, file=file.local_path, content_type=file.mime_type ) image = await storage_file_client.download(file.bucket_path) files = await storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type async def test_client_upload_with_query( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket, even with query parameters""" await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) image = await storage_file_client.download( file.bucket_path, query_params={"my-param": "test"} ) files = await storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type async def test_client_download_with_query_doesnt_lose_params( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure query params aren't lost""" from yarl import URL params = {"my-param": "test"} mock_response = Mock() - with patch.object(HttpxClient, "request") as mock_request: + mock_response.headers = {} + mock_response.status_code = 200 + with patch.object(storage_file_client.executor.session, "send") as mock_request: mock_request.return_value = mock_response await storage_file_client.download(file.bucket_path, query_params=params) - expected_url = storage_file_client._base_url.joinpath( + expected_url = storage_file_client.base_url.joinpath( "object", storage_file_client.id, *URL(file.bucket_path).parts ).with_query(params) - actual_url = mock_request.call_args[0][1] + + (actual_request,) = mock_request.call_args[0] + actual_url = str(actual_request.url) assert URL(actual_url).query == params assert str(expected_url) == actual_url async def test_client_update( - storage_file_client: AsyncBucketProxy, + storage_file_client: StorageFileApiClient[AsyncHttpIO], two_files: list[FileForTesting], ) -> None: """Ensure we can upload files to a bucket""" await storage_file_client.upload( two_files[0].bucket_path, two_files[0].local_path, - {"content-type": two_files[0].mime_type}, + content_type=two_files[0].mime_type, ) await storage_file_client.update( two_files[0].bucket_path, two_files[1].local_path, - {"content-type": two_files[1].mime_type}, + content_type=two_files[1].mime_type, ) image = await storage_file_client.download(two_files[0].bucket_path) file_list = await storage_file_client.list(two_files[0].bucket_folder) - image_info = next( - (f for f in file_list if f.get("name") == two_files[0].name), None - ) + image_info = next((f for f in file_list if f.name == two_files[0].name), None) assert image == two_files[1].file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == two_files[1].mime_type + assert image_info.metadata.get("mimetype") == two_files[1].mime_type @pytest.mark.parametrize( "path", ["foobar.txt", "example/nested.jpg", "/leading/slash.png"] ) async def test_client_create_signed_upload_url( - storage_file_client: AsyncBucketProxy, path: str + storage_file_client: StorageFileApiClient[AsyncHttpIO], path: str ) -> None: """Ensure we can create signed URLs to upload files to a bucket""" data = await storage_file_client.create_signed_upload_url(path) - assert data["path"] == path - assert data["token"] - expected_url = f"{storage_file_client._base_url}object/upload/sign/{storage_file_client.id}/{path.lstrip('/')}" - assert data["signed_url"].startswith(expected_url) + expected_url = storage_file_client.base_url.joinpath( + "object", "upload", "sign", storage_file_client.id, *path.lstrip("/").split("/") + ) + assert data.signed_url.startswith(str(expected_url)) async def test_client_upload_to_signed_url( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload to a signed URL with various options""" # Test with content-type data = await storage_file_client.create_signed_upload_url(file.bucket_path) + await storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content, {"content-type": file.mime_type} + file.bucket_path, data.token, file.file_content, content_type=file.mime_type ) image = await storage_file_client.download(file.bucket_path) files = await storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type # Test with file_options=None data = await storage_file_client.create_signed_upload_url( f"no_options_{file.bucket_path}" ) await storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content + f"no_options_{file.bucket_path}", token=data.token, file=file.file_content ) image = await storage_file_client.download(f"no_options_{file.bucket_path}") assert image == file.file_content @@ -396,33 +400,36 @@ async def test_client_upload_to_signed_url( f"cached_{file.bucket_path}" ) await storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content, {"cache-control": "3600"} + f"cached_{file.bucket_path}", + token=data.token, + file=file.file_content, + cache_control="7200", ) cached_info = await storage_file_client.info(f"cached_{file.bucket_path}") - assert cached_info.get("cache_control") == "max-age=3600" + assert cached_info.cache_control == "max-age=7200" async def test_client_create_signed_url( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can create and use signed URLs with various options""" await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Test basic signed URL signed_url = await storage_file_client.create_signed_url(file.bucket_path, 60) async with HttpxClient(timeout=None) as client: - response = await client.get(signed_url["signedURL"]) + response = await client.get(signed_url) response.raise_for_status() assert response.content == file.file_content # Test with download option download_signed_url = await storage_file_client.create_signed_url( - file.bucket_path, 60, options={"download": "custom_download.svg"} + file.bucket_path, 60, download="custom_download.svg" ) async with HttpxClient(timeout=None) as client: - response = await client.get(download_signed_url["signedURL"]) + response = await client.get(download_signed_url) response.raise_for_status() assert ( @@ -435,55 +442,57 @@ async def test_client_create_signed_url( transform_signed_url = await storage_file_client.create_signed_url( file.bucket_path, 60, - options={"transform": {"width": 200, "height": 200, "resize": "cover"}}, + transform=TransformOptions(width=200, height=200, resize="cover"), ) # assert "width=200" in transform_signed_url["signedURL"] # assert "height=200" in transform_signed_url["signedURL"] # assert "resize=cover" in transform_signed_url["signedURL"] # assert "format=png" in transform_signed_url["signedURL"] async with HttpxClient(timeout=None) as client: - response = await client.get(transform_signed_url["signedURL"]) + response = await client.get(transform_signed_url) response.raise_for_status() async def test_client_create_signed_urls( - storage_file_client: AsyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[AsyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can create signed urls for files in a bucket""" paths = [] for file in multi_file: paths.append(file.bucket_path) await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) signed_urls = await storage_file_client.create_signed_urls(paths, 10) async with HttpxClient() as client: for url in signed_urls: - response = await client.get(url["signedURL"]) + response = await client.get(url.signed_url) response.raise_for_status() assert response.content == multi_file[0].file_content async def test_client_get_public_url( - storage_file_client_public: AsyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[AsyncHttpIO], + file: FileForTesting, ) -> None: """Ensure we can get the public url of a file in a bucket with various options""" await storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Test basic public URL - public_url = await storage_file_client_public.get_public_url(file.bucket_path) + public_url = storage_file_client_public.get_public_url(file.bucket_path) async with HttpxClient(timeout=None) as client: response = await client.get(public_url) response.raise_for_status() assert response.content == file.file_content # Test with download option - download_url = await storage_file_client_public.get_public_url( - file.bucket_path, options={"download": "custom_name.svg"} + download_url = storage_file_client_public.get_public_url( + file.bucket_path, download="custom_name.svg" ) async with HttpxClient(timeout=None) as client: response = await client.get(download_url) @@ -495,9 +504,9 @@ async def test_client_get_public_url( assert response.content == file.file_content # Test with transform options - transform_url = await storage_file_client_public.get_public_url( + transform_url = storage_file_client_public.get_public_url( file.bucket_path, - options={"transform": {"width": 100, "height": 100, "resize": "contain"}}, + transform=TransformOptions(width=100, height=100, resize="contain"), ) assert "width=100" in transform_url assert "height=100" in transform_url @@ -505,22 +514,20 @@ async def test_client_get_public_url( async def test_client_upload_with_custom_metadata( - storage_file_client_public: AsyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[AsyncHttpIO], + file: FileForTesting, ) -> None: """Ensure we can get the public url of a file in a bucket""" await storage_file_client_public.upload( file.bucket_path, file.local_path, - { - "content-type": file.mime_type, - "metadata": {"custom": "metadata", "second": "second", "third": "third"}, - }, + content_type=file.mime_type, + metadata={"custom": "metadata", "second": "second", "third": "third"}, ) info = await storage_file_client_public.info(file.bucket_path) - assert "metadata" in info.keys() - assert info["name"] == file.bucket_path - assert info["metadata"] == { + assert info.name == file.bucket_path + assert info.metadata == { "custom": "metadata", "second": "second", "third": "third", @@ -528,60 +535,49 @@ async def test_client_upload_with_custom_metadata( async def test_client_info( - storage_file_client_public: AsyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[AsyncHttpIO], + file: FileForTesting, ) -> None: """Ensure we can get the public url of a file in a bucket""" await storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) info = await storage_file_client_public.info(file.bucket_path) - assert "metadata" in info.keys() - assert info["name"] == file.bucket_path - assert info["content_type"] == file.mime_type + assert info.name == file.bucket_path + assert info.content_type == file.mime_type async def test_client_info_with_error( - storage_file_client_public: AsyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[AsyncHttpIO], + file: FileForTesting, ) -> None: """Ensure we can get the public url of a file in a bucket""" await storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) """Ensure StorageException is raised when signed URL creation fails""" mock_error_response = Mock(spec=Response) - mock_error_response.status_code = 404 - mock_error_response.json.return_value = { - "error": "Custom error message", - "statusCode": 404, - "message": "File not found", - } - - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Custom error message"} - mock_response.raise_for_status.side_effect = HTTPStatusError( - "HTTP Error", request=Mock(), response=mock_error_response - ) + mock_error_response.status = 404 + mock_error_response.is_success = False + mock_error_response.content = b'{"error": "Custom error message", "statusCode": 404, "message": "File not found"}' with patch.object( - storage_file_client_public._client, "request", new_callable=AsyncMock + storage_file_client_public.executor.session, "send", new_callable=AsyncMock ) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises( - StorageApiError, - match="{'statusCode': 404, 'error': Custom error message, 'message': File not found}", - ): + mock_request.return_value = mock_error_response + with pytest.raises(StorageApiError): await storage_file_client_public.info(file.bucket_path) async def test_client_exists( - storage_file_client_public: AsyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[AsyncHttpIO], + file: FileForTesting, ) -> None: """Ensure we can get the public url of a file in a bucket""" await storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) exists = await storage_file_client_public.exists(file.bucket_path) @@ -589,28 +585,13 @@ async def test_client_exists( assert exists -async def test_client_exists_json_decode_error( - storage_file_client_public: AsyncBucketProxy, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test exists method handling of json.JSONDecodeError""" - from json import JSONDecodeError - - async def mock_head(*args, **kwargs) -> None: - raise JSONDecodeError("Expecting value", "", 0) - - monkeypatch.setattr(storage_file_client_public._client, "head", mock_head) - exists = await storage_file_client_public.exists("some/path") - assert exists is False - - async def test_client_copy( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can copy files within a bucket""" # Upload original file await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Copy to new path @@ -624,20 +605,18 @@ async def test_client_copy( # Verify metadata was copied files = await storage_file_client.list(file.bucket_folder) - copied_info = next( - (f for f in files if f.get("name") == f"copied_{file.name}"), None - ) + copied_info = next((f for f in files if f.name == f"copied_{file.name}"), None) assert copied_info is not None - assert copied_info.get("metadata", {}).get("mimetype") == file.mime_type + assert copied_info.metadata.get("mimetype") == file.mime_type async def test_client_move( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can move files within a bucket""" # Upload original file await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Move to new path @@ -653,18 +632,18 @@ async def test_client_move( # Verify metadata was preserved files = await storage_file_client.list(file.bucket_folder) - moved_info = next((f for f in files if f.get("name") == f"moved_{file.name}"), None) + moved_info = next((f for f in files if f.name == f"moved_{file.name}"), None) assert moved_info is not None - assert moved_info.get("metadata", {}).get("mimetype") == file.mime_type + assert moved_info.metadata.get("mimetype") == file.mime_type async def test_client_remove( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can remove files from a bucket""" # Upload file await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Verify file exists @@ -678,14 +657,15 @@ async def test_client_remove( async def test_client_remove_multiple( - storage_file_client: AsyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[AsyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can remove multiple files from a bucket""" # Upload files paths = [] for file in multi_file: await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) paths.append(file.bucket_path) @@ -701,50 +681,33 @@ async def test_client_remove_multiple( assert not await storage_file_client.exists(path) -async def test_client_create_signed_upload_url_error( - storage_file_client: AsyncBucketProxy, -) -> None: - """Ensure StorageException is raised when signed URL creation fails""" - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"url": "https://example.com/test.txt"} - - with patch.object( - storage_file_client._client, "request", new_callable=AsyncMock - ) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(StorageException, match="No token sent by the API"): - await storage_file_client.create_signed_upload_url("test.txt") - - async def test_client_create_signed_urls_with_download( - storage_file_client: AsyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[AsyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can create signed urls with download options for files in a bucket""" paths = [] for file in multi_file: paths.append(file.bucket_path) await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) - signed_urls = await storage_file_client.create_signed_urls( - paths, 10, options={"download": True} - ) + signed_urls = await storage_file_client.create_signed_urls(paths, 10, download=True) async with HttpxClient() as client: for i, url in enumerate(signed_urls): - response = await client.get(url["signedURL"]) + response = await client.get(url.signed_url) response.raise_for_status() assert response.content == multi_file[i].file_content async def test_client_list_v2( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) result = await storage_file_client.list_v2() @@ -758,14 +721,14 @@ async def test_client_list_v2( async def test_client_list_v2_folder( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" await storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) - result = await storage_file_client.list_v2({"with_delimiter": True}) + result = await storage_file_client.list_v2(with_delimiter=True) assert not result.hasNext assert len(result.objects) == 0 @@ -775,13 +738,13 @@ async def test_client_list_v2_folder( async def test_client_list_v2_paginated( - storage_file_client: AsyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[AsyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" suffixes = ["zz", "bb", "xx", "ww", "cc", "aa", "yy", "oo"] for suffix in suffixes: await storage_file_client.upload( - file.bucket_path + suffix, file.local_path, {"content-type": file.mime_type} + file.bucket_path + suffix, file.local_path, content_type=file.mime_type ) has_next = True @@ -789,12 +752,10 @@ async def test_client_list_v2_paginated( pages = 0 while has_next: result = await storage_file_client.list_v2( - { - "with_delimiter": True, - "prefix": f"{file.bucket_folder}/", - "limit": 2, - "cursor": cursor, - } + with_delimiter=True, + prefix=f"{file.bucket_folder}/", + limit=2, + cursor=cursor, ) has_next = result.hasNext cursor = result.nextCursor or "" diff --git a/src/storage/tests/_sync/conftest.py b/src/storage/tests/_sync/conftest.py index 598159ef..f3077fde 100644 --- a/src/storage/tests/_sync/conftest.py +++ b/src/storage/tests/_sync/conftest.py @@ -5,15 +5,22 @@ import pytest from dotenv import load_dotenv +from httpx import Client +from supabase_utils.http.adapters.httpx import HttpxSession + from storage3 import SyncStorageClient -def pytest_configure(config) -> None: +def pytest_configure(config: pytest.Config) -> None: load_dotenv(dotenv_path="tests/tests.env") -@pytest.fixture -def storage() -> Generator[SyncStorageClient]: +def httpx() -> HttpxSession: + return HttpxSession(client=Client(http2=True, verify=True)) + + +@pytest.fixture(params=[httpx]) +def storage(request: pytest.FixtureRequest) -> Generator[SyncStorageClient]: url = os.environ.get("SUPABASE_TEST_URL") assert url is not None, "Must provide SUPABASE_TEST_URL environment variable" key = os.environ.get("SUPABASE_TEST_KEY") @@ -24,6 +31,6 @@ def storage() -> Generator[SyncStorageClient]: "apiKey": key, "Authorization": f"Bearer {key}", }, + http_session=request.param(), ) as client: - client.session.timeout = None yield client diff --git a/src/storage/tests/_sync/test_bucket.py b/src/storage/tests/_sync/test_bucket.py deleted file mode 100644 index 45a81724..00000000 --- a/src/storage/tests/_sync/test_bucket.py +++ /dev/null @@ -1,217 +0,0 @@ -from unittest.mock import Mock - -import pytest -from httpx import Client, Headers, HTTPStatusError, Response -from storage3 import SyncBucket, SyncStorageBucketAPI -from storage3.exceptions import StorageApiError -from storage3.types import CreateOrUpdateBucketOptions - - -@pytest.fixture -def mock_client() -> Mock: - return Mock() - - -@pytest.fixture -def headers() -> Headers: - return Headers() - - -@pytest.fixture -def storage_api(mock_client: Client, headers: Headers) -> SyncStorageBucketAPI: - return SyncStorageBucketAPI(mock_client, "", headers) - - -@pytest.fixture -def mock_response() -> Mock: - response = Mock(spec=Response) - response.raise_for_status = Mock() - return response - - -def test_list_buckets(storage_api, mock_client, mock_response) -> None: - # Mock response data - mock_response.json.return_value = [ - { - "id": "bucket1", - "name": "Bucket 1", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - { - "id": "bucket2", - "name": "Bucket 2", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - ] - mock_client.request.return_value = mock_response - - buckets = storage_api.list_buckets() - - assert len(buckets) == 2 - assert all(isinstance(bucket, SyncBucket) for bucket in buckets) - assert buckets[0].id == "bucket1" - assert buckets[1].id == "bucket2" - - mock_client.request.assert_called_once_with("GET", "bucket", json=None, headers={}) - - -def test_get_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "test-bucket" - mock_response.json.return_value = { - "id": bucket_id, - "name": "Test Bucket", - "public": True, - "owner": "test-owner", - "created_at": "2024-01-01", - "updated_at": "2024-01-01", - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - } - mock_client.request.return_value = mock_response - - bucket = storage_api.get_bucket(bucket_id) - - assert isinstance(bucket, SyncBucket) - assert bucket.id == bucket_id - assert bucket.name == "Test Bucket" - assert bucket.public is True - assert bucket.owner == "test-owner" - - mock_client.request.assert_called_once_with( - "GET", f"bucket/{bucket_id}", json=None, headers={} - ) - - -def test_create_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "new-bucket" - bucket_name = "New Bucket" - options = CreateOrUpdateBucketOptions( - public=True, file_size_limit=1000000, allowed_mime_types=["image/*"] - ) - - mock_response.json.return_value = {"message": "Bucket created successfully"} - mock_client.request.return_value = mock_response - - result = storage_api.create_bucket(bucket_id, bucket_name, options) - - assert result == {"message": "Bucket created successfully"} - mock_client.request.assert_called_once_with( - "POST", - "bucket", - json={ - "id": bucket_id, - "name": bucket_name, - "public": True, - "file_size_limit": 1000000, - "allowed_mime_types": ["image/*"], - }, - headers={}, - ) - - -def test_create_bucket_minimal(storage_api, mock_client, mock_response) -> None: - bucket_id = "minimal-bucket" - mock_response.json.return_value = {"message": "Bucket created successfully"} - mock_client.request.return_value = mock_response - - result = storage_api.create_bucket(bucket_id) - - assert result == {"message": "Bucket created successfully"} - mock_client.request.assert_called_once_with( - "POST", "bucket", json={"id": bucket_id, "name": bucket_id}, headers={} - ) - - -def test_update_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "update-bucket" - options = CreateOrUpdateBucketOptions(public=False, file_size_limit=2000000) - - mock_response.json.return_value = {"message": "Bucket updated successfully"} - mock_client.request.return_value = mock_response - - result = storage_api.update_bucket(bucket_id, options) - - assert result == {"message": "Bucket updated successfully"} - mock_client.request.assert_called_once_with( - "PUT", - f"bucket/{bucket_id}", - json={ - "id": bucket_id, - "name": bucket_id, - "public": False, - "file_size_limit": 2000000, - }, - headers={}, - ) - - -def test_empty_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "empty-bucket" - mock_response.json.return_value = {"message": "Bucket emptied successfully"} - mock_client.request.return_value = mock_response - - result = storage_api.empty_bucket(bucket_id) - - assert result == {"message": "Bucket emptied successfully"} - mock_client.request.assert_called_once_with( - "POST", f"bucket/{bucket_id}/empty", json={}, headers={} - ) - - -def test_delete_bucket(storage_api, mock_client, mock_response) -> None: - bucket_id = "delete-bucket" - mock_response.json.return_value = {"message": "Bucket deleted successfully"} - mock_client.request.return_value = mock_response - - result = storage_api.delete_bucket(bucket_id) - - assert result == {"message": "Bucket deleted successfully"} - mock_client.request.assert_called_once_with( - "DELETE", f"bucket/{bucket_id}", json={}, headers={} - ) - - -def test_request_error_handling(storage_api, mock_client) -> None: - error_response = Mock(spec=Response) - error_response.json.return_value = { - "message": "Test error message", - "error": "Test error", - "statusCode": 400, - } - - exc = HTTPStatusError("HTTP Error", request=Mock(), response=error_response) - mock_client.request.side_effect = exc - - with pytest.raises(StorageApiError) as exc_info: - storage_api._request("GET", ["test"]) - - assert exc_info.value.message == "Test error message" - - -@pytest.mark.parametrize( - "method,path,json_data", - [ - ("GET", "test", None), - ("POST", "test", {"key": "value"}), - ("PUT", "test", {"id": "123"}), - ("DELETE", "test", {}), - ], -) -def test_request_methods( - storage_api, mock_client, mock_response, method, path, json_data -) -> None: - mock_client.request.return_value = mock_response - storage_api._request(method, [path], json_data) - mock_client.request.assert_called_once_with( - method, path, json=json_data, headers={} - ) diff --git a/src/storage/tests/_sync/test_client.py b/src/storage/tests/_sync/test_client.py index 2e701857..800ec19d 100644 --- a/src/storage/tests/_sync/test_client.py +++ b/src/storage/tests/_sync/test_client.py @@ -8,12 +8,13 @@ import pytest from httpx import Client as HttpxClient -from httpx import HTTPStatusError, Response -from storage3 import SyncStorageClient -from storage3.exceptions import StorageApiError -from storage3.utils import StorageException +from httpx import Response +from supabase_utils.http.io import SyncHttpIO + +from storage3 import StorageFileApiClient, SyncStorageClient +from storage3.exceptions import StorageApiError, StorageException +from storage3.types import TransformOptions -from .. import SyncBucketProxy from ..utils import SyncFinalizerFactory if TYPE_CHECKING: @@ -88,7 +89,7 @@ def public_bucket( global temp_test_buckets_ids temp_test_buckets_ids.append(bucket_id) - storage.create_bucket(id=bucket_id, options={"public": True}) + storage.create_bucket(id=bucket_id, public=True) yield bucket_id @@ -101,7 +102,7 @@ def public_bucket( @pytest.fixture def storage_file_client( storage: SyncStorageClient, bucket: str -) -> Generator[SyncBucketProxy]: +) -> Generator[StorageFileApiClient[SyncHttpIO]]: """Creates the storage file client for the whole storage tests run""" yield storage.from_(bucket) @@ -109,7 +110,7 @@ def storage_file_client( @pytest.fixture def storage_file_client_public( storage: SyncStorageClient, public_bucket: str -) -> Generator[SyncBucketProxy]: +) -> Generator[StorageFileApiClient[SyncHttpIO]]: """Creates the storage file client for the whole storage tests run""" yield storage.from_(public_bucket) @@ -267,126 +268,129 @@ def multi_file(tmp_path: Path, uuid_factory: Callable[[], str]) -> list[FileForT def test_client_upload( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + path=file.bucket_path, file=file.local_path, content_type=file.mime_type ) image = storage_file_client.download(file.bucket_path) files = storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type def test_client_upload_with_query( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket, even with query parameters""" storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) image = storage_file_client.download( file.bucket_path, query_params={"my-param": "test"} ) files = storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type def test_client_download_with_query_doesnt_lose_params( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure query params aren't lost""" from yarl import URL params = {"my-param": "test"} mock_response = Mock() - with patch.object(HttpxClient, "request") as mock_request: + mock_response.headers = {} + mock_response.status_code = 200 + with patch.object(HttpxClient, "send") as mock_request: mock_request.return_value = mock_response storage_file_client.download(file.bucket_path, query_params=params) - expected_url = storage_file_client._base_url.joinpath( + expected_url = storage_file_client.base_url.joinpath( "object", storage_file_client.id, *URL(file.bucket_path).parts ).with_query(params) - actual_url = mock_request.call_args[0][1] + + (actual_request,) = mock_request.call_args[0] + actual_url = str(actual_request.url) assert URL(actual_url).query == params assert str(expected_url) == actual_url def test_client_update( - storage_file_client: SyncBucketProxy, + storage_file_client: StorageFileApiClient[SyncHttpIO], two_files: list[FileForTesting], ) -> None: """Ensure we can upload files to a bucket""" storage_file_client.upload( two_files[0].bucket_path, two_files[0].local_path, - {"content-type": two_files[0].mime_type}, + content_type=two_files[0].mime_type, ) storage_file_client.update( two_files[0].bucket_path, two_files[1].local_path, - {"content-type": two_files[1].mime_type}, + content_type=two_files[1].mime_type, ) image = storage_file_client.download(two_files[0].bucket_path) file_list = storage_file_client.list(two_files[0].bucket_folder) - image_info = next( - (f for f in file_list if f.get("name") == two_files[0].name), None - ) + image_info = next((f for f in file_list if f.name == two_files[0].name), None) assert image == two_files[1].file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == two_files[1].mime_type + assert image_info.metadata.get("mimetype") == two_files[1].mime_type @pytest.mark.parametrize( "path", ["foobar.txt", "example/nested.jpg", "/leading/slash.png"] ) def test_client_create_signed_upload_url( - storage_file_client: SyncBucketProxy, path: str + storage_file_client: StorageFileApiClient[SyncHttpIO], path: str ) -> None: """Ensure we can create signed URLs to upload files to a bucket""" data = storage_file_client.create_signed_upload_url(path) - assert data["path"] == path - assert data["token"] - expected_url = f"{storage_file_client._base_url}object/upload/sign/{storage_file_client.id}/{path.lstrip('/')}" - assert data["signed_url"].startswith(expected_url) + expected_url = storage_file_client.base_url.joinpath( + "object", "upload", "sign", storage_file_client.id, *path.lstrip("/").split("/") + ) + assert data.signed_url.startswith(str(expected_url)) def test_client_upload_to_signed_url( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload to a signed URL with various options""" # Test with content-type data = storage_file_client.create_signed_upload_url(file.bucket_path) + storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content, {"content-type": file.mime_type} + file.bucket_path, data.token, file.file_content, content_type=file.mime_type ) image = storage_file_client.download(file.bucket_path) files = storage_file_client.list(file.bucket_folder) - image_info = next((f for f in files if f.get("name") == file.name), None) + image_info = next((f for f in files if f.name == file.name), None) assert image == file.file_content assert image_info is not None - assert image_info.get("metadata", {}).get("mimetype") == file.mime_type + assert image_info.metadata.get("mimetype") == file.mime_type # Test with file_options=None data = storage_file_client.create_signed_upload_url( f"no_options_{file.bucket_path}" ) storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content + f"no_options_{file.bucket_path}", data.token, file.file_content ) image = storage_file_client.download(f"no_options_{file.bucket_path}") assert image == file.file_content @@ -394,33 +398,36 @@ def test_client_upload_to_signed_url( # Test with cache-control data = storage_file_client.create_signed_upload_url(f"cached_{file.bucket_path}") storage_file_client.upload_to_signed_url( - data["path"], data["token"], file.file_content, {"cache-control": "3600"} + f"cached_{file.bucket_path}", + data.token, + file.file_content, + cache_control="3600", ) cached_info = storage_file_client.info(f"cached_{file.bucket_path}") - assert cached_info.get("cache_control") == "max-age=3600" + assert cached_info.cache_control == "max-age=3600" def test_client_create_signed_url( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can create and use signed URLs with various options""" storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Test basic signed URL signed_url = storage_file_client.create_signed_url(file.bucket_path, 60) with HttpxClient(timeout=None) as client: - response = client.get(signed_url["signedURL"]) + response = client.get(signed_url) response.raise_for_status() assert response.content == file.file_content # Test with download option download_signed_url = storage_file_client.create_signed_url( - file.bucket_path, 60, options={"download": "custom_download.svg"} + file.bucket_path, 60, download="custom_download.svg" ) with HttpxClient(timeout=None) as client: - response = client.get(download_signed_url["signedURL"]) + response = client.get(download_signed_url) response.raise_for_status() assert ( @@ -433,43 +440,44 @@ def test_client_create_signed_url( transform_signed_url = storage_file_client.create_signed_url( file.bucket_path, 60, - options={"transform": {"width": 200, "height": 200, "resize": "cover"}}, + transform=TransformOptions(width=200, height=200, resize="cover"), ) # assert "width=200" in transform_signed_url["signedURL"] # assert "height=200" in transform_signed_url["signedURL"] # assert "resize=cover" in transform_signed_url["signedURL"] # assert "format=png" in transform_signed_url["signedURL"] with HttpxClient(timeout=None) as client: - response = client.get(transform_signed_url["signedURL"]) + response = client.get(transform_signed_url) response.raise_for_status() def test_client_create_signed_urls( - storage_file_client: SyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[SyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can create signed urls for files in a bucket""" paths = [] for file in multi_file: paths.append(file.bucket_path) storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) signed_urls = storage_file_client.create_signed_urls(paths, 10) with HttpxClient() as client: for url in signed_urls: - response = client.get(url["signedURL"]) + response = client.get(url.signed_url) response.raise_for_status() assert response.content == multi_file[0].file_content def test_client_get_public_url( - storage_file_client_public: SyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can get the public url of a file in a bucket with various options""" storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Test basic public URL @@ -481,7 +489,7 @@ def test_client_get_public_url( # Test with download option download_url = storage_file_client_public.get_public_url( - file.bucket_path, options={"download": "custom_name.svg"} + file.bucket_path, download="custom_name.svg" ) with HttpxClient(timeout=None) as client: response = client.get(download_url) @@ -495,7 +503,7 @@ def test_client_get_public_url( # Test with transform options transform_url = storage_file_client_public.get_public_url( file.bucket_path, - options={"transform": {"width": 100, "height": 100, "resize": "contain"}}, + transform=TransformOptions(width=100, height=100, resize="contain"), ) assert "width=100" in transform_url assert "height=100" in transform_url @@ -503,22 +511,19 @@ def test_client_get_public_url( def test_client_upload_with_custom_metadata( - storage_file_client_public: SyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can get the public url of a file in a bucket""" storage_file_client_public.upload( file.bucket_path, file.local_path, - { - "content-type": file.mime_type, - "metadata": {"custom": "metadata", "second": "second", "third": "third"}, - }, + content_type=file.mime_type, + metadata={"custom": "metadata", "second": "second", "third": "third"}, ) info = storage_file_client_public.info(file.bucket_path) - assert "metadata" in info.keys() - assert info["name"] == file.bucket_path - assert info["metadata"] == { + assert info.name == file.bucket_path + assert info.metadata == { "custom": "metadata", "second": "second", "third": "third", @@ -526,60 +531,47 @@ def test_client_upload_with_custom_metadata( def test_client_info( - storage_file_client_public: SyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can get the public url of a file in a bucket""" storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) info = storage_file_client_public.info(file.bucket_path) - assert "metadata" in info.keys() - assert info["name"] == file.bucket_path - assert info["content_type"] == file.mime_type + assert info.name == file.bucket_path + assert info.content_type == file.mime_type def test_client_info_with_error( - storage_file_client_public: SyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can get the public url of a file in a bucket""" storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) """Ensure StorageException is raised when signed URL creation fails""" mock_error_response = Mock(spec=Response) mock_error_response.status_code = 404 - mock_error_response.json.return_value = { - "error": "Custom error message", - "statusCode": 404, - "message": "File not found", - } - - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"error": "Custom error message"} - mock_response.raise_for_status.side_effect = HTTPStatusError( - "HTTP Error", request=Mock(), response=mock_error_response - ) + mock_error_response.is_success = False + mock_error_response.content = b'{"error": "Custom error message", "statusCode": 404, "message": "File not found"}' with patch.object( - storage_file_client_public._client, "request", new_callable=Mock + storage_file_client_public.executor.session, "send", new_callable=Mock ) as mock_request: - mock_request.return_value = mock_response + mock_request.return_value = mock_error_response - with pytest.raises( - StorageApiError, - match="{'statusCode': 404, 'error': Custom error message, 'message': File not found}", - ): + with pytest.raises(StorageApiError): storage_file_client_public.info(file.bucket_path) def test_client_exists( - storage_file_client_public: SyncBucketProxy, file: FileForTesting + storage_file_client_public: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can get the public url of a file in a bucket""" storage_file_client_public.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) exists = storage_file_client_public.exists(file.bucket_path) @@ -587,28 +579,13 @@ def test_client_exists( assert exists -def test_client_exists_json_decode_error( - storage_file_client_public: SyncBucketProxy, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test exists method handling of json.JSONDecodeError""" - from json import JSONDecodeError - - def mock_head(*args, **kwargs) -> None: - raise JSONDecodeError("Expecting value", "", 0) - - monkeypatch.setattr(storage_file_client_public._client, "head", mock_head) - exists = storage_file_client_public.exists("some/path") - assert exists is False - - def test_client_copy( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can copy files within a bucket""" # Upload original file storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Copy to new path @@ -622,20 +599,18 @@ def test_client_copy( # Verify metadata was copied files = storage_file_client.list(file.bucket_folder) - copied_info = next( - (f for f in files if f.get("name") == f"copied_{file.name}"), None - ) + copied_info = next((f for f in files if f.name == f"copied_{file.name}"), None) assert copied_info is not None - assert copied_info.get("metadata", {}).get("mimetype") == file.mime_type + assert copied_info.metadata.get("mimetype") == file.mime_type def test_client_move( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can move files within a bucket""" # Upload original file storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Move to new path @@ -651,18 +626,18 @@ def test_client_move( # Verify metadata was preserved files = storage_file_client.list(file.bucket_folder) - moved_info = next((f for f in files if f.get("name") == f"moved_{file.name}"), None) + moved_info = next((f for f in files if f.name == f"moved_{file.name}"), None) assert moved_info is not None - assert moved_info.get("metadata", {}).get("mimetype") == file.mime_type + assert moved_info.metadata.get("mimetype") == file.mime_type def test_client_remove( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can remove files from a bucket""" # Upload file storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) # Verify file exists @@ -676,14 +651,15 @@ def test_client_remove( def test_client_remove_multiple( - storage_file_client: SyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[SyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can remove multiple files from a bucket""" # Upload files paths = [] for file in multi_file: storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) paths.append(file.bucket_path) @@ -699,50 +675,33 @@ def test_client_remove_multiple( assert not storage_file_client.exists(path) -def test_client_create_signed_upload_url_error( - storage_file_client: SyncBucketProxy, -) -> None: - """Ensure StorageException is raised when signed URL creation fails""" - mock_response = Mock(spec=Response) - mock_response.json.return_value = {"url": "https://example.com/test.txt"} - - with patch.object( - storage_file_client._client, "request", new_callable=Mock - ) as mock_request: - mock_request.return_value = mock_response - - with pytest.raises(StorageException, match="No token sent by the API"): - storage_file_client.create_signed_upload_url("test.txt") - - def test_client_create_signed_urls_with_download( - storage_file_client: SyncBucketProxy, multi_file: list[FileForTesting] + storage_file_client: StorageFileApiClient[SyncHttpIO], + multi_file: list[FileForTesting], ) -> None: """Ensure we can create signed urls with download options for files in a bucket""" paths = [] for file in multi_file: paths.append(file.bucket_path) storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) - signed_urls = storage_file_client.create_signed_urls( - paths, 10, options={"download": True} - ) + signed_urls = storage_file_client.create_signed_urls(paths, 10, download=True) with HttpxClient() as client: for i, url in enumerate(signed_urls): - response = client.get(url["signedURL"]) + response = client.get(url.signed_url) response.raise_for_status() assert response.content == multi_file[i].file_content def test_client_list_v2( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) result = storage_file_client.list_v2() @@ -756,17 +715,46 @@ def test_client_list_v2( def test_client_list_v2_folder( - storage_file_client: SyncBucketProxy, file: FileForTesting + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting ) -> None: """Ensure we can upload files to a bucket""" storage_file_client.upload( - file.bucket_path, file.local_path, {"content-type": file.mime_type} + file.bucket_path, file.local_path, content_type=file.mime_type ) - result = storage_file_client.list_v2({"with_delimiter": True}) + result = storage_file_client.list_v2(with_delimiter=True) assert not result.hasNext assert len(result.objects) == 0 assert len(result.folders) == 1 folder = result.folders[0] assert folder.key == file.bucket_folder + + +def test_client_list_v2_paginated( + storage_file_client: StorageFileApiClient[SyncHttpIO], file: FileForTesting +) -> None: + """Ensure we can upload files to a bucket""" + suffixes = ["zz", "bb", "xx", "ww", "cc", "aa", "yy", "oo"] + for suffix in suffixes: + storage_file_client.upload( + file.bucket_path + suffix, file.local_path, content_type=file.mime_type + ) + + has_next = True + cursor = "" + pages = 0 + while has_next: + result = storage_file_client.list_v2( + with_delimiter=True, + prefix=f"{file.bucket_folder}/", + limit=2, + cursor=cursor, + ) + has_next = result.hasNext + cursor = result.nextCursor or "" + + assert len(result.objects) == 2 + assert all(f.name.startswith(file.bucket_path) for f in result.objects) + pages += 1 + assert pages == 4 diff --git a/src/storage/tests/test_client.py b/src/storage/tests/test_client.py deleted file mode 100644 index 4d926cc6..00000000 --- a/src/storage/tests/test_client.py +++ /dev/null @@ -1,96 +0,0 @@ -from typing import Dict - -import pytest -from httpx import AsyncClient, Client, Timeout -from storage3 import AsyncStorageClient, SyncStorageClient -from storage3.constants import DEFAULT_TIMEOUT - - -@pytest.fixture -def valid_url() -> str: - return "https://example.com/storage/v1" - - -@pytest.fixture -def valid_headers() -> Dict[str, str]: - return {"Authorization": "Bearer test_token", "apikey": "test_api_key"} - - -def test_create_async_client(valid_url, valid_headers) -> None: - client = AsyncStorageClient(url=valid_url, headers=valid_headers) - - assert isinstance(client, AsyncStorageClient) - assert all( - client._client.headers[key] == value for key, value in valid_headers.items() - ) - assert client._client.timeout == Timeout(DEFAULT_TIMEOUT) - - -def test_create_sync_client(valid_url, valid_headers) -> None: - client = SyncStorageClient(url=valid_url, headers=valid_headers) - - assert isinstance(client, SyncStorageClient) - assert all( - client._client.headers[key] == value for key, value in valid_headers.items() - ) - assert client._client.timeout == Timeout(DEFAULT_TIMEOUT) - - -def test_async_storage_client(valid_url, valid_headers) -> None: - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = AsyncClient(headers=headers) - client = AsyncStorageClient( - url=valid_url, headers=valid_headers, http_client=http_client - ) - - assert isinstance(client, AsyncStorageClient) - assert all(client._headers[key] == value for key, value in valid_headers.items()) - assert client._client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client._client.timeout == Timeout(5.0) - - -def test_sync_storage_client(valid_url, valid_headers) -> None: - headers = {"x-user-agent": "my-app/0.0.1"} - http_client = Client(headers=headers) - client = SyncStorageClient( - url=valid_url, headers=valid_headers, http_client=http_client - ) - - assert isinstance(client, SyncStorageClient) - assert all(client._headers[key] == value for key, value in valid_headers.items()) - assert client._client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client._client.timeout == Timeout(5.0) - - -def test_async_storage_client_with_httpx(valid_url, valid_headers) -> None: - client = AsyncStorageClient(url=valid_url, headers=valid_headers) - - assert isinstance(client, AsyncStorageClient) - assert all( - client._client.headers[key] == value for key, value in valid_headers.items() - ) - assert client._client.timeout == Timeout(DEFAULT_TIMEOUT) - - -def test_sync_storage_client_with_httpx(valid_url, valid_headers) -> None: - client = SyncStorageClient(url=valid_url, headers=valid_headers) - - assert isinstance(client, SyncStorageClient) - assert all( - client._client.headers[key] == value for key, value in valid_headers.items() - ) - assert client._client.timeout == Timeout(DEFAULT_TIMEOUT) - - -def test_custom_timeout(valid_url, valid_headers) -> None: - custom_timeout = 30 - - async_client = AsyncStorageClient( - url=valid_url, headers=valid_headers, timeout=custom_timeout - ) - assert async_client._client.timeout == Timeout(custom_timeout) - - sync_client = SyncStorageClient( - url=valid_url, headers=valid_headers, timeout=custom_timeout - ) - assert sync_client._client.timeout == Timeout(custom_timeout) diff --git a/src/storage/tests/test_exceptions.py b/src/storage/tests/test_exceptions.py deleted file mode 100644 index b59ca79e..00000000 --- a/src/storage/tests/test_exceptions.py +++ /dev/null @@ -1,47 +0,0 @@ -from storage3.exceptions import StorageApiError - - -def test_storage_api_error_initialization() -> None: - # Arrange - message = "Test error message" - code = "TEST_ERROR" - status = 400 - - # Act - error = StorageApiError(message, code, status) - - # Assert - assert error.message == message - assert error.code == code - assert error.status == status - assert error.name == "StorageApiError" - assert ( - str(error) - == "{'statusCode': 400, 'error': TEST_ERROR, 'message': Test error message}" - ) - - -def test_storage_api_error_to_dict() -> None: - # Arrange - error = StorageApiError("Test message", "TEST_CODE", 404) - - # Act - error_dict = error.to_dict() - - # Assert - assert error_dict == { - "name": "StorageApiError", - "code": "TEST_CODE", - "message": "Test message", - "status": 404, - } - - -def test_storage_api_error_inheritance() -> None: - # Arrange & Act - error = StorageApiError("Test message", "TEST_CODE", 500) - - # Assert - from storage3.utils import StorageException - - assert isinstance(error, StorageException) diff --git a/src/storage/tests/test_utils.py b/src/storage/tests/test_utils.py deleted file mode 100644 index a63709ac..00000000 --- a/src/storage/tests/test_utils.py +++ /dev/null @@ -1,11 +0,0 @@ -from deprecation import fail_if_not_removed -from storage3.utils import SyncClient - - -@fail_if_not_removed -def test_sync_client() -> None: - client = SyncClient() - # Verify that aclose method exists and calls close - assert hasattr(client, "aclose") - assert callable(client.aclose) - client.aclose() # Should not raise any exception diff --git a/src/supabase/pyproject.toml b/src/supabase/pyproject.toml index 8ebcd236..7464cb33 100644 --- a/src/supabase/pyproject.toml +++ b/src/supabase/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "supabase" -version = "2.28.3" # {x-release-please-version} +version = "3.0.0a1" # {x-release-please-version} description = "Supabase client for Python." authors = [ { name = "Joel Lee", email = "joel@joellee.org" }, @@ -20,14 +20,15 @@ classifiers = [ "Programming Language :: Python :: 3", "Operating System :: OS Independent" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "realtime == 2.28.3", # x-release-please-version - "supabase_functions == 2.28.3", # x-release-please-version - "storage3 == 2.28.3", # x-release-please-version - "supabase_auth == 2.28.3", # x-release-please-version - "postgrest == 2.28.3", # x-release-please-version - "httpx >=0.26,<0.29", + "realtime == 3.0.0a1", # x-release-please-version + "supabase_functions == 3.0.0a1", # x-release-please-version + "storage3 == 3.0.0a1", # x-release-please-version + "supabase_auth == 3.0.0a1", # x-release-please-version + "supabase_utils == 3.0.0a1", # x-release-please-version + "postgrest == 3.0.0a1", # x-release-please-version + "supabase_utils==3.0.0a1", # x-release-please-version "yarl>=1.22.0", ] @@ -37,6 +38,10 @@ repository = "https://github.com/supabase/supabase-py" documentation = "https://github.com/supabase/supabase-py/src/supabase" changelog = "https://github.com/supabase/supabase-py/tree/main/CHANGELOG.md" +[project.optional-dependencies] +httpx = ["supabase_utils[httpx]"] +aiohttp = ["supabase_utils[aiohttp]"] + [dependency-groups] dev = [ { include-group = "tests" }, @@ -48,6 +53,7 @@ tests = [ "pytest-asyncio >=0.24,<1.1", "python-dotenv >= 1.1.1", "mypy>=1.18.2", + "supabase_utils[all]", ] lints = [ "unasync >=0.6.0", diff --git a/src/supabase/src/supabase/__init__.py b/src/supabase/src/supabase/__init__.py index 2abfed2b..18174ca1 100644 --- a/src/supabase/src/supabase/__init__.py +++ b/src/supabase/src/supabase/__init__.py @@ -1,7 +1,7 @@ from postgrest import APIError as PostgrestAPIError from postgrest import APIResponse as PostgrestAPIResponse from realtime import AuthorizationError, NotConnectedError -from storage3.utils import StorageException +from storage3.exceptions import StorageException from supabase_auth.errors import ( AuthApiError, AuthError, @@ -19,18 +19,12 @@ ) # Async Client -from ._async.auth_client import AsyncSupabaseAuthClient as ASupabaseAuthClient from ._async.client import AsyncClient from ._async.client import AsyncClient as AClient from ._async.client import AsyncStorageClient as ASupabaseStorageClient from ._async.client import SupabaseException as ASupabaseException from ._async.client import SupabaseException as AsyncSupabaseException -from ._async.client import create_client as acreate_client -from ._async.client import create_client as create_async_client - -# Sync Client -from ._sync.auth_client import SyncSupabaseAuthClient as SupabaseAuthClient -from ._sync.client import Client, SupabaseException, create_client +from ._sync.client import Client from ._sync.client import SupabaseException as SyncSupabaseException from ._sync.client import SyncStorageClient as SupabaseStorageClient @@ -43,15 +37,12 @@ from .version import __version__ __all__ = ( - "acreate_client", - "create_async_client", "AClient", "ASupabaseAuthClient", "ASupabaseStorageClient", "AClientOptions", "AsyncClient", "AsyncClientOptions", - "create_client", "Client", "SupabaseAuthClient", "SupabaseStorageClient", diff --git a/src/supabase/src/supabase/_async/auth_client.py b/src/supabase/src/supabase/_async/auth_client.py deleted file mode 100644 index 6f7ff5ea..00000000 --- a/src/supabase/src/supabase/_async/auth_client.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import Dict, Optional - -from httpx import AsyncClient -from supabase_auth import ( - AsyncGoTrueClient, - AsyncSupportedStorage, - AuthFlowType, -) - - -class AsyncSupabaseAuthClient(AsyncGoTrueClient): - """Supabase Auth Client for asynchronous operations.""" - - def __init__( - self, - *, - url: str, - headers: Optional[Dict[str, str]] = None, - storage_key: Optional[str] = None, - auto_refresh_token: bool = True, - persist_session: bool = True, - storage: Optional[AsyncSupportedStorage] = None, - http_client: Optional[AsyncClient] = None, - flow_type: AuthFlowType = "implicit", - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - """ - Instantiate a SupabaseAuthClient instance. - - Args: - url (str): The URL of the Supabase instance. - headers (Optional[Dict[str, str]]): Optional headers to include in requests. - storage_key (Optional[str]): Key to store session information. - auto_refresh_token (bool): Whether to automatically refresh the token. Defaults to True. - persist_session (bool): Whether to persist the session. Defaults to True. - storage (AsyncSupportedStorage): Storage mechanism. Defaults to AsyncMemoryStorage(). - http_client (Optional[AsyncClient]): HTTP client for making requests. Defaults to None. - flow_type (AuthFlowType): Type of authentication flow. Defaults to "implicit". - verify (bool): Whether to verify SSL certificates. Defaults to True. - proxy (Optional[str]): Proxy URL. Defaults to None. - """ - if headers is None: - headers = {} - - super().__init__( - url=url, - headers=headers, - storage_key=storage_key, - auto_refresh_token=auto_refresh_token, - persist_session=persist_session, - storage=storage, - http_client=http_client, - flow_type=flow_type, - verify=verify, - proxy=proxy, - ) diff --git a/src/supabase/src/supabase/_async/client.py b/src/supabase/src/supabase/_async/client.py index 8b81e6ed..029a8c84 100644 --- a/src/supabase/src/supabase/_async/client.py +++ b/src/supabase/src/supabase/_async/client.py @@ -1,28 +1,26 @@ import asyncio import copy import re -from typing import Any, Dict, List, Optional, Union - -from httpx import Timeout -from postgrest import ( - AsyncPostgrestClient, - AsyncRequestBuilder, - AsyncRPCFilterRequestBuilder, +from types import TracebackType +from typing import Dict, List, Literal, overload + +from postgrest import AsyncPostgrestClient +from postgrest.request_builder import ( + RequestBuilder, + RPCCountRequestBuilder, + RPCFilterRequestBuilder, ) -from postgrest.constants import DEFAULT_POSTGREST_CLIENT_TIMEOUT from postgrest.types import CountMethod from realtime import AsyncRealtimeChannel, AsyncRealtimeClient, RealtimeChannelOptions from storage3 import AsyncStorageClient -from storage3.constants import DEFAULT_TIMEOUT as DEFAULT_STORAGE_CLIENT_TIMEOUT -from supabase_auth import AsyncMemoryStorage +from supabase_auth import AsyncMemoryStorage, AsyncSupabaseAuthClient from supabase_auth.types import AuthChangeEvent, Session from supabase_functions import AsyncFunctionsClient +from supabase_utils.http.io import AsyncHttpIO, AsyncHttpSession from yarl import URL from ..lib.client_options import AsyncClientOptions as ClientOptions -from ..lib.client_options import AsyncHttpxClient from ..types import RealtimeClientOptions -from .auth_client import AsyncSupabaseAuthClient # Create an exception class when user does not provide a valid url or key. @@ -39,7 +37,8 @@ def __init__( self, supabase_url: str, supabase_key: str, - options: Optional[ClientOptions] = None, + http_session: AsyncHttpSession, + options: ClientOptions | None = None, ) -> None: """Instantiate the client. @@ -66,6 +65,8 @@ def __init__( if options is None: options = ClientOptions(storage=AsyncMemoryStorage()) + self.http_session: AsyncHttpSession = http_session + self.supabase_url = ( URL(supabase_url) if supabase_url.endswith("/") else URL(supabase_url + "/") ) @@ -88,26 +89,39 @@ def __init__( self.auth = self._init_supabase_auth_client( auth_url=str(self.auth_url), client_options=self.options, + http_session=self.http_session, ) self.realtime = self._init_realtime_client( realtime_url=self.realtime_url, supabase_key=self.supabase_key, options=self.options.realtime if self.options else None, ) - self._postgrest: Optional[AsyncPostgrestClient] = None - self._storage: Optional[AsyncStorageClient] = None - self._functions: Optional[AsyncFunctionsClient] = None + self._postgrest: AsyncPostgrestClient | None = None + self._storage: AsyncStorageClient | None = None + self._functions: AsyncFunctionsClient | None = None self.auth.on_auth_state_change(self._listen_to_auth_events) + async def __aenter__(self) -> "AsyncClient": + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.http_session.__aexit__(exc_type, exc, tb) + @classmethod async def create( cls, supabase_url: str, supabase_key: str, - options: Optional[ClientOptions] = None, + http_session: AsyncHttpSession, + options: ClientOptions | None = None, ) -> "AsyncClient": auth_header = options.headers.get("Authorization") if options else None - client = cls(supabase_url, supabase_key, options) + client = cls(supabase_url, supabase_key, http_session, options) if auth_header is None: try: @@ -126,7 +140,7 @@ async def create( return client - def table(self, table_name: str) -> AsyncRequestBuilder: + def table(self, table_name: str) -> RequestBuilder[AsyncHttpIO]: """Perform a table operation. Note that the supabase client uses the `from` method, but in Python, @@ -142,21 +156,47 @@ def schema(self, schema: str) -> AsyncPostgrestClient: """ return self.postgrest.schema(schema) - def from_(self, table_name: str) -> AsyncRequestBuilder: + def from_(self, table_name: str) -> RequestBuilder[AsyncHttpIO]: """Perform a table operation. See the `table` method. """ return self.postgrest.from_(table_name) + @overload + def rpc( + self, + fn: str, + head: Literal[False], + params: Dict[str, str] | None = None, + count: CountMethod | None = None, + get: bool = False, + ) -> RPCFilterRequestBuilder[AsyncHttpIO]: ... + + @overload + def rpc( + self, + fn: str, + head: Literal[True], + params: Dict[str, str] | None = None, + count: CountMethod | None = None, + get: bool = False, + ) -> RPCCountRequestBuilder[AsyncHttpIO]: ... + + @overload + def rpc( + self, + fn: str, + ) -> RPCFilterRequestBuilder[AsyncHttpIO]: ... + def rpc( self, fn: str, - params: Optional[Dict[Any, Any]] = None, - count: Optional[CountMethod] = None, head: bool = False, + params: Dict[str, str] | None = None, + count: CountMethod | None = None, get: bool = False, - ) -> AsyncRPCFilterRequestBuilder: + ) -> RPCFilterRequestBuilder[AsyncHttpIO] | RPCCountRequestBuilder[AsyncHttpIO]: """Performs a stored procedure call. Parameters @@ -177,7 +217,7 @@ def rpc( """ if params is None: params = {} - return self.postgrest.rpc(fn, params, count, head, get) + return self.postgrest.rpc(fn, params, count=count, head=head, get=get) @property def postgrest(self) -> AsyncPostgrestClient: @@ -186,8 +226,7 @@ def postgrest(self) -> AsyncPostgrestClient: rest_url=str(self.rest_url), headers=self.options.headers, schema=self.options.schema, - timeout=self.options.postgrest_client_timeout, - http_client=self.options.httpx_client, + http_session=self.http_session, ) return self._postgrest @@ -198,8 +237,7 @@ def storage(self) -> AsyncStorageClient: self._storage = self._init_storage_client( storage_url=str(self.storage_url), headers=self.options.headers, - storage_client_timeout=self.options.storage_client_timeout, - http_client=self.options.httpx_client, + http_session=self.http_session, ) return self._storage @@ -209,17 +247,11 @@ def functions(self) -> AsyncFunctionsClient: self._functions = AsyncFunctionsClient( url=str(self.functions_url), headers=self.options.headers, - timeout=( - self.options.function_client_timeout - if self.options.httpx_client is None - else None - ), - http_client=self.options.httpx_client, ) return self._functions def channel( - self, topic: str, params: Optional[RealtimeChannelOptions] = None + self, topic: str, params: RealtimeChannelOptions | None = None ) -> AsyncRealtimeChannel: """Creates a Realtime channel with Broadcast, Presence, and Postgres Changes.""" return self.realtime.channel(topic, params or {}) @@ -240,7 +272,7 @@ async def remove_all_channels(self) -> None: def _init_realtime_client( realtime_url: URL, supabase_key: str, - options: Optional[RealtimeClientOptions] = None, + options: RealtimeClientOptions | None = None, ) -> AsyncRealtimeClient: realtime_options = options or {} """Private method for creating an instance of the realtime-py client.""" @@ -252,31 +284,19 @@ def _init_realtime_client( def _init_storage_client( storage_url: str, headers: Dict[str, str], - storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT, - verify: bool = True, - proxy: Optional[str] = None, - http_client: Union[AsyncHttpxClient, None] = None, + http_session: AsyncHttpSession, ) -> AsyncStorageClient: - if http_client is not None: - # If an http client is provided, use it - return AsyncStorageClient( - url=storage_url, headers=headers, http_client=http_client - ) return AsyncStorageClient( url=storage_url, headers=headers, - timeout=storage_client_timeout, - verify=verify, - proxy=proxy, - http_client=None, + http_session=http_session, ) @staticmethod def _init_supabase_auth_client( auth_url: str, client_options: ClientOptions, - verify: bool = True, - proxy: Optional[str] = None, + http_session: AsyncHttpSession, ) -> AsyncSupabaseAuthClient: """Creates a wrapped instance of the GoTrue Client.""" return AsyncSupabaseAuthClient( @@ -286,9 +306,7 @@ def _init_supabase_auth_client( storage=client_options.storage, headers=client_options.headers, flow_type=client_options.flow_type, - verify=verify, - proxy=proxy, - http_client=client_options.httpx_client, + http_session=http_session, ) @staticmethod @@ -296,31 +314,20 @@ def _init_postgrest_client( rest_url: str, headers: Dict[str, str], schema: str, - timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, - verify: bool = True, - proxy: Optional[str] = None, - http_client: Union[AsyncHttpxClient, None] = None, + http_session: AsyncHttpSession, ) -> AsyncPostgrestClient: """Private helper for creating an instance of the Postgrest client.""" - if http_client is not None: - # If an http client is provided, use it - return AsyncPostgrestClient( - rest_url, headers=headers, schema=schema, http_client=http_client - ) return AsyncPostgrestClient( rest_url, headers=headers, schema=schema, - timeout=timeout, - verify=verify, - proxy=proxy, - http_client=None, + http_session=http_session, ) def _create_auth_header(self, token: str) -> str: return f"Bearer {token}" - def _get_auth_headers(self, authorization: Optional[str] = None) -> Dict[str, str]: + def _get_auth_headers(self, authorization: str | None = None) -> Dict[str, str]: if authorization is None: authorization = self.options.headers.get( "Authorization", self._create_auth_header(self.supabase_key) @@ -333,7 +340,7 @@ def _get_auth_headers(self, authorization: Optional[str] = None) -> Dict[str, st } def _listen_to_auth_events( - self, event: AuthChangeEvent, session: Optional[Session] + self, event: AuthChangeEvent, session: Session | None ) -> None: access_token = self.supabase_key if event in ["SIGNED_IN", "TOKEN_REFRESHED", "SIGNED_OUT"]: @@ -344,41 +351,7 @@ def _listen_to_auth_events( access_token = session.access_token if session else self.supabase_key auth_header = self._create_auth_header(access_token) self.options.headers["Authorization"] = auth_header - self.auth._headers["Authorization"] = auth_header + self.auth.default_headers = self.auth.default_headers.override( + "Authorization", auth_header + ) asyncio.create_task(self.realtime.set_auth(access_token)) - - -async def create_client( - supabase_url: str, - supabase_key: str, - options: Optional[ClientOptions] = None, -) -> AsyncClient: - """Create client function to instantiate supabase client like JS runtime. - - Parameters - ---------- - supabase_url: str - The URL to the Supabase instance that should be connected to. - supabase_key: str - The API key to the Supabase instance that should be connected to. - **options - Any extra settings to be optionally specified - also see the - `DEFAULT_OPTIONS` dict. - - Examples - -------- - Instantiating the client. - >>> import os - >>> from supabase import create_client, Client - >>> - >>> url: str = os.environ.get("SUPABASE_TEST_URL") - >>> key: str = os.environ.get("SUPABASE_TEST_KEY") - >>> supabase: Client = create_client(url, key) - - Returns - ------- - Client - """ - return await AsyncClient.create( - supabase_url=supabase_url, supabase_key=supabase_key, options=options - ) diff --git a/src/supabase/src/supabase/_sync/auth_client.py b/src/supabase/src/supabase/_sync/auth_client.py deleted file mode 100644 index 9b701241..00000000 --- a/src/supabase/src/supabase/_sync/auth_client.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import Dict, Optional - -from httpx import Client -from supabase_auth import ( - AuthFlowType, - SyncGoTrueClient, - SyncSupportedStorage, -) - - -class SyncSupabaseAuthClient(SyncGoTrueClient): - """Supabase Auth Client for synchronous operations.""" - - def __init__( - self, - *, - url: str, - headers: Optional[Dict[str, str]] = None, - storage_key: Optional[str] = None, - auto_refresh_token: bool = True, - persist_session: bool = True, - storage: Optional[SyncSupportedStorage] = None, - http_client: Optional[Client] = None, - flow_type: AuthFlowType = "implicit", - verify: bool = True, - proxy: Optional[str] = None, - ) -> None: - """ - Instantiate a SupabaseAuthClient instance. - - Args: - url (str): The URL of the Supabase instance. - headers (Optional[Dict[str, str]]): Optional headers to include in requests. - storage_key (Optional[str]): Key to store session information. - auto_refresh_token (bool): Whether to automatically refresh the token. Defaults to True. - persist_session (bool): Whether to persist the session. Defaults to True. - storage (SyncSupportedStorage): Storage mechanism. Defaults to SyncMemoryStorage(). - http_client (Optional[SyncClient]): HTTP client for making requests. Defaults to None. - flow_type (AuthFlowType): Type of authentication flow. Defaults to "implicit". - verify (bool): Whether to verify SSL certificates. Defaults to True. - proxy (Optional[str]): Proxy URL. Defaults to None. - """ - if headers is None: - headers = {} - - super().__init__( - url=url, - headers=headers, - storage_key=storage_key, - auto_refresh_token=auto_refresh_token, - persist_session=persist_session, - storage=storage, - http_client=http_client, - flow_type=flow_type, - verify=verify, - proxy=proxy, - ) diff --git a/src/supabase/src/supabase/_sync/client.py b/src/supabase/src/supabase/_sync/client.py index 29c0246b..867fcbd0 100644 --- a/src/supabase/src/supabase/_sync/client.py +++ b/src/supabase/src/supabase/_sync/client.py @@ -1,27 +1,23 @@ import copy import re -from typing import Any, Dict, List, Optional, Union - -from httpx import Timeout -from postgrest import ( - SyncPostgrestClient, - SyncRequestBuilder, - SyncRPCFilterRequestBuilder, +from types import TracebackType +from typing import Dict, Literal, overload + +from postgrest import SyncPostgrestClient +from postgrest.request_builder import ( + RequestBuilder, + RPCCountRequestBuilder, + RPCFilterRequestBuilder, ) -from postgrest.constants import DEFAULT_POSTGREST_CLIENT_TIMEOUT from postgrest.types import CountMethod -from realtime import RealtimeChannelOptions, SyncRealtimeChannel, SyncRealtimeClient from storage3 import SyncStorageClient -from storage3.constants import DEFAULT_TIMEOUT as DEFAULT_STORAGE_CLIENT_TIMEOUT -from supabase_auth import SyncMemoryStorage +from supabase_auth import SyncMemoryStorage, SyncSupabaseAuthClient from supabase_auth.types import AuthChangeEvent, Session from supabase_functions import SyncFunctionsClient +from supabase_utils.http.io import HttpSession, SyncHttpIO from yarl import URL from ..lib.client_options import SyncClientOptions as ClientOptions -from ..lib.client_options import SyncHttpxClient -from ..types import RealtimeClientOptions -from .auth_client import SyncSupabaseAuthClient # Create an exception class when user does not provide a valid url or key. @@ -38,7 +34,8 @@ def __init__( self, supabase_url: str, supabase_key: str, - options: Optional[ClientOptions] = None, + http_session: HttpSession, + options: ClientOptions | None = None, ) -> None: """Instantiate the client. @@ -65,6 +62,8 @@ def __init__( if options is None: options = ClientOptions(storage=SyncMemoryStorage()) + self.http_session: HttpSession = http_session + self.supabase_url = ( URL(supabase_url) if supabase_url.endswith("/") else URL(supabase_url + "/") ) @@ -76,9 +75,6 @@ def __init__( } self.rest_url = self.supabase_url.joinpath("rest", "v1") - self.realtime_url = self.supabase_url.joinpath("realtime", "v1").with_scheme( - "wss" if self.supabase_url.scheme == "https" else "ws" - ) self.auth_url = self.supabase_url.joinpath("auth", "v1") self.storage_url = self.supabase_url.joinpath("storage", "v1", "") self.functions_url = self.supabase_url.joinpath("functions", "v1") @@ -87,15 +83,11 @@ def __init__( self.auth = self._init_supabase_auth_client( auth_url=str(self.auth_url), client_options=self.options, + http_session=self.http_session, ) - self.realtime = self._init_realtime_client( - realtime_url=self.realtime_url, - supabase_key=self.supabase_key, - options=self.options.realtime if self.options else None, - ) - self._postgrest: Optional[SyncPostgrestClient] = None - self._storage: Optional[SyncStorageClient] = None - self._functions: Optional[SyncFunctionsClient] = None + self._postgrest: SyncPostgrestClient | None = None + self._storage: SyncStorageClient | None = None + self._functions: SyncFunctionsClient | None = None self.auth.on_auth_state_change(self._listen_to_auth_events) @classmethod @@ -103,10 +95,11 @@ def create( cls, supabase_url: str, supabase_key: str, - options: Optional[ClientOptions] = None, + http_session: HttpSession, + options: ClientOptions | None = None, ) -> "Client": auth_header = options.headers.get("Authorization") if options else None - client = cls(supabase_url, supabase_key, options) + client = cls(supabase_url, supabase_key, http_session, options) if auth_header is None: try: @@ -125,7 +118,18 @@ def create( return client - def table(self, table_name: str) -> SyncRequestBuilder: + def __enter__(self) -> "Client": + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.http_session.__exit__(exc_type, exc, tb) + + def table(self, table_name: str) -> RequestBuilder[SyncHttpIO]: """Perform a table operation. Note that the supabase client uses the `from` method, but in Python, @@ -141,42 +145,50 @@ def schema(self, schema: str) -> SyncPostgrestClient: """ return self.postgrest.schema(schema) - def from_(self, table_name: str) -> SyncRequestBuilder: + def from_(self, table_name: str) -> RequestBuilder[SyncHttpIO]: """Perform a table operation. See the `table` method. """ return self.postgrest.from_(table_name) + @overload def rpc( self, fn: str, - params: Optional[Dict[Any, Any]] = None, - count: Optional[CountMethod] = None, - head: bool = False, + head: Literal[False], + params: Dict[str, str] | None = None, + count: CountMethod | None = None, get: bool = False, - ) -> SyncRPCFilterRequestBuilder: - """Performs a stored procedure call. + ) -> RPCFilterRequestBuilder[SyncHttpIO]: ... - Parameters - ---------- - fn : callable - The stored procedure call to be executed. - params : dict of any - Parameters passed into the stored procedure call. - count: The method to use to get the count of rows returned. - head: When set to `true`, `data` will not be returned. Useful if you only need the count. - get: When set to `true`, the function will be called with read-only access mode. - - Returns - ------- - SyncFilterRequestBuilder - Returns a filter builder. This lets you apply filters on the response - of an RPC. - """ + @overload + def rpc( + self, + fn: str, + head: Literal[True], + params: Dict[str, str] | None = None, + count: CountMethod | None = None, + get: bool = False, + ) -> RPCCountRequestBuilder[SyncHttpIO]: ... + + @overload + def rpc( + self, + fn: str, + ) -> RPCCountRequestBuilder[SyncHttpIO]: ... + + def rpc( + self, + fn: str, + head: bool = False, + params: dict[str, str] | None = None, + count: CountMethod | None = None, + get: bool = False, + ) -> RPCFilterRequestBuilder[SyncHttpIO] | RPCCountRequestBuilder[SyncHttpIO]: if params is None: params = {} - return self.postgrest.rpc(fn, params, count, head, get) + return self.postgrest.rpc(fn, params, head=head, count=count, get=get) @property def postgrest(self) -> SyncPostgrestClient: @@ -185,8 +197,7 @@ def postgrest(self) -> SyncPostgrestClient: rest_url=str(self.rest_url), headers=self.options.headers, schema=self.options.schema, - timeout=self.options.postgrest_client_timeout, - http_client=self.options.httpx_client, + http_session=self.http_session, ) return self._postgrest @@ -197,8 +208,7 @@ def storage(self) -> SyncStorageClient: self._storage = self._init_storage_client( storage_url=str(self.storage_url), headers=self.options.headers, - storage_client_timeout=self.options.storage_client_timeout, - http_client=self.options.httpx_client, + http_session=self.http_session, ) return self._storage @@ -208,74 +218,26 @@ def functions(self) -> SyncFunctionsClient: self._functions = SyncFunctionsClient( url=str(self.functions_url), headers=self.options.headers, - timeout=( - self.options.function_client_timeout - if self.options.httpx_client is None - else None - ), - http_client=self.options.httpx_client, ) return self._functions - def channel( - self, topic: str, params: Optional[RealtimeChannelOptions] = None - ) -> SyncRealtimeChannel: - """Creates a Realtime channel with Broadcast, Presence, and Postgres Changes.""" - return self.realtime.channel(topic, params or {}) - - def get_channels(self) -> List[SyncRealtimeChannel]: - """Returns all realtime channels.""" - return self.realtime.get_channels() - - def remove_channel(self, channel: SyncRealtimeChannel) -> None: - """Unsubscribes and removes Realtime channel from Realtime client.""" - self.realtime.remove_channel(channel) - - def remove_all_channels(self) -> None: - """Unsubscribes and removes all Realtime channels from Realtime client.""" - self.realtime.remove_all_channels() - - @staticmethod - def _init_realtime_client( - realtime_url: URL, - supabase_key: str, - options: Optional[RealtimeClientOptions] = None, - ) -> SyncRealtimeClient: - realtime_options = options or {} - """Private method for creating an instance of the realtime-py client.""" - return SyncRealtimeClient( - str(realtime_url), token=supabase_key, **realtime_options - ) - @staticmethod def _init_storage_client( storage_url: str, headers: Dict[str, str], - storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT, - verify: bool = True, - proxy: Optional[str] = None, - http_client: Union[SyncHttpxClient, None] = None, + http_session: HttpSession, ) -> SyncStorageClient: - if http_client is not None: - # If an http client is provided, use it - return SyncStorageClient( - url=storage_url, headers=headers, http_client=http_client - ) return SyncStorageClient( url=storage_url, headers=headers, - timeout=storage_client_timeout, - verify=verify, - proxy=proxy, - http_client=None, + http_session=http_session, ) @staticmethod def _init_supabase_auth_client( auth_url: str, client_options: ClientOptions, - verify: bool = True, - proxy: Optional[str] = None, + http_session: HttpSession, ) -> SyncSupabaseAuthClient: """Creates a wrapped instance of the GoTrue Client.""" return SyncSupabaseAuthClient( @@ -285,9 +247,7 @@ def _init_supabase_auth_client( storage=client_options.storage, headers=client_options.headers, flow_type=client_options.flow_type, - verify=verify, - proxy=proxy, - http_client=client_options.httpx_client, + http_session=http_session, ) @staticmethod @@ -295,31 +255,20 @@ def _init_postgrest_client( rest_url: str, headers: Dict[str, str], schema: str, - timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, - verify: bool = True, - proxy: Optional[str] = None, - http_client: Union[SyncHttpxClient, None] = None, + http_session: HttpSession, ) -> SyncPostgrestClient: """Private helper for creating an instance of the Postgrest client.""" - if http_client is not None: - # If an http client is provided, use it - return SyncPostgrestClient( - rest_url, headers=headers, schema=schema, http_client=http_client - ) return SyncPostgrestClient( rest_url, headers=headers, schema=schema, - timeout=timeout, - verify=verify, - proxy=proxy, - http_client=None, + http_session=http_session, ) def _create_auth_header(self, token: str) -> str: return f"Bearer {token}" - def _get_auth_headers(self, authorization: Optional[str] = None) -> Dict[str, str]: + def _get_auth_headers(self, authorization: str | None = None) -> Dict[str, str]: if authorization is None: authorization = self.options.headers.get( "Authorization", self._create_auth_header(self.supabase_key) @@ -332,7 +281,7 @@ def _get_auth_headers(self, authorization: Optional[str] = None) -> Dict[str, st } def _listen_to_auth_events( - self, event: AuthChangeEvent, session: Optional[Session] + self, event: AuthChangeEvent, session: Session | None ) -> None: access_token = self.supabase_key if event in ["SIGNED_IN", "TOKEN_REFRESHED", "SIGNED_OUT"]: @@ -343,40 +292,6 @@ def _listen_to_auth_events( access_token = session.access_token if session else self.supabase_key auth_header = self._create_auth_header(access_token) self.options.headers["Authorization"] = auth_header - self.auth._headers["Authorization"] = auth_header - - -def create_client( - supabase_url: str, - supabase_key: str, - options: Optional[ClientOptions] = None, -) -> Client: - """Create client function to instantiate supabase client like JS runtime. - - Parameters - ---------- - supabase_url: str - The URL to the Supabase instance that should be connected to. - supabase_key: str - The API key to the Supabase instance that should be connected to. - **options - Any extra settings to be optionally specified - also see the - `DEFAULT_OPTIONS` dict. - - Examples - -------- - Instantiating the client. - >>> import os - >>> from supabase import create_client, Client - >>> - >>> url: str = os.environ.get("SUPABASE_TEST_URL") - >>> key: str = os.environ.get("SUPABASE_TEST_KEY") - >>> supabase: Client = create_client(url, key) - - Returns - ------- - Client - """ - return Client.create( - supabase_url=supabase_url, supabase_key=supabase_key, options=options - ) + self.auth.default_headers = self.auth.default_headers.override( + "Authorization", auth_header + ) diff --git a/src/supabase/src/supabase/aiohttp.py b/src/supabase/src/supabase/aiohttp.py new file mode 100644 index 00000000..321b77a3 --- /dev/null +++ b/src/supabase/src/supabase/aiohttp.py @@ -0,0 +1,20 @@ +from aiohttp import ClientSession +from supabase_utils.http.adapters.aiohttp import AsyncAiohttpSession + +from ._async.client import AsyncClient +from .lib.client_options import AsyncClientOptions + + +def create_aclient( + supabase_url: str, + supabase_key: str, + http_client: ClientSession | None = None, + options: AsyncClientOptions | None = None, +) -> AsyncClient: + client = http_client or ClientSession() + return AsyncClient( + supabase_url=supabase_url, + supabase_key=supabase_key, + http_session=AsyncAiohttpSession(client=client), + options=options, + ) diff --git a/src/supabase/src/supabase/client.py b/src/supabase/src/supabase/client.py index edd00387..50e7eedc 100644 --- a/src/supabase/src/supabase/client.py +++ b/src/supabase/src/supabase/client.py @@ -1,7 +1,7 @@ from postgrest import APIError as PostgrestAPIError from postgrest import APIResponse as PostgrestAPIResponse from realtime import AuthorizationError, NotConnectedError -from storage3.utils import StorageException +from storage3.exceptions import StorageException from supabase_auth.errors import ( AuthApiError, AuthError, @@ -19,15 +19,10 @@ ) # Async Client -from ._async.auth_client import AsyncSupabaseAuthClient from ._async.client import AsyncClient from ._async.client import AsyncStorageClient as AsyncSupabaseStorageClient -from ._async.client import create_client as acreate_client -from ._async.client import create_client as create_async_client # Sync Client -from ._sync.auth_client import SyncSupabaseAuthClient as SupabaseAuthClient -from ._sync.client import Client, create_client from ._sync.client import SyncStorageClient as SupabaseStorageClient # Lib @@ -39,16 +34,10 @@ from .version import __version__ __all__ = [ - "AsyncSupabaseAuthClient", - "acreate_client", - "create_async_client", "AClientOptions", "AsyncClient", "AsyncClientOptions", "AsyncSupabaseStorageClient", - "SupabaseAuthClient", - "create_client", - "Client", "ClientOptions", "SupabaseStorageClient", "PostgrestAPIError", diff --git a/src/supabase/src/supabase/httpx.py b/src/supabase/src/supabase/httpx.py new file mode 100644 index 00000000..02cbbef1 --- /dev/null +++ b/src/supabase/src/supabase/httpx.py @@ -0,0 +1,43 @@ +from httpx import AsyncClient as AsyncHttpxClient +from httpx import Client as SyncHttpxClient +from supabase_utils.http.adapters.httpx import AsyncHttpxSession, HttpxSession + +from ._async.client import AsyncClient +from ._sync.client import Client +from .lib.client_options import AsyncClientOptions, SyncClientOptions + + +def create_client( + supabase_url: str, + supabase_key: str, + http_client: SyncHttpxClient | None = None, + options: SyncClientOptions | None = None, +) -> Client: + client = http_client or SyncHttpxClient( + http2=True, + verify=True, + ) + return Client( + supabase_url=supabase_url, + supabase_key=supabase_key, + http_session=HttpxSession(client=client), + options=options, + ) + + +def create_aclient( + supabase_url: str, + supabase_key: str, + http_client: AsyncHttpxClient | None = None, + options: AsyncClientOptions | None = None, +) -> AsyncClient: + client = http_client or AsyncHttpxClient( + http2=True, + verify=True, + ) + return AsyncClient( + supabase_url=supabase_url, + supabase_key=supabase_key, + http_session=AsyncHttpxSession(client=client), + options=options, + ) diff --git a/src/supabase/src/supabase/lib/__init__.py b/src/supabase/src/supabase/lib/__init__.py index 9c3bbcfd..8b137891 100644 --- a/src/supabase/src/supabase/lib/__init__.py +++ b/src/supabase/src/supabase/lib/__init__.py @@ -1,3 +1 @@ -from supabase._async import auth_client -__all__ = ["auth_client"] diff --git a/src/supabase/src/supabase/lib/client_options.py b/src/supabase/src/supabase/lib/client_options.py index 44450c0e..62a32e32 100644 --- a/src/supabase/src/supabase/lib/client_options.py +++ b/src/supabase/src/supabase/lib/client_options.py @@ -1,11 +1,6 @@ from dataclasses import dataclass, field -from typing import Dict, Optional, Union +from typing import Dict -from httpx import AsyncClient as AsyncHttpxClient -from httpx import Client as SyncHttpxClient -from httpx import Timeout -from postgrest.constants import DEFAULT_POSTGREST_CLIENT_TIMEOUT -from storage3.constants import DEFAULT_TIMEOUT as DEFAULT_STORAGE_CLIENT_TIMEOUT from supabase_auth import ( AsyncMemoryStorage, AsyncSupportedStorage, @@ -13,7 +8,6 @@ SyncMemoryStorage, SyncSupportedStorage, ) -from supabase_functions.utils import DEFAULT_FUNCTION_CLIENT_TIMEOUT from supabase.types import RealtimeClientOptions @@ -39,20 +33,9 @@ class ClientOptions: persist_session: bool = True """Whether to persist a logged in session to storage.""" - realtime: Optional[RealtimeClientOptions] = None + realtime: RealtimeClientOptions | None = None """Options passed to the realtime-py instance""" - postgrest_client_timeout: Union[int, float, Timeout] = ( - DEFAULT_POSTGREST_CLIENT_TIMEOUT - ) - """Timeout passed to the SyncPostgrestClient instance.""" - - storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT - """Timeout passed to the SyncStorageClient instance""" - - function_client_timeout: int = DEFAULT_FUNCTION_CLIENT_TIMEOUT - """Timeout passed to the SyncFunctionsClient instance.""" - flow_type: AuthFlowType = "pkce" """flow type to use for authentication""" @@ -62,23 +45,16 @@ class AsyncClientOptions(ClientOptions): storage: AsyncSupportedStorage = field(default_factory=AsyncMemoryStorage) """A storage provider. Used to store the logged in session.""" - httpx_client: Optional[AsyncHttpxClient] = None - """httpx client instance to be used by the PostgREST, functions, auth and storage clients.""" - def replace( self, - schema: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - auto_refresh_token: Optional[bool] = None, - persist_session: Optional[bool] = None, - storage: Optional[AsyncSupportedStorage] = None, - realtime: Optional[RealtimeClientOptions] = None, - httpx_client: Optional[AsyncHttpxClient] = None, - postgrest_client_timeout: Union[ - int, float, Timeout - ] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, - storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT, - flow_type: Optional[AuthFlowType] = None, + schema: str | None = None, + headers: Dict[str, str] | None = None, + auto_refresh_token: bool | None = None, + persist_session: bool | None = None, + storage: AsyncSupportedStorage | None = None, + realtime: RealtimeClientOptions | None = None, + storage_client_timeout: int | None = None, + flow_type: AuthFlowType | None = None, ) -> "AsyncClientOptions": """Create a new SupabaseClientOptions with changes""" client_options = AsyncClientOptions() @@ -90,13 +66,6 @@ def replace( client_options.persist_session = persist_session or self.persist_session client_options.storage = storage or self.storage client_options.realtime = realtime or self.realtime - client_options.httpx_client = httpx_client or self.httpx_client - client_options.postgrest_client_timeout = ( - postgrest_client_timeout or self.postgrest_client_timeout - ) - client_options.storage_client_timeout = ( - storage_client_timeout or self.storage_client_timeout - ) client_options.flow_type = flow_type or self.flow_type return client_options @@ -105,23 +74,16 @@ def replace( class SyncClientOptions(ClientOptions): storage: SyncSupportedStorage = field(default_factory=SyncMemoryStorage) """A storage provider. Used to store the logged in session.""" - httpx_client: Optional[SyncHttpxClient] = None - """httpx client instance to be used by the PostgREST, functions, auth and storage clients.""" def replace( self, - schema: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, - auto_refresh_token: Optional[bool] = None, - persist_session: Optional[bool] = None, - storage: Optional[SyncSupportedStorage] = None, - realtime: Optional[RealtimeClientOptions] = None, - httpx_client: Optional[SyncHttpxClient] = None, - postgrest_client_timeout: Union[ - int, float, Timeout - ] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, - storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT, - flow_type: Optional[AuthFlowType] = None, + schema: str | None = None, + headers: Dict[str, str] | None = None, + auto_refresh_token: bool | None = None, + persist_session: bool | None = None, + storage: SyncSupportedStorage | None = None, + storage_client_timeout: int | None = None, + flow_type: AuthFlowType | None = None, ) -> "SyncClientOptions": """Create a new SupabaseClientOptions with changes""" client_options = SyncClientOptions() @@ -132,13 +94,5 @@ def replace( ) client_options.persist_session = persist_session or self.persist_session client_options.storage = storage or self.storage - client_options.realtime = realtime or self.realtime - client_options.httpx_client = httpx_client or self.httpx_client - client_options.postgrest_client_timeout = ( - postgrest_client_timeout or self.postgrest_client_timeout - ) - client_options.storage_client_timeout = ( - storage_client_timeout or self.storage_client_timeout - ) client_options.flow_type = flow_type or self.flow_type return client_options diff --git a/src/supabase/tests/_async/conftest.py b/src/supabase/tests/_async/conftest.py new file mode 100644 index 00000000..71cc6a7c --- /dev/null +++ b/src/supabase/tests/_async/conftest.py @@ -0,0 +1,45 @@ +from typing import Iterable, Protocol + +import pytest + +from supabase import AsyncClient as AsyncSupabaseClient +from supabase import AsyncClientOptions +from supabase.aiohttp import create_aclient as create_asyncio_client +from supabase.httpx import create_aclient as create_httpx_client + + +def pytest_configure(config) -> None: + from dotenv import load_dotenv + + load_dotenv(dotenv_path="tests/tests.env") + + +REST_URL = "http://127.0.0.1:3000" + + +def httpx( + supabase_key: str, supabase_url: str, options: AsyncClientOptions | None = None +) -> AsyncSupabaseClient: + return create_httpx_client(supabase_key, supabase_url, options=options) + + +def aiohttp( + supabase_key: str, supabase_url: str, options: AsyncClientOptions | None = None +) -> AsyncSupabaseClient: + return create_asyncio_client(supabase_key, supabase_url, options=options) + + +class AsyncClientCallable(Protocol): + def __call__( + self, + supabase_key: str, + supabase_url: str, + options: AsyncClientOptions | None = None, + ) -> AsyncSupabaseClient: ... + + +@pytest.fixture(params=[httpx, aiohttp]) +def create_async_client( + request: pytest.FixtureRequest, +) -> Iterable[AsyncClientCallable]: + yield request.param # just immediatly yield the `create_client` function diff --git a/src/supabase/tests/_async/test_client.py b/src/supabase/tests/_async/test_client.py index f3423ee6..b5c19dbe 100644 --- a/src/supabase/tests/_async/test_client.py +++ b/src/supabase/tests/_async/test_client.py @@ -1,212 +1,138 @@ import os -from typing import Any from unittest.mock import AsyncMock, MagicMock -import pytest -from httpx import AsyncClient as AsyncHttpxClient -from httpx import AsyncHTTPTransport, Limits, Timeout from supabase_auth import AsyncMemoryStorage from supabase import ( - AsyncClient, AsyncClientOptions, - AsyncSupabaseException, - create_async_client, ) +from .conftest import AsyncClientCallable -@pytest.mark.xfail( - reason="None of these values should be able to instantiate a client object" -) -@pytest.mark.parametrize("url", ["", None, "valeefgpoqwjgpj", 139, -1, {}, []]) -@pytest.mark.parametrize("key", ["", None, "valeefgpoqwjgpj", 139, -1, {}, []]) -async def test_incorrect_values_dont_instantiate_client(url: Any, key: Any) -> None: - """Ensure we can't instantiate client with invalid values.""" - try: - _: AsyncClient = await create_async_client(url, key) - except AsyncSupabaseException: - pass - - -async def test_supabase_exception() -> None: - try: - raise AsyncSupabaseException("err") - except AsyncSupabaseException: - pass - - -async def test_postgrest_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - client = await create_async_client(url, key) - assert client.table("sample") - assert client.postgrest.schema("new_schema") - - -async def test_rpc_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - client = await create_async_client(url, key) - assert client.rpc("test_fn") - +url = os.environ["SUPABASE_TEST_URL"] +key = os.environ["SUPABASE_TEST_KEY"] -async def test_function_initialization() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - client = await create_async_client(url, key) - assert client.functions +async def test_postgrest_client(create_async_client: AsyncClientCallable) -> None: + async with create_async_client(url, key) as client: + assert client.table("sample") + assert client.postgrest.schema("new_schema") -async def test_uses_key_as_authorization_header_by_default() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +async def test_rpc_client(create_async_client: AsyncClientCallable) -> None: + async with create_async_client(url, key) as client: + assert client.rpc("test_fn") - client = await create_async_client(url, key) - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == f"Bearer {key}" +async def test_function_initialization( + create_async_client: AsyncClientCallable, +) -> None: + async with create_async_client(url, key) as client: + assert client.functions - assert client.postgrest.session.headers.get("apiKey") == key - assert client.postgrest.session.headers.get("Authorization") == f"Bearer {key}" - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == f"Bearer {key}" +async def test_uses_key_as_authorization_header_by_default( + create_async_client: AsyncClientCallable, +) -> None: + async with create_async_client(url, key) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == f"Bearer {key}" - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == f"Bearer {key}" + assert client.postgrest.default_headers.get("apiKey") == key + assert client.postgrest.default_headers.get("Authorization") == f"Bearer {key}" + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == f"Bearer {key}" -async def test_schema_update() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] + assert client.storage.default_headers.get("apiKey") == key + assert client.storage.default_headers.get("Authorization") == f"Bearer {key}" - client = await create_async_client(url, key) - assert client.postgrest - assert client.schema("new_schema") +async def test_schema_update(create_async_client: AsyncClientCallable) -> None: + async with create_async_client(url, key) as client: + assert client.postgrest + assert client.schema("new_schema") -async def test_updates_the_authorization_header_on_auth_events() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - client = await create_async_client(url, key) +async def test_updates_the_authorization_header_on_auth_events( + create_async_client: AsyncClientCallable, +) -> None: + async with create_async_client(url, key) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == f"Bearer {key}" - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == f"Bearer {key}" + mock_session = MagicMock(access_token="secretuserjwt") + realtime_mock = AsyncMock() + client.realtime = realtime_mock - mock_session = MagicMock(access_token="secretuserjwt") - realtime_mock = AsyncMock() - client.realtime = realtime_mock + client._listen_to_auth_events("SIGNED_IN", mock_session) - client._listen_to_auth_events("SIGNED_IN", mock_session) + updated_authorization = f"Bearer {mock_session.access_token}" - updated_authorization = f"Bearer {mock_session.access_token}" + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == updated_authorization - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == updated_authorization + assert client.postgrest.default_headers.get("apiKey") == key + assert ( + client.postgrest.default_headers.get("Authorization") + == updated_authorization + ) - assert client.postgrest.session.headers.get("apiKey") == key - assert ( - client.postgrest.session.headers.get("Authorization") == updated_authorization - ) - - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == updated_authorization - - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == updated_authorization + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == updated_authorization + assert client.storage.default_headers.get("apiKey") == key + assert ( + client.storage.default_headers.get("Authorization") == updated_authorization + ) -async def test_supports_setting_a_global_authorization_header() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +async def test_supports_setting_a_global_authorization_header( + create_async_client: AsyncClientCallable, +) -> None: authorization = "Bearer secretuserjwt" options = AsyncClientOptions(headers={"Authorization": authorization}) - client = await create_async_client(url, key, options) - - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == authorization - - assert client.postgrest.session.headers.get("apiKey") == key - assert client.postgrest.session.headers.get("Authorization") == authorization + async with create_async_client(url, key, options) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == authorization - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == authorization + assert client.postgrest.default_headers.get("apiKey") == key + assert client.postgrest.default_headers.get("Authorization") == authorization - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == authorization + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == authorization + assert client.storage.default_headers.get("apiKey") == key + assert client.storage.default_headers.get("Authorization") == authorization -async def test_mutable_headers_issue() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +async def test_mutable_headers_issue(create_async_client: AsyncClientCallable) -> None: shared_options = AsyncClientOptions( storage=AsyncMemoryStorage(), headers={"Authorization": "Bearer initial-token"} ) - client1 = await create_async_client(url, key, shared_options) - client2 = await create_async_client(url, key, shared_options) + async with ( + create_async_client(url, key, shared_options) as client1, + create_async_client(url, key, shared_options) as client2, + ): + client1.options.headers["Authorization"] = "Bearer modified-token" + assert client2.options.headers["Authorization"] == "Bearer initial-token" + assert client1.options.headers["Authorization"] == "Bearer modified-token" - client1.options.headers["Authorization"] = "Bearer modified-token" - - assert client2.options.headers["Authorization"] == "Bearer initial-token" - assert client1.options.headers["Authorization"] == "Bearer modified-token" - - -async def test_global_authorization_header_issue() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +async def test_global_authorization_header_issue( + create_async_client: AsyncClientCallable, +) -> None: authorization = "Bearer secretuserjwt" options = AsyncClientOptions(headers={"Authorization": authorization}) - client = await create_async_client(url, key, options) - - assert client.options.headers.get("apiKey") == key - + async with create_async_client(url, key, options) as client: + assert client.options.headers.get("apiKey") == key -async def test_httpx_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - transport = AsyncHTTPTransport( - retries=10, - verify=False, - limits=Limits( - max_connections=1, - ), - ) - - headers = {"x-user-agent": "my-app/0.0.1"} - async with AsyncHttpxClient( - transport=transport, headers=headers, timeout=Timeout(2.0) - ) as http_client: - # Create a client with the custom httpx client - options = AsyncClientOptions(httpx_client=http_client) - - client = await create_async_client(url, key, options) - - assert client.postgrest.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.auth._http_client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.storage.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.functions._client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.postgrest.session.timeout == Timeout(2.0) - assert client.auth._http_client.timeout == Timeout(2.0) - assert client.storage.session.timeout == Timeout(2.0) - assert client.functions._client.timeout == Timeout(2.0) - - -async def test_custom_headers() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +async def test_custom_headers(create_async_client: AsyncClientCallable) -> None: options = AsyncClientOptions( headers={ "x-app-name": "apple", @@ -214,16 +140,14 @@ async def test_custom_headers() -> None: } ) - client = await create_async_client(url, key, options) - - assert client.options.headers.get("x-app-name") == "apple" - assert client.options.headers.get("x-version") == "1.0" + async with create_async_client(url, key, options) as client: + assert client.options.headers.get("x-app-name") == "apple" + assert client.options.headers.get("x-version") == "1.0" -async def test_custom_headers_immutable() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - +async def test_custom_headers_immutable( + create_async_client: AsyncClientCallable, +) -> None: options = AsyncClientOptions( headers={ "x-app-name": "apple", @@ -231,54 +155,51 @@ async def test_custom_headers_immutable() -> None: } ) - client1 = await create_async_client(url, key, options) - client2 = await create_async_client(url, key, options) - - client1.options.headers["x-app-name"] = "grapes" + async with ( + create_async_client(url, key, options) as client1, + create_async_client(url, key, options) as client2, + ): + client1.options.headers["x-app-name"] = "grapes" - assert client1.options.headers.get("x-app-name") == "grapes" - assert client1.options.headers.get("x-version") == "1.0" - assert client2.options.headers.get("x-app-name") == "apple" + assert client1.options.headers.get("x-app-name") == "grapes" + assert client1.options.headers.get("x-version") == "1.0" + assert client2.options.headers.get("x-app-name") == "apple" -async def test_httpx_client_base_url_isolation() -> None: +async def test_httpx_client_base_url_isolation( + create_async_client: AsyncClientCallable, +) -> None: """Test that shared httpx_client doesn't cause base_url mutation between services. This test reproduces the issue where accessing PostgREST after Storage causes Storage requests to hit the wrong endpoint (404 errors). See: https://github.com/supabase/supabase-py/issues/1244 """ - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - # Create client with shared httpx instance - timeout = Timeout(10.0, read=60.0) - httpx_client = AsyncHttpxClient(timeout=timeout) - options = AsyncClientOptions(httpx_client=httpx_client) - client = await create_async_client(url, key, options) - - # Access storage and capture its base_url - storage = client.storage - storage_base_url = str(storage._base_url).rstrip("/") - assert storage_base_url.endswith("/storage/v1"), ( - f"Expected storage base_url to end with '/storage/v1', got {storage_base_url}" - ) - - # Access postgrest (this should NOT mutate storage's base_url) - postgrest = client.postgrest - postgrest_base_url = str(postgrest.base_url).rstrip("/") - assert postgrest_base_url.endswith("/rest/v1"), ( - f"Expected postgrest base_url to end with '/rest/v1', got {postgrest_base_url}" - ) - - # Verify storage still has the correct base_url - storage_base_url_after = str(storage._base_url).rstrip("/") - assert storage_base_url_after.endswith("/storage/v1"), ( - f"Storage base_url was mutated! Expected '/storage/v1', got {storage_base_url_after}" - ) - - assert str(storage._base_url).rstrip("/").endswith("/storage/v1"), ( - "Storage base_url was mutated after accessing functions" - ) - assert str(postgrest.base_url).rstrip("/").endswith("/rest/v1"), ( - "PostgREST base_url was mutated after accessing functions" - ) + options = AsyncClientOptions() + async with create_async_client(url, key, options) as client: + # Access storage and capture its base_url + storage = client.storage + storage_base_url = str(storage.base_url).rstrip("/") + assert storage_base_url.endswith("/storage/v1"), ( + f"Expected storage base_url to end with '/storage/v1', got {storage_base_url}" + ) + + # Access postgrest (this should NOT mutate storage's base_url) + postgrest = client.postgrest + postgrest_base_url = str(postgrest.base_url).rstrip("/") + assert postgrest_base_url.endswith("/rest/v1"), ( + f"Expected postgrest base_url to end with '/rest/v1', got {postgrest_base_url}" + ) + + # Verify storage still has the correct base_url + storage_base_url_after = str(storage.base_url).rstrip("/") + assert storage_base_url_after.endswith("/storage/v1"), ( + f"Storage base_url was mutated! Expected '/storage/v1', got {storage_base_url_after}" + ) + + assert str(storage.base_url).rstrip("/").endswith("/storage/v1"), ( + "Storage base_url was mutated after accessing functions" + ) + assert str(postgrest.base_url).rstrip("/").endswith("/rest/v1"), ( + "PostgREST base_url was mutated after accessing functions" + ) diff --git a/src/supabase/tests/_sync/conftest.py b/src/supabase/tests/_sync/conftest.py new file mode 100644 index 00000000..6a2b7973 --- /dev/null +++ b/src/supabase/tests/_sync/conftest.py @@ -0,0 +1,34 @@ +from typing import Iterable, Protocol + +import pytest + +from supabase import Client, ClientOptions +from supabase.httpx import create_client as create_httpx_client + + +def pytest_configure(config) -> None: + from dotenv import load_dotenv + + load_dotenv(dotenv_path="tests/tests.env") + + +REST_URL = "http://127.0.0.1:3000" + + +def httpx( + supabase_key: str, supabase_url: str, options: ClientOptions | None = None +) -> Client: + return create_httpx_client(supabase_key, supabase_url, options=options) + + +class SyncClientCallable(Protocol): + def __call__( + self, supabase_key: str, supabase_url: str, options: ClientOptions | None = None + ) -> Client: ... + + +@pytest.fixture(params=[httpx]) +def create_client( + request: pytest.FixtureRequest, +) -> Iterable[SyncClientCallable]: + yield request.param # just immediatly yield the `create_client` function diff --git a/src/supabase/tests/_sync/test_client.py b/src/supabase/tests/_sync/test_client.py index a490d67d..f7e111a1 100644 --- a/src/supabase/tests/_sync/test_client.py +++ b/src/supabase/tests/_sync/test_client.py @@ -1,212 +1,132 @@ import os -from typing import Any -from unittest.mock import MagicMock, Mock +from unittest.mock import MagicMock -import pytest -from httpx import Client as SyncHttpxClient -from httpx import HTTPTransport, Limits, Timeout from supabase_auth import SyncMemoryStorage from supabase import ( - Client, ClientOptions, - SyncSupabaseException, - create_client, ) +from .conftest import SyncClientCallable -@pytest.mark.xfail( - reason="None of these values should be able to instantiate a client object" -) -@pytest.mark.parametrize("url", ["", None, "valeefgpoqwjgpj", 139, -1, {}, []]) -@pytest.mark.parametrize("key", ["", None, "valeefgpoqwjgpj", 139, -1, {}, []]) -def test_incorrect_values_dont_instantiate_client(url: Any, key: Any) -> None: - """Ensure we can't instantiate client with invalid values.""" - try: - _: Client = create_client(url, key) - except SyncSupabaseException: - pass - - -def test_supabase_exception() -> None: - try: - raise SyncSupabaseException("err") - except SyncSupabaseException: - pass - - -def test_postgrest_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - client = create_client(url, key) - assert client.table("sample") - assert client.postgrest.schema("new_schema") - - -def test_rpc_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - client = create_client(url, key) - assert client.rpc("test_fn") - +url = os.environ["SUPABASE_TEST_URL"] +key = os.environ["SUPABASE_TEST_KEY"] -def test_function_initialization() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - client = create_client(url, key) - assert client.functions +def test_postgrest_client(create_client: SyncClientCallable) -> None: + with create_client(url, key) as client: + assert client.table("sample") + assert client.postgrest.schema("new_schema") -def test_uses_key_as_authorization_header_by_default() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +def test_rpc_client(create_client: SyncClientCallable) -> None: + with create_client(url, key) as client: + assert client.rpc("test_fn") - client = create_client(url, key) - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == f"Bearer {key}" +def test_function_initialization(create_client: SyncClientCallable) -> None: + with create_client(url, key) as client: + assert client.functions - assert client.postgrest.session.headers.get("apiKey") == key - assert client.postgrest.session.headers.get("Authorization") == f"Bearer {key}" - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == f"Bearer {key}" +def test_uses_key_as_authorization_header_by_default( + create_client: SyncClientCallable, +) -> None: + with create_client(url, key) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == f"Bearer {key}" - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == f"Bearer {key}" + assert client.postgrest.default_headers.get("apiKey") == key + assert client.postgrest.default_headers.get("Authorization") == f"Bearer {key}" + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == f"Bearer {key}" -def test_schema_update() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] + assert client.storage.default_headers.get("apiKey") == key + assert client.storage.default_headers.get("Authorization") == f"Bearer {key}" - client = create_client(url, key) - assert client.postgrest - assert client.schema("new_schema") +def test_schema_update(create_client: SyncClientCallable) -> None: + with create_client(url, key) as client: + assert client.postgrest + assert client.schema("new_schema") -def test_updates_the_authorization_header_on_auth_events() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - client = create_client(url, key) +def test_updates_the_authorization_header_on_auth_events( + create_client: SyncClientCallable, +) -> None: + with create_client(url, key) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == f"Bearer {key}" - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == f"Bearer {key}" + mock_session = MagicMock(access_token="secretuserjwt") - mock_session = MagicMock(access_token="secretuserjwt") - realtime_mock = Mock() - client.realtime = realtime_mock + client._listen_to_auth_events("SIGNED_IN", mock_session) - client._listen_to_auth_events("SIGNED_IN", mock_session) + updated_authorization = f"Bearer {mock_session.access_token}" - updated_authorization = f"Bearer {mock_session.access_token}" + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == updated_authorization - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == updated_authorization + assert client.postgrest.default_headers.get("apiKey") == key + assert ( + client.postgrest.default_headers.get("Authorization") + == updated_authorization + ) - assert client.postgrest.session.headers.get("apiKey") == key - assert ( - client.postgrest.session.headers.get("Authorization") == updated_authorization - ) - - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == updated_authorization - - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == updated_authorization + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == updated_authorization + assert client.storage.default_headers.get("apiKey") == key + assert ( + client.storage.default_headers.get("Authorization") == updated_authorization + ) -def test_supports_setting_a_global_authorization_header() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +def test_supports_setting_a_global_authorization_header( + create_client: SyncClientCallable, +) -> None: authorization = "Bearer secretuserjwt" options = ClientOptions(headers={"Authorization": authorization}) - client = create_client(url, key, options) - - assert client.options.headers.get("apiKey") == key - assert client.options.headers.get("Authorization") == authorization - - assert client.postgrest.session.headers.get("apiKey") == key - assert client.postgrest.session.headers.get("Authorization") == authorization + with create_client(url, key, options) as client: + assert client.options.headers.get("apiKey") == key + assert client.options.headers.get("Authorization") == authorization - assert client.auth._headers.get("apiKey") == key - assert client.auth._headers.get("Authorization") == authorization + assert client.postgrest.default_headers.get("apiKey") == key + assert client.postgrest.default_headers.get("Authorization") == authorization - assert client.storage.session.headers.get("apiKey") == key - assert client.storage.session.headers.get("Authorization") == authorization + assert client.auth.default_headers.get("apiKey") == key + assert client.auth.default_headers.get("Authorization") == authorization + assert client.storage.default_headers.get("apiKey") == key + assert client.storage.default_headers.get("Authorization") == authorization -def test_mutable_headers_issue() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +def test_mutable_headers_issue(create_client: SyncClientCallable) -> None: shared_options = ClientOptions( storage=SyncMemoryStorage(), headers={"Authorization": "Bearer initial-token"} ) - client1 = create_client(url, key, shared_options) - client2 = create_client(url, key, shared_options) + with ( + create_client(url, key, shared_options) as client1, + create_client(url, key, shared_options) as client2, + ): + client1.options.headers["Authorization"] = "Bearer modified-token" + assert client2.options.headers["Authorization"] == "Bearer initial-token" + assert client1.options.headers["Authorization"] == "Bearer modified-token" - client1.options.headers["Authorization"] = "Bearer modified-token" - - assert client2.options.headers["Authorization"] == "Bearer initial-token" - assert client1.options.headers["Authorization"] == "Bearer modified-token" - - -def test_global_authorization_header_issue() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +def test_global_authorization_header_issue(create_client: SyncClientCallable) -> None: authorization = "Bearer secretuserjwt" options = ClientOptions(headers={"Authorization": authorization}) - client = create_client(url, key, options) - - assert client.options.headers.get("apiKey") == key - + with create_client(url, key, options) as client: + assert client.options.headers.get("apiKey") == key -def test_httpx_client() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - - transport = HTTPTransport( - retries=10, - verify=False, - limits=Limits( - max_connections=1, - ), - ) - - headers = {"x-user-agent": "my-app/0.0.1"} - with SyncHttpxClient( - transport=transport, headers=headers, timeout=Timeout(2.0) - ) as http_client: - # Create a client with the custom httpx client - options = ClientOptions(httpx_client=http_client) - - client = create_client(url, key, options) - - assert client.postgrest.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.auth._http_client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.storage.session.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.functions._client.headers.get("x-user-agent") == "my-app/0.0.1" - assert client.postgrest.session.timeout == Timeout(2.0) - assert client.auth._http_client.timeout == Timeout(2.0) - assert client.storage.session.timeout == Timeout(2.0) - assert client.functions._client.timeout == Timeout(2.0) - - -def test_custom_headers() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] +def test_custom_headers(create_client: SyncClientCallable) -> None: options = ClientOptions( headers={ "x-app-name": "apple", @@ -214,16 +134,12 @@ def test_custom_headers() -> None: } ) - client = create_client(url, key, options) - - assert client.options.headers.get("x-app-name") == "apple" - assert client.options.headers.get("x-version") == "1.0" + with create_client(url, key, options) as client: + assert client.options.headers.get("x-app-name") == "apple" + assert client.options.headers.get("x-version") == "1.0" -def test_custom_headers_immutable() -> None: - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - +def test_custom_headers_immutable(create_client: SyncClientCallable) -> None: options = ClientOptions( headers={ "x-app-name": "apple", @@ -231,54 +147,49 @@ def test_custom_headers_immutable() -> None: } ) - client1 = create_client(url, key, options) - client2 = create_client(url, key, options) - - client1.options.headers["x-app-name"] = "grapes" + with ( + create_client(url, key, options) as client1, + create_client(url, key, options) as client2, + ): + client1.options.headers["x-app-name"] = "grapes" - assert client1.options.headers.get("x-app-name") == "grapes" - assert client1.options.headers.get("x-version") == "1.0" - assert client2.options.headers.get("x-app-name") == "apple" + assert client1.options.headers.get("x-app-name") == "grapes" + assert client1.options.headers.get("x-version") == "1.0" + assert client2.options.headers.get("x-app-name") == "apple" -def test_httpx_client_base_url_isolation() -> None: +def test_httpx_client_base_url_isolation(create_client: SyncClientCallable) -> None: """Test that shared httpx_client doesn't cause base_url mutation between services. This test reproduces the issue where accessing PostgREST after Storage causes Storage requests to hit the wrong endpoint (404 errors). See: https://github.com/supabase/supabase-py/issues/1244 """ - url = os.environ["SUPABASE_TEST_URL"] - key = os.environ["SUPABASE_TEST_KEY"] - # Create client with shared httpx instance - timeout = Timeout(10.0, read=60.0) - httpx_client = SyncHttpxClient(timeout=timeout) - options = ClientOptions(httpx_client=httpx_client) - client = create_client(url, key, options) - - # Access storage and capture its base_url - storage = client.storage - storage_base_url = str(storage._base_url).rstrip("/") - assert storage_base_url.endswith("/storage/v1"), ( - f"Expected storage base_url to end with '/storage/v1', got {storage_base_url}" - ) - - # Access postgrest (this should NOT mutate storage's base_url) - postgrest = client.postgrest - postgrest_base_url = str(postgrest.base_url).rstrip("/") - assert postgrest_base_url.endswith("/rest/v1"), ( - f"Expected postgrest base_url to end with '/rest/v1', got {postgrest_base_url}" - ) - - # Verify storage still has the correct base_url - storage_base_url_after = str(storage._base_url).rstrip("/") - assert storage_base_url_after.endswith("/storage/v1"), ( - f"Storage base_url was mutated! Expected '/storage/v1', got {storage_base_url_after}" - ) - - assert str(storage._base_url).rstrip("/").endswith("/storage/v1"), ( - "Storage base_url was mutated after accessing functions" - ) - assert str(postgrest.base_url).rstrip("/").endswith("/rest/v1"), ( - "PostgREST base_url was mutated after accessing functions" - ) + options = ClientOptions() + with create_client(url, key, options) as client: + # Access storage and capture its base_url + storage = client.storage + storage_base_url = str(storage.base_url).rstrip("/") + assert storage_base_url.endswith("/storage/v1"), ( + f"Expected storage base_url to end with '/storage/v1', got {storage_base_url}" + ) + + # Access postgrest (this should NOT mutate storage's base_url) + postgrest = client.postgrest + postgrest_base_url = str(postgrest.base_url).rstrip("/") + assert postgrest_base_url.endswith("/rest/v1"), ( + f"Expected postgrest base_url to end with '/rest/v1', got {postgrest_base_url}" + ) + + # Verify storage still has the correct base_url + storage_base_url_after = str(storage.base_url).rstrip("/") + assert storage_base_url_after.endswith("/storage/v1"), ( + f"Storage base_url was mutated! Expected '/storage/v1', got {storage_base_url_after}" + ) + + assert str(storage.base_url).rstrip("/").endswith("/storage/v1"), ( + "Storage base_url was mutated after accessing functions" + ) + assert str(postgrest.base_url).rstrip("/").endswith("/rest/v1"), ( + "PostgREST base_url was mutated after accessing functions" + ) diff --git a/src/supabase/tests/conftest.py b/src/supabase/tests/conftest.py deleted file mode 100644 index 0f616ade..00000000 --- a/src/supabase/tests/conftest.py +++ /dev/null @@ -1,21 +0,0 @@ -from __future__ import annotations - -import os - -import pytest -from dotenv import load_dotenv - -from supabase import Client, create_client - - -def pytest_configure(config) -> None: - load_dotenv(dotenv_path="tests/tests.env") - - -@pytest.fixture(scope="session") -def supabase() -> Client: - url = os.environ.get("SUPABASE_TEST_URL") - assert url is not None, "Must provide SUPABASE_TEST_URL environment variable" - key = os.environ.get("SUPABASE_TEST_KEY") - assert key is not None, "Must provide SUPABASE_TEST_KEY environment variable" - return create_client(url, key) diff --git a/src/supabase/tests/test_function_configuration.py b/src/supabase/tests/test_function_configuration.py deleted file mode 100644 index 856ebd97..00000000 --- a/src/supabase/tests/test_function_configuration.py +++ /dev/null @@ -1,14 +0,0 @@ -import supabase - - -def test_functions_client_initialization() -> None: - ref = "ooqqmozurnggtljmjkii" - url = f"https://{ref}.supabase.co" - # Sample JWT Key - key = "xxxxxxxxxxxxxx.xxxxxxxxxxxxxxx.xxxxxxxxxxxxxxx" - sp = supabase.Client(url, key) - assert str(sp.functions_url) == f"https://{ref}.supabase.co/functions/v1" - - url = "https://localhost:54322" - sp_local = supabase.Client(url, key) - assert str(sp_local.functions_url) == f"{url}/functions/v1" diff --git a/src/supabase/tests/test_realtime.py b/src/supabase/tests/test_realtime.py deleted file mode 100644 index c377189f..00000000 --- a/src/supabase/tests/test_realtime.py +++ /dev/null @@ -1,14 +0,0 @@ -import supabase - - -def test_realtime_client_initialization() -> None: - ref = "ooqqmozurnggtljmjkii" - url = f"https://{ref}.supabase.co" - # Sample JWT Key - key = "xxxxxxxxxxxxxx.xxxxxxxxxxxxxxx.xxxxxxxxxxxxxxx" - sp = supabase.Client(url, key) - assert str(sp.realtime_url) == f"wss://{ref}.supabase.co/realtime/v1" - - url = "http://localhost:54322" - sp_local = supabase.Client(url, key) - assert str(sp_local.realtime_url) == "ws://localhost:54322/realtime/v1" diff --git a/src/utils/Makefile b/src/utils/Makefile new file mode 100644 index 00000000..6782e7a3 --- /dev/null +++ b/src/utils/Makefile @@ -0,0 +1,23 @@ +help:: + @echo "Available commands" + @echo " help -- (default) print this message" + +tests: mypy +help:: + @echo " tests -- run all tests for supabase_utils package" + +mypy: + uv run --package supabase_utils mypy src/supabase_utils +help:: + @echo " mypy -- run mypy on supabase_utils package" + +clean: + rm -rf htmlcov .pytest_cache .mypy_cache .ruff_cache + rm -f .coverage coverage.xml +help:: + @echo " clean -- clean intermediary files" + +build: + uv build --package supabase_utils +help:: + @echo " build -- invoke uv build on supabase_utils package" diff --git a/src/utils/README.md b/src/utils/README.md new file mode 100644 index 00000000..e69de29b diff --git a/src/utils/pyproject.toml b/src/utils/pyproject.toml new file mode 100644 index 00000000..16950497 --- /dev/null +++ b/src/utils/pyproject.toml @@ -0,0 +1,52 @@ +[project] +name = "supabase_utils" +version = "3.0.0a1" # {x-release-please-version} +authors = [ + { name = "Leonardo Santiago", email="leonardo.santiago@supabase.io" } +] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] +classifiers = [ + "Programming Language :: Python :: 3", + "Operating System :: OS Independent" +] +requires-python = ">=3.10" +description = "Common collection of methods used in the supabase python libraries." +readme = "README.md" +dependencies = [ + "pydantic>=2.12.2", + "pyrsistent>=0.20.0", + "typing-extensions>=4.15.0", + "yarl>=1.22.0", +] + +[project.optional-dependencies] +httpx = ["httpx[http2] >=0.26,<0.29"] +aiohttp = ["aiohttp >= 3"] +all = [ + "supabase_utils[httpx]", + "supabase_utils[aiohttp]" +] + +[project.urls] +homepage = "https://github.com/supabase/supabase-py" +repository = "https://github.com/supabase/supabase-py" +documentation = "https://github.com/supabase/supabase-py/tree/main/src/utils" +changelog = "https://github.com/supabase/supabase-py/tree/main/CHANGELOG.md" + +[tool.mypy] +check_untyped_defs = true +allow_redefinition = true +follow_untyped_imports = true # for deprecation module that does not have stubs + +no_warn_no_return = true +warn_return_any = true +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true +strict = true + +[build-system] +requires = ["uv_build>=0.8.3,<0.9.0"] +build-backend = "uv_build" diff --git a/src/utils/src/supabase_utils/__init__.py b/src/utils/src/supabase_utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/utils/src/supabase_utils/http/__init__.py b/src/utils/src/supabase_utils/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/utils/src/supabase_utils/http/adapters/__init__.py b/src/utils/src/supabase_utils/http/adapters/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/utils/src/supabase_utils/http/adapters/aiohttp.py b/src/utils/src/supabase_utils/http/adapters/aiohttp.py new file mode 100644 index 00000000..7979448f --- /dev/null +++ b/src/utils/src/supabase_utils/http/adapters/aiohttp.py @@ -0,0 +1,41 @@ +from types import TracebackType + +from aiohttp import ClientResponse as AioResponse +from aiohttp import ClientSession + +from ..headers import Headers +from ..request import Request, Response + + +async def to_supabase_response(req: Request, resp: AioResponse) -> Response: + return Response( + status=resp.status, + content=await resp.read(), + headers=Headers.from_mapping(resp.headers), + request=req, + ) + + +class AsyncAiohttpSession: + def __init__(self, client: ClientSession) -> None: + self.client = client + + async def send(self, request: Request) -> Response: + response = await self.client.request( + method=request.method, + url=str(request.url), + data=request.content, + headers=dict(request.headers), + ) + return await to_supabase_response(request, response) + + async def __aenter__(self) -> "AsyncAiohttpSession": + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.client.__aexit__(exc_type, exc, tb) diff --git a/src/utils/src/supabase_utils/http/adapters/httpx.py b/src/utils/src/supabase_utils/http/adapters/httpx.py new file mode 100644 index 00000000..f4b8a0da --- /dev/null +++ b/src/utils/src/supabase_utils/http/adapters/httpx.py @@ -0,0 +1,66 @@ +from types import TracebackType + +from httpx import AsyncClient, Client +from httpx import Request as HttpxRequest +from httpx import Response as HttpxResponse + +from ..headers import Headers +from ..request import Request, Response + + +def to_httpx_request(req: Request) -> HttpxRequest: + return HttpxRequest( + method=req.method, + url=str(req.url), + headers=req.headers.iter_items(), + content=req.content, + ) + + +def to_supabase_response(req: Request, resp: HttpxResponse) -> Response: + return Response( + status=resp.status_code, + content=resp.content, + headers=Headers.from_mapping(resp.headers), + request=req, + ) + + +class HttpxSession: + def __init__(self, client: Client) -> None: + self.client = client + + def send(self, request: Request) -> Response: + response = self.client.send(to_httpx_request(request)) + return to_supabase_response(request, response) + + def __enter__(self) -> "HttpxSession": + return self + + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + self.client.close() + + +class AsyncHttpxSession: + def __init__(self, client: AsyncClient) -> None: + self.client = client + + async def send(self, request: Request) -> Response: + response = await self.client.send(to_httpx_request(request)) + return to_supabase_response(request, response) + + async def __aenter__(self) -> "AsyncHttpxSession": + return self + + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: + await self.client.aclose() diff --git a/src/utils/src/supabase_utils/http/headers.py b/src/utils/src/supabase_utils/http/headers.py new file mode 100644 index 00000000..7818689a --- /dev/null +++ b/src/utils/src/supabase_utils/http/headers.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from typing import Iterator, KeysView, Mapping + +from pyrsistent import PMap, PVector +from pyrsistent import m as Map +from pyrsistent import v as Vec + + +class Headers: + def __init__(self, pmap: PMap[str, PVector[str]]) -> None: + self._map = pmap + + @staticmethod + def empty() -> Headers: + return Headers(pmap=Map()) + + @staticmethod + def from_mapping(mapping: Mapping[str, str]) -> Headers: + map: PMap[str, PVector[str]] = Map() + for key, val in mapping.items(): + map = map.set(key.lower(), Vec(val)) + return Headers(pmap=map) + + def set(self, key: str, val: str) -> Headers: + key = key.lower() + existing: PVector[str] = self._map.get(key, Vec()) + new_val = existing.append(val) + return Headers(pmap=self._map.set(key, new_val)) + + def override(self, key: str, val: str) -> Headers: + return Headers(pmap=self._map.set(key.lower(), Vec(val))) + + def get(self, key: str) -> str | None: + if vals := self._map.get(key.lower(), None): + return ", ".join(vals) + return None + + def get_list(self, key: str) -> list[str] | None: + if val := self._map.get(key, None): + return list(val) + return None + + def __getitem__(self, key: str) -> str: + if val := self.get(key): + return val + raise KeyError(f"'{key}' not found.") + + def __len__(self) -> int: + return len(self._map) + + def __contains__(self, key: str) -> bool: + return key.lower() in self._map + + def keys(self) -> KeysView[str]: + return self._map.keys() + + def __iter__(self) -> Iterator[str]: + return iter(self.keys()) + + def update(self, other: Headers) -> Headers: + new = self._map.update(other._map) + return Headers(new) + + def iter_items(self) -> list[tuple[str, str]]: + return [(k, v) for k, vals in self._map.items() for v in vals] + + def __str__(self) -> str: + fields = ", ".join(f'"{k}"="{self.get(k)}"' for k in self._map) + return f"Headers({fields})" + + def __repr__(self) -> str: + return str(self) diff --git a/src/utils/src/supabase_utils/http/io.py b/src/utils/src/supabase_utils/http/io.py new file mode 100644 index 00000000..93f53f25 --- /dev/null +++ b/src/utils/src/supabase_utils/http/io.py @@ -0,0 +1,139 @@ +from dataclasses import dataclass +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Generator, + Generic, + Protocol, + TypeAlias, + TypeVar, + overload, +) + +from typing_extensions import Concatenate, ParamSpec, Self +from yarl import URL + +from .headers import Headers +from .request import Request, Response, ToRequest + +T = TypeVar("T", covariant=True) + +Success = TypeVar("Success", covariant=True) + + +HttpMethod: TypeAlias = Generator[ToRequest, Response, Success] + + +@dataclass +class LoopReturnValue(Generic[Success]): + iterable: HttpMethod[Success] + + def __iter__(self) -> HttpMethod[Success]: + self.return_value: Success = yield from self.iterable + return self.return_value + + +class HttpSession(Protocol): + def send(self, request: Request) -> Response: ... + def __enter__(self) -> Self: ... + def __exit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: ... + + +class AsyncHttpSession(Protocol): + async def send(self, request: Request) -> Response: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, + exc_type: type[Exception] | None, + exc: Exception | None, + tb: TracebackType | None, + ) -> None: ... + + +class SyncHttpIO: + def __init__(self, session: HttpSession) -> None: + self.session = session + + def communicate( + self, + base_url: URL, + default_headers: Headers, + http_iterator: HttpMethod[Success], + ) -> Success: + return_value_iterator = LoopReturnValue(http_iterator) + iterator = iter(return_value_iterator) + try: + http_request = next(iterator) + while True: + response = self.session.send( + http_request.finalize(base_url, default_headers) + ) + http_request = iterator.send(response) + except StopIteration: + return return_value_iterator.return_value + + +class AsyncHttpIO: + def __init__(self, session: AsyncHttpSession) -> None: + self.session = session + + async def communicate( + self, + base_url: URL, + default_headers: Headers, + http_iterator: HttpMethod[Success], + ) -> Success: + return_value_iterator = LoopReturnValue(http_iterator) + iterator = iter(return_value_iterator) + try: + http_request = next(iterator) + while True: + response = await self.session.send( + http_request.finalize(base_url, default_headers) + ) + http_request = iterator.send(response) + except StopIteration: + return return_value_iterator.return_value + + +Params = ParamSpec("Params") +HttpIO = TypeVar("HttpIO", SyncHttpIO, AsyncHttpIO) + + +class HasExecutor(Protocol[HttpIO]): + executor: HttpIO + base_url: URL + default_headers: Headers + + +@dataclass +class handle_http_io(Generic[Params, Success]): + method: Callable[Concatenate[Any, Params], HttpMethod[Success]] + + @overload + def __get__( + self, obj: HasExecutor[SyncHttpIO], objtype: type | None = None + ) -> Callable[Params, Success]: ... + + @overload + def __get__( + self, obj: HasExecutor[AsyncHttpIO], objtype: type | None = None + ) -> Callable[Params, Awaitable[Success]]: ... + + def __get__( + self, obj: HasExecutor[HttpIO], objtype: type | None = None + ) -> Callable[Params, Success | Awaitable[Success]]: + def bound_method( + *args: Params.args, **kwargs: Params.kwargs + ) -> Success | Awaitable[Success]: + iterator = self.method(obj, *args, **kwargs) + return obj.executor.communicate(obj.base_url, obj.default_headers, iterator) + + return bound_method diff --git a/src/utils/src/supabase_utils/http/query.py b/src/utils/src/supabase_utils/http/query.py new file mode 100644 index 00000000..5bcbb060 --- /dev/null +++ b/src/utils/src/supabase_utils/http/query.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from typing import Mapping + +from pyrsistent import PMap, PVector +from pyrsistent import m as Map +from pyrsistent import v as Vec +from yarl import Query + +QueryValue = str | int | float | bool + + +class URLQuery: + def __init__(self, pmap: PMap[str, PVector[QueryValue]]) -> None: + self._map = pmap + + @staticmethod + def empty() -> URLQuery: + return URLQuery(pmap=Map()) + + @staticmethod + def from_mapping(mapping: Mapping[str, QueryValue]) -> URLQuery: + map: PMap[str, PVector[QueryValue]] = Map() + for key, val in mapping.items(): + map = map.set(key, Vec(val)) + return URLQuery(pmap=map) + + def set(self, key: str, val: QueryValue) -> URLQuery: + existing: PVector[QueryValue] = self._map.get(key, Vec()) + new_val = existing.append(val) + return URLQuery(pmap=self._map.set(key, new_val)) + + def get(self, key: str) -> str | None: + if val := self._map.get(key, None): + return "&".join(str(v) for v in val) + return None + + def get_list(self, key: str) -> list[QueryValue] | None: + if val := self._map.get(key, None): + return list(val) + return None + + def __contains__(self, key: str) -> bool: + return key in self._map + + def __getitem__(self, key: str) -> str: + if val := self.get(key): + return val + raise KeyError(f"'{key}' not found.") + + def as_query(self) -> Query: + return {key: list(vals) for key, vals in self._map.items()} + + def merge(self, other: URLQuery) -> URLQuery: + new = self._map.update(other._map) + return URLQuery(new) + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + fields = ", ".join(f'"{k}"="{self.get_list(k)}"' for k in self._map) + return f"URLQuery({fields})" diff --git a/src/utils/src/supabase_utils/http/request.py b/src/utils/src/supabase_utils/http/request.py new file mode 100644 index 00000000..b6a27658 --- /dev/null +++ b/src/utils/src/supabase_utils/http/request.py @@ -0,0 +1,201 @@ +import os +from dataclasses import dataclass, field +from io import BytesIO +from typing import ( + List, + Literal, + Protocol, +) + +from pydantic import BaseModel +from yarl import URL + +from ..types import JSON, JSONParser +from .headers import Headers +from .query import URLQuery + +HTTPRequestMethod = Literal["GET", "POST", "PATCH", "PUT", "DELETE", "HEAD"] + + +@dataclass +class Request: + url: URL + method: HTTPRequestMethod + headers: Headers + content: bytes | None + + +@dataclass +class Response: + headers: Headers + content: bytes + status: int + request: Request + + @property + def is_success(self) -> bool: + return 200 <= self.status <= 300 + + @property + def is_error(self) -> bool: + return 400 <= self.status <= 599 + + +class ToRequest(Protocol): + def finalize(self, base_url: URL, default_headers: Headers) -> Request: ... + + +@dataclass +class EmptyRequest: + path: List[str] + method: HTTPRequestMethod + headers: Headers = field(default_factory=Headers.empty, kw_only=True) + query: URLQuery = field(default_factory=URLQuery.empty, kw_only=True) + + def finalize(self, base_url: URL, default_headers: Headers) -> Request: + return Request( + method=self.method, + url=base_url.joinpath(*self.path).with_query(self.query.as_query()), + headers=default_headers.update(self.headers), + content=None, + ) + + +@dataclass +class BytesRequest(EmptyRequest): + body: bytes + + def finalize(self, base_url: URL, default_headers: Headers) -> Request: + headers = default_headers.update(self.headers).set( + "Content-Type", "application/octet-stream" + ) + return Request( + method=self.method, + url=base_url.joinpath(*self.path).with_query(self.query.as_query()), + headers=headers, + content=self.body, + ) + + +@dataclass +class JSONRequest(EmptyRequest): + body: JSON | BaseModel + exclude_none: bool = True + + def finalize(self, base_url: URL, default_headers: Headers) -> Request: + headers = default_headers.update(self.headers).set( + "Content-Type", "application/json" + ) + if isinstance(self.body, BaseModel): + content = self.body.__pydantic_serializer__.to_json( + self.body, exclude_none=self.exclude_none + ) + else: + content = JSONParser.dump_json(self.body) + return Request( + method=self.method, + url=base_url.joinpath(*self.path).with_query(self.query.as_query()), + headers=headers, + content=content, + ) + + +@dataclass +class TextRequest(EmptyRequest): + text: str + + def finalize(self, base_url: URL, default_headers: Headers) -> Request: + headers = default_headers.update(self.headers).set( + "Content-Type", "text/plain; charset=utf-8" + ) + return Request( + method=self.method, + url=base_url.joinpath(*self.path).with_query(self.query.as_query()), + headers=headers, + content=self.text.encode("utf-8"), + ) + + +@dataclass +class FileField: + name: str + data: bytes + filename: str + content_type: str + headers: dict[str, str] = field(default_factory=dict) + content_disposition: str | None = None + content_location: str | None = None + + def render_headers(self) -> bytes: + """ + Renders the headers for this request field. + """ + lines = [] + lines.append(f"Content-Type: {self.content_type}") + content_disposition = self.content_disposition or "form-data" + lines.append( + f'Content-Disposition: {content_disposition}; name="{self.name}"; filename="{self.filename}"' + ) + if self.content_location: + lines.append(f"Content-Location: {self.content_location}") + for header_name, header_value in self.headers.items(): + if header_value: + lines.append(f"{header_name}: {header_value}") + lines.append("\r\n") + return "\r\n".join(lines).encode("utf-8") + + +@dataclass +class DataField: + name: str + data: bytes + headers: dict[str, str] = field(default_factory=dict) + content_disposition: str | None = None + + def render_headers(self) -> bytes: + """ + Renders the headers for this request field. + """ + lines = [] + content_disposition = self.content_disposition or "form-data" + lines.append(f'Content-Disposition: {content_disposition}; name="{self.name}"') + for header_name, header_value in self.headers.items(): + if header_value: + lines.append(f"{header_name}: {header_value}") + lines.append("\r\n") + return "\r\n".join(lines).encode("utf-8") + + +class PartField(Protocol): + data: bytes + + def render_headers(self) -> bytes: ... + + +def encode_multipart_formdata(fields: list[PartField]) -> tuple[bytes, str]: + body = BytesIO() + boundary = os.urandom(16).hex() + bin_boundary = boundary.encode("ascii") + for form_field in fields: + body.write(b"--%s\r\n" % (bin_boundary)) + body.write(form_field.render_headers()) + body.write(form_field.data) + body.write(b"\r\n") + body.write(b"--%s--\r\n" % (bin_boundary)) + content_type = f"multipart/form-data; boundary={boundary}" + return body.getvalue(), content_type + + +@dataclass +class MultipartFormDataRequest(EmptyRequest): + fields: list[PartField] + + def finalize(self, base_url: URL, default_headers: Headers) -> Request: + content, content_type = encode_multipart_formdata(fields=self.fields) + headers = default_headers.update(self.headers).set("Content-Type", content_type) + return Request( + method=self.method, + url=base_url.joinpath(*self.path).with_query(self.query.as_query()), + headers=headers, + content=content, + ) diff --git a/src/utils/src/supabase_utils/py.typed b/src/utils/src/supabase_utils/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/utils/src/supabase_utils/types.py b/src/utils/src/supabase_utils/types.py new file mode 100644 index 00000000..5d0de1c9 --- /dev/null +++ b/src/utils/src/supabase_utils/types.py @@ -0,0 +1,9 @@ +from typing import Mapping, Sequence + +from pydantic import TypeAdapter +from typing_extensions import TypeAliasType + +JSONSimple = None | bool | str | int | float +JSON = TypeAliasType("JSON", "JSONSimple | Sequence[JSON] | Mapping[str, JSON]") + +JSONParser: TypeAdapter[JSON] = TypeAdapter(JSON) diff --git a/uv.lock b/uv.lock index d1587758..23fbd3a0 100644 --- a/uv.lock +++ b/uv.lock @@ -1,11 +1,10 @@ version = 1 revision = 3 -requires-python = ">=3.9" +requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", + "python_full_version < '3.11'", ] [manifest] @@ -16,6 +15,7 @@ members = [ "supabase", "supabase-auth", "supabase-functions", + "supabase-utils", ] [[package]] @@ -157,23 +157,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, - { url = "https://files.pythonhosted.org/packages/bf/79/446655656861d3e7e2c32bfcf160c7aa9e9dc63776a691b124dba65cdd77/aiohttp-3.13.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e", size = 741433, upload-time = "2026-01-03T17:32:26.453Z" }, - { url = "https://files.pythonhosted.org/packages/cb/49/773c4b310b5140d2fb5e79bb0bf40b7b41dad80a288ca1a8759f5f72bda9/aiohttp-3.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7", size = 497332, upload-time = "2026-01-03T17:32:28.37Z" }, - { url = "https://files.pythonhosted.org/packages/bc/31/1dcbc4b83a4e6f76a0ad883f07f21ffbfe29750c89db97381701508c9f45/aiohttp-3.13.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02", size = 492365, upload-time = "2026-01-03T17:32:30.234Z" }, - { url = "https://files.pythonhosted.org/packages/5a/b5/b50657496c8754482cd7964e50aaf3aa84b3db61ed45daec4c1aec5b94b4/aiohttp-3.13.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43", size = 1660440, upload-time = "2026-01-03T17:32:32.586Z" }, - { url = "https://files.pythonhosted.org/packages/2a/73/9b69e5139d89d75127569298931444ad78ea86a5befd5599780b1e9a6880/aiohttp-3.13.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6", size = 1632740, upload-time = "2026-01-03T17:32:34.793Z" }, - { url = "https://files.pythonhosted.org/packages/ef/fe/3ea9b5af694b4e3aec0d0613a806132ca744747146fca68e96bf056f61a7/aiohttp-3.13.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce", size = 1719782, upload-time = "2026-01-03T17:32:37.737Z" }, - { url = "https://files.pythonhosted.org/packages/fb/c2/46b3b06e60851cbb71efb0f79a3267279cbef7b12c58e68a1e897f269cca/aiohttp-3.13.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80", size = 1813527, upload-time = "2026-01-03T17:32:39.973Z" }, - { url = "https://files.pythonhosted.org/packages/36/23/71ceb78c769ed65fe4c697692de232b63dab399210678d2b00961ccb0619/aiohttp-3.13.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a", size = 1661268, upload-time = "2026-01-03T17:32:42.082Z" }, - { url = "https://files.pythonhosted.org/packages/c4/8d/86e929523d955e85ebab7c0e2b9e0cb63604cfc27dc3280e10d0063cf682/aiohttp-3.13.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6", size = 1552742, upload-time = "2026-01-03T17:32:44.622Z" }, - { url = "https://files.pythonhosted.org/packages/3a/ea/3f5987cba1bab6bd151f0d97aa60f0ce04d3c83316692a6bb6ba2fb69f92/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558", size = 1632918, upload-time = "2026-01-03T17:32:46.749Z" }, - { url = "https://files.pythonhosted.org/packages/be/2c/7e1e85121f2e31ee938cb83a8f32dfafd4908530c10fabd6d46761c12ac7/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7", size = 1644446, upload-time = "2026-01-03T17:32:49.063Z" }, - { url = "https://files.pythonhosted.org/packages/5d/35/ce6133d423ad0e8ca976a7c848f7146bca3520eea4ccf6b95e2d077c9d20/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877", size = 1689487, upload-time = "2026-01-03T17:32:51.113Z" }, - { url = "https://files.pythonhosted.org/packages/50/f7/ff7a27c15603d460fd1366b3c22054f7ae4fa9310aca40b43bde35867fcd/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3", size = 1540715, upload-time = "2026-01-03T17:32:53.38Z" }, - { url = "https://files.pythonhosted.org/packages/17/02/053f11346e5b962e6d8a1c4f8c70c29d5970a1b4b8e7894c68e12c27a57f/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704", size = 1711835, upload-time = "2026-01-03T17:32:56.088Z" }, - { url = "https://files.pythonhosted.org/packages/fb/71/9b9761ddf276fd6708d13720197cbac19b8d67ecfa9116777924056cfcaa/aiohttp-3.13.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f", size = 1649593, upload-time = "2026-01-03T17:32:58.181Z" }, - { url = "https://files.pythonhosted.org/packages/ae/72/5d817e9ea218acae12a5e3b9ad1178cf0c12fc3570c0b47eea2daf95f9ea/aiohttp-3.13.3-cp39-cp39-win32.whl", hash = "sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1", size = 434831, upload-time = "2026-01-03T17:33:00.577Z" }, - { url = "https://files.pythonhosted.org/packages/39/cb/22659d9bf3149b7a2927bc2769cc9c8f8f5a80eba098398e03c199a43a85/aiohttp-3.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538", size = 457697, upload-time = "2026-01-03T17:33:03.167Z" }, ] [[package]] @@ -189,27 +172,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "alabaster" -version = "0.7.16" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, -] - [[package]] name = "alabaster" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, @@ -246,8 +212,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "apeye-core" }, { name = "domdf-python-tools" }, - { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs" }, { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4f/6b/cc65e31843d7bfda8313a9dc0c77a21e8580b782adca53c7cb3e511fe023/apeye-1.4.1.tar.gz", hash = "sha256:14ea542fad689e3bfdbda2189a354a4908e90aee4bf84c15ab75d68453d76a36", size = 99219, upload-time = "2023-08-14T15:32:41.381Z" } @@ -291,8 +256,7 @@ name = "autodocsumm" version = "0.2.14" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/03/96/92afe8a7912b327c01f0a8b6408c9556ee13b1aba5b98d587ac7327ff32d/autodocsumm-0.2.14.tar.gz", hash = "sha256:2839a9d4facc3c4eccd306c08695540911042b46eeafcdc3203e6d0bab40bc77", size = 46357, upload-time = "2024-10-23T18:51:47.369Z" } @@ -327,13 +291,11 @@ name = "black" version = "25.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "mypy-extensions" }, { name = "packaging" }, { name = "pathspec" }, - { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs" }, { name = "pytokens" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, @@ -356,46 +318,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" }, { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" }, { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/0f724eb152bc9fc03029a9c903ddd77a288285042222a381050d27e64ac1/black-25.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef69351df3c84485a8beb6f7b8f9721e2009e20ef80a8d619e2d1788b7816d47", size = 1715243, upload-time = "2025-09-19T00:34:14.216Z" }, - { url = "https://files.pythonhosted.org/packages/fb/be/cb986ea2f0fabd0ee58668367724ba16c3a042842e9ebe009c139f8221c9/black-25.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c1f4cd5e93842774d9ee4ef6cd8d17790e65f44f7cdbaab5f2cf8ccf22a823", size = 1571246, upload-time = "2025-09-19T00:31:39.624Z" }, - { url = "https://files.pythonhosted.org/packages/82/ce/74cf4d66963fca33ab710e4c5817ceeff843c45649f61f41d88694c2e5db/black-25.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:154b06d618233fe468236ba1f0e40823d4eb08b26f5e9261526fde34916b9140", size = 1631265, upload-time = "2025-09-19T00:31:05.341Z" }, - { url = "https://files.pythonhosted.org/packages/ff/f3/9b11e001e84b4d1721f75e20b3c058854a748407e6fc1abe6da0aa22014f/black-25.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e593466de7b998374ea2585a471ba90553283fb9beefcfa430d84a2651ed5933", size = 1326615, upload-time = "2025-09-19T00:31:25.347Z" }, { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" }, ] -[[package]] -name = "cachecontrol" -version = "0.12.14" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "msgpack", marker = "python_full_version < '3.10'" }, - { name = "requests", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/43/bfdc1888b1889bdb6eaadc179f6d18a0aa34d53eb89fb5954e09bc37b7e1/CacheControl-0.12.14.tar.gz", hash = "sha256:d1087f45781c0e00616479bfd282c78504371ca71da017b49df9f5365a95feba", size = 115447, upload-time = "2023-06-06T11:02:26.088Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/72/a2/28e0ef082f7d78253aded97933e1d7b94bab3c5be366e8afd6513de4028e/CacheControl-0.12.14-py2.py3-none-any.whl", hash = "sha256:1c2939be362a70c4e5f02c6249462b3b7a24441e4f1ced5e9ef028172edf356a", size = 21252, upload-time = "2023-06-06T11:02:24.125Z" }, -] - -[package.optional-dependencies] -filecache = [ - { name = "lockfile", marker = "python_full_version < '3.10'" }, -] - [[package]] name = "cachecontrol" version = "0.14.3" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "msgpack", marker = "python_full_version >= '3.10'" }, - { name = "requests", marker = "python_full_version >= '3.10'" }, + { name = "msgpack" }, + { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/3a/0cbeb04ea57d2493f3ec5a069a117ab467f85e4a10017c6d854ddcbff104/cachecontrol-0.14.3.tar.gz", hash = "sha256:73e7efec4b06b20d9267b441c1f733664f989fb8688391b670ca812d70795d11", size = 28985, upload-time = "2025-04-30T16:45:06.135Z" } wheels = [ @@ -404,7 +336,7 @@ wheels = [ [package.optional-dependencies] filecache = [ - { name = "filelock", marker = "python_full_version >= '3.10'" }, + { name = "filelock" }, ] [[package]] @@ -519,18 +451,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, - { url = "https://files.pythonhosted.org/packages/c0/cc/08ed5a43f2996a16b462f64a7055c6e962803534924b9b2f1371d8c00b7b/cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf", size = 184288, upload-time = "2025-09-08T23:23:48.404Z" }, - { url = "https://files.pythonhosted.org/packages/3d/de/38d9726324e127f727b4ecc376bc85e505bfe61ef130eaf3f290c6847dd4/cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7", size = 180509, upload-time = "2025-09-08T23:23:49.73Z" }, - { url = "https://files.pythonhosted.org/packages/9b/13/c92e36358fbcc39cf0962e83223c9522154ee8630e1df7c0b3a39a8124e2/cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c", size = 208813, upload-time = "2025-09-08T23:23:51.263Z" }, - { url = "https://files.pythonhosted.org/packages/15/12/a7a79bd0df4c3bff744b2d7e52cc1b68d5e7e427b384252c42366dc1ecbc/cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165", size = 216498, upload-time = "2025-09-08T23:23:52.494Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/5c51c1c7600bdd7ed9a24a203ec255dccdd0ebf4527f7b922a0bde2fb6ed/cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534", size = 203243, upload-time = "2025-09-08T23:23:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/32/f2/81b63e288295928739d715d00952c8c6034cb6c6a516b17d37e0c8be5600/cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f", size = 203158, upload-time = "2025-09-08T23:23:55.169Z" }, - { url = "https://files.pythonhosted.org/packages/1f/74/cc4096ce66f5939042ae094e2e96f53426a979864aa1f96a621ad128be27/cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63", size = 216548, upload-time = "2025-09-08T23:23:56.506Z" }, - { url = "https://files.pythonhosted.org/packages/e8/be/f6424d1dc46b1091ffcc8964fa7c0ab0cd36839dd2761b49c90481a6ba1b/cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2", size = 218897, upload-time = "2025-09-08T23:23:57.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/e0/dda537c2309817edf60109e39265f24f24aa7f050767e22c98c53fe7f48b/cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65", size = 211249, upload-time = "2025-09-08T23:23:59.139Z" }, - { url = "https://files.pythonhosted.org/packages/2b/e7/7c769804eb75e4c4b35e658dba01de1640a351a9653c3d49ca89d16ccc91/cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322", size = 218041, upload-time = "2025-09-08T23:24:00.496Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d9/6218d78f920dcd7507fc16a766b5ef8f3b913cc7aa938e7fc80b9978d089/cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a", size = 172138, upload-time = "2025-09-08T23:24:01.7Z" }, - { url = "https://files.pythonhosted.org/packages/54/8f/a1e836f82d8e32a97e6b29cc8f641779181ac7363734f12df27db803ebda/cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9", size = 182794, upload-time = "2025-09-08T23:24:02.943Z" }, ] [[package]] @@ -619,51 +539,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/46/7c/0c4760bccf082737ca7ab84a4c2034fcc06b1f21cf3032ea98bd6feb1725/charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9", size = 209609, upload-time = "2025-10-14T04:42:10.922Z" }, - { url = "https://files.pythonhosted.org/packages/bb/a4/69719daef2f3d7f1819de60c9a6be981b8eeead7542d5ec4440f3c80e111/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d", size = 149029, upload-time = "2025-10-14T04:42:12.38Z" }, - { url = "https://files.pythonhosted.org/packages/e6/21/8d4e1d6c1e6070d3672908b8e4533a71b5b53e71d16828cc24d0efec564c/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608", size = 144580, upload-time = "2025-10-14T04:42:13.549Z" }, - { url = "https://files.pythonhosted.org/packages/a7/0a/a616d001b3f25647a9068e0b9199f697ce507ec898cacb06a0d5a1617c99/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc", size = 162340, upload-time = "2025-10-14T04:42:14.892Z" }, - { url = "https://files.pythonhosted.org/packages/85/93/060b52deb249a5450460e0585c88a904a83aec474ab8e7aba787f45e79f2/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e", size = 159619, upload-time = "2025-10-14T04:42:16.676Z" }, - { url = "https://files.pythonhosted.org/packages/dd/21/0274deb1cc0632cd587a9a0ec6b4674d9108e461cb4cd40d457adaeb0564/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1", size = 153980, upload-time = "2025-10-14T04:42:17.917Z" }, - { url = "https://files.pythonhosted.org/packages/28/2b/e3d7d982858dccc11b31906976323d790dded2017a0572f093ff982d692f/charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3", size = 152174, upload-time = "2025-10-14T04:42:19.018Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ff/4a269f8e35f1e58b2df52c131a1fa019acb7ef3f8697b7d464b07e9b492d/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6", size = 151666, upload-time = "2025-10-14T04:42:20.171Z" }, - { url = "https://files.pythonhosted.org/packages/da/c9/ec39870f0b330d58486001dd8e532c6b9a905f5765f58a6f8204926b4a93/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88", size = 145550, upload-time = "2025-10-14T04:42:21.324Z" }, - { url = "https://files.pythonhosted.org/packages/75/8f/d186ab99e40e0ed9f82f033d6e49001701c81244d01905dd4a6924191a30/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1", size = 163721, upload-time = "2025-10-14T04:42:22.46Z" }, - { url = "https://files.pythonhosted.org/packages/96/b1/6047663b9744df26a7e479ac1e77af7134b1fcf9026243bb48ee2d18810f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf", size = 152127, upload-time = "2025-10-14T04:42:23.712Z" }, - { url = "https://files.pythonhosted.org/packages/59/78/e5a6eac9179f24f704d1be67d08704c3c6ab9f00963963524be27c18ed87/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318", size = 161175, upload-time = "2025-10-14T04:42:24.87Z" }, - { url = "https://files.pythonhosted.org/packages/e5/43/0e626e42d54dd2f8dd6fc5e1c5ff00f05fbca17cb699bedead2cae69c62f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c", size = 155375, upload-time = "2025-10-14T04:42:27.246Z" }, - { url = "https://files.pythonhosted.org/packages/e9/91/d9615bf2e06f35e4997616ff31248c3657ed649c5ab9d35ea12fce54e380/charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505", size = 99692, upload-time = "2025-10-14T04:42:28.425Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a9/6c040053909d9d1ef4fcab45fddec083aedc9052c10078339b47c8573ea8/charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966", size = 107192, upload-time = "2025-10-14T04:42:29.482Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c6/4fa536b2c0cd3edfb7ccf8469fa0f363ea67b7213a842b90909ca33dd851/charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50", size = 100220, upload-time = "2025-10-14T04:42:30.632Z" }, { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, -] - [[package]] name = "click" version = "8.3.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ @@ -775,18 +659,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/d1c25053764b4c42eb294aae92ab617d2e4f803397f9c7c8295caa77a260/coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3", size = 217978, upload-time = "2025-09-21T20:03:30.362Z" }, - { url = "https://files.pythonhosted.org/packages/52/2f/b9f9daa39b80ece0b9548bbb723381e29bc664822d9a12c2135f8922c22b/coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c", size = 218370, upload-time = "2025-09-21T20:03:32.147Z" }, - { url = "https://files.pythonhosted.org/packages/dd/6e/30d006c3b469e58449650642383dddf1c8fb63d44fdf92994bfd46570695/coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396", size = 244802, upload-time = "2025-09-21T20:03:33.919Z" }, - { url = "https://files.pythonhosted.org/packages/b0/49/8a070782ce7e6b94ff6a0b6d7c65ba6bc3091d92a92cef4cd4eb0767965c/coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40", size = 246625, upload-time = "2025-09-21T20:03:36.09Z" }, - { url = "https://files.pythonhosted.org/packages/6a/92/1c1c5a9e8677ce56d42b97bdaca337b2d4d9ebe703d8c174ede52dbabd5f/coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594", size = 248399, upload-time = "2025-09-21T20:03:38.342Z" }, - { url = "https://files.pythonhosted.org/packages/c0/54/b140edee7257e815de7426d5d9846b58505dffc29795fff2dfb7f8a1c5a0/coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a", size = 245142, upload-time = "2025-09-21T20:03:40.591Z" }, - { url = "https://files.pythonhosted.org/packages/e4/9e/6d6b8295940b118e8b7083b29226c71f6154f7ff41e9ca431f03de2eac0d/coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b", size = 246284, upload-time = "2025-09-21T20:03:42.355Z" }, - { url = "https://files.pythonhosted.org/packages/db/e5/5e957ca747d43dbe4d9714358375c7546cb3cb533007b6813fc20fce37ad/coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3", size = 244353, upload-time = "2025-09-21T20:03:44.218Z" }, - { url = "https://files.pythonhosted.org/packages/9a/45/540fc5cc92536a1b783b7ef99450bd55a4b3af234aae35a18a339973ce30/coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0", size = 244430, upload-time = "2025-09-21T20:03:46.065Z" }, - { url = "https://files.pythonhosted.org/packages/75/0b/8287b2e5b38c8fe15d7e3398849bb58d382aedc0864ea0fa1820e8630491/coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f", size = 245311, upload-time = "2025-09-21T20:03:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1d/29724999984740f0c86d03e6420b942439bf5bd7f54d4382cae386a9d1e9/coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431", size = 220500, upload-time = "2025-09-21T20:03:50.024Z" }, - { url = "https://files.pythonhosted.org/packages/43/11/4b1e6b129943f905ca54c339f343877b55b365ae2558806c1be4f7476ed5/coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07", size = 221408, upload-time = "2025-09-21T20:03:51.803Z" }, { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, ] @@ -905,27 +777,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/56/7b/af3d0da15bed3a8665419bb3a630585756920f4ad67abfdfef26240ebcc0/docstring_to_markdown-0.17-py3-none-any.whl", hash = "sha256:fd7d5094aa83943bf5f9e1a13701866b7c452eac19765380dead666e36d3711c", size = 23479, upload-time = "2025-05-02T15:09:06.676Z" }, ] -[[package]] -name = "docutils" -version = "0.18.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/57/b1/b880503681ea1b64df05106fc7e3c4e3801736cf63deffc6fa7fc5404cf5/docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06", size = 2043249, upload-time = "2021-11-23T17:49:42.043Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/14/69b4bad34e3f250afe29a854da03acb6747711f3df06c359fa053fae4e76/docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c", size = 570050, upload-time = "2021-11-23T17:49:38.556Z" }, -] - [[package]] name = "docutils" version = "0.21.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, @@ -946,14 +801,14 @@ wheels = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] @@ -1095,46 +950,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, - { url = "https://files.pythonhosted.org/packages/c2/59/ae5cdac87a00962122ea37bb346d41b66aec05f9ce328fa2b9e216f8967b/frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47", size = 86967, upload-time = "2025-10-06T05:37:55.607Z" }, - { url = "https://files.pythonhosted.org/packages/8a/10/17059b2db5a032fd9323c41c39e9d1f5f9d0c8f04d1e4e3e788573086e61/frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca", size = 49984, upload-time = "2025-10-06T05:37:57.049Z" }, - { url = "https://files.pythonhosted.org/packages/4b/de/ad9d82ca8e5fa8f0c636e64606553c79e2b859ad253030b62a21fe9986f5/frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068", size = 50240, upload-time = "2025-10-06T05:37:58.145Z" }, - { url = "https://files.pythonhosted.org/packages/4e/45/3dfb7767c2a67d123650122b62ce13c731b6c745bc14424eea67678b508c/frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95", size = 219472, upload-time = "2025-10-06T05:37:59.239Z" }, - { url = "https://files.pythonhosted.org/packages/0b/bf/5bf23d913a741b960d5c1dac7c1985d8a2a1d015772b2d18ea168b08e7ff/frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459", size = 221531, upload-time = "2025-10-06T05:38:00.521Z" }, - { url = "https://files.pythonhosted.org/packages/d0/03/27ec393f3b55860859f4b74cdc8c2a4af3dbf3533305e8eacf48a4fd9a54/frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675", size = 219211, upload-time = "2025-10-06T05:38:01.842Z" }, - { url = "https://files.pythonhosted.org/packages/3a/ad/0fd00c404fa73fe9b169429e9a972d5ed807973c40ab6b3cf9365a33d360/frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61", size = 231775, upload-time = "2025-10-06T05:38:03.384Z" }, - { url = "https://files.pythonhosted.org/packages/8a/c3/86962566154cb4d2995358bc8331bfc4ea19d07db1a96f64935a1607f2b6/frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6", size = 236631, upload-time = "2025-10-06T05:38:04.609Z" }, - { url = "https://files.pythonhosted.org/packages/ea/9e/6ffad161dbd83782d2c66dc4d378a9103b31770cb1e67febf43aea42d202/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5", size = 218632, upload-time = "2025-10-06T05:38:05.917Z" }, - { url = "https://files.pythonhosted.org/packages/58/b2/4677eee46e0a97f9b30735e6ad0bf6aba3e497986066eb68807ac85cf60f/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3", size = 235967, upload-time = "2025-10-06T05:38:07.614Z" }, - { url = "https://files.pythonhosted.org/packages/05/f3/86e75f8639c5a93745ca7addbbc9de6af56aebb930d233512b17e46f6493/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1", size = 228799, upload-time = "2025-10-06T05:38:08.845Z" }, - { url = "https://files.pythonhosted.org/packages/30/00/39aad3a7f0d98f5eb1d99a3c311215674ed87061aecee7851974b335c050/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178", size = 230566, upload-time = "2025-10-06T05:38:10.52Z" }, - { url = "https://files.pythonhosted.org/packages/0d/4d/aa144cac44568d137846ddc4d5210fb5d9719eb1d7ec6fa2728a54b5b94a/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda", size = 217715, upload-time = "2025-10-06T05:38:11.832Z" }, - { url = "https://files.pythonhosted.org/packages/64/4c/8f665921667509d25a0dd72540513bc86b356c95541686f6442a3283019f/frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087", size = 39933, upload-time = "2025-10-06T05:38:13.061Z" }, - { url = "https://files.pythonhosted.org/packages/79/bd/bcc926f87027fad5e59926ff12d136e1082a115025d33c032d1cd69ab377/frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a", size = 44121, upload-time = "2025-10-06T05:38:14.572Z" }, - { url = "https://files.pythonhosted.org/packages/4c/07/9c2e4eb7584af4b705237b971b89a4155a8e57599c4483a131a39256a9a0/frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103", size = 40312, upload-time = "2025-10-06T05:38:15.699Z" }, { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] -[[package]] -name = "fsspec" -version = "2025.10.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, -] - [[package]] name = "fsspec" version = "2025.12.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/b6/27/954057b0d1f53f086f681755207dda6de6c660ce133c829158e8e8fe7895/fsspec-2025.12.0.tar.gz", hash = "sha256:c505de011584597b1060ff778bb664c1bc022e87921b0e4f10cc9c44f9635973", size = 309748, upload-time = "2025-12-03T15:23:42.687Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/51/c7/b64cae5dba3a1b138d7123ec36bb5ccd39d39939f18454407e5468f4763f/fsspec-2025.12.0-py3-none-any.whl", hash = "sha256:8bf1fe301b7d8acfa6e8571e3b1c3d158f909666642431cc78a1b7b4dbc5ec5b", size = 201422, upload-time = "2025-12-03T15:23:41.434Z" }, @@ -1148,8 +970,7 @@ dependencies = [ { name = "accessible-pygments" }, { name = "beautifulsoup4" }, { name = "pygments" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-basic-ng" }, ] @@ -1316,15 +1137,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] -[[package]] -name = "lockfile" -version = "0.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/47/72cb04a58a35ec495f96984dddb48232b551aafb95bde614605b754fe6f7/lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799", size = 20874, upload-time = "2015-11-25T18:29:58.279Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/22/9460e311f340cb62d26a38c419b1381b8593b0bb6b5d1f056938b086d362/lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa", size = 13564, upload-time = "2015-11-25T18:29:51.462Z" }, -] - [[package]] name = "lsprotocol" version = "2025.0.0" @@ -1338,32 +1150,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/f0/92f2d609d6642b5f30cb50a885d2bf1483301c69d5786286500d15651ef2/lsprotocol-2025.0.0-py3-none-any.whl", hash = "sha256:f9d78f25221f2a60eaa4a96d3b4ffae011b107537facee61d3da3313880995c7", size = 76250, upload-time = "2025-06-17T21:30:19.455Z" }, ] -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "mdurl", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, -] - [[package]] name = "markdown-it-py" version = "4.0.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "mdurl", marker = "python_full_version >= '3.10'" }, + { name = "mdurl" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ @@ -1453,17 +1245,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, - { url = "https://files.pythonhosted.org/packages/56/23/0d8c13a44bde9154821586520840643467aee574d8ce79a17da539ee7fed/markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26", size = 11623, upload-time = "2025-09-27T18:37:29.296Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/07a2cb9a8045d5f3f0890a8c3bc0859d7a47bfd9a560b563899bec7b72ed/markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc", size = 12049, upload-time = "2025-09-27T18:37:30.234Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e4/6be85eb81503f8e11b61c0b6369b6e077dcf0a74adbd9ebf6b349937b4e9/markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c", size = 21923, upload-time = "2025-09-27T18:37:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bc/4dc914ead3fe6ddaef035341fee0fc956949bbd27335b611829292b89ee2/markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42", size = 20543, upload-time = "2025-09-27T18:37:32.168Z" }, - { url = "https://files.pythonhosted.org/packages/89/6e/5fe81fbcfba4aef4093d5f856e5c774ec2057946052d18d168219b7bd9f9/markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b", size = 20585, upload-time = "2025-09-27T18:37:33.166Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/e0e5a3d3ae9c4020f696cd055f940ef86b64fe88de26f3a0308b9d3d048c/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758", size = 21387, upload-time = "2025-09-27T18:37:34.185Z" }, - { url = "https://files.pythonhosted.org/packages/c8/25/651753ef4dea08ea790f4fbb65146a9a44a014986996ca40102e237aa49a/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2", size = 20133, upload-time = "2025-09-27T18:37:35.138Z" }, - { url = "https://files.pythonhosted.org/packages/dc/0a/c3cf2b4fef5f0426e8a6d7fce3cb966a17817c568ce59d76b92a233fdbec/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d", size = 20588, upload-time = "2025-09-27T18:37:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1b/a7782984844bd519ad4ffdbebbba2671ec5d0ebbeac34736c15fb86399e8/markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7", size = 14566, upload-time = "2025-09-27T18:37:37.09Z" }, - { url = "https://files.pythonhosted.org/packages/18/1f/8d9c20e1c9440e215a44be5ab64359e207fcb4f675543f1cf9a2a7f648d0/markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e", size = 15053, upload-time = "2025-09-27T18:37:38.054Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d3/fe08482b5cd995033556d45041a4f4e76e7f0521112a9c9991d40d39825f/markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8", size = 13928, upload-time = "2025-09-27T18:37:39.037Z" }, ] [[package]] @@ -1585,22 +1366,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/1c/ccf35892684d3a408202e296e56843743e0b4fb1629e59432ea88cdb3909/mmh3-5.2.0-cp314-cp314t-win32.whl", hash = "sha256:6d541038b3fc360ec538fc116de87462627944765a6750308118f8b509a8eec7", size = 41970, upload-time = "2025-07-29T07:43:27.666Z" }, { url = "https://files.pythonhosted.org/packages/75/b2/b9e4f1e5adb5e21eb104588fcee2cd1eaa8308255173481427d5ecc4284e/mmh3-5.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e912b19cf2378f2967d0c08e86ff4c6c360129887f678e27e4dde970d21b3f4d", size = 43063, upload-time = "2025-07-29T07:43:28.582Z" }, { url = "https://files.pythonhosted.org/packages/6a/fc/0e61d9a4e29c8679356795a40e48f647b4aad58d71bfc969f0f8f56fb912/mmh3-5.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9", size = 40455, upload-time = "2025-07-29T07:43:29.563Z" }, - { url = "https://files.pythonhosted.org/packages/f2/11/4bad09e880b648eeb55393a644c08efbd7da302fc405c8d2f6555521bb98/mmh3-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c6041fd9d5fb5fcac57d5c80f521a36b74aea06b8566431c63e4ffc49aced51", size = 56117, upload-time = "2025-07-29T07:43:30.955Z" }, - { url = "https://files.pythonhosted.org/packages/b2/43/97cacd1fa2994b4ec110334388e126fe000ddf041829721e2e59e46b0a7c/mmh3-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:58477cf9ef16664d1ce2b038f87d2dc96d70fe50733a34a7f07da6c9a5e3538c", size = 40634, upload-time = "2025-07-29T07:43:31.917Z" }, - { url = "https://files.pythonhosted.org/packages/e9/03/2a52e464b0e23f9838267adf75f942c5addc2c1f009a48d1ef5c331084fb/mmh3-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be7d3dca9358e01dab1bad881fb2b4e8730cec58d36dd44482bc068bfcd3bc65", size = 40075, upload-time = "2025-07-29T07:43:32.9Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/c0c00f7eb436a0adf64d8a877673ac76096bf86aca57b6a2c80786d69242/mmh3-5.2.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:931d47e08c9c8a67bf75d82f0ada8399eac18b03388818b62bfa42882d571d72", size = 95112, upload-time = "2025-07-29T07:43:33.815Z" }, - { url = "https://files.pythonhosted.org/packages/9b/f3/116cc1171bcb41a9cec10c46ee1d8bb5185d70c15848ff66d15ab7afb6fd/mmh3-5.2.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dd966df3489ec13848d6c6303429bbace94a153f43d1ae2a55115fd36fd5ca5d", size = 101006, upload-time = "2025-07-29T07:43:34.876Z" }, - { url = "https://files.pythonhosted.org/packages/41/34/b38a0c5c323666e632cc07d4fd337c4af0b300619c7b8b7a1d9a2db1ac1a/mmh3-5.2.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c677d78887244bf3095020b73c42b505b700f801c690f8eaa90ad12d3179612f", size = 103782, upload-time = "2025-07-29T07:43:35.987Z" }, - { url = "https://files.pythonhosted.org/packages/25/d6/42b5ae7219ec87f756ffafcf7471b7fd3386e352653522d155f4897e06d0/mmh3-5.2.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63830f846797187c5d3e2dae50f0848fdc86032f5bfdc58ae352f02f857e9025", size = 110660, upload-time = "2025-07-29T07:43:37.103Z" }, - { url = "https://files.pythonhosted.org/packages/8f/55/daea1ee478328f7ed3b5422f080a3f892e02bc1542f0bc5a1be083a05758/mmh3-5.2.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c3f563e8901960e2eaa64c8e8821895818acabeb41c96f2efbb936f65dbe486c", size = 118107, upload-time = "2025-07-29T07:43:38.173Z" }, - { url = "https://files.pythonhosted.org/packages/46/f1/930d3395a0aaef49db41019e94a7b46ac35b9a64c213a620eacac34078c0/mmh3-5.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96f1e1ac44cbb42bcc406e509f70c9af42c594e72ccc7b1257f97554204445f0", size = 101448, upload-time = "2025-07-29T07:43:39.199Z" }, - { url = "https://files.pythonhosted.org/packages/cc/e4/543bf2622a1645fa560c26fe5dc2919c8c9eb2f9ac129778ce6acc9848fc/mmh3-5.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7bbb0df897944b5ec830f3ad883e32c5a7375370a521565f5fe24443bfb2c4f7", size = 96474, upload-time = "2025-07-29T07:43:41.025Z" }, - { url = "https://files.pythonhosted.org/packages/16/d8/9c552bd64c86bb03fba08d4b702efd65b09ed54c6969df0d1ec7fa8c0ae4/mmh3-5.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1fae471339ae1b9c641f19cf46dfe6ffd7f64b1fba7c4333b99fa3dd7f21ae0a", size = 110049, upload-time = "2025-07-29T07:43:42.106Z" }, - { url = "https://files.pythonhosted.org/packages/6b/47/8a012b9c4d9c9b704ffcd71cad861ef120b2bd417d081bdb3aaa9e396fe6/mmh3-5.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aa6e5d31fdc5ed9e3e95f9873508615a778fe9b523d52c17fc770a3eb39ab6e4", size = 111683, upload-time = "2025-07-29T07:43:43.228Z" }, - { url = "https://files.pythonhosted.org/packages/2c/fc/4ad1bd01976484d0568a7d18d5a8597da1e65e76ac763114573dcd09d225/mmh3-5.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:746a5ee71c6d1103d9b560fa147881b5e68fd35da56e54e03d5acefad0e7c055", size = 99883, upload-time = "2025-07-29T07:43:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1d/4fbd0f74c7e9c35f5f70eb77509b7a706ef76ee86957a79e228f47cf037f/mmh3-5.2.0-cp39-cp39-win32.whl", hash = "sha256:10983c10f5c77683bd845751905ba535ec47409874acc759d5ce3ff7ef34398a", size = 40790, upload-time = "2025-07-29T07:43:45.296Z" }, - { url = "https://files.pythonhosted.org/packages/a0/61/0f593606dbd3a4259301ffb61678433656dc4a2c6da022fa7a122de7ffb4/mmh3-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fdfd3fb739f4e22746e13ad7ba0c6eedf5f454b18d11249724a388868e308ee4", size = 41563, upload-time = "2025-07-29T07:43:46.599Z" }, - { url = "https://files.pythonhosted.org/packages/07/e6/ff066b72d86f0a19d3e4b6f3af073a9a328cb3cb4b068e25972866fcd517/mmh3-5.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:33576136c06b46a7046b6d83a3d75fbca7d25f84cec743f1ae156362608dc6d2", size = 39340, upload-time = "2025-07-29T07:43:47.512Z" }, ] [[package]] @@ -1671,14 +1436,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, - { url = "https://files.pythonhosted.org/packages/46/73/85469b4aa71d25e5949fee50d3c2cf46f69cea619fe97cfe309058080f75/msgpack-1.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e", size = 81529, upload-time = "2025-10-08T09:15:46.069Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3a/7d4077e8ae720b29d2b299a9591969f0d105146960681ea6f4121e6d0f8d/msgpack-1.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844", size = 84106, upload-time = "2025-10-08T09:15:47.064Z" }, - { url = "https://files.pythonhosted.org/packages/df/c0/da451c74746ed9388dca1b4ec647c82945f4e2f8ce242c25fb7c0e12181f/msgpack-1.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23", size = 396656, upload-time = "2025-10-08T09:15:48.118Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a1/20486c29a31ec9f0f88377fdf7eb7a67f30bcb5e0f89b7550f6f16d9373b/msgpack-1.1.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7", size = 404722, upload-time = "2025-10-08T09:15:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/ad/ae/e613b0a526d54ce85447d9665c2ff8c3210a784378d50573321d43d324b8/msgpack-1.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8", size = 391838, upload-time = "2025-10-08T09:15:50.517Z" }, - { url = "https://files.pythonhosted.org/packages/49/6a/07f3e10ed4503045b882ef7bf8512d01d8a9e25056950a977bd5f50df1c2/msgpack-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833", size = 397516, upload-time = "2025-10-08T09:15:51.646Z" }, - { url = "https://files.pythonhosted.org/packages/76/9b/a86828e75986c12a3809c1e5062f5eba8e0cae3dfa2bf724ed2b1bb72b4c/msgpack-1.1.2-cp39-cp39-win32.whl", hash = "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c", size = 64863, upload-time = "2025-10-08T09:15:53.118Z" }, - { url = "https://files.pythonhosted.org/packages/14/a7/b1992b4fb3da3b413f5fb78a63bad42f256c3be2352eb69273c3789c2c96/msgpack-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030", size = 71540, upload-time = "2025-10-08T09:15:55.573Z" }, ] [[package]] @@ -1816,24 +1573,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, - { url = "https://files.pythonhosted.org/packages/90/d7/4cf84257902265c4250769ac49f4eaab81c182ee9aff8bf59d2714dbb174/multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c", size = 77073, upload-time = "2025-10-06T14:51:57.386Z" }, - { url = "https://files.pythonhosted.org/packages/6d/51/194e999630a656e76c2965a1590d12faa5cd528170f2abaa04423e09fe8d/multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40", size = 44928, upload-time = "2025-10-06T14:51:58.791Z" }, - { url = "https://files.pythonhosted.org/packages/e5/6b/2a195373c33068c9158e0941d0b46cfcc9c1d894ca2eb137d1128081dff0/multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851", size = 44581, upload-time = "2025-10-06T14:52:00.174Z" }, - { url = "https://files.pythonhosted.org/packages/69/7b/7f4f2e644b6978bf011a5fd9a5ebb7c21de3f38523b1f7897d36a1ac1311/multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687", size = 239901, upload-time = "2025-10-06T14:52:02.416Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b5/952c72786710a031aa204a9adf7db66d7f97a2c6573889d58b9e60fe6702/multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5", size = 240534, upload-time = "2025-10-06T14:52:04.105Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ef/109fe1f2471e4c458c74242c7e4a833f2d9fc8a6813cd7ee345b0bad18f9/multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb", size = 219545, upload-time = "2025-10-06T14:52:06.208Z" }, - { url = "https://files.pythonhosted.org/packages/42/bd/327d91288114967f9fe90dc53de70aa3fec1b9073e46aa32c4828f771a87/multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6", size = 251187, upload-time = "2025-10-06T14:52:08.049Z" }, - { url = "https://files.pythonhosted.org/packages/f4/13/a8b078ebbaceb7819fd28cd004413c33b98f1b70d542a62e6a00b74fb09f/multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e", size = 249379, upload-time = "2025-10-06T14:52:09.831Z" }, - { url = "https://files.pythonhosted.org/packages/e3/6d/ab12e1246be4d65d1f55de1e6f6aaa9b8120eddcfdd1d290439c7833d5ce/multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e", size = 239241, upload-time = "2025-10-06T14:52:11.561Z" }, - { url = "https://files.pythonhosted.org/packages/bb/d7/079a93625208c173b8fa756396814397c0fd9fee61ef87b75a748820b86e/multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32", size = 237418, upload-time = "2025-10-06T14:52:13.671Z" }, - { url = "https://files.pythonhosted.org/packages/c9/29/03777c2212274aa9440918d604dc9d6af0e6b4558c611c32c3dcf1a13870/multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c", size = 232987, upload-time = "2025-10-06T14:52:15.708Z" }, - { url = "https://files.pythonhosted.org/packages/d9/00/11188b68d85a84e8050ee34724d6ded19ad03975caebe0c8dcb2829b37bf/multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84", size = 240985, upload-time = "2025-10-06T14:52:17.317Z" }, - { url = "https://files.pythonhosted.org/packages/df/0c/12eef6aeda21859c6cdf7d75bd5516d83be3efe3d8cc45fd1a3037f5b9dc/multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329", size = 246855, upload-time = "2025-10-06T14:52:19.096Z" }, - { url = "https://files.pythonhosted.org/packages/69/f6/076120fd8bb3975f09228e288e08bff6b9f1bfd5166397c7ba284f622ab2/multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e", size = 241804, upload-time = "2025-10-06T14:52:21.166Z" }, - { url = "https://files.pythonhosted.org/packages/5f/51/41bb950c81437b88a93e6ddfca1d8763569ae861e638442838c4375f7497/multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4", size = 235321, upload-time = "2025-10-06T14:52:23.208Z" }, - { url = "https://files.pythonhosted.org/packages/5a/cf/5bbd31f055199d56c1f6b04bbadad3ccb24e6d5d4db75db774fc6d6674b8/multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91", size = 41435, upload-time = "2025-10-06T14:52:24.735Z" }, - { url = "https://files.pythonhosted.org/packages/af/01/547ffe9c2faec91c26965c152f3fea6cff068b6037401f61d310cc861ff4/multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f", size = 46193, upload-time = "2025-10-06T14:52:26.101Z" }, - { url = "https://files.pythonhosted.org/packages/27/77/cfa5461d1d2651d6fc24216c92b4a21d4e385a41c46e0d9f3b070675167b/multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546", size = 43118, upload-time = "2025-10-06T14:52:27.876Z" }, { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] @@ -1879,12 +1618,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, - { url = "https://files.pythonhosted.org/packages/3f/a6/490ff491d8ecddf8ab91762d4f67635040202f76a44171420bcbe38ceee5/mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b", size = 12807230, upload-time = "2025-09-19T00:09:49.471Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2e/60076fc829645d167ece9e80db9e8375648d210dab44cc98beb5b322a826/mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133", size = 11895666, upload-time = "2025-09-19T00:10:53.678Z" }, - { url = "https://files.pythonhosted.org/packages/97/4a/1e2880a2a5dda4dc8d9ecd1a7e7606bc0b0e14813637eeda40c38624e037/mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6", size = 12499608, upload-time = "2025-09-19T00:09:36.204Z" }, - { url = "https://files.pythonhosted.org/packages/00/81/a117f1b73a3015b076b20246b1f341c34a578ebd9662848c6b80ad5c4138/mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac", size = 13244551, upload-time = "2025-09-19T00:10:17.531Z" }, - { url = "https://files.pythonhosted.org/packages/9b/61/b9f48e1714ce87c7bf0358eb93f60663740ebb08f9ea886ffc670cea7933/mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b", size = 13491552, upload-time = "2025-09-19T00:10:13.753Z" }, - { url = "https://files.pythonhosted.org/packages/c9/66/b2c0af3b684fa80d1b27501a8bdd3d2daa467ea3992a8aa612f5ca17c2db/mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0", size = 9765635, upload-time = "2025-09-19T00:10:30.993Z" }, { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, ] @@ -1906,24 +1639,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/82/7a9d0550484a62c6da82858ee9419f3dd1ccc9aa1c26a1e43da3ecd20b0d/natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c", size = 38268, upload-time = "2023-06-20T04:17:17.522Z" }, ] -[[package]] -name = "networkx" -version = "3.2.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/c4/80/a84676339aaae2f1cfdf9f418701dd634aef9cc76f708ef55c36ff39c3ca/networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6", size = 2073928, upload-time = "2023-10-28T08:41:39.364Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2", size = 1647772, upload-time = "2023-10-28T08:41:36.945Z" }, -] - [[package]] name = "networkx" version = "3.4.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version == '3.10.*'", + "python_full_version < '3.11'", ] sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } wheels = [ @@ -1970,27 +1691,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] -[[package]] -name = "platformdirs" -version = "4.4.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, -] - [[package]] name = "platformdirs" version = "4.5.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, @@ -2007,13 +1711,13 @@ wheels = [ [[package]] name = "postgrest" -version = "2.28.1" +version = "3.0.0a1" source = { editable = "src/postgrest" } dependencies = [ { name = "deprecation" }, - { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, { name = "strenum", marker = "python_full_version < '3.11'" }, + { name = "supabase-utils" }, { name = "yarl" }, ] @@ -2027,12 +1731,12 @@ dev = [ { name = "python-lsp-ruff" }, { name = "python-lsp-server" }, { name = "ruff" }, + { name = "supabase-utils", extra = ["all"] }, { name = "unasync" }, ] docs = [ { name = "furo" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] lints = [ @@ -2046,15 +1750,16 @@ test = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-depends" }, + { name = "supabase-utils", extra = ["all"] }, { name = "unasync" }, ] [package.metadata] requires-dist = [ { name = "deprecation", specifier = ">=2.1.0" }, - { name = "httpx", extras = ["http2"], specifier = ">=0.26,<0.29" }, { name = "pydantic", specifier = ">=1.9,<3.0" }, { name = "strenum", marker = "python_full_version < '3.11'", specifier = ">=0.4.9" }, + { name = "supabase-utils", editable = "src/utils" }, { name = "yarl", specifier = ">=1.20.1" }, ] @@ -2068,6 +1773,7 @@ dev = [ { name = "python-lsp-ruff", specifier = ">=2.2.2,<3.0.0" }, { name = "python-lsp-server", specifier = ">=1.12.2,<2.0.0" }, { name = "ruff", specifier = ">=0.12.1" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, { name = "unasync", specifier = ">=0.6.0" }, ] docs = [ @@ -2085,6 +1791,7 @@ test = [ { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.2.1" }, { name = "pytest-depends", specifier = ">=1.0.1" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, { name = "unasync", specifier = ">=0.6.0" }, ] @@ -2199,21 +1906,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/0ebaec9003f5d619a7475165961f8e3083cf8644d704b60395df3601632d/propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff", size = 80277, upload-time = "2025-10-08T19:48:36.647Z" }, - { url = "https://files.pythonhosted.org/packages/34/58/04af97ac586b4ef6b9026c3fd36ee7798b737a832f5d3440a4280dcebd3a/propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb", size = 45865, upload-time = "2025-10-08T19:48:37.859Z" }, - { url = "https://files.pythonhosted.org/packages/7c/19/b65d98ae21384518b291d9939e24a8aeac4fdb5101b732576f8f7540e834/propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac", size = 47636, upload-time = "2025-10-08T19:48:39.038Z" }, - { url = "https://files.pythonhosted.org/packages/b3/0f/317048c6d91c356c7154dca5af019e6effeb7ee15fa6a6db327cc19e12b4/propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888", size = 201126, upload-time = "2025-10-08T19:48:40.774Z" }, - { url = "https://files.pythonhosted.org/packages/71/69/0b2a7a5a6ee83292b4b997dbd80549d8ce7d40b6397c1646c0d9495f5a85/propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc", size = 209837, upload-time = "2025-10-08T19:48:42.167Z" }, - { url = "https://files.pythonhosted.org/packages/a5/92/c699ac495a6698df6e497fc2de27af4b6ace10d8e76528357ce153722e45/propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a", size = 215578, upload-time = "2025-10-08T19:48:43.56Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ee/14de81c5eb02c0ee4f500b4e39c4e1bd0677c06e72379e6ab18923c773fc/propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88", size = 197187, upload-time = "2025-10-08T19:48:45.309Z" }, - { url = "https://files.pythonhosted.org/packages/1d/94/48dce9aaa6d8dd5a0859bad75158ec522546d4ac23f8e2f05fac469477dd/propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00", size = 193478, upload-time = "2025-10-08T19:48:47.743Z" }, - { url = "https://files.pythonhosted.org/packages/60/b5/0516b563e801e1ace212afde869a0596a0d7115eec0b12d296d75633fb29/propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0", size = 190650, upload-time = "2025-10-08T19:48:49.373Z" }, - { url = "https://files.pythonhosted.org/packages/24/89/e0f7d4a5978cd56f8cd67735f74052f257dc471ec901694e430f0d1572fe/propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e", size = 200251, upload-time = "2025-10-08T19:48:51.4Z" }, - { url = "https://files.pythonhosted.org/packages/06/7d/a1fac863d473876ed4406c914f2e14aa82d2f10dd207c9e16fc383cc5a24/propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781", size = 200919, upload-time = "2025-10-08T19:48:53.227Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4e/f86a256ff24944cf5743e4e6c6994e3526f6acfcfb55e21694c2424f758c/propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183", size = 193211, upload-time = "2025-10-08T19:48:55.027Z" }, - { url = "https://files.pythonhosted.org/packages/6e/3f/3fbad5f4356b068f1b047d300a6ff2c66614d7030f078cd50be3fec04228/propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19", size = 38314, upload-time = "2025-10-08T19:48:56.792Z" }, - { url = "https://files.pythonhosted.org/packages/a4/45/d78d136c3a3d215677abb886785aae744da2c3005bcb99e58640c56529b1/propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f", size = 41912, upload-time = "2025-10-08T19:48:57.995Z" }, - { url = "https://files.pythonhosted.org/packages/fc/2a/b0632941f25139f4e58450b307242951f7c2717a5704977c6d5323a800af/propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938", size = 38450, upload-time = "2025-10-08T19:48:59.349Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] @@ -2329,19 +2021,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, - { url = "https://files.pythonhosted.org/packages/2c/36/f86d582be5fb47d4014506cd9ddd10a3979b6d0f2d237aa6ad3e7033b3ea/pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062", size = 2112444, upload-time = "2025-10-14T10:22:16.165Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e5/63c521dc2dd106ba6b5941c080617ea9db252f8a7d5625231e9d761bc28c/pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338", size = 1938218, upload-time = "2025-10-14T10:22:19.443Z" }, - { url = "https://files.pythonhosted.org/packages/30/56/c84b638a3e6e9f5a612b9f5abdad73182520423de43669d639ed4f14b011/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d", size = 1971449, upload-time = "2025-10-14T10:22:21.567Z" }, - { url = "https://files.pythonhosted.org/packages/99/c6/e974aade34fc7a0248fdfd0a373d62693502a407c596ab3470165e38183c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7", size = 2054023, upload-time = "2025-10-14T10:22:24.229Z" }, - { url = "https://files.pythonhosted.org/packages/4f/91/2507dda801f50980a38d1353c313e8f51349a42b008e63a4e45bf4620562/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166", size = 2251614, upload-time = "2025-10-14T10:22:26.498Z" }, - { url = "https://files.pythonhosted.org/packages/b2/ad/05d886bc96938f4d31bed24e8d3fc3496d9aea7e77bcff6e4b93127c6de7/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e", size = 2378807, upload-time = "2025-10-14T10:22:28.733Z" }, - { url = "https://files.pythonhosted.org/packages/6a/0a/d26e1bb9a80b9fc12cc30d9288193fbc9e60a799e55843804ee37bd38a9c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891", size = 2076891, upload-time = "2025-10-14T10:22:30.853Z" }, - { url = "https://files.pythonhosted.org/packages/d9/66/af014e3a294d9933ebfecf11a5d858709014bd2315fa9616195374dd82f0/pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb", size = 2192179, upload-time = "2025-10-14T10:22:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3e/79783f97024037d0ea6e1b3ebcd761463a925199e04ce2625727e9f27d06/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514", size = 2153067, upload-time = "2025-10-14T10:22:35.792Z" }, - { url = "https://files.pythonhosted.org/packages/b3/97/ea83b0f87d9e742405fb687d5682e7a26334eef2c82a2de06bfbdc305fab/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005", size = 2319048, upload-time = "2025-10-14T10:22:38.144Z" }, - { url = "https://files.pythonhosted.org/packages/64/4a/36d8c966a0b086362ac10a7ee75978ed15c5f2dfdfc02a1578d19d3802fb/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8", size = 2321830, upload-time = "2025-10-14T10:22:40.337Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6e/d80cc4909dde5f6842861288aa1a7181e7afbfc50940c862ed2848df15bd/pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb", size = 1976706, upload-time = "2025-10-14T10:22:42.61Z" }, - { url = "https://files.pythonhosted.org/packages/29/ee/5bda8d960d4a8b24a7eeb8a856efa9c865a7a6cab714ed387b29507dc278/pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332", size = 2027640, upload-time = "2025-10-14T10:22:44.907Z" }, { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, @@ -2383,10 +2062,8 @@ version = "0.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "fsspec", version = "2025.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "fsspec", version = "2025.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, + { name = "fsspec" }, { name = "mmh3" }, { name = "pydantic" }, { name = "pyparsing" }, @@ -2414,11 +2091,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f5/2b/756a74c80db6edd82c8d3f23c3ae13e7d6620300b87ef792c2a4d3935b30/pyiceberg-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6979dd741cee263c1235595f71888c73365f2725697411027c4bd81046db3294", size = 1377048, upload-time = "2025-09-11T14:59:20.541Z" }, { url = "https://files.pythonhosted.org/packages/bb/35/9c18cb4ddc7d371db63714abb2f5e8414bc7a4d63f474644a2aea2933fe6/pyiceberg-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:13fd03ec3da6eb4d3b55ff94b647946a7749bede5d743c75b39deaad26421200", size = 1369921, upload-time = "2025-09-11T14:59:22.134Z" }, { url = "https://files.pythonhosted.org/packages/7b/b3/c012dc6b5bc3d0a84821936789c753f5c44aec619b64fbcf7f90038d172e/pyiceberg-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:33367c84bcb0a2fbbe54cbbfe062691ab93b91a2e3d319bb546ec5b9b45b6057", size = 617722, upload-time = "2025-09-11T14:59:23.67Z" }, - { url = "https://files.pythonhosted.org/packages/e7/65/8a93fe2d72a99700da6372d68c78c551e1fdb8ce441b570d724506faaf93/pyiceberg-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14cb3a5186e64f2ab37bc69cd7d1b32b25f416c87f9dadbcaa4f8e21b6c4e7b1", size = 516206, upload-time = "2025-09-11T14:59:25.224Z" }, - { url = "https://files.pythonhosted.org/packages/b4/e3/554a2130a6e137a6621fd6c064937354a8bf3406cf3f405a23a5a356f4e2/pyiceberg-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9631d892f0977fbaef0498f088cb8535cd6b933606946dcce214a9a342d9c009", size = 514863, upload-time = "2025-09-11T14:59:27.522Z" }, - { url = "https://files.pythonhosted.org/packages/a9/87/bdf9f0751e7501ac247fe8a49a65d68bf84e671242a1c6ec15a3337bcfc3/pyiceberg-0.10.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:79016a97ec70a11e2920791fe2620b001bc5b657380d3d3ddf9f6a48af209615", size = 692516, upload-time = "2025-09-11T14:59:29.123Z" }, - { url = "https://files.pythonhosted.org/packages/34/de/c6a601fc18bce3d570393be3842dd2174a8f0e8facb4bee93e553e2e502e/pyiceberg-0.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912128d6b70313002b5418096c444afcde3d541e53b5e6a6b4df177531ac5686", size = 691178, upload-time = "2025-09-11T14:59:30.459Z" }, - { url = "https://files.pythonhosted.org/packages/31/df/10b7d7da19efbbbbbf2f5452d8529b83853b0b82b871303702e693ef6994/pyiceberg-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0d76efec0409536bf48146961abc2b94e4001c647d348da8ea0f8ccca6504d1f", size = 515431, upload-time = "2025-09-11T14:59:31.742Z" }, ] [[package]] @@ -2529,20 +2201,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/59/b1/d47c5ec2b2580d0b94f42575be8f49907a0f4aa396fdc18660f3b5060d54/pyroaring-1.0.3-cp313-cp313-win32.whl", hash = "sha256:f758c681e63ffe74b20423695e71f0410920f41b075cee679ffb5bc2bf38440b", size = 205153, upload-time = "2025-10-09T09:07:45.496Z" }, { url = "https://files.pythonhosted.org/packages/c4/92/3600486936eebab747ae1462d231d7f87d234da24a04e82e1915c00f4427/pyroaring-1.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:428c3bb384fe4c483feb5cf7aa3aef1621fb0a5c4f3d391da67b2c4a43f08a10", size = 260349, upload-time = "2025-10-09T09:07:46.524Z" }, { url = "https://files.pythonhosted.org/packages/77/96/8dde074f1ad2a1c3d2091b22de80d1b3007824e649e06eeeebded83f4d48/pyroaring-1.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:9c0c856e8aa5606e8aed5f30201286e404fdc9093f81fefe82d2e79e67472bb2", size = 218775, upload-time = "2025-10-09T09:07:47.558Z" }, - { url = "https://files.pythonhosted.org/packages/68/6c/094ec30f0ef9564ec03785b9eb85026087cdcd77dc8d6d6613735fbe7c16/pyroaring-1.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d46eb5db78b673d8d8ca83651a1cce1e15eec5a922f2951b1f61014463b72af5", size = 671124, upload-time = "2025-10-09T09:08:05.443Z" }, - { url = "https://files.pythonhosted.org/packages/78/a8/ccc2110a02c18a68202b186fd4ad688bf279dd805f17b74b8f3f76855724/pyroaring-1.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce202452de2b58bffa3eb02e27c681eefcfb54e27f8ef85b5c93ebaada50f3f3", size = 367769, upload-time = "2025-10-09T09:08:06.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/82/73e4182cc620ee66802726997a07696c5c37f38604af9ec1a2170d7d74fe/pyroaring-1.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:531b6ae56989b61742dde1b64fedc5537acc046cf04a333548322366c1bf3922", size = 311928, upload-time = "2025-10-09T09:08:07.488Z" }, - { url = "https://files.pythonhosted.org/packages/2b/aa/b11c13da5b5c61487fcc1abadc4457a12de8e7125aec956ba71486e25b0e/pyroaring-1.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3035db9459bd8635a0145b4a9e3102869d621cb0b3648051115f06d31ffd1976", size = 1849914, upload-time = "2025-10-09T09:08:08.651Z" }, - { url = "https://files.pythonhosted.org/packages/24/e5/93e89d1b8d52c840bf0b10ef0adf4099f059e51eb6d3c7496ad827192a9a/pyroaring-1.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c8fb6b0ad0e8db1b9559b2da180b103b48adddf0e4f24404269e2a3b5db268d", size = 2036736, upload-time = "2025-10-09T09:08:09.87Z" }, - { url = "https://files.pythonhosted.org/packages/6d/72/73420fc84bb71239f8d6c1be583d0e178847bd673f873a88f4734633369d/pyroaring-1.0.3-cp39-cp39-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8d5df95d9511bc83048da9348c7ab1c20f97ff4d95faf27ee1fdf2e8a96e200e", size = 1783607, upload-time = "2025-10-09T09:08:11.033Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c0/35479042a045e84331292a44edc2ae6b8cc974b51292e9e27aa072600c17/pyroaring-1.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65d2d81e5aed7698fab23058db70fb2b65fad221090be037a0af498569109915", size = 1781211, upload-time = "2025-10-09T09:08:12.475Z" }, - { url = "https://files.pythonhosted.org/packages/8e/aa/a348d22558cc419788bd4bf5e23d3a01951c87d4e0a71e50e8d818da3c59/pyroaring-1.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e195636034a0b62ec0e5325ed2f610f39cc8955ace3f47a5bc7f484159f02341", size = 2833677, upload-time = "2025-10-09T09:08:13.626Z" }, - { url = "https://files.pythonhosted.org/packages/a3/29/ed5be0d26cedc5e8c648a1c7ff687fa677699ce1c88bc79db8ed29ddbdc3/pyroaring-1.0.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:bb7f2561e3ec26c3c869458431cbcba6b83f7e925b024460c136dbb5fadf3b31", size = 2635835, upload-time = "2025-10-09T09:08:15.28Z" }, - { url = "https://files.pythonhosted.org/packages/6e/70/f7864e5c8e8aa71bec6f9f031a817a12ec1cbdb8f0d0e56b8b8f0a7ece12/pyroaring-1.0.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8e996939de01f448eb9448d91b47ab60bff0555c2a80d5c12a8405814072cd35", size = 2964711, upload-time = "2025-10-09T09:08:16.848Z" }, - { url = "https://files.pythonhosted.org/packages/d9/dd/4addaae811223d06886ccfd3565cbd0069ec4bf94aed31ecdc1cb4c45e12/pyroaring-1.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c656d62d0cf96ede0edc4e7d392889238777bdf88b32afd5d51c3cab016c29a0", size = 3079893, upload-time = "2025-10-09T09:08:18.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/3f/099832741b09d95bff732fdecf60d32379cf875c47c35ca9411d7e857d5b/pyroaring-1.0.3-cp39-cp39-win32.whl", hash = "sha256:a7a7d14822c64841ae64e98309697e1631ebadba55ded33daa7cd16d1b487d11", size = 205188, upload-time = "2025-10-09T09:08:19.312Z" }, - { url = "https://files.pythonhosted.org/packages/eb/8e/1b1a183e9caec7079343fd9b52cb3f3655e92f7e2383b5a713e11a236c19/pyroaring-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:a86b88adbe0531b75f94f87279a6d4ee68e63335e29bbdab4400a05704fc2587", size = 254149, upload-time = "2025-10-09T09:08:20.29Z" }, - { url = "https://files.pythonhosted.org/packages/39/f6/5a50162e3aabfca78b9ecfb8a5fd54efe0cdb8cae4364566c76270a11ad1/pyroaring-1.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:1ed2e9c7af46052466b5fa0392fe540331474718d97b9756cefa23233bfdb3ea", size = 219700, upload-time = "2025-10-09T09:08:21.377Z" }, +] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/3a/5031723c09068e9c8c2f0bc25c3a9245f2b1d1aea8396c787a408f2b95ca/pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4", size = 103642, upload-time = "2023-10-25T21:06:56.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/19/c343b14061907b629b765444b6436b160e2bd4184d17d4804bbe6381f6be/pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce", size = 83416, upload-time = "2023-10-25T21:06:04.579Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/8342079ea331031ef9ed57edd312a9ad283bcc8adfaf268931ae356a09a6/pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f", size = 118021, upload-time = "2023-10-25T21:06:06.953Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b7/64a125c488243965b7c5118352e47c6f89df95b4ac306d31cee409153d57/pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34", size = 117747, upload-time = "2023-10-25T21:06:08.5Z" }, + { url = "https://files.pythonhosted.org/packages/fe/a5/43c67bd5f80df9e7583042398d12113263ec57f27c0607abe9d78395d18f/pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b", size = 114524, upload-time = "2023-10-25T21:06:10.728Z" }, + { url = "https://files.pythonhosted.org/packages/8a/98/b382a87e89ca839106d874f7bf78d226b3eedb26735eb6f751f1a3375f21/pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f", size = 60780, upload-time = "2023-10-25T21:06:12.14Z" }, + { url = "https://files.pythonhosted.org/packages/37/8a/23e2193f7adea6901262e3cf39c7fe18ac0c446176c0ff0e19aeb2e9681e/pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7", size = 63310, upload-time = "2023-10-25T21:06:13.598Z" }, + { url = "https://files.pythonhosted.org/packages/df/63/7544dc7d0953294882a5c587fb1b10a26e0c23d9b92281a14c2514bac1f7/pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958", size = 83481, upload-time = "2023-10-25T21:06:15.238Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a0/49249bc14d71b1bf2ffe89703acfa86f2017c25cfdabcaea532b8c8a5810/pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8", size = 120222, upload-time = "2023-10-25T21:06:17.144Z" }, + { url = "https://files.pythonhosted.org/packages/a1/94/9808e8c9271424120289b9028a657da336ad7e43da0647f62e4f6011d19b/pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a", size = 120002, upload-time = "2023-10-25T21:06:18.727Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f6/9ecfb78b2fc8e2540546db0fe19df1fae0f56664a5958c21ff8861b0f8da/pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224", size = 116850, upload-time = "2023-10-25T21:06:20.424Z" }, + { url = "https://files.pythonhosted.org/packages/83/c8/e6d28bc27a0719f8eaae660357df9757d6e9ca9be2691595721de9e8adfc/pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656", size = 60775, upload-time = "2023-10-25T21:06:21.815Z" }, + { url = "https://files.pythonhosted.org/packages/98/87/c6ef52ff30388f357922d08de012abdd3dc61e09311d88967bdae23ab657/pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee", size = 63306, upload-time = "2023-10-25T21:06:22.874Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/ff2ed52032ac1ce2e7ba19e79bd5b05d152ebfb77956cf08fcd6e8d760ea/pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e", size = 83537, upload-time = "2023-10-25T21:06:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/80/f1/338d0050b24c3132bcfc79b68c3a5f54bce3d213ecef74d37e988b971d8a/pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e", size = 122615, upload-time = "2023-10-25T21:06:25.815Z" }, + { url = "https://files.pythonhosted.org/packages/07/3a/e56d6431b713518094fae6ff833a04a6f49ad0fbe25fb7c0dc7408e19d20/pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3", size = 122335, upload-time = "2023-10-25T21:06:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/4a/bb/5f40a4d5e985a43b43f607250e766cdec28904682c3505eb0bd343a4b7db/pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d", size = 118510, upload-time = "2023-10-25T21:06:30.718Z" }, + { url = "https://files.pythonhosted.org/packages/1c/13/e6a22f40f5800af116c02c28e29f15c06aa41cb2036f6a64ab124647f28b/pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174", size = 60865, upload-time = "2023-10-25T21:06:32.742Z" }, + { url = "https://files.pythonhosted.org/packages/75/ef/2fa3b55023ec07c22682c957808f9a41836da4cd006b5f55ec76bf0fbfa6/pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d", size = 63239, upload-time = "2023-10-25T21:06:34.035Z" }, + { url = "https://files.pythonhosted.org/packages/23/88/0acd180010aaed4987c85700b7cc17f9505f3edb4e5873e4dc67f613e338/pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b", size = 58106, upload-time = "2023-10-25T21:06:54.387Z" }, ] [[package]] @@ -2569,7 +2254,6 @@ version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } wheels = [ @@ -2597,8 +2281,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, { name = "future-fstrings" }, - { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pytest" }, ] @@ -2675,7 +2358,6 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "black" }, { name = "docstring-to-markdown" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jedi" }, { name = "pluggy" }, { name = "python-lsp-jsonrpc" }, @@ -2697,7 +2379,7 @@ wheels = [ [[package]] name = "realtime" -version = "2.28.1" +version = "2.28.3" source = { editable = "src/realtime" } dependencies = [ { name = "pydantic" }, @@ -2796,8 +2478,7 @@ name = "rich" version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "markdown-it-py" }, { name = "pygments" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } @@ -2876,16 +2557,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/fa/3234f913fe9a6525a7b97c6dad1f51e72b917e6872e051a5e2ffd8b16fbb/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83", size = 137970, upload-time = "2025-09-22T19:51:09.472Z" }, { url = "https://files.pythonhosted.org/packages/ef/ec/4edbf17ac2c87fa0845dd366ef8d5852b96eb58fcd65fc1ecf5fe27b4641/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27", size = 739639, upload-time = "2025-09-22T19:51:10.566Z" }, { url = "https://files.pythonhosted.org/packages/15/18/b0e1fafe59051de9e79cdd431863b03593ecfa8341c110affad7c8121efc/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640", size = 764456, upload-time = "2025-09-22T19:51:11.736Z" }, - { url = "https://files.pythonhosted.org/packages/2a/a0/e709dc2f58054049cd154319a7d37917689785b12ec43ea2df47ea5344ef/ruamel.yaml.clib-0.2.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:18c041b28f3456ddef1f1951d4492dbebe0f8114157c1b3c981a4611c2020792", size = 270636, upload-time = "2025-09-23T14:24:17.855Z" }, - { url = "https://files.pythonhosted.org/packages/18/81/491c9e394976e10682a596f2b785ba7066db525cc17f267005ae8ca33c73/ruamel.yaml.clib-0.2.14-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:d8354515ab62f95a07deaf7f845886cc50e2f345ceab240a3d2d09a9f7d77853", size = 137954, upload-time = "2025-09-22T19:51:12.851Z" }, - { url = "https://files.pythonhosted.org/packages/ad/a5/c6d1c767e051bbc00146a93132bf199b3e6ec2c219131b9d3e19eff428f3/ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:275f938692013a3883edbd848edde6d9f26825d65c9a2eb1db8baa1adc96a05d", size = 636162, upload-time = "2025-09-22T19:51:16.823Z" }, - { url = "https://files.pythonhosted.org/packages/e3/6f/4746e2e8f60b3489b6cd6fad96a8e2aaa0cf7dd6760de3daad1a6e9f5789/ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a60d69f4057ad9a92f3444e2367c08490daed6428291aa16cefb445c29b0e9", size = 723934, upload-time = "2025-09-22T19:51:13.948Z" }, - { url = "https://files.pythonhosted.org/packages/26/47/5446e8cea2f6b5391fba653196f38b3f14030c1c324bd9aa67f1773d24ec/ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac5ff9425d8acb8f59ac5b96bcb7fd3d272dc92d96a7c730025928ffcc88a7a", size = 686265, upload-time = "2025-09-22T19:51:15.142Z" }, - { url = "https://files.pythonhosted.org/packages/52/d7/344d7b3010b6a01af97431bdf89056abb2d8bd704d0f3430e7b50232cce4/ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e1d1735d97fd8a48473af048739379975651fab186f8a25a9f683534e6904179", size = 693042, upload-time = "2025-09-23T18:42:53.238Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d5/a0f2cce1b6cfa9bf1921b8a19ebceafc7a9b3c27882e5af5a07ae080b1bd/ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:83bbd8354f6abb3fdfb922d1ed47ad8d1db3ea72b0523dac8d07cdacfe1c0fcf", size = 706110, upload-time = "2025-09-22T19:51:18.467Z" }, - { url = "https://files.pythonhosted.org/packages/42/cd/85b422d24ee2096eaf6faa360c95ef9bdb59097d19b9624cebce4dd9bc2a/ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:808c7190a0fe7ae7014c42f73897cf8e9ef14ff3aa533450e51b1e72ec5239ad", size = 725028, upload-time = "2025-09-22T19:51:19.782Z" }, - { url = "https://files.pythonhosted.org/packages/4d/ac/99e6e0ea2584f84f447069d0187fe411e9b5deb7e3ddecda25001cfc7a95/ruamel.yaml.clib-0.2.14-cp39-cp39-win32.whl", hash = "sha256:6d5472f63a31b042aadf5ed28dd3ef0523da49ac17f0463e10fda9c4a2773352", size = 100915, upload-time = "2025-09-22T19:51:21.764Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8d/846e43369658958c99d959bb7774136fff9210f9017d91a4277818ceafbf/ruamel.yaml.clib-0.2.14-cp39-cp39-win_amd64.whl", hash = "sha256:8dd3c2cc49caa7a8d64b67146462aed6723a0495e44bf0aa0a2e94beaa8432f6", size = 118706, upload-time = "2025-09-22T19:51:20.878Z" }, { url = "https://files.pythonhosted.org/packages/e7/cd/150fdb96b8fab27fe08d8a59fe67554568727981806e6bc2677a16081ec7/ruamel_yaml_clib-0.2.14-cp314-cp314-win32.whl", hash = "sha256:9b4104bf43ca0cd4e6f738cb86326a3b2f6eef00f417bd1e7efb7bdffe74c539", size = 102394, upload-time = "2025-11-14T21:57:36.703Z" }, { url = "https://files.pythonhosted.org/packages/bd/e6/a3fa40084558c7e1dc9546385f22a93949c890a8b2e445b2ba43935f51da/ruamel_yaml_clib-0.2.14-cp314-cp314-win_amd64.whl", hash = "sha256:13997d7d354a9890ea1ec5937a219817464e5cc344805b37671562a401ca3008", size = 122673, upload-time = "2025-11-14T21:57:38.177Z" }, ] @@ -2970,63 +2641,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, ] -[[package]] -name = "sphinx" -version = "7.3.7" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "babel", marker = "python_full_version < '3.10'" }, - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "docutils", version = "0.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "imagesize", marker = "python_full_version < '3.10'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jinja2", marker = "python_full_version < '3.10'" }, - { name = "packaging", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "requests", marker = "python_full_version < '3.10'" }, - { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, - { name = "tomli", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b7/0a/b88033900b1582f5ed8f880263363daef968d1cd064175e32abfd9714410/sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc", size = 7094808, upload-time = "2024-04-19T04:44:48.297Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/fa/130c32ed94cf270e3d0b9ded16fb7b2c8fea86fa7263c29a696a30c1dde7/sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3", size = 3335650, upload-time = "2024-04-19T04:44:43.839Z" }, -] - [[package]] name = "sphinx" version = "8.1.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "babel", marker = "python_full_version == '3.10.*'" }, - { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, - { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "imagesize", marker = "python_full_version == '3.10.*'" }, - { name = "jinja2", marker = "python_full_version == '3.10.*'" }, - { name = "packaging", marker = "python_full_version == '3.10.*'" }, - { name = "pygments", marker = "python_full_version == '3.10.*'" }, - { name = "requests", marker = "python_full_version == '3.10.*'" }, - { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, - { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, - { name = "tomli", marker = "python_full_version == '3.10.*'" }, + "python_full_version < '3.11'", +] +dependencies = [ + { name = "alabaster", marker = "python_full_version < '3.11'" }, + { name = "babel", marker = "python_full_version < '3.11'" }, + { name = "colorama", marker = "python_full_version < '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.11'" }, + { name = "imagesize", marker = "python_full_version < '3.11'" }, + { name = "jinja2", marker = "python_full_version < '3.11'" }, + { name = "packaging", marker = "python_full_version < '3.11'" }, + { name = "pygments", marker = "python_full_version < '3.11'" }, + { name = "requests", marker = "python_full_version < '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } wheels = [ @@ -3042,10 +2681,10 @@ resolution-markers = [ "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ - { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "alabaster", marker = "python_full_version >= '3.11'" }, { name = "babel", marker = "python_full_version >= '3.11'" }, { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, - { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, { name = "imagesize", marker = "python_full_version >= '3.11'" }, { name = "jinja2", marker = "python_full_version >= '3.11'" }, { name = "packaging", marker = "python_full_version >= '3.11'" }, @@ -3065,30 +2704,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, ] -[[package]] -name = "sphinx-autodoc-typehints" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/74/cd/03e7b917230dc057922130a79ba0240df1693bfd76727ea33fae84b39138/sphinx_autodoc_typehints-2.3.0.tar.gz", hash = "sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084", size = 40709, upload-time = "2024-08-29T16:25:48.343Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/f3/e0a4ce49da4b6f4e4ce84b3c39a0677831884cb9d8a87ccbf1e9e56e53ac/sphinx_autodoc_typehints-2.3.0-py3-none-any.whl", hash = "sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67", size = 19836, upload-time = "2024-08-29T16:25:46.707Z" }, -] - [[package]] name = "sphinx-autodoc-typehints" version = "3.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version == '3.10.*'", + "python_full_version < '3.11'", ] dependencies = [ - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/26/f0/43c6a5ff3e7b08a8c3b32f81b859f1b518ccc31e45f22e2b41ced38be7b9/sphinx_autodoc_typehints-3.0.1.tar.gz", hash = "sha256:b9b40dd15dee54f6f810c924f863f9cf1c54f9f3265c495140ea01be7f44fa55", size = 36282, upload-time = "2025-01-16T18:25:30.958Z" } wheels = [ @@ -3116,8 +2740,7 @@ name = "sphinx-basic-ng" version = "1.0.0b2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736, upload-time = "2023-07-08T18:40:54.166Z" } @@ -3144,8 +2767,7 @@ name = "sphinx-press-theme" version = "0.9.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e7/25/8769ef130d57ea449309a4ee2d76eed653063b5de27d34100822e34e7e93/sphinx_press_theme-0.9.1.tar.gz", hash = "sha256:1643dee7365f7831d1d3971b389b7c255641a7aced75f0681f71574e380046cf", size = 254696, upload-time = "2024-03-23T01:39:02.384Z" } @@ -3153,40 +2775,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cf/28/fbd928e5dcde4f8acdc7b2b730685dafb35d83437cc0b955332349e12b89/sphinx_press_theme-0.9.1-py3-none-any.whl", hash = "sha256:eed3fdd8df249b67136b507dfc6a84d1a2c5feca5376960c6d4d28ada4f6cdf7", size = 83558, upload-time = "2024-03-23T01:39:00.525Z" }, ] -[[package]] -name = "sphinx-prompt" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "docutils", version = "0.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/fb/7a07b8df1ca2418147a6b13e3f6b445071f2565198b45efa631d0d6ef0cd/sphinx_prompt-1.8.0.tar.gz", hash = "sha256:47482f86fcec29662fdfd23e7c04ef03582714195d01f5d565403320084372ed", size = 5121, upload-time = "2023-09-14T12:46:13.449Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/39/49/f890a2668b7cbf375f5528b549c8d36dd2e801b0fbb7b2b5ef65663ecb6c/sphinx_prompt-1.8.0-py3-none-any.whl", hash = "sha256:369ecc633f0711886f9b3a078c83264245be1adf46abeeb9b88b5519e4b51007", size = 7298, upload-time = "2023-09-14T12:46:12.373Z" }, -] - [[package]] name = "sphinx-prompt" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "certifi", marker = "python_full_version >= '3.10'" }, - { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "idna", marker = "python_full_version >= '3.10'" }, - { name = "pygments", marker = "python_full_version >= '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "certifi" }, + { name = "docutils" }, + { name = "idna" }, + { name = "pygments" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "urllib3", marker = "python_full_version >= '3.10'" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/34/fe/ac4e24f35b5148b31ac717ae7dcc7a2f7ec56eb729e22c7252ed8ad2d9a5/sphinx_prompt-1.9.0.tar.gz", hash = "sha256:471b3c6d466dce780a9b167d9541865fd4e9a80ed46e31b06a52a0529ae995a1", size = 5340, upload-time = "2024-08-07T15:46:51.428Z" } wheels = [ @@ -3198,11 +2798,9 @@ name = "sphinx-tabs" version = "3.4.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "docutils", version = "0.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "docutils" }, { name = "pygments" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/27/32/ab475e252dc2b704e82a91141fa404cdd8901a5cf34958fd22afacebfccd/sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531", size = 16070, upload-time = "2024-01-21T12:13:39.392Z" } @@ -3210,66 +2808,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/9f/4ac7dbb9f23a2ff5a10903a4f9e9f43e0ff051f63a313e989c962526e305/sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09", size = 9904, upload-time = "2024-01-21T12:13:37.67Z" }, ] -[[package]] -name = "sphinx-toolbox" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "apeye", marker = "python_full_version < '3.10'" }, - { name = "autodocsumm", marker = "python_full_version < '3.10'" }, - { name = "beautifulsoup4", marker = "python_full_version < '3.10'" }, - { name = "cachecontrol", version = "0.12.14", source = { registry = "https://pypi.org/simple" }, extra = ["filecache"], marker = "python_full_version < '3.10'" }, - { name = "dict2css", marker = "python_full_version < '3.10'" }, - { name = "docutils", version = "0.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "domdf-python-tools", marker = "python_full_version < '3.10'" }, - { name = "html5lib", marker = "python_full_version < '3.10'" }, - { name = "lockfile", marker = "python_full_version < '3.10'" }, - { name = "ruamel-yaml", marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx-autodoc-typehints", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx-jinja2-compat", marker = "python_full_version < '3.10'" }, - { name = "sphinx-prompt", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx-tabs", marker = "python_full_version < '3.10'" }, - { name = "tabulate", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f1/9c/bbf8bf24ea33e805f62362d4021e109ec86fb2e23fb4baed958a61262422/sphinx_toolbox-3.4.0.tar.gz", hash = "sha256:e1cf2a3dea5ce80e175a6a9cee8b5b2792240ecf6c28993d87a63b6fcf606293", size = 112202, upload-time = "2023-01-23T23:33:10.621Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/ba/7eb6695cf42038545be89f839de5ffd06d1de7197f0f5a58544facdb87eb/sphinx_toolbox-3.4.0-py3-none-any.whl", hash = "sha256:cdf70facee515a2d9406d568a253fa3e89f930fde23c4e8095ba0c675f7c0a48", size = 525140, upload-time = "2023-01-23T23:33:08.937Z" }, -] - [[package]] name = "sphinx-toolbox" version = "4.0.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "apeye", marker = "python_full_version >= '3.10'" }, - { name = "autodocsumm", marker = "python_full_version >= '3.10'" }, - { name = "beautifulsoup4", marker = "python_full_version >= '3.10'" }, - { name = "cachecontrol", version = "0.14.3", source = { registry = "https://pypi.org/simple" }, extra = ["filecache"], marker = "python_full_version >= '3.10'" }, - { name = "dict2css", marker = "python_full_version >= '3.10'" }, - { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "domdf-python-tools", marker = "python_full_version >= '3.10'" }, - { name = "filelock", marker = "python_full_version >= '3.10'" }, - { name = "html5lib", marker = "python_full_version >= '3.10'" }, - { name = "ruamel-yaml", marker = "python_full_version >= '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "apeye" }, + { name = "autodocsumm" }, + { name = "beautifulsoup4" }, + { name = "cachecontrol", extra = ["filecache"] }, + { name = "dict2css" }, + { name = "docutils" }, + { name = "domdf-python-tools" }, + { name = "filelock" }, + { name = "html5lib" }, + { name = "ruamel-yaml" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx-autodoc-typehints", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "sphinx-jinja2-compat", marker = "python_full_version >= '3.10'" }, - { name = "sphinx-prompt", version = "1.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "sphinx-tabs", marker = "python_full_version >= '3.10'" }, - { name = "tabulate", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, + { name = "sphinx-jinja2-compat" }, + { name = "sphinx-prompt" }, + { name = "sphinx-tabs" }, + { name = "tabulate" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/60/d2/fd68940102a02cbff392b91317618e0f87458e98a9684c0f74b1c58d4e49/sphinx_toolbox-4.0.0.tar.gz", hash = "sha256:48c31451db2e2d8c71c03939e72a19ef7bc92ca7850a62db63fc7bb8395b6785", size = 113819, upload-time = "2025-05-12T17:11:39.104Z" } wheels = [ @@ -3341,13 +2903,14 @@ wheels = [ [[package]] name = "storage3" -version = "2.28.1" +version = "3.0.0a1" source = { editable = "src/storage" } dependencies = [ { name = "deprecation" }, - { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, { name = "pyiceberg" }, + { name = "supabase-utils" }, + { name = "typing-extensions" }, { name = "yarl" }, ] @@ -3361,21 +2924,18 @@ dev = [ { name = "python-lsp-ruff" }, { name = "python-lsp-server" }, { name = "ruff" }, - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-press-theme" }, - { name = "sphinx-toolbox", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx-toolbox", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx-toolbox" }, + { name = "supabase-utils", extra = ["all"] }, { name = "unasync" }, ] docs = [ - { name = "sphinx", version = "7.3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-press-theme" }, - { name = "sphinx-toolbox", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx-toolbox", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx-toolbox" }, ] lints = [ { name = "pylsp-mypy" }, @@ -3389,14 +2949,16 @@ tests = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "python-dotenv" }, + { name = "supabase-utils", extra = ["all"] }, ] [package.metadata] requires-dist = [ { name = "deprecation", specifier = ">=2.1.0" }, - { name = "httpx", extras = ["http2"], specifier = ">=0.26,<0.29" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pyiceberg", specifier = ">=0.10.0" }, + { name = "supabase-utils", editable = "src/utils" }, + { name = "typing-extensions", specifier = ">=4.15.0" }, { name = "yarl", specifier = ">=1.20.1" }, ] @@ -3413,6 +2975,7 @@ dev = [ { name = "sphinx", specifier = ">=7.1.2" }, { name = "sphinx-press-theme", specifier = ">=0.9.1" }, { name = "sphinx-toolbox", specifier = ">=3.4.0" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, { name = "unasync", specifier = ">=0.6.0" }, ] docs = [ @@ -3432,6 +2995,7 @@ tests = [ { name = "pytest-asyncio", specifier = ">=0.21.0" }, { name = "pytest-cov", specifier = ">=6.1.0" }, { name = "python-dotenv", specifier = ">=1.1.0" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, ] [[package]] @@ -3457,18 +3021,26 @@ wheels = [ [[package]] name = "supabase" -version = "2.28.1" +version = "3.0.0a1" source = { editable = "src/supabase" } dependencies = [ - { name = "httpx" }, { name = "postgrest" }, { name = "realtime" }, { name = "storage3" }, { name = "supabase-auth" }, { name = "supabase-functions" }, + { name = "supabase-utils" }, { name = "yarl" }, ] +[package.optional-dependencies] +aiohttp = [ + { name = "supabase-utils", extra = ["aiohttp"] }, +] +httpx = [ + { name = "supabase-utils", extra = ["httpx"] }, +] + [package.dev-dependencies] dev = [ { name = "mypy" }, @@ -3477,6 +3049,7 @@ dev = [ { name = "pytest-cov" }, { name = "python-dotenv" }, { name = "ruff" }, + { name = "supabase-utils", extra = ["all"] }, { name = "unasync" }, ] lints = [ @@ -3489,18 +3062,22 @@ tests = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "python-dotenv" }, + { name = "supabase-utils", extra = ["all"] }, ] [package.metadata] requires-dist = [ - { name = "httpx", specifier = ">=0.26,<0.29" }, { name = "postgrest", editable = "src/postgrest" }, { name = "realtime", editable = "src/realtime" }, { name = "storage3", editable = "src/storage" }, { name = "supabase-auth", editable = "src/auth" }, { name = "supabase-functions", editable = "src/functions" }, + { name = "supabase-utils", editable = "src/utils" }, + { name = "supabase-utils", extras = ["aiohttp"], marker = "extra == 'aiohttp'", editable = "src/utils" }, + { name = "supabase-utils", extras = ["httpx"], marker = "extra == 'httpx'", editable = "src/utils" }, { name = "yarl", specifier = ">=1.22.0" }, ] +provides-extras = ["httpx", "aiohttp"] [package.metadata.requires-dev] dev = [ @@ -3510,6 +3087,7 @@ dev = [ { name = "pytest-cov", specifier = ">=6.2.1" }, { name = "python-dotenv", specifier = ">=1.1.1" }, { name = "ruff", specifier = ">=0.12.1" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, { name = "unasync", specifier = ">=0.6.0" }, ] lints = [ @@ -3522,16 +3100,17 @@ tests = [ { name = "pytest-asyncio", specifier = ">=0.24,<1.1" }, { name = "pytest-cov", specifier = ">=6.2.1" }, { name = "python-dotenv", specifier = ">=1.1.1" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, ] [[package]] name = "supabase-auth" -version = "2.28.1" +version = "3.0.0a1" source = { editable = "src/auth" } dependencies = [ - { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, { name = "pyjwt", extra = ["crypto"] }, + { name = "supabase-utils" }, ] [package.dev-dependencies] @@ -3548,6 +3127,7 @@ dev = [ { name = "python-lsp-server" }, { name = "respx" }, { name = "ruff" }, + { name = "supabase-utils", extra = ["all"] }, { name = "unasync" }, ] lints = [ @@ -3566,13 +3146,14 @@ tests = [ { name = "pytest-depends" }, { name = "pytest-mock" }, { name = "respx" }, + { name = "supabase-utils", extra = ["all"] }, ] [package.metadata] requires-dist = [ - { name = "httpx", extras = ["http2"], specifier = ">=0.26,<0.29" }, - { name = "pydantic", specifier = ">=1.10,<3" }, + { name = "pydantic", specifier = ">=2,<3" }, { name = "pyjwt", extras = ["crypto"], specifier = ">=2.10.1" }, + { name = "supabase-utils", editable = "src/utils" }, ] [package.metadata.requires-dev] @@ -3589,6 +3170,7 @@ dev = [ { name = "python-lsp-server", specifier = ">=1.12.2,<2.0.0" }, { name = "respx", specifier = ">=0.20.2,<0.23.0" }, { name = "ruff", specifier = ">=0.12.1" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, { name = "unasync", specifier = ">=0.6.0" }, ] lints = [ @@ -3607,15 +3189,17 @@ tests = [ { name = "pytest-depends", specifier = ">=1.0.1" }, { name = "pytest-mock", specifier = ">=3.14.0" }, { name = "respx", specifier = ">=0.20.2,<0.23.0" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, ] [[package]] name = "supabase-functions" -version = "2.28.1" +version = "3.0.0a1" source = { editable = "src/functions" } dependencies = [ - { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, { name = "strenum" }, + { name = "supabase-utils" }, { name = "yarl" }, ] @@ -3629,26 +3213,27 @@ dev = [ { name = "python-lsp-ruff" }, { name = "python-lsp-server" }, { name = "ruff" }, - { name = "unasync" }, + { name = "supabase-utils", extra = ["all"] }, ] lints = [ { name = "pylsp-mypy" }, { name = "python-lsp-ruff" }, { name = "python-lsp-server" }, { name = "ruff" }, - { name = "unasync" }, ] tests = [ { name = "pyjwt" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, + { name = "supabase-utils", extra = ["all"] }, ] [package.metadata] requires-dist = [ - { name = "httpx", extras = ["http2"], specifier = ">=0.26,<0.29" }, + { name = "pydantic", specifier = ">=2.12.2" }, { name = "strenum", specifier = ">=0.4.15" }, + { name = "supabase-utils", editable = "src/utils" }, { name = "yarl", specifier = ">=1.20.1" }, ] @@ -3662,21 +3247,57 @@ dev = [ { name = "python-lsp-ruff", specifier = ">=2.2.2,<3.0.0" }, { name = "python-lsp-server", specifier = ">=1.12.2,<2.0.0" }, { name = "ruff", specifier = ">=0.12.1" }, - { name = "unasync", specifier = ">=0.6.0" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, ] lints = [ { name = "pylsp-mypy", specifier = ">=0.7.0,<0.8.0" }, { name = "python-lsp-ruff", specifier = ">=2.2.2,<3.0.0" }, { name = "python-lsp-server", specifier = ">=1.12.2,<2.0.0" }, { name = "ruff", specifier = ">=0.12.1" }, - { name = "unasync", specifier = ">=0.6.0" }, ] tests = [ { name = "pyjwt", specifier = ">=2.8.0" }, { name = "pytest", specifier = ">=7.4.2,<9.0.0" }, { name = "pytest-asyncio", specifier = ">=0.21.1,<1.2.0" }, { name = "pytest-cov", specifier = ">=4,<7" }, + { name = "supabase-utils", extras = ["all"], editable = "src/utils" }, +] + +[[package]] +name = "supabase-utils" +version = "3.0.0a1" +source = { editable = "src/utils" } +dependencies = [ + { name = "pydantic" }, + { name = "pyrsistent" }, + { name = "typing-extensions" }, + { name = "yarl" }, +] + +[package.optional-dependencies] +aiohttp = [ + { name = "aiohttp" }, +] +all = [ + { name = "aiohttp" }, + { name = "httpx", extra = ["http2"] }, +] +httpx = [ + { name = "httpx", extra = ["http2"] }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", marker = "extra == 'aiohttp'", specifier = ">=3" }, + { name = "httpx", extras = ["http2"], marker = "extra == 'httpx'", specifier = ">=0.26,<0.29" }, + { name = "pydantic", specifier = ">=2.12.2" }, + { name = "pyrsistent", specifier = ">=0.20.0" }, + { name = "supabase-utils", extras = ["aiohttp"], marker = "extra == 'all'", editable = "src/utils" }, + { name = "supabase-utils", extras = ["httpx"], marker = "extra == 'all'", editable = "src/utils" }, + { name = "typing-extensions", specifier = ">=4.15.0" }, + { name = "yarl", specifier = ">=1.22.0" }, ] +provides-extras = ["httpx", "aiohttp", "all"] [[package]] name = "tabulate" @@ -3856,17 +3477,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5", size = 41859, upload-time = "2025-08-20T11:56:30.495Z" }, { url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec", size = 46183, upload-time = "2025-08-20T11:56:31.574Z" }, { url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab", size = 40264, upload-time = "2025-08-20T11:56:32.773Z" }, - { url = "https://files.pythonhosted.org/packages/39/bf/c6f59cdf74ce70bd937b97c31c42fd04a5ed1a9222d0197e77e4bd899841/ujson-5.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65f3c279f4ed4bf9131b11972040200c66ae040368abdbb21596bf1564899694", size = 55283, upload-time = "2025-08-20T11:56:33.947Z" }, - { url = "https://files.pythonhosted.org/packages/8d/c1/a52d55638c0c644b8a63059f95ad5ffcb4ad8f60d8bc3e8680f78e77cc75/ujson-5.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99c49400572cd77050894e16864a335225191fd72a818ea6423ae1a06467beac", size = 53168, upload-time = "2025-08-20T11:56:35.141Z" }, - { url = "https://files.pythonhosted.org/packages/75/6c/e64e19a01d59c8187d01ffc752ee3792a09f5edaaac2a0402de004459dd7/ujson-5.11.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0654a2691fc252c3c525e3d034bb27b8a7546c9d3eb33cd29ce6c9feda361a6a", size = 57809, upload-time = "2025-08-20T11:56:36.293Z" }, - { url = "https://files.pythonhosted.org/packages/9f/36/910117b7a8a1c188396f6194ca7bc8fd75e376d8f7e3cf5eb6219fc8b09d/ujson-5.11.0-cp39-cp39-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:6b6ec7e7321d7fc19abdda3ad809baef935f49673951a8bab486aea975007e02", size = 59797, upload-time = "2025-08-20T11:56:37.746Z" }, - { url = "https://files.pythonhosted.org/packages/c7/17/bcc85d282ee2f4cdef5f577e0a43533eedcae29cc6405edf8c62a7a50368/ujson-5.11.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f62b9976fabbcde3ab6e413f4ec2ff017749819a0786d84d7510171109f2d53c", size = 57378, upload-time = "2025-08-20T11:56:39.123Z" }, - { url = "https://files.pythonhosted.org/packages/ef/39/120bb76441bf835f3c3f42db9c206f31ba875711637a52a8209949ab04b0/ujson-5.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f1a27ab91083b4770e160d17f61b407f587548f2c2b5fbf19f94794c495594a", size = 1036515, upload-time = "2025-08-20T11:56:40.848Z" }, - { url = "https://files.pythonhosted.org/packages/b6/ae/fe1b4ff6388f681f6710e9494656957725b1e73ae50421ec04567df9fb75/ujson-5.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ecd6ff8a3b5a90c292c2396c2d63c687fd0ecdf17de390d852524393cd9ed052", size = 1195753, upload-time = "2025-08-20T11:56:42.341Z" }, - { url = "https://files.pythonhosted.org/packages/92/20/005b93f2cf846ae50b46812fcf24bbdd127521197e5f1e1a82e3b3e730a1/ujson-5.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9aacbeb23fdbc4b256a7d12e0beb9063a1ba5d9e0dbb2cfe16357c98b4334596", size = 1088844, upload-time = "2025-08-20T11:56:43.777Z" }, - { url = "https://files.pythonhosted.org/packages/41/9e/3142023c30008e2b24d7368a389b26d28d62fcd3f596d3d898a72dd09173/ujson-5.11.0-cp39-cp39-win32.whl", hash = "sha256:674f306e3e6089f92b126eb2fe41bcb65e42a15432c143365c729fdb50518547", size = 39652, upload-time = "2025-08-20T11:56:45.034Z" }, - { url = "https://files.pythonhosted.org/packages/ca/89/f4de0a3c485d0163f85f552886251876645fb62cbbe24fcdc0874b9fae03/ujson-5.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c6618f480f7c9ded05e78a1938873fde68baf96cdd74e6d23c7e0a8441175c4b", size = 43783, upload-time = "2025-08-20T11:56:46.156Z" }, - { url = "https://files.pythonhosted.org/packages/48/b1/2d50987a7b7cccb5c1fbe9ae7b184211106237b32c7039118c41d79632ea/ujson-5.11.0-cp39-cp39-win_arm64.whl", hash = "sha256:5600202a731af24a25e2d7b6eb3f648e4ecd4bb67c4d5cf12f8fab31677469c9", size = 38430, upload-time = "2025-08-20T11:56:47.653Z" }, { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, @@ -3956,29 +3566,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/36/db/3fff0bcbe339a6fa6a3b9e3fbc2bfb321ec2f4cd233692272c5a8d6cf801/websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5", size = 175424, upload-time = "2025-03-05T20:02:56.505Z" }, - { url = "https://files.pythonhosted.org/packages/46/e6/519054c2f477def4165b0ec060ad664ed174e140b0d1cbb9fafa4a54f6db/websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a", size = 173077, upload-time = "2025-03-05T20:02:58.37Z" }, - { url = "https://files.pythonhosted.org/packages/1a/21/c0712e382df64c93a0d16449ecbf87b647163485ca1cc3f6cbadb36d2b03/websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b", size = 173324, upload-time = "2025-03-05T20:02:59.773Z" }, - { url = "https://files.pythonhosted.org/packages/1c/cb/51ba82e59b3a664df54beed8ad95517c1b4dc1a913730e7a7db778f21291/websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770", size = 182094, upload-time = "2025-03-05T20:03:01.827Z" }, - { url = "https://files.pythonhosted.org/packages/fb/0f/bf3788c03fec679bcdaef787518dbe60d12fe5615a544a6d4cf82f045193/websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb", size = 181094, upload-time = "2025-03-05T20:03:03.123Z" }, - { url = "https://files.pythonhosted.org/packages/5e/da/9fb8c21edbc719b66763a571afbaf206cb6d3736d28255a46fc2fe20f902/websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054", size = 181397, upload-time = "2025-03-05T20:03:04.443Z" }, - { url = "https://files.pythonhosted.org/packages/2e/65/65f379525a2719e91d9d90c38fe8b8bc62bd3c702ac651b7278609b696c4/websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee", size = 181794, upload-time = "2025-03-05T20:03:06.708Z" }, - { url = "https://files.pythonhosted.org/packages/d9/26/31ac2d08f8e9304d81a1a7ed2851c0300f636019a57cbaa91342015c72cc/websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed", size = 181194, upload-time = "2025-03-05T20:03:08.844Z" }, - { url = "https://files.pythonhosted.org/packages/98/72/1090de20d6c91994cd4b357c3f75a4f25ee231b63e03adea89671cc12a3f/websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880", size = 181164, upload-time = "2025-03-05T20:03:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/2d/37/098f2e1c103ae8ed79b0e77f08d83b0ec0b241cf4b7f2f10edd0126472e1/websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411", size = 176381, upload-time = "2025-03-05T20:03:12.77Z" }, - { url = "https://files.pythonhosted.org/packages/75/8b/a32978a3ab42cebb2ebdd5b05df0696a09f4d436ce69def11893afa301f0/websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4", size = 176841, upload-time = "2025-03-05T20:03:14.367Z" }, { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, - { url = "https://files.pythonhosted.org/packages/b7/48/4b67623bac4d79beb3a6bb27b803ba75c1bdedc06bd827e465803690a4b2/websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940", size = 173106, upload-time = "2025-03-05T20:03:29.404Z" }, - { url = "https://files.pythonhosted.org/packages/ed/f0/adb07514a49fe5728192764e04295be78859e4a537ab8fcc518a3dbb3281/websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e", size = 173339, upload-time = "2025-03-05T20:03:30.755Z" }, - { url = "https://files.pythonhosted.org/packages/87/28/bd23c6344b18fb43df40d0700f6d3fffcd7cef14a6995b4f976978b52e62/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9", size = 174597, upload-time = "2025-03-05T20:03:32.247Z" }, - { url = "https://files.pythonhosted.org/packages/6d/79/ca288495863d0f23a60f546f0905ae8f3ed467ad87f8b6aceb65f4c013e4/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b", size = 174205, upload-time = "2025-03-05T20:03:33.731Z" }, - { url = "https://files.pythonhosted.org/packages/04/e4/120ff3180b0872b1fe6637f6f995bcb009fb5c87d597c1fc21456f50c848/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f", size = 174150, upload-time = "2025-03-05T20:03:35.757Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c3/30e2f9c539b8da8b1d76f64012f3b19253271a63413b2d3adb94b143407f/websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123", size = 176877, upload-time = "2025-03-05T20:03:37.199Z" }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] @@ -4105,22 +3698,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, - { url = "https://files.pythonhosted.org/packages/94/fd/6480106702a79bcceda5fd9c63cb19a04a6506bd5ce7fd8d9b63742f0021/yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748", size = 141301, upload-time = "2025-10-06T14:12:19.01Z" }, - { url = "https://files.pythonhosted.org/packages/42/e1/6d95d21b17a93e793e4ec420a925fe1f6a9342338ca7a563ed21129c0990/yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859", size = 93864, upload-time = "2025-10-06T14:12:21.05Z" }, - { url = "https://files.pythonhosted.org/packages/32/58/b8055273c203968e89808413ea4c984988b6649baabf10f4522e67c22d2f/yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9", size = 94706, upload-time = "2025-10-06T14:12:23.287Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/d7bfbc28a88c2895ecd0da6a874def0c147de78afc52c773c28e1aa233a3/yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054", size = 347100, upload-time = "2025-10-06T14:12:28.527Z" }, - { url = "https://files.pythonhosted.org/packages/bd/e8/37a1e7b99721c0564b1fc7b0a4d1f595ef6fb8060d82ca61775b644185f7/yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b", size = 318902, upload-time = "2025-10-06T14:12:30.528Z" }, - { url = "https://files.pythonhosted.org/packages/1c/ef/34724449d7ef2db4f22df644f2dac0b8a275d20f585e526937b3ae47b02d/yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60", size = 363302, upload-time = "2025-10-06T14:12:32.295Z" }, - { url = "https://files.pythonhosted.org/packages/8a/04/88a39a5dad39889f192cce8d66cc4c58dbeca983e83f9b6bf23822a7ed91/yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890", size = 370816, upload-time = "2025-10-06T14:12:34.01Z" }, - { url = "https://files.pythonhosted.org/packages/6b/1f/5e895e547129413f56c76be2c3ce4b96c797d2d0ff3e16a817d9269b12e6/yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba", size = 346465, upload-time = "2025-10-06T14:12:35.977Z" }, - { url = "https://files.pythonhosted.org/packages/11/13/a750e9fd6f9cc9ed3a52a70fe58ffe505322f0efe0d48e1fd9ffe53281f5/yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca", size = 341506, upload-time = "2025-10-06T14:12:37.788Z" }, - { url = "https://files.pythonhosted.org/packages/3c/67/bb6024de76e7186611ebe626aec5b71a2d2ecf9453e795f2dbd80614784c/yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba", size = 335030, upload-time = "2025-10-06T14:12:39.775Z" }, - { url = "https://files.pythonhosted.org/packages/a2/be/50b38447fd94a7992996a62b8b463d0579323fcfc08c61bdba949eef8a5d/yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b", size = 358560, upload-time = "2025-10-06T14:12:41.547Z" }, - { url = "https://files.pythonhosted.org/packages/e2/89/c020b6f547578c4e3dbb6335bf918f26e2f34ad0d1e515d72fd33ac0c635/yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e", size = 357290, upload-time = "2025-10-06T14:12:43.861Z" }, - { url = "https://files.pythonhosted.org/packages/8c/52/c49a619ee35a402fa3a7019a4fa8d26878fec0d1243f6968bbf516789578/yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8", size = 350700, upload-time = "2025-10-06T14:12:46.868Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c9/f5042d87777bf6968435f04a2bbb15466b2f142e6e47fa4f34d1a3f32f0c/yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b", size = 82323, upload-time = "2025-10-06T14:12:48.633Z" }, - { url = "https://files.pythonhosted.org/packages/fd/58/d00f7cad9eba20c4eefac2682f34661d1d1b3a942fc0092eb60e78cfb733/yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed", size = 87145, upload-time = "2025-10-06T14:12:50.241Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a3/70904f365080780d38b919edd42d224b8c4ce224a86950d2eaa2a24366ad/yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2", size = 82173, upload-time = "2025-10-06T14:12:51.869Z" }, { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ]