From 39177447543ae416b12404559ccf5c129ce8b062 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 28 Aug 2025 11:47:43 -0300 Subject: [PATCH 01/25] chore: initial changes to include postgrest in monorepo --- Makefile | 10 +- pyproject.toml | 2 + src/postgrest/.devcontainer/Dockerfile | 21 + src/postgrest/.devcontainer/devcontainer.json | 87 + src/postgrest/CHANGELOG.md | 3064 +++++++++++++++++ src/postgrest/LICENSE | 21 + src/postgrest/Makefile | 37 + src/postgrest/README.md | 109 + src/postgrest/docs/Makefile | 20 + src/postgrest/docs/api/client.rst | 14 + src/postgrest/docs/api/exceptions.rst | 5 + src/postgrest/docs/api/filters.rst | 31 + src/postgrest/docs/api/index.rst | 16 + src/postgrest/docs/api/request_builders.rst | 33 + src/postgrest/docs/api/responses.rst | 7 + src/postgrest/docs/api/types.rst | 16 + src/postgrest/docs/conf.py | 67 + src/postgrest/docs/examples/basic_queries.rst | 68 + src/postgrest/docs/examples/index.rst | 14 + src/postgrest/docs/examples/logging.rst | 25 + src/postgrest/docs/index.rst | 31 + src/postgrest/docs/make.bat | 35 + src/postgrest/docs/requirements.txt | 2 + src/postgrest/infra/docker-compose.yaml | 28 + src/postgrest/infra/init.sql | 105 + src/postgrest/pyproject.toml | 82 + src/postgrest/src/postgrest/__init__.py | 62 + .../src/postgrest/_async/__init__.py | 1 + src/postgrest/src/postgrest/_async/client.py | 189 + .../src/postgrest/_async/request_builder.py | 416 +++ src/postgrest/src/postgrest/_sync/__init__.py | 1 + src/postgrest/src/postgrest/_sync/client.py | 189 + .../src/postgrest/_sync/request_builder.py | 416 +++ src/postgrest/src/postgrest/base_client.py | 81 + .../src/postgrest/base_request_builder.py | 687 ++++ src/postgrest/src/postgrest/constants.py | 6 + src/postgrest/src/postgrest/exceptions.py | 68 + src/postgrest/src/postgrest/py.typed | 0 src/postgrest/src/postgrest/types.py | 58 + src/postgrest/src/postgrest/utils.py | 72 + src/postgrest/src/postgrest/version.py | 1 + src/postgrest/tests/__init__.py | 0 src/postgrest/tests/_async/__init__.py | 0 src/postgrest/tests/_async/client.py | 28 + src/postgrest/tests/_async/test_client.py | 189 + .../_async/test_filter_request_builder.py | 243 ++ ...test_filter_request_builder_integration.py | 586 ++++ .../_async/test_query_request_builder.py | 22 + .../tests/_async/test_request_builder.py | 492 +++ src/postgrest/tests/_sync/__init__.py | 0 src/postgrest/tests/_sync/client.py | 28 + src/postgrest/tests/_sync/test_client.py | 185 + .../_sync/test_filter_request_builder.py | 243 ++ ...test_filter_request_builder_integration.py | 579 ++++ .../tests/_sync/test_query_request_builder.py | 22 + .../tests/_sync/test_request_builder.py | 492 +++ src/postgrest/tests/test_utils.py | 29 + src/supabase/pyproject.toml | 2 +- test.ps1 | 5 - uv.lock | 112 +- 60 files changed, 9438 insertions(+), 16 deletions(-) create mode 100644 src/postgrest/.devcontainer/Dockerfile create mode 100644 src/postgrest/.devcontainer/devcontainer.json create mode 100644 src/postgrest/CHANGELOG.md create mode 100644 src/postgrest/LICENSE create mode 100644 src/postgrest/Makefile create mode 100644 src/postgrest/README.md create mode 100644 src/postgrest/docs/Makefile create mode 100644 src/postgrest/docs/api/client.rst create mode 100644 src/postgrest/docs/api/exceptions.rst create mode 100644 src/postgrest/docs/api/filters.rst create mode 100644 src/postgrest/docs/api/index.rst create mode 100644 src/postgrest/docs/api/request_builders.rst create mode 100644 src/postgrest/docs/api/responses.rst create mode 100644 src/postgrest/docs/api/types.rst create mode 100644 src/postgrest/docs/conf.py create mode 100644 src/postgrest/docs/examples/basic_queries.rst create mode 100644 src/postgrest/docs/examples/index.rst create mode 100644 src/postgrest/docs/examples/logging.rst create mode 100644 src/postgrest/docs/index.rst create mode 100644 src/postgrest/docs/make.bat create mode 100644 src/postgrest/docs/requirements.txt create mode 100644 src/postgrest/infra/docker-compose.yaml create mode 100644 src/postgrest/infra/init.sql create mode 100644 src/postgrest/pyproject.toml create mode 100644 src/postgrest/src/postgrest/__init__.py create mode 100644 src/postgrest/src/postgrest/_async/__init__.py create mode 100644 src/postgrest/src/postgrest/_async/client.py create mode 100644 src/postgrest/src/postgrest/_async/request_builder.py create mode 100644 src/postgrest/src/postgrest/_sync/__init__.py create mode 100644 src/postgrest/src/postgrest/_sync/client.py create mode 100644 src/postgrest/src/postgrest/_sync/request_builder.py create mode 100644 src/postgrest/src/postgrest/base_client.py create mode 100644 src/postgrest/src/postgrest/base_request_builder.py create mode 100644 src/postgrest/src/postgrest/constants.py create mode 100644 src/postgrest/src/postgrest/exceptions.py create mode 100644 src/postgrest/src/postgrest/py.typed create mode 100644 src/postgrest/src/postgrest/types.py create mode 100644 src/postgrest/src/postgrest/utils.py create mode 100644 src/postgrest/src/postgrest/version.py create mode 100644 src/postgrest/tests/__init__.py create mode 100644 src/postgrest/tests/_async/__init__.py create mode 100644 src/postgrest/tests/_async/client.py create mode 100644 src/postgrest/tests/_async/test_client.py create mode 100644 src/postgrest/tests/_async/test_filter_request_builder.py create mode 100644 src/postgrest/tests/_async/test_filter_request_builder_integration.py create mode 100644 src/postgrest/tests/_async/test_query_request_builder.py create mode 100644 src/postgrest/tests/_async/test_request_builder.py create mode 100644 src/postgrest/tests/_sync/__init__.py create mode 100644 src/postgrest/tests/_sync/client.py create mode 100644 src/postgrest/tests/_sync/test_client.py create mode 100644 src/postgrest/tests/_sync/test_filter_request_builder.py create mode 100644 src/postgrest/tests/_sync/test_filter_request_builder_integration.py create mode 100644 src/postgrest/tests/_sync/test_query_request_builder.py create mode 100644 src/postgrest/tests/_sync/test_request_builder.py create mode 100644 src/postgrest/tests/test_utils.py delete mode 100644 test.ps1 diff --git a/Makefile b/Makefile index 61f94945..c417286a 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ .PHONY: ci, default, pre-commit, clean, start-infra, stop-infra -PACKAGES := functions realtime storage auth supabase +PACKAGES := functions realtime storage auth postgrest supabase FORALL_PKGS = $(foreach pkg, $(PACKAGES), $(pkg).$(1)) help:: @@ -17,6 +17,7 @@ help:: @echo " pre-commit -- Run pre-commit on all files" clean: $(call FORALL_PKGS,clean) + rm -rf dist .ruff_cache .pytest_cache help:: @echo " clean -- Delete cache files and coverage reports from tests" @@ -24,10 +25,10 @@ publish: $(call FORALL_PKGS,build) uv publish # not all packages have infra, so just manually instantiate the ones that do for now -start-infra: realtime.start-infra storage.start-infra auth.start-infra +start-infra: realtime.start-infra storage.start-infra auth.start-infra postgrest.start-infra help:: @echo " start-infra -- Start all containers necessary for tests. NOTE: it is not necessary to this before running CI tests, they start the infra by themselves" -stop-infra: realtime.stop-infra storage.stop-infra auth.stop-infra +stop-infra: realtime.stop-infra storage.stop-infra auth.stop-infra postgrest.stop-infra help:: @echo " stop-infra -- Stop all infra used by tests. NOTE: tests do leave their infra running, so run this to ensure all containers are stopped" @@ -44,6 +45,9 @@ storage.%: auth.%: @$(MAKE) -C src/auth $* +postgrest.%: + @$(MAKE) -C src/postgrest $* + supabase.%: @$(MAKE) -C src/supabase $* diff --git a/pyproject.toml b/pyproject.toml index 8beb844c..ac60986e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,7 @@ members = [ "src/functions", "src/supabase", "src/storage", + "src/postgrest", "src/auth" ] @@ -12,6 +13,7 @@ realtime = { workspace = true } supabase_functions = { workspace = true } supabase_auth = { workspace = true } storage3 = { workspace = true } +postgrest = { workspace = true } supabase = { workspace = true } [tool.pytest.ini_options] diff --git a/src/postgrest/.devcontainer/Dockerfile b/src/postgrest/.devcontainer/Dockerfile new file mode 100644 index 00000000..6a9e8da9 --- /dev/null +++ b/src/postgrest/.devcontainer/Dockerfile @@ -0,0 +1,21 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.203.0/containers/python-3/.devcontainer/base.Dockerfile + +# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster +ARG VARIANT="3.10-bullseye" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 +ARG NODE_VERSION="none" +RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi + +# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. +# COPY requirements.txt /tmp/pip-tmp/ +# RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ +# && rm -rf /tmp/pip-tmp + +# [Optional] Uncomment this section to install additional OS packages. +# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +# && apt-get -y install --no-install-recommends + +# [Optional] Uncomment this line to install global node packages. +# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 diff --git a/src/postgrest/.devcontainer/devcontainer.json b/src/postgrest/.devcontainer/devcontainer.json new file mode 100644 index 00000000..8ce5faab --- /dev/null +++ b/src/postgrest/.devcontainer/devcontainer.json @@ -0,0 +1,87 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: +// https://github.com/microsoft/vscode-dev-containers/tree/v0.203.0/containers/python-3 +{ + "name": "Python 3", + "runArgs": [ + "--init" + ], + "build": { + "dockerfile": "Dockerfile", + "context": "..", + "args": { + // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 + // Append -bullseye or -buster to pin to an OS version. + // Use -bullseye variants on local on arm64/Apple Silicon. + "VARIANT": "3.10-bullseye", + // Options + "NODE_VERSION": "lts/*" + } + }, + // Set *default* container specific settings.json values on container create. + "settings": { + "python.pythonPath": "/usr/local/bin/python", + "python.languageServer": "Pylance", + "python.linting.enabled": true, + "python.linting.flake8Enabled": true, + "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", + "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", + "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", + "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", + "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", + "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", + "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint", + "python.analysis.diagnosticMode": "workspace", + "files.exclude": { + "**/.ipynb_checkpoints": true, + "**/.pytest_cache": true, + "**/*pycache*": true + }, + "python.formatting.provider": "black", + "python.linting.flake8Args": [ + "--max-line-length=88", + "--extend-ignore=E203" + ], + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + }, + "python.sortImports.args": [ + "--multi-line=3", + "--trailing-comma", + "--force-grid-wrap=0", + "--use-parentheses", + "--line-width=88", + ], + "markdownlint.config": { + "MD022": false, + "MD024": false, + "MD032": false, + "MD033": false + } + }, + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "ms-azuretools.vscode-docker", + "donjayamanne.githistory", + "felipecaputo.git-project-manager", + "github.copilot-nightly", + "eamodio.gitlens", + "davidanson.vscode-markdownlint" + ], + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "pip3 install --user -r requirements.txt", + // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode", + "features": { + "docker-in-docker": "latest", + "git": "latest", + "git-lfs": "latest", + "github-cli": "latest" + } +} diff --git a/src/postgrest/CHANGELOG.md b/src/postgrest/CHANGELOG.md new file mode 100644 index 00000000..825b44e7 --- /dev/null +++ b/src/postgrest/CHANGELOG.md @@ -0,0 +1,3064 @@ +# CHANGELOG + +## [1.1.1](https://github.com/supabase/postgrest-py/compare/v1.1.0...v1.1.1) (2025-06-23) + + +### Bug Fixes + +* remove jwt key validation to allow new api keys ([#612](https://github.com/supabase/postgrest-py/issues/612)) ([af63482](https://github.com/supabase/postgrest-py/commit/af634822dac7b0a7f12973c01de2750de5723490)) + +## [1.1.0](https://github.com/supabase/postgrest-py/compare/v1.0.2...v1.1.0) (2025-06-19) + + +### Features + +* allow injection of httpx client ([#591](https://github.com/supabase/postgrest-py/issues/591)) ([635a4ba](https://github.com/supabase/postgrest-py/commit/635a4ba421457ce0967c3efc332ae883b693ef71)) + + +### Bug Fixes + +* **pydantic:** model_validate_json causing code break with pydantic v1 ([#609](https://github.com/supabase/postgrest-py/issues/609)) ([587dcc8](https://github.com/supabase/postgrest-py/commit/587dcc82835afd0290c0c83f3c38ff6b8de123a2)) +* remove reliance on SyncClient and use Client directly from httpx ([#607](https://github.com/supabase/postgrest-py/issues/607)) ([021f1b6](https://github.com/supabase/postgrest-py/commit/021f1b65fd728116c715b33504df7c37847e6bf2)) + +## [1.0.2](https://github.com/supabase/postgrest-py/compare/v1.0.1...v1.0.2) (2025-05-21) + + +### Bug Fixes + +* pass params as query params for get/head requests ([#593](https://github.com/supabase/postgrest-py/issues/593)) ([576a5b8](https://github.com/supabase/postgrest-py/commit/576a5b84e19c0a379ef10df24f3325c0519d406e)) +* validate JSON input for APIError ([#597](https://github.com/supabase/postgrest-py/issues/597)) ([3c8bdae](https://github.com/supabase/postgrest-py/commit/3c8bdae4135f79dcf903d0bd75f02c097db0b855)) + +## [1.0.1](https://github.com/supabase/postgrest-py/compare/v1.0.0...v1.0.1) (2025-03-24) + + +### Bug Fixes + +* order using foreign table ([#581](https://github.com/supabase/postgrest-py/issues/581)) ([66477dd](https://github.com/supabase/postgrest-py/commit/66477dd82580544c3ed238cc82080c7ca91ee226)) + +## [1.0.0](https://github.com/supabase/postgrest-py/compare/v0.19.3...v1.0.0) (2025-03-11) + + +### ⚠ BREAKING CHANGES + +* schema method persisting only on current query ([#575](https://github.com/supabase/postgrest-py/issues/575)) + +### Bug Fixes + +* schema method persisting only on current query ([#575](https://github.com/supabase/postgrest-py/issues/575)) ([b0dd496](https://github.com/supabase/postgrest-py/commit/b0dd496e1793c07ac1081fb59b3c2c8f9feb2984)) + +## [0.19.3](https://github.com/supabase/postgrest-py/compare/v0.19.2...v0.19.3) (2025-01-24) + + +### Bug Fixes + +* client is sending a body in a GET and HEAD requests ([#562](https://github.com/supabase/postgrest-py/issues/562)) ([6947a53](https://github.com/supabase/postgrest-py/commit/6947a5391b1b2178c4d4a2f13a9592e996f4fa6e)) + +## [0.19.2](https://github.com/supabase/postgrest-py/compare/v0.19.1...v0.19.2) (2025-01-08) + + +### Bug Fixes + +* _cleaned_columns function now works with python multiline and typings ([#556](https://github.com/supabase/postgrest-py/issues/556)) ([4127576](https://github.com/supabase/postgrest-py/commit/412757633e9319a4e55e00bdc09464aa807db1b9)) + +## [0.19.1](https://github.com/supabase/postgrest-py/compare/v0.19.0...v0.19.1) (2024-12-30) + + +### Bug Fixes + +* head=True breaking count ([#545](https://github.com/supabase/postgrest-py/issues/545)) ([576987b](https://github.com/supabase/postgrest-py/commit/576987bb2512f6e18360008316377b8d4f2f255b)) + +## [0.19.0](https://github.com/supabase/postgrest-py/compare/v0.18.0...v0.19.0) (2024-11-22) + + +### Features + +* Check if token is a JWT ([#529](https://github.com/supabase/postgrest-py/issues/529)) ([ed892c4](https://github.com/supabase/postgrest-py/commit/ed892c45346b3f866df0fc0afb997f292c17cbf2)) + +## [0.18.0](https://github.com/supabase/postgrest-py/compare/v0.17.2...v0.18.0) (2024-10-31) + + +### Features + +* Check if url is an HTTP URL ([#526](https://github.com/supabase/postgrest-py/issues/526)) ([eb7f319](https://github.com/supabase/postgrest-py/commit/eb7f3193b35a8e727511b290c3f5bd7a8a19b9c8)) + +## [0.17.2](https://github.com/supabase/postgrest-py/compare/v0.17.1...v0.17.2) (2024-10-18) + + +### Bug Fixes + +* bump minimal version of Python to 3.9 ([#522](https://github.com/supabase/postgrest-py/issues/522)) ([11da550](https://github.com/supabase/postgrest-py/commit/11da55084fdd22d0e081aee5867b946337783d73)) +* **deps:** install strenum package only with Python 3.10 and older ([#519](https://github.com/supabase/postgrest-py/issues/519)) ([9dfefd0](https://github.com/supabase/postgrest-py/commit/9dfefd0bd2e31775e4ff423654797cf40b1940fe)) +* Types to use Option[T] ([#514](https://github.com/supabase/postgrest-py/issues/514)) ([645b677](https://github.com/supabase/postgrest-py/commit/645b677715b8ff338047240bf48dd19dd86b71b4)) + +## [0.17.1](https://github.com/supabase/postgrest-py/compare/v0.17.0...v0.17.1) (2024-10-02) + + +### Bug Fixes + +* httpx minimum version update ([#512](https://github.com/supabase/postgrest-py/issues/512)) ([5107584](https://github.com/supabase/postgrest-py/commit/5107584f4f49d46bf7df6567109a3edce820c726)) + +## [0.17.0](https://github.com/supabase/postgrest-py/compare/v0.16.11...v0.17.0) (2024-09-28) + + +### Features + +* Proxy support ([#508](https://github.com/supabase/postgrest-py/issues/508)) ([8629f6f](https://github.com/supabase/postgrest-py/commit/8629f6f8d194d54efb8944f9fe5811ee8190cbf1)) +* select all columns by default ([#509](https://github.com/supabase/postgrest-py/issues/509)) ([ffb304f](https://github.com/supabase/postgrest-py/commit/ffb304fbc102ed9efa431e5ccfd8027d4d5c3f54)) + + +### Bug Fixes + +* **deps:** bump pydantic from 2.8.2 to 2.9.2 ([#506](https://github.com/supabase/postgrest-py/issues/506)) ([ccf2885](https://github.com/supabase/postgrest-py/commit/ccf28850fe0a0888accc09d2dedf42f7d9242e2e)) + +## [0.16.11](https://github.com/supabase/postgrest-py/compare/v0.16.10...v0.16.11) (2024-08-22) + + +### Bug Fixes + +* fixed the 'order' method for 'BaseSelectRequestBuilder' ([#495](https://github.com/supabase/postgrest-py/issues/495)) ([97d520e](https://github.com/supabase/postgrest-py/commit/97d520ea339fcf7f706d679d06e8111f0cfdec19)) + +## [0.16.10](https://github.com/supabase/postgrest-py/compare/v0.16.9...v0.16.10) (2024-08-14) + + +### Bug Fixes + +* revert sanitize_pattern_param in like and ilike ([#481](https://github.com/supabase/postgrest-py/issues/481)) ([18ed416](https://github.com/supabase/postgrest-py/commit/18ed4162cd8eeb6910b87ee7d06bcfba298bca72)) + +## v0.16.9 (2024-07-16) + +### Chore + +* chore(deps-dev): bump zipp from 3.18.1 to 3.19.1 (#470) ([`a0bbb4f`](https://github.com/supabase-community/postgrest-py/commit/a0bbb4f0a17a8e1b4aed0c3893517955d9dede63)) + +* chore(deps-dev): bump python-semantic-release from 9.8.3 to 9.8.5 (#469) ([`61be87c`](https://github.com/supabase-community/postgrest-py/commit/61be87cb8cea268b340c88b97bcbe7ffcbd02945)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.8.3 to 9.8.5 (#468) ([`7a62fde`](https://github.com/supabase-community/postgrest-py/commit/7a62fdef8ddaf0614149713a0584c2a715cd46f5)) + +* chore(deps): bump certifi from 2024.2.2 to 2024.7.4 (#467) ([`f7eb0f4`](https://github.com/supabase-community/postgrest-py/commit/f7eb0f43b0284bd5bf40f31492fe317f58429348)) + +* chore(deps): bump pydantic from 2.7.4 to 2.8.2 (#464) ([`b7c425d`](https://github.com/supabase-community/postgrest-py/commit/b7c425d3786bca9aa802dc72d0787e9e5c2982bf)) + +* chore(deps-dev): bump python-semantic-release from 9.8.0 to 9.8.3 (#460) ([`ce704b3`](https://github.com/supabase-community/postgrest-py/commit/ce704b32393c8e8be95b66ce9fb1ecf20aed9041)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.8.1 to 9.8.3 (#459) ([`87995dd`](https://github.com/supabase-community/postgrest-py/commit/87995dda60eb8f69da5cbd82e31cc2318741fb4f)) + +* chore(deps-dev): bump urllib3 from 2.2.1 to 2.2.2 (#458) ([`27b16fa`](https://github.com/supabase-community/postgrest-py/commit/27b16fabc8676f38acca9ac383cef13ed25806b7)) + +* chore(deps): bump codecov/codecov-action from 4.4.1 to 4.5.0 (#455) ([`9d6bf07`](https://github.com/supabase-community/postgrest-py/commit/9d6bf07fa6ef10db9c46799d0cdd2f425eda56ae)) + +* chore(deps): bump pydantic from 2.7.2 to 2.7.4 (#454) ([`6b1b003`](https://github.com/supabase-community/postgrest-py/commit/6b1b0034249230f3c0005eb3a527cc68d7dafda0)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.8.0 to 9.8.1 (#452) ([`73eb0cf`](https://github.com/supabase-community/postgrest-py/commit/73eb0cf8d91b32fe9a17193a487c9d4799844868)) + +* chore(deps-dev): bump pytest from 8.2.1 to 8.2.2 (#450) ([`f2f3d4f`](https://github.com/supabase-community/postgrest-py/commit/f2f3d4fa5955b69fbc108627298d18aebca4fe46)) + +### Fix + +* fix: version bump (#471) ([`b509b3a`](https://github.com/supabase-community/postgrest-py/commit/b509b3ad0d69f7ab34d3043350bbc7eb579ab029)) + +### Unknown + +* Fix 830 (#461) ([`ea791f4`](https://github.com/supabase-community/postgrest-py/commit/ea791f44406c57be3096ba44a14271e65cb2df30)) + +* Enable HTTP2 (#462) ([`0ea293d`](https://github.com/supabase-community/postgrest-py/commit/0ea293d5cd632ec8032ce7bf008cfa05f50b5685)) + +## v0.16.8 (2024-06-04) + +### Chore + +* chore(release): bump version to v0.16.8 ([`3871911`](https://github.com/supabase-community/postgrest-py/commit/38719110badf6dd985beb77b5e8a25f413b69abd)) + +* chore(deps): bump pydantic from 2.7.1 to 2.7.2 (#447) ([`3161d15`](https://github.com/supabase-community/postgrest-py/commit/3161d154cae451449e30c0bb7e7c7b53672d30b0)) + +### Fix + +* fix: add "verify" flag to the creation of client ([`ffe9e28`](https://github.com/supabase-community/postgrest-py/commit/ffe9e28aede84f5e3906c2429bfed945a18ca8f5)) + +### Unknown + +* Follow redirects (#449) ([`bb851bf`](https://github.com/supabase-community/postgrest-py/commit/bb851bfae70d36f40d75a85da58274e75e19eadb)) + +## v0.16.7 (2024-06-01) + +### Chore + +* chore(release): bump version to v0.16.7 ([`bc2fc64`](https://github.com/supabase-community/postgrest-py/commit/bc2fc648c709dd2099740d75e5c2b9ade97b57d5)) + +### Fix + +* fix: add get, head and count parameters to the rpc method. (#444) ([`b1d48bc`](https://github.com/supabase-community/postgrest-py/commit/b1d48bca84e707c802448a851608d623c080d72a)) + +## v0.16.6 (2024-06-01) + +### Chore + +* chore(release): bump version to v0.16.6 ([`465232a`](https://github.com/supabase-community/postgrest-py/commit/465232aef70545484ca6c35ceacfaa3aa976b391)) + +### Fix + +* fix: convert None to a string null for the is method (#446) ([`9970ac3`](https://github.com/supabase-community/postgrest-py/commit/9970ac379f06ebb56d5532ab6730b9330fcccc40)) + +## v0.16.5 (2024-06-01) + +### Chore + +* chore(release): bump version to v0.16.5 ([`901108a`](https://github.com/supabase-community/postgrest-py/commit/901108a11dcba0549d63efa96b40a837b58c54ec)) + +* chore(deps-dev): bump python-semantic-release from 9.7.3 to 9.8.0 (#443) ([`d8ce53a`](https://github.com/supabase-community/postgrest-py/commit/d8ce53a2b3b66fa42af3c28594749029b0f70b2e)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.7.3 to 9.8.0 (#442) ([`42a4a32`](https://github.com/supabase-community/postgrest-py/commit/42a4a3226ee129ed8e51fe58347266181d8165bb)) + +* chore(deps-dev): bump requests from 2.31.0 to 2.32.0 (#440) ([`9fe1070`](https://github.com/supabase-community/postgrest-py/commit/9fe1070e0b446d8f1a0d5c4b312b5d4c8659926c)) + +* chore: code style fixes (#425) ([`11076da`](https://github.com/supabase-community/postgrest-py/commit/11076dae41d3bc6172799245f491f8a1e50dc9ca)) + +* chore(deps-dev): bump jinja2 from 3.1.3 to 3.1.4 (#428) ([`d786fc7`](https://github.com/supabase-community/postgrest-py/commit/d786fc7667e3074dc29a0513eccbc18edf850358)) + +* chore(deps): bump codecov/codecov-action from 4.3.0 to 4.4.1 (#437) ([`fe50d11`](https://github.com/supabase-community/postgrest-py/commit/fe50d111cd7ad4831fc9772113fff85cdc926bd9)) + +* chore(deps-dev): bump pytest-asyncio from 0.23.6 to 0.23.7 (#439) ([`f41e7af`](https://github.com/supabase-community/postgrest-py/commit/f41e7afd112b9f76c6f0fb3ec02676333c9d7513)) + +* chore(deps-dev): bump pytest from 8.1.1 to 8.2.1 (#438) ([`9722855`](https://github.com/supabase-community/postgrest-py/commit/9722855535dc1d85506b499b07c7adad060b12ae)) + +* chore(deps-dev): bump python-semantic-release from 9.7.1 to 9.7.3 (#434) ([`7cc3c38`](https://github.com/supabase-community/postgrest-py/commit/7cc3c380628e719c055879df63cec48bc31f86a9)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.5.0 to 9.7.3 (#436) ([`6115096`](https://github.com/supabase-community/postgrest-py/commit/6115096e9371be0fd0b7a4ff5b7a32d927f17813)) + +* chore(deps-dev): bump black from 24.3.0 to 24.4.2 (#416) ([`5dceb98`](https://github.com/supabase-community/postgrest-py/commit/5dceb982315773d72a5e46de6429157a6b1af3d8)) + +* chore(deps-dev): bump python-semantic-release from 9.5.0 to 9.7.1 (#430) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`56f7f5d`](https://github.com/supabase-community/postgrest-py/commit/56f7f5dea3649a9eb5e9e25beba562d81d32e562)) + +* chore(deps): bump furo from 2024.1.29 to 2024.5.6 (#431) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`4692500`](https://github.com/supabase-community/postgrest-py/commit/46925008794dd12de8823cc94116e99802d0a5a6)) + +### Fix + +* fix: update overlaps to work with timestamp range (#445) ([`b39f332`](https://github.com/supabase-community/postgrest-py/commit/b39f3326566c998dc517089a7053fb4d5d44d128)) + +### Unknown + +* Update .pre-commit-config.yaml (#424) ([`82a9e9b`](https://github.com/supabase-community/postgrest-py/commit/82a9e9ba6e503a1797ea565b84003de4f944de7d)) + +* Add stale bot (#422) ([`44f3672`](https://github.com/supabase-community/postgrest-py/commit/44f36724b6366aabee0177f795f8fe386df3d7e5)) + +## v0.16.4 (2024-04-29) + +### Chore + +* chore(release): bump version to v0.16.4 ([`b2bd803`](https://github.com/supabase-community/postgrest-py/commit/b2bd8032c17a32f3094a03b5d9414f4d5767d19e)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.4.1 to 9.5.0 (#414) ([`059aceb`](https://github.com/supabase-community/postgrest-py/commit/059aceb78f1efbed03014ddbc328bed65563d387)) + +* chore(deps-dev): bump python-semantic-release from 9.4.1 to 9.5.0 (#413) ([`5444cda`](https://github.com/supabase-community/postgrest-py/commit/5444cdaf76103ca3ab97b0603c033967c6432158)) + +* chore(deps): bump pydantic from 2.7.0 to 2.7.1 (#412) ([`0e08ecb`](https://github.com/supabase-community/postgrest-py/commit/0e08ecbc214515e9bccdbcbaf5f98599963666be)) + +* chore(deps): bump pydantic from 2.6.4 to 2.7.0 (#408) ([`4b3a664`](https://github.com/supabase-community/postgrest-py/commit/4b3a664abf2c35b7a08919ae7bf71c90f3fa9ef8)) + +* chore(deps): bump idna from 3.6 to 3.7 (#407) ([`9046f41`](https://github.com/supabase-community/postgrest-py/commit/9046f417b2c6dec2a450c538d0dd0193230c25ed)) + +* chore(deps): bump codecov/codecov-action from 4.1.0 to 4.3.0 (#406) ([`a496a78`](https://github.com/supabase-community/postgrest-py/commit/a496a78c90a4bd7981109d6ae67a0a11a1850e4c)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.3.1 to 9.4.1 (#405) ([`e153778`](https://github.com/supabase-community/postgrest-py/commit/e153778b0a5acfe7864e5cbfa8b69cd87dab99ee)) + +* chore(deps-dev): bump python-semantic-release from 9.3.0 to 9.4.1 (#404) ([`364a9a9`](https://github.com/supabase-community/postgrest-py/commit/364a9a951d0a52174a9e263a24f7032821c46f44)) + +* chore(deps-dev): bump pytest-cov from 4.1.0 to 5.0.0 (#394) ([`64e8819`](https://github.com/supabase-community/postgrest-py/commit/64e88199bc8abdab884c76618e6197f1cd4fe748)) + +### Fix + +* fix: increase timeout (#417) ([`a387471`](https://github.com/supabase-community/postgrest-py/commit/a3874712ab1440915a7ccd3788fe11289febcc00)) + +## v0.16.3 (2024-04-13) + +### Chore + +* chore(release): bump version to v0.16.3 ([`980d262`](https://github.com/supabase-community/postgrest-py/commit/980d2624fda078544ff0a9da1001cb3e00dca483)) + +* chore(deps): bump codecov/codecov-action from 4.1.0 to 4.1.1 (#396) ([`dcadb43`](https://github.com/supabase-community/postgrest-py/commit/dcadb436d48445fdea1d7870b483ea77bda0927a)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 9.3.0 to 9.3.1 (#393) ([`eabf30c`](https://github.com/supabase-community/postgrest-py/commit/eabf30c390b259c285eeab28c6a6d1e64bb0ea9f)) + +* chore(deps): bump python-semantic-release/python-semantic-release from 8.0.0 to 9.3.0 (#390) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`47e262e`](https://github.com/supabase-community/postgrest-py/commit/47e262e8154d1989f327b4c944bf350c5657f0b5)) + +### Fix + +* fix: upsert and insert with default_to_null boolean argument (#398) ([`ae5f80a`](https://github.com/supabase-community/postgrest-py/commit/ae5f80a7dc350afc69808f36f62b732739685739)) + +### Unknown + +* Revert "chore(deps): bump codecov/codecov-action from 4.1.0 to 4.1.1" (#397) ([`b4c740d`](https://github.com/supabase-community/postgrest-py/commit/b4c740d3e6cbe1709b18cdd56d89b8e592785ec7)) + +## v0.16.2 (2024-03-23) + +### Chore + +* chore(release): bump version to v0.16.2 ([`90d6906`](https://github.com/supabase-community/postgrest-py/commit/90d690672324c00628adfcaa95350f0690666efc)) + +* chore(deps-dev): bump black from 23.12.1 to 24.3.0 (#385) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`b10e114`](https://github.com/supabase-community/postgrest-py/commit/b10e1146fe87087315c038309b531ef87d7c5ce1)) + +* chore(deps): bump pydantic from 2.6.2 to 2.6.4 (#384) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`9bd4fb1`](https://github.com/supabase-community/postgrest-py/commit/9bd4fb1d6eec03c0c006eb06ef37fb62e83f9e7c)) + +* chore(deps): bump furo from 2023.9.10 to 2024.1.29 (#383) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7eec5ce`](https://github.com/supabase-community/postgrest-py/commit/7eec5ced4c33fa604ab09aa1d64a708e52928a5d)) + +* chore(deps-dev): bump pytest-asyncio from 0.23.5 to 0.23.5.post1 (#382) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`ae09035`](https://github.com/supabase-community/postgrest-py/commit/ae090355aa07065e4ee37c4a39dd1b853b0de63d)) + +### Fix + +* fix: update dependencies and tests (#392) ([`d04d76c`](https://github.com/supabase-community/postgrest-py/commit/d04d76caa914afd5efffa9efd6481a38429742e8)) + +## v0.16.1 (2024-02-29) + +### Chore + +* chore(release): bump version to v0.16.1 ([`6d8b32a`](https://github.com/supabase-community/postgrest-py/commit/6d8b32a5210846081cb4d6f2f54dd8e8100129c4)) + +* chore(deps): bump pydantic from 2.5.3 to 2.6.2 (#374) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`b858685`](https://github.com/supabase-community/postgrest-py/commit/b858685c52291aba780d2db3626c6d618b826d1a)) + +### Fix + +* fix: explain functionality to show results (#371) + +Co-authored-by: Rodrigo Mansueli Nunes <rodrigo@mansueli.com> ([`3e0ea2e`](https://github.com/supabase-community/postgrest-py/commit/3e0ea2ef54fb2b50d4e5cb5619abc1c96471836f)) + +### Test + +* test: remove skip from rpc with range test (#376) ([`a3fc560`](https://github.com/supabase-community/postgrest-py/commit/a3fc56044ed26eefdde3dea18353ad7cc2f03b2c)) + +### Unknown + +* Bump action versions (#377) ([`602d66e`](https://github.com/supabase-community/postgrest-py/commit/602d66e6e40402281aa388a3bb9e8ddef6d5c718)) + +## v0.16.0 (2024-02-27) + +### Chore + +* chore(release): bump version to v0.16.0 ([`3dc51d4`](https://github.com/supabase-community/postgrest-py/commit/3dc51d4859721c99a8c7c69d4fe144ff37d9e16f)) + +### Feature + +* feat: Add RPC request builder class for additional filters (#372) ([`0002e8f`](https://github.com/supabase-community/postgrest-py/commit/0002e8f7ec32b6787b44996079b4c2f43fc43717)) + +## v0.15.1 (2024-02-27) + +### Chore + +* chore(release): bump version to v0.15.1 ([`6d55e49`](https://github.com/supabase-community/postgrest-py/commit/6d55e49b461fd0b52e9267a6b1e47038756bfd7f)) + +### Fix + +* fix: update range to use query parameters instead of headers (#375) ([`eae612c`](https://github.com/supabase-community/postgrest-py/commit/eae612ce0548b392d574e9afc12c11f73e54cf8f)) + +## v0.15.0 (2024-01-15) + +### Chore + +* chore(release): bump version to v0.15.0 ([`0faa8c3`](https://github.com/supabase-community/postgrest-py/commit/0faa8c3f37cb1f360f65dcef075479c297029844)) + +* chore(deps-dev): bump pytest-asyncio from 0.18.3 to 0.23.3 (#344) + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`e9f49a5`](https://github.com/supabase-community/postgrest-py/commit/e9f49a57b13c7f8f53ca2a5fc2d41377e980959b)) + +### Feature + +* feat: add like_any_of, like_all_of, ilike_any_of and ilike_all_of filters (#358) ([`d4e3f57`](https://github.com/supabase-community/postgrest-py/commit/d4e3f57aafd75138272b558f4ce507b2bef70e37)) + +## v0.14.0 (2024-01-15) + +### Chore + +* chore(release): bump version to v0.14.0 ([`9f8a2a5`](https://github.com/supabase-community/postgrest-py/commit/9f8a2a54319795523efe5d41f5dcd327ba465a69)) + +* chore: add alias for range methods (#350) ([`83ca3cd`](https://github.com/supabase-community/postgrest-py/commit/83ca3cd0a791513ed4c1fe45d3ed125a3c3d96e3)) + +### Feature + +* feat: add or filter along with tests (#355) + +Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> ([`e302009`](https://github.com/supabase-community/postgrest-py/commit/e302009ac93ba3703a7b5f9e394e1d867704cea7)) + +### Unknown + +* update ci for publishing package (#349) ([`496d95a`](https://github.com/supabase-community/postgrest-py/commit/496d95a227c8412a064a8f31a365e758d8c7d844)) + +## v0.13.2 (2024-01-11) + +### Chore + +* chore(release): bump version to v0.13.2 ([`bef118f`](https://github.com/supabase-community/postgrest-py/commit/bef118f164fe31b2f372436339807867a7d4c648)) + +### Fix + +* fix: add missing RPCFilterRequestBuilder and MaybeSingleRequestBuilder exports ([`3ab20e4`](https://github.com/supabase-community/postgrest-py/commit/3ab20e4682a16b31b414fa7e5f2e1a565828f60e)) + +## v0.13.1 (2024-01-04) + +### Chore + +* chore(release): bump version to v0.13.1 ([`9b1b44e`](https://github.com/supabase-community/postgrest-py/commit/9b1b44e31fc0ed980f6fa335f03b4156b8b113de)) + +* chore(deps): bump pydantic from 2.4.2 to 2.5.0 (#332) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.4.2 to 2.5.0. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v2.4.2...v2.5.0) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`fb0b8c2`](https://github.com/supabase-community/postgrest-py/commit/fb0b8c2590a3d53f67c84e9f52917768a13d7153)) + +* chore(deps-dev): bump pytest from 7.4.2 to 7.4.3 (#329) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.2 to 7.4.3. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.4.2...7.4.3) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`85826ea`](https://github.com/supabase-community/postgrest-py/commit/85826ea3473cc6e4c7ebe3a0b8068b89ae917101)) + +* chore(deps): bump pydantic from 2.1.1 to 2.4.2 (#314) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.1.1 to 2.4.2. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v2.1.1...v2.4.2) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`9579d03`](https://github.com/supabase-community/postgrest-py/commit/9579d03480e2f28ad670d205e4e89ab2a768c4c6)) + +* chore(deps-dev): bump gitpython from 3.1.35 to 3.1.37 (#320) + +Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 3.1.35 to 3.1.37. +- [Release notes](https://github.com/gitpython-developers/GitPython/releases) +- [Changelog](https://github.com/gitpython-developers/GitPython/blob/main/CHANGES) +- [Commits](https://github.com/gitpython-developers/GitPython/compare/3.1.35...3.1.37) + +--- +updated-dependencies: +- dependency-name: gitpython + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`0e6e8b0`](https://github.com/supabase-community/postgrest-py/commit/0e6e8b0f0be6564a5a0a7fd4b86cd107c1f439d2)) + +* chore(deps-dev): bump urllib3 from 2.0.4 to 2.0.7 (#324) + +Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.0.4 to 2.0.7. +- [Release notes](https://github.com/urllib3/urllib3/releases) +- [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) +- [Commits](https://github.com/urllib3/urllib3/compare/2.0.4...2.0.7) + +--- +updated-dependencies: +- dependency-name: urllib3 + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`ffa552f`](https://github.com/supabase-community/postgrest-py/commit/ffa552fa18a87c21de5b1b4ec7b54f27e99179d5)) + +* chore(deps-dev): bump python-semantic-release from 8.1.1 to 8.3.0 (#327) + +Bumps [python-semantic-release](https://github.com/python-semantic-release/python-semantic-release) from 8.1.1 to 8.3.0. +- [Release notes](https://github.com/python-semantic-release/python-semantic-release/releases) +- [Changelog](https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/python-semantic-release/python-semantic-release/compare/v8.1.1...v8.3.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`3f2eebb`](https://github.com/supabase-community/postgrest-py/commit/3f2eebb729d65648b81676be4932a1635aaba70a)) + +* chore(deps-dev): bump black from 23.10.0 to 23.10.1 (#328) + +Bumps [black](https://github.com/psf/black) from 23.10.0 to 23.10.1. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/23.10.0...23.10.1) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`32d3abf`](https://github.com/supabase-community/postgrest-py/commit/32d3abfe34f3b895c9b88cc3b423101535cafe24)) + +### Fix + +* fix: update httpx and other dev dependencies ([`bfc6714`](https://github.com/supabase-community/postgrest-py/commit/bfc6714dc05a21374b67b0c84c0029e1143b3a99)) + +## v0.13.0 (2023-10-22) + +### Chore + +* chore(release): bump version to v0.13.0 ([`f7f786b`](https://github.com/supabase-community/postgrest-py/commit/f7f786bd19194c3878adbe899213dceb67ffb29d)) + +* chore(deps-dev): bump black from 23.9.1 to 23.10.0 (#325) + +Bumps [black](https://github.com/psf/black) from 23.9.1 to 23.10.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/23.9.1...23.10.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`193c8df`](https://github.com/supabase-community/postgrest-py/commit/193c8df842616c12e897aceb342df9db64c55264)) + +### Feature + +* feat: add offset (#326) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`7cd6751`](https://github.com/supabase-community/postgrest-py/commit/7cd67512705853f6e4488cfa34491ae97c526041)) + +## v0.12.1 (2023-10-17) + +### Chore + +* chore(release): bump version to v0.12.1 ([`e2d2f0e`](https://github.com/supabase-community/postgrest-py/commit/e2d2f0eef49d0309d8af5091712a2ea10c3d51e8)) + +* chore(deps-dev): bump pre-commit from 3.3.3 to 3.5.0 (#323) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.3.3 to 3.5.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.3.3...v3.5.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1209139`](https://github.com/supabase-community/postgrest-py/commit/120913914ee958087c1f65d9292a23408fbe6227)) + +* chore: add python 3.12 to CI (#319) + +* chore: add python 3.12 to CI + +* chore: update autoflake hook + +* chore: add myself to codeowners + +* fix: make doc requirements optional ([`d1ee0bb`](https://github.com/supabase-community/postgrest-py/commit/d1ee0bbaf41f357322a31987cbdb016aee372b25)) + +### Fix + +* fix: make rpc function sync (#322) ([`04f4980`](https://github.com/supabase-community/postgrest-py/commit/04f49804db614427b2545414b934b93baef91a71)) + +## v0.12.0 (2023-10-06) + +### Chore + +* chore(release): bump version to v0.12.0 ([`89b370f`](https://github.com/supabase-community/postgrest-py/commit/89b370fb1089a06d7d85dee6da37defbbaaf5a02)) + +### Feature + +* feat: add csv() modifier (#316) + +* fix: cast to correct type + +* feat: add csv() modifier + +* chore: export SingleRequestBuilder + +* chore: write tests for csv() + +* 'Refactored by Sourcery' (#317) + +Co-authored-by: Sourcery AI <> + +--------- + +Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> ([`4f6e9d9`](https://github.com/supabase-community/postgrest-py/commit/4f6e9d9a8f340dd25d47f2399218873c7b9abc01)) + +## v0.11.0 (2023-09-28) + +### Chore + +* chore(release): bump version to v0.11.0 ([`5ae0f99`](https://github.com/supabase-community/postgrest-py/commit/5ae0f99732ee416cf1a3b59ebe28937344fefd1a)) + +* chore(deps-dev): bump python-semantic-release from 7.34.6 to 8.1.1 (#311) + +Bumps [python-semantic-release](https://github.com/python-semantic-release/python-semantic-release) from 7.34.6 to 8.1.1. +- [Release notes](https://github.com/python-semantic-release/python-semantic-release/releases) +- [Changelog](https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/python-semantic-release/python-semantic-release/compare/v7.34.6...v8.1.1) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`d5a32d4`](https://github.com/supabase-community/postgrest-py/commit/d5a32d42f768d096d16d2d834775889af373dd79)) + +* chore(deps-dev): bump pytest from 7.4.0 to 7.4.2 (#304) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.0 to 7.4.2. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.4.0...7.4.2) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`488721a`](https://github.com/supabase-community/postgrest-py/commit/488721a33435aab79d8527e35ccbb9740c470395)) + +* chore(deps-dev): bump black from 23.7.0 to 23.9.1 (#303) + +Bumps [black](https://github.com/psf/black) from 23.7.0 to 23.9.1. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/23.7.0...23.9.1) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`9e99b11`](https://github.com/supabase-community/postgrest-py/commit/9e99b110984599343599e737d53906db14b46edd)) + +* chore(deps): bump furo from 2023.7.26 to 2023.9.10 (#298) + +Bumps [furo](https://github.com/pradyunsg/furo) from 2023.7.26 to 2023.9.10. +- [Release notes](https://github.com/pradyunsg/furo/releases) +- [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) +- [Commits](https://github.com/pradyunsg/furo/compare/2023.07.26...2023.09.10) + +--- +updated-dependencies: +- dependency-name: furo + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`39aa5c9`](https://github.com/supabase-community/postgrest-py/commit/39aa5c94afa39326406297998040725d2601f0a4)) + +* chore(deps): bump sphinx from 7.0.1 to 7.1.2 (#281) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 7.0.1 to 7.1.2. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v7.0.1...v7.1.2) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`c7a77a1`](https://github.com/supabase-community/postgrest-py/commit/c7a77a1a78004bc30ae54a1286245edc31a89e64)) + +* chore(deps-dev): bump gitpython from 3.1.34 to 3.1.35 (#296) + +Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 3.1.34 to 3.1.35. +- [Release notes](https://github.com/gitpython-developers/GitPython/releases) +- [Changelog](https://github.com/gitpython-developers/GitPython/blob/main/CHANGES) +- [Commits](https://github.com/gitpython-developers/GitPython/compare/3.1.34...3.1.35) + +--- +updated-dependencies: +- dependency-name: gitpython + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`324fa53`](https://github.com/supabase-community/postgrest-py/commit/324fa53cbde8616b4fcfdb50a6768437c00a321b)) + +* chore(deps-dev): bump gitpython from 3.1.32 to 3.1.34 (#295) + +Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 3.1.32 to 3.1.34. +- [Release notes](https://github.com/gitpython-developers/GitPython/releases) +- [Changelog](https://github.com/gitpython-developers/GitPython/blob/main/CHANGES) +- [Commits](https://github.com/gitpython-developers/GitPython/compare/3.1.32...3.1.34) + +--- +updated-dependencies: +- dependency-name: gitpython + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`ed6928a`](https://github.com/supabase-community/postgrest-py/commit/ed6928afb3b200e82cb55d36fedfe916da5e2eef)) + +### Feature + +* feat: generic query builders (#309) + +* feat: make all query builders generic + +* feat: return generic request builders from client methods + +* chore: use typing.List instead of builtin + +* chore: use typing.List + +* fix: correct type of APIResponse.data + +* feat: make RPCFilterRequestBuilder + +This makes sure the return types of rpc() and other +query methods are correct. +See https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf +for an explanation. + +* chore: use typing.List + +* feat: make get_origin_and_cast + +This fixes the type-checker error raised while accessing +RequestBuilder[T].__origin__ + +* fix: use typing.List ([`ba9ad8d`](https://github.com/supabase-community/postgrest-py/commit/ba9ad8dc92778a31a25fa14218545f82b1885329)) + +* feat: update semver, add CODEOWNERS (#299) + +* Update ci.yml + +* chore: add CODEOWNERS + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`18b5838`](https://github.com/supabase-community/postgrest-py/commit/18b58383e4c5651a0e1b773af1d4d1ee04050505)) + +### Fix + +* fix: pre-commit hook to stop checks on md files (#315) + +* fix: pre-commit hook to stop checks on md files + +* fix(ci): using correct token to publish a release + +* fix: correct semantic release variable names ([`e8fbe61`](https://github.com/supabase-community/postgrest-py/commit/e8fbe61b0c2904f46461171fc35cf8cab3ea771b)) + +* fix: update upsert type (#307) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`3329234`](https://github.com/supabase-community/postgrest-py/commit/332923432c20a5898f6f27702a59abb6144676cb)) + +* fix: add semver (#297) + +* fix: add semver + +* fix: add environ and perms + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`166fa7c`](https://github.com/supabase-community/postgrest-py/commit/166fa7c24004e769eb36283682be2889a695c539)) + +* fix: maybe_single with no matching rows returns None (#289) ([`a5efce6`](https://github.com/supabase-community/postgrest-py/commit/a5efce6acd932b6a9922ccf4882ea79606f97175)) + +### Unknown + +* re-enable pydantic 1.9 (#283) ([`8d1f249`](https://github.com/supabase-community/postgrest-py/commit/8d1f249c4ed89e6ed6843647177c6ae4d3edf601)) + +## v0.10.8 (2023-08-04) + +### Chore + +* chore: bump httpx to 0.24.1 (#277) + +* fix: use new httpx parameter encoding in tests + +httpx changed how it formats query parameters in 0.24.0 - see here +https://github.com/encode/httpx/blob/master/CHANGELOG.md#0240-6th-april-2023 + +* chore: bump version + +* 'Refactored by Sourcery' (#280) + +Co-authored-by: Sourcery AI <> + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> +Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> ([`561548e`](https://github.com/supabase-community/postgrest-py/commit/561548ea4c17d89cef1777d2843176efe6ead614)) + +## v0.10.7 (2023-08-04) + +### Chore + +* chore: bump postgrest version (#279) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`abb074f`](https://github.com/supabase-community/postgrest-py/commit/abb074f4a2ca1239ff4ab17c632a648e01fada84)) + +* chore(deps): bump sphinx from 7.0.1 to 7.1.2 (#275) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 7.0.1 to 7.1.2. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v7.0.1...v7.1.2) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`398610e`](https://github.com/supabase-community/postgrest-py/commit/398610ec55d1d0ccd82a922ad5ed177361fcd189)) + +* chore(deps): bump strenum from 0.4.10 to 0.4.15 (#272) + +Bumps [strenum](https://github.com/irgeek/StrEnum) from 0.4.10 to 0.4.15. +- [Release notes](https://github.com/irgeek/StrEnum/releases) +- [Commits](https://github.com/irgeek/StrEnum/compare/v0.4.10...v0.4.15) + +--- +updated-dependencies: +- dependency-name: strenum + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`c20a7d9`](https://github.com/supabase-community/postgrest-py/commit/c20a7d95bdb1fff3e590fa65ca23172c7dae4405)) + +* chore(deps): bump sphinx from 4.3.2 to 7.0.1 (#263) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.3.2 to 7.0.1. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.3.2...v7.0.1) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`3bda953`](https://github.com/supabase-community/postgrest-py/commit/3bda9534a630600538f45ddc34d9c7eebdb19767)) + +* chore(deps): bump pydantic from 1.10.9 to 2.0.3 (#270) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.9 to 2.0.3. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v1.10.9...v2.0.3) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`043cf2f`](https://github.com/supabase-community/postgrest-py/commit/043cf2fa7248ca4e9ddeb5e2a1a615a154ecb6cd)) + +* chore(deps-dev): bump pre-commit from 3.2.0 to 3.3.3 (#255) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.2.0 to 3.3.3. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.2.0...v3.3.3) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`c56878f`](https://github.com/supabase-community/postgrest-py/commit/c56878f37b3be9638b403b6d35a52514c6a81f63)) + +* chore(deps-dev): bump black from 23.1.0 to 23.3.0 (#256) + +Bumps [black](https://github.com/psf/black) from 23.1.0 to 23.3.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/23.1.0...23.3.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`3fde0fd`](https://github.com/supabase-community/postgrest-py/commit/3fde0fd58b66c02f6a9e6c44ccfc65e40806c2ca)) + +* chore(deps): bump sphinx from 4.3.2 to 7.0.1 (#253) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.3.2 to 7.0.1. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.3.2...v7.0.1) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7e18b17`](https://github.com/supabase-community/postgrest-py/commit/7e18b17430564f1fa974803cba8cc424cf96fad1)) + +* chore(deps): bump strenum from 0.4.9 to 0.4.10 (#234) + +Bumps [strenum](https://github.com/irgeek/StrEnum) from 0.4.9 to 0.4.10. +- [Release notes](https://github.com/irgeek/StrEnum/releases) +- [Commits](https://github.com/irgeek/StrEnum/compare/v0.4.9...v0.4.10) + +--- +updated-dependencies: +- dependency-name: strenum + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1a8a2ad`](https://github.com/supabase-community/postgrest-py/commit/1a8a2ade080d2a107e7edfef01b49b73fb5e2e16)) + +* chore(deps): bump cryptography from 39.0.1 to 41.0.0 (#246) + +Bumps [cryptography](https://github.com/pyca/cryptography) from 39.0.1 to 41.0.0. +- [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pyca/cryptography/compare/39.0.1...41.0.0) + +--- +updated-dependencies: +- dependency-name: cryptography + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`779d827`](https://github.com/supabase-community/postgrest-py/commit/779d827494568fa9764f20189851cb4f0b492a97)) + +* chore(deps-dev): bump pytest from 7.2.2 to 7.3.2 (#252) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.2 to 7.3.2. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.2.2...7.3.2) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`120786e`](https://github.com/supabase-community/postgrest-py/commit/120786e8d4905ae6ae3d153a11ea8d72ea79352a)) + +* chore(deps): bump furo from 2022.12.7 to 2023.5.20 (#243) + +Bumps [furo](https://github.com/pradyunsg/furo) from 2022.12.7 to 2023.5.20. +- [Release notes](https://github.com/pradyunsg/furo/releases) +- [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) +- [Commits](https://github.com/pradyunsg/furo/compare/2022.12.07...2023.05.20) + +--- +updated-dependencies: +- dependency-name: furo + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`ed7719c`](https://github.com/supabase-community/postgrest-py/commit/ed7719cdbab13c3717dcbdeaa2c91647676379e7)) + +* chore(deps-dev): bump python-semantic-release from 7.33.2 to 7.34.6 (#250) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.33.2 to 7.34.6. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.33.2...v7.34.6) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`f2931f9`](https://github.com/supabase-community/postgrest-py/commit/f2931f9aa1fba763d1371aab42ca0016af727479)) + +* chore(deps): bump pydantic from 1.10.5 to 1.10.9 (#247) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.5 to 1.10.9. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v1.10.5...v1.10.9) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`371c73d`](https://github.com/supabase-community/postgrest-py/commit/371c73d22e95702c367dc2afd02a05d1a5fe1374)) + +* chore(deps): bump requests from 2.28.2 to 2.31.0 (#244) + +Bumps [requests](https://github.com/psf/requests) from 2.28.2 to 2.31.0. +- [Release notes](https://github.com/psf/requests/releases) +- [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) +- [Commits](https://github.com/psf/requests/compare/v2.28.2...v2.31.0) + +--- +updated-dependencies: +- dependency-name: requests + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`9153733`](https://github.com/supabase-community/postgrest-py/commit/9153733f6267a4e2e742d3f6d3149aac5458e871)) + +* chore(deps-dev): bump pre-commit from 3.1.0 to 3.2.0 (#235) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.1.0 to 3.2.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.1.0...v3.2.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`b319f64`](https://github.com/supabase-community/postgrest-py/commit/b319f64435054f91f4588adcb13e6baffa42d963)) + +* chore(deps-dev): bump pytest from 7.2.1 to 7.2.2 (#229) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.1 to 7.2.2. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.2.1...7.2.2) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`67572ba`](https://github.com/supabase-community/postgrest-py/commit/67572ba52ec1f49769b93446a2faba4c2135ffeb)) + +### Feature + +* feat: add py.typed (#258) + +* feat: add py.typed + +* fix: remove trailing line + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`39ae07d`](https://github.com/supabase-community/postgrest-py/commit/39ae07dfc3fc32bd54518676fd8444305993c2d4)) + +### Unknown + +* Migrate postgrest-py from pydantic v1 to v2. (#276) + +* Update package to pydantic 2.1 + +* Update poetry.lock + +* Specify pydantic minor version + +* isort fix + +* Update poetry lock + +* lock hash update ([`85ff406`](https://github.com/supabase-community/postgrest-py/commit/85ff4063d25ae859155fa42a152268c6cc138deb)) + +* feat explain (#241) ([`5be79ec`](https://github.com/supabase-community/postgrest-py/commit/5be79ec1499648705fb30c052c308bfdae4630a1)) + +* `maybe_single` with no matching rows returns None (#231) ([`d148298`](https://github.com/supabase-community/postgrest-py/commit/d148298195ea34c0048ea8e11b8a903f5a6f2342)) + +## v0.10.6 (2023-02-26) + +### Chore + +* chore: bump version to 0.10.6 (#225) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`167c401`](https://github.com/supabase-community/postgrest-py/commit/167c40125b179f5c698e92cfe831837cf3017d65)) + +* chore(deps-dev): bump pre-commit from 3.0.4 to 3.1.0 (#224) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.0.4 to 3.1.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.0.4...v3.1.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`a98883e`](https://github.com/supabase-community/postgrest-py/commit/a98883e979161a5c50095af421c1252b5c5d0370)) + +* chore(deps): bump sphinx from 5.3.0 to 6.1.3 (#221) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.3.0 to 6.1.3. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.3.0...v6.1.3) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`a2cdd3e`](https://github.com/supabase-community/postgrest-py/commit/a2cdd3ea37337db49834d73be610af90765f06e6)) + +### Unknown + +* Fix sanitize_params to correctly resolve nested columns (#222) + +* Add test for sanitize_params in utils + +* Remove dot character from sanitize_params util + +* Add tests for filter queries that include special characters in column name + +* Add missing test for equals operator ([`36a0702`](https://github.com/supabase-community/postgrest-py/commit/36a070262444832d3f43af0d82803ab3a953ac77)) + +## v0.10.5 (2023-02-19) + +### Chore + +* chore: bump version (#220) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`ea579fd`](https://github.com/supabase-community/postgrest-py/commit/ea579fd782e7d4ef13820356c8cc7fba0a4bec86)) + +* chore(deps): bump furo from 2022.9.29 to 2022.12.7 (#216) + +Bumps [furo](https://github.com/pradyunsg/furo) from 2022.9.29 to 2022.12.7. +- [Release notes](https://github.com/pradyunsg/furo/releases) +- [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) +- [Commits](https://github.com/pradyunsg/furo/compare/2022.09.29...2022.12.07) + +--- +updated-dependencies: +- dependency-name: furo + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`c1fb15f`](https://github.com/supabase-community/postgrest-py/commit/c1fb15ff353ea0979218fba22221a9ac6c502c8b)) + +* chore(deps): bump pydantic from 1.10.4 to 1.10.5 (#217) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.4 to 1.10.5. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/v1.10.5/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v1.10.4...v1.10.5) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`bb3318e`](https://github.com/supabase-community/postgrest-py/commit/bb3318ee618595588268f048fa7b709c851f0e38)) + +* chore(deps): bump sphinx from 4.3.2 to 5.3.0 (#214) + +Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.3.2 to 5.3.0. +- [Release notes](https://github.com/sphinx-doc/sphinx/releases) +- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) +- [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.3.2...v5.3.0) + +--- +updated-dependencies: +- dependency-name: sphinx + dependency-type: direct:production + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`6646131`](https://github.com/supabase-community/postgrest-py/commit/6646131c727251c3aed199913d8d46ad58525bbd)) + +* chore(deps): bump cryptography from 39.0.0 to 39.0.1 (#213) + +Bumps [cryptography](https://github.com/pyca/cryptography) from 39.0.0 to 39.0.1. +- [Release notes](https://github.com/pyca/cryptography/releases) +- [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pyca/cryptography/compare/39.0.0...39.0.1) + +--- +updated-dependencies: +- dependency-name: cryptography + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`08c5cf7`](https://github.com/supabase-community/postgrest-py/commit/08c5cf752e97bf50922f8e7e81563a11c3334320)) + +* chore(deps-dev): bump pre-commit from 2.21.0 to 3.0.4 (#212) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 2.21.0 to 3.0.4. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v2.21.0...v3.0.4) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`da41692`](https://github.com/supabase-community/postgrest-py/commit/da4169208c8c17e4de97cca0f4d4e058c888c866)) + +* chore(deps-dev): bump isort from 5.11.5 to 5.12.0 (#211) + +Bumps [isort](https://github.com/pycqa/isort) from 5.11.5 to 5.12.0. +- [Release notes](https://github.com/pycqa/isort/releases) +- [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pycqa/isort/compare/5.11.5...5.12.0) + +--- +updated-dependencies: +- dependency-name: isort + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`f10e569`](https://github.com/supabase-community/postgrest-py/commit/f10e569d67dc73cfdadf8101940c00170a50694e)) + +* chore(deps-dev): bump black from 22.12.0 to 23.1.0 (#208) + +Bumps [black](https://github.com/psf/black) from 22.12.0 to 23.1.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/22.12.0...23.1.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`261f8ce`](https://github.com/supabase-community/postgrest-py/commit/261f8ce315279bc8166026b0bc3452c99f3273a4)) + +* chore: update pre-commit (#209) + +* chore: update pre-commit + +* fix: convert to string + +* fix: drop py37 + +* 'Refactored by Sourcery' (#210) + +Co-authored-by: Sourcery AI <> + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> +Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> ([`4f451a3`](https://github.com/supabase-community/postgrest-py/commit/4f451a39fb03924fa499a82b9cf8403911e9fb35)) + +* chore: bump version (#197) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`0fa4e4e`](https://github.com/supabase-community/postgrest-py/commit/0fa4e4eeaa76dd7575b75d36ec7a04c04b4c0917)) + +* chore: bump ci poetry version (#186) + +* chore: bump poetry lock + +* fix: bump ci poetry version + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`95db586`](https://github.com/supabase-community/postgrest-py/commit/95db5866b281e509541c75f66be55d8645a38500)) + +* chore(deps-dev): bump isort from 5.10.1 to 5.11.4 (#180) + +Bumps [isort](https://github.com/pycqa/isort) from 5.10.1 to 5.11.4. +- [Release notes](https://github.com/pycqa/isort/releases) +- [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pycqa/isort/compare/5.10.1...5.11.4) + +--- +updated-dependencies: +- dependency-name: isort + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`4afd352`](https://github.com/supabase-community/postgrest-py/commit/4afd352cfa16e5d5288f0678275996bd1422e3eb)) + +* chore(deps): bump wheel from 0.37.1 to 0.38.1 (#173) + +Bumps [wheel](https://github.com/pypa/wheel) from 0.37.1 to 0.38.1. +- [Release notes](https://github.com/pypa/wheel/releases) +- [Changelog](https://github.com/pypa/wheel/blob/main/docs/news.rst) +- [Commits](https://github.com/pypa/wheel/compare/0.37.1...0.38.1) + +--- +updated-dependencies: +- dependency-name: wheel + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1448861`](https://github.com/supabase-community/postgrest-py/commit/1448861c3e65fe9dd01f818cff6469257105b938)) + +* chore(deps): bump certifi from 2022.9.24 to 2022.12.7 (#171) + +Bumps [certifi](https://github.com/certifi/python-certifi) from 2022.9.24 to 2022.12.7. +- [Release notes](https://github.com/certifi/python-certifi/releases) +- [Commits](https://github.com/certifi/python-certifi/compare/2022.09.24...2022.12.07) + +--- +updated-dependencies: +- dependency-name: certifi + dependency-type: indirect +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`745be97`](https://github.com/supabase-community/postgrest-py/commit/745be979676f82c9e941f1c7814cec4994b99873)) + +* chore(deps): bump httpx from 0.23.0 to 0.23.3 (#175) + +Bumps [httpx](https://github.com/encode/httpx) from 0.23.0 to 0.23.3. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.23.0...0.23.3) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`9d3a3d5`](https://github.com/supabase-community/postgrest-py/commit/9d3a3d5819a07c9d6dfd47dcad0a67e6f1899672)) + +### Feature + +* feat: add text_search (#215) + +* feat: add text_search + +* fix: run pre-commit hooks + +* test: add tests for text search + +* fix: run black + +* fix: update poetry deps + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`0d16b47`](https://github.com/supabase-community/postgrest-py/commit/0d16b47d90a55544e3c296bf48b7efbec71e3e42)) + +* feat: upsert with on-conflict support (#142) + +* feat: upsert with on-conflict support + +* fix: lint + +* Update postgrest/base_request_builder.py + +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> + +* chore: docs + +* chore: docs + +--------- + +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> ([`ecc6e79`](https://github.com/supabase-community/postgrest-py/commit/ecc6e796b94e1995362823a124e3a34918cec46f)) + +* feat: add support for 3.11 (#188) + +* fix: add StrEnum + +* fix: add 3.11 to ci + +* fix: run black and autoflake + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`08f156a`](https://github.com/supabase-community/postgrest-py/commit/08f156a74c82b69137de57160077a456a2e9f598)) + +### Fix + +* fix: handle Py311 Validation errors (#219) + +* fix: handle NoneType response + +* fix: add default error message for non JSONDecodable objects + +* fix: rename message -> details + +* fix: handle exception instead of checking for no content + +--------- + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`29cb042`](https://github.com/supabase-community/postgrest-py/commit/29cb0425ab6b2d36e25284a4ca00777636d6c2eb)) + +* fix: update types for insert (#187) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`228fe53`](https://github.com/supabase-community/postgrest-py/commit/228fe53009df3c46d8568e72b811e2013145e834)) + +### Unknown + +* Update README.md ([`7e87364`](https://github.com/supabase-community/postgrest-py/commit/7e873646da5b37279708c651fd8bcb759ff658e9)) + +* Implementation of `maybe_single` (#118) + +* add initial implementation on maybe_single + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* add sync maybe_single and fix error implementation + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* use relative import + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* implement new design for sync method + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* remove error from APIResponse + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* shift changes to async part + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* change class design to factory pattern + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* black and isort + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix: CI errors + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix tests and add additional test + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix new test + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* revamp class design + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix CI test + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix CI test 2 + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix unasync error and add typing + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* make tests for new methods + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* generate code and test for sync + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix docstring + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix docstring and remove unwanted changes + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* fix tests on CI + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +* remove single ok tests + +Signed-off-by: Bariq <bariqhibat@gmail.com> + +Signed-off-by: Bariq <bariqhibat@gmail.com> ([`5d17f81`](https://github.com/supabase-community/postgrest-py/commit/5d17f81054d9b753c117b342528ab41cc8b7f9f7)) + +## v0.10.3 (2022-10-11) + +### Chore + +* chore(deps-dev): bump flake8 from 4.0.1 to 5.0.4 (#157) + +Bumps [flake8](https://github.com/pycqa/flake8) from 4.0.1 to 5.0.4. +- [Release notes](https://github.com/pycqa/flake8/releases) +- [Commits](https://github.com/pycqa/flake8/compare/4.0.1...5.0.4) + +--- +updated-dependencies: +- dependency-name: flake8 + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`28145b1`](https://github.com/supabase-community/postgrest-py/commit/28145b179936a92fb2e01bcf7f0e8c3be18c5b66)) + +* chore(deps): bump pydantic from 1.9.1 to 1.10.2 (#159) + +Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.9.1 to 1.10.2. +- [Release notes](https://github.com/pydantic/pydantic/releases) +- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) +- [Commits](https://github.com/pydantic/pydantic/compare/v1.9.1...v1.10.2) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`8dec08f`](https://github.com/supabase-community/postgrest-py/commit/8dec08f064c1a41de7845775a3b8952c76d3a39b)) + +* chore(deps-dev): bump pytest from 7.1.2 to 7.1.3 (#158) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.2 to 7.1.3. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.1.2...7.1.3) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`56ddf21`](https://github.com/supabase-community/postgrest-py/commit/56ddf2103a69b1be7a61f6899e977f8ad37546cd)) + +* chore(deps-dev): bump pytest-cov from 3.0.0 to 4.0.0 (#156) + +Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 3.0.0 to 4.0.0. +- [Release notes](https://github.com/pytest-dev/pytest-cov/releases) +- [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest-cov/compare/v3.0.0...v4.0.0) + +--- +updated-dependencies: +- dependency-name: pytest-cov + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1e47d6d`](https://github.com/supabase-community/postgrest-py/commit/1e47d6dbc129b7b240f6230a34c3428f2542d770)) + +* chore(deps-dev): bump black from 22.3.0 to 22.10.0 (#155) + +Bumps [black](https://github.com/psf/black) from 22.3.0 to 22.10.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/22.3.0...22.10.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7636691`](https://github.com/supabase-community/postgrest-py/commit/763669140cbf3eb53f1974401b1bf81339e20f92)) + +* chore(deps-dev): bump python-semantic-release from 7.28.1 to 7.32.1 (#154) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.28.1 to 7.32.1. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.28.1...v7.32.1) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`4eb490a`](https://github.com/supabase-community/postgrest-py/commit/4eb490a856e4d2f0fd36814d5d8c8b39d9d5483c)) + +* chore(deps-dev): bump pre-commit from 2.19.0 to 2.20.0 (#138) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 2.19.0 to 2.20.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v2.19.0...v2.20.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`6d948a7`](https://github.com/supabase-community/postgrest-py/commit/6d948a76e6b38b7265fe51904100a094b59f00af)) + +* chore(deps): bump furo from 2022.6.4.1 to 2022.9.15 (#152) + +Bumps [furo](https://github.com/pradyunsg/furo) from 2022.6.4.1 to 2022.9.15. +- [Release notes](https://github.com/pradyunsg/furo/releases) +- [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) +- [Commits](https://github.com/pradyunsg/furo/compare/2022.06.04.1...2022.09.15) + +--- +updated-dependencies: +- dependency-name: furo + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Signed-off-by: dependabot[bot] <support@github.com> +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`d1e50a2`](https://github.com/supabase-community/postgrest-py/commit/d1e50a2a83e55b2895fd4de19b0da11c34de09d8)) + +* chore(deps): bump furo from 2022.4.7 to 2022.6.4.1 (#130) + +Bumps [furo](https://github.com/pradyunsg/furo) from 2022.4.7 to 2022.6.4.1. +- [Release notes](https://github.com/pradyunsg/furo/releases) +- [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) +- [Commits](https://github.com/pradyunsg/furo/compare/2022.04.07...2022.06.04.1) + +--- +updated-dependencies: +- dependency-name: furo + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`09142ab`](https://github.com/supabase-community/postgrest-py/commit/09142ab374f51a5ff7b339f071fc8d369c14f144)) + +* chore(deps): bump httpx from 0.22.0 to 0.23.0 (#127) + +Bumps [httpx](https://github.com/encode/httpx) from 0.22.0 to 0.23.0. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.22.0...0.23.0) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`0daf278`](https://github.com/supabase-community/postgrest-py/commit/0daf278b290d63565443737f6e1939b0fb040e08)) + +* chore(deps): bump pydantic from 1.9.0 to 1.9.1 (#126) + +Bumps [pydantic](https://github.com/samuelcolvin/pydantic) from 1.9.0 to 1.9.1. +- [Release notes](https://github.com/samuelcolvin/pydantic/releases) +- [Changelog](https://github.com/samuelcolvin/pydantic/blob/v1.9.1/HISTORY.md) +- [Commits](https://github.com/samuelcolvin/pydantic/compare/v1.9.0...v1.9.1) + +--- +updated-dependencies: +- dependency-name: pydantic + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`e13a6e0`](https://github.com/supabase-community/postgrest-py/commit/e13a6e0ea4b9761fde2d66e343a4b527761748f5)) + +* chore(deps-dev): bump pre-commit from 2.18.1 to 2.19.0 (#124) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 2.18.1 to 2.19.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v2.18.1...v2.19.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`5c0c128`](https://github.com/supabase-community/postgrest-py/commit/5c0c12874576728e9ffc13e97435d6fb0a4fbf76)) + +* chore(deps-dev): bump pytest from 7.1.1 to 7.1.2 (#117) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.1 to 7.1.2. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.1.1...7.1.2) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`36b7328`](https://github.com/supabase-community/postgrest-py/commit/36b732893710d3c5440a889d3027feb5a0ef2ad2)) + +### Fix + +* fix: update version (#160) + +Co-authored-by: joel@joellee.org <joel@joellee.org> ([`c1105dc`](https://github.com/supabase-community/postgrest-py/commit/c1105dc33d99d034fad0d9081ee59796ab990441)) + +### Unknown + +* limit and order on foreign tables (#120) + +* limit and order on foreign tables + +* Apply suggestions from code review + +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> + +* Updated docstrings for order and limit + +* Changed limit modifier to use limit param instead of range headers + +Co-authored-by: privaterepo <hauntedanon420@gmail.com> +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> ([`bf27b85`](https://github.com/supabase-community/postgrest-py/commit/bf27b850367a8126d8262fc5921e2e4f57bc6d60)) + +## v0.10.2 (2022-04-18) + +### Chore + +* chore(release): bump version to v0.10.2 + +Automatically generated by python-semantic-release ([`f30e688`](https://github.com/supabase-community/postgrest-py/commit/f30e6880f3a6dd125557aa67a631ef56120605f4)) + +* chore(deps-dev): bump python-semantic-release from 7.28.0 to 7.28.1 (#115) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.28.0 to 7.28.1. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.28.0...v7.28.1) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`29e91a2`](https://github.com/supabase-community/postgrest-py/commit/29e91a2123d1363963c1ba50c87fce38f0bef263)) + +* chore(deps-dev): bump pre-commit from 2.17.0 to 2.18.1 (#110) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 2.17.0 to 2.18.1. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v2.17.0...v2.18.1) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`c4fbd29`](https://github.com/supabase-community/postgrest-py/commit/c4fbd29f84fbb1baa806cfb9018a368954d4d91d)) + +* chore(deps-dev): bump python-semantic-release from 7.27.0 to 7.28.0 (#113) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.27.0 to 7.28.0. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.27.0...v7.28.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`89177f2`](https://github.com/supabase-community/postgrest-py/commit/89177f26e2f6d3b98408c61a91ed37b1c3a3e0cc)) + +### Documentation + +* docs: remove rtd config file + +this seems to conflict with the config set in the dashboard; we can add this back later if we need more fine-grained control ([`14100a1`](https://github.com/supabase-community/postgrest-py/commit/14100a15a7d9c526df3e504a676d2d1018be3e04)) + +### Fix + +* fix: include source directory name (#116) + +Poetry by default looks for a directory with the same name as the +project as the source directory. However as our project is named +postgrest-py, but we migrated to the postgrest namespace, we need to +explicitly tell poetry where to look for the source code. ([`18334f8`](https://github.com/supabase-community/postgrest-py/commit/18334f880d5e4e769a9e843007bc2f46b597a777)) + +### Unknown + +* Namespace change (#114) + +* docs: add rtd config + +* chore: move to the postgrest namespace + +* chore: move constants to its own file + +* chore: pass headers/params down builders + +We were earlier modifying session.headers/session.params for every +query. Instead of this we follow what postgrest-js does and add +headers and params as arguments to the query builders, and pass them +down the chain of builders, and finally pass it to the execute method. + +* docs: add examples + +* fix: order of filters in examples + +* docs: add example for closing the client ([`6493154`](https://github.com/supabase-community/postgrest-py/commit/64931544f4d2c8a8bbfb5e133c7e5b761ad5a10a)) + +* Add documentation (#111) + +* deps: add furo + +* docs: document public classes + +* docs: setup sphinx + furo + +* docs: fix bullet point + +* fix: remove test file + +* tests: check if params purged after execute + +* fix: remove the `asyncio` mark from sync tests + +* docs: add project version + +* docs: add rtd config ([`442a45a`](https://github.com/supabase-community/postgrest-py/commit/442a45a5638253888d7675f3c664e01c1e61d7d3)) + +## v0.10.1 (2022-04-07) + +### Chore + +* chore(release): bump version to v0.10.1 + +Automatically generated by python-semantic-release ([`9ddc6f5`](https://github.com/supabase-community/postgrest-py/commit/9ddc6f5e186c7670db417bafdb2f7bc8a5610c4f)) + +* chore(deps-dev): bump black from 22.1.0 to 22.3.0 (#107) + +Bumps [black](https://github.com/psf/black) from 22.1.0 to 22.3.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/compare/22.1.0...22.3.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`05d2e01`](https://github.com/supabase-community/postgrest-py/commit/05d2e01f7a60e8141f5a37ea9e47b44eb2f653a6)) + +* chore(deps-dev): bump pytest-asyncio from 0.18.2 to 0.18.3 (#106) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.18.2 to 0.18.3. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Changelog](https://github.com/pytest-dev/pytest-asyncio/blob/master/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.18.2...v0.18.3) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`cf0d3ac`](https://github.com/supabase-community/postgrest-py/commit/cf0d3acfe3e02fde914fa034f21ebf34c28db254)) + +* chore(deps-dev): bump pytest from 7.1.0 to 7.1.1 (#105) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.0 to 7.1.1. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.1.0...7.1.1) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`f8ad56e`](https://github.com/supabase-community/postgrest-py/commit/f8ad56e2083b925773a06331579f3cfff7182185)) + +* chore(deps-dev): bump python-semantic-release from 7.26.0 to 7.27.0 (#104) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.26.0 to 7.27.0. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.26.0...v7.27.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`36b861c`](https://github.com/supabase-community/postgrest-py/commit/36b861cedaf8a381291a9add47e14eca7db6d38d)) + +* chore(deps-dev): bump pytest from 7.0.1 to 7.1.0 (#103) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.0.1 to 7.1.0. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.0.1...7.1.0) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`8abde61`](https://github.com/supabase-community/postgrest-py/commit/8abde6117f62dd920bb9c7a690fb856cadfd9273)) + +### Fix + +* fix: escape chars only when necessary (#108) + +* fix: escape chars only when necessary + +* fix: escape column names + +* deps: upgrade black in pre-commit ([`53f7d18`](https://github.com/supabase-community/postgrest-py/commit/53f7d18807aa292aa7326af573bd55828a3bb6e4)) + +## v0.10.0 (2022-03-13) + +### Chore + +* chore(release): bump version to v0.10.0 + +Automatically generated by python-semantic-release ([`cbbdf5c`](https://github.com/supabase-community/postgrest-py/commit/cbbdf5cb6e6ad9380242b6b4fa6ff29867fe6e03)) + +### Feature + +* feat: add .contains and .contained_by operators to match JS client (#100) + +* Add .contains and .contained_by operators to match JS client + +* Fix whitespace + +* Add tests + +* Describe percent-encoded strings ([`7189e09`](https://github.com/supabase-community/postgrest-py/commit/7189e095bd792fcbc5b89e4f03ef7174e1dd30b7)) + +## v0.9.2 (2022-03-12) + +### Chore + +* chore(release): bump version to v0.9.2 + +Automatically generated by python-semantic-release ([`7d156b3`](https://github.com/supabase-community/postgrest-py/commit/7d156b33d8dd78e45ad0c727e5c5e4bd9c89b1e3)) + +* chore(deps-dev): bump python-semantic-release from 7.25.2 to 7.26.0 (#98) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.25.2 to 7.26.0. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.25.2...v7.26.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`796687d`](https://github.com/supabase-community/postgrest-py/commit/796687d5f5fcf72c7b4f824f6d1ce3b255232c22)) + +### Fix + +* fix: make api error properties optionals (#101) + +For avoid linter error ([`eb92326`](https://github.com/supabase-community/postgrest-py/commit/eb92326db0088fbf2d96bb68b206160b03e63747)) + +## v0.9.1 (2022-03-08) + +### Chore + +* chore(release): bump version to v0.9.1 + +Automatically generated by python-semantic-release ([`d4204ef`](https://github.com/supabase-community/postgrest-py/commit/d4204ef4dc33a9fccf8684dc13c90df3e843c1c9)) + +* chore(deps-dev): bump pytest-asyncio from 0.18.1 to 0.18.2 (#96) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.18.1 to 0.18.2. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.18.1...v0.18.2) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`8cf3de1`](https://github.com/supabase-community/postgrest-py/commit/8cf3de18b787d5c2407be5264f981381d747ea8b)) + +* chore(deps-dev): bump python-semantic-release from 7.25.1 to 7.25.2 (#95) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.25.1 to 7.25.2. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.25.1...v7.25.2) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`392845d`](https://github.com/supabase-community/postgrest-py/commit/392845d698570d48f8412f7c7396c41d0987d5cc)) + +* chore(deps-dev): bump python-semantic-release from 7.25.0 to 7.25.1 (#93) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.25.0 to 7.25.1. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.25.0...v7.25.1) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`a7cee63`](https://github.com/supabase-community/postgrest-py/commit/a7cee6309a37f310fd6cf2079ec2142a05db71b6)) + +### Fix + +* fix: fix APIError (#97) ([`ff29024`](https://github.com/supabase-community/postgrest-py/commit/ff290240cf9364902ffca19e854604d6a40770f9)) + +## v0.9.0 (2022-02-19) + +### Chore + +* chore(release): bump version to v0.9.0 + +Automatically generated by python-semantic-release ([`032fc5e`](https://github.com/supabase-community/postgrest-py/commit/032fc5ef89e16bc42eaf7c4dff335930394448a2)) + +### Feature + +* feat: export APIError and APIResponse ([`83e7799`](https://github.com/supabase-community/postgrest-py/commit/83e77991101c8e8aec42552344b02ce8db6bd04a)) + +### Unknown + +* Export APIResponse and APIError (#92) + +* Export APIResponse and APIError + +* Reorder imports ([`b237d62`](https://github.com/supabase-community/postgrest-py/commit/b237d62eaa825e72b9069b0a6cc40c6da58f0ab4)) + +* Bump python-semantic-release from 7.24.0 to 7.25.0 (#91) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.24.0 to 7.25.0. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.24.0...v7.25.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`3a9419c`](https://github.com/supabase-community/postgrest-py/commit/3a9419c212b5892a03b67969d789981a83352e5a)) + +* Bump pytest from 7.0.0 to 7.0.1 (#90) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.0.0 to 7.0.1. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/7.0.0...7.0.1) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1b6c6a5`](https://github.com/supabase-community/postgrest-py/commit/1b6c6a574796dc14b67774de54d9c1bd67dc09d4)) + +* Bump pytest-asyncio from 0.17.2 to 0.18.1 (#89) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.17.2 to 0.18.1. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.17.2...v0.18.1) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`eda1892`](https://github.com/supabase-community/postgrest-py/commit/eda189204895c26d336d5f68900c05acfefa3c33)) + +* Bump pytest from 6.2.5 to 7.0.0 (#87) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 6.2.5 to 7.0.0. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/6.2.5...7.0.0) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`f12ffa5`](https://github.com/supabase-community/postgrest-py/commit/f12ffa5d0e6b24099a5bed89fb1976edf8b05a5c)) + +* Bump black from 21.12b0 to 22.1.0 (#85) + +Bumps [black](https://github.com/psf/black) from 21.12b0 to 22.1.0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/commits/22.1.0) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7bf8b47`](https://github.com/supabase-community/postgrest-py/commit/7bf8b47a0b9be3adb3305488f07a4afebe65b141)) + +## v0.8.2 (2022-01-30) + +### Chore + +* chore(release): bump version to v0.8.2 + +Automatically generated by python-semantic-release ([`34fd1bd`](https://github.com/supabase-community/postgrest-py/commit/34fd1bda6893782a955340e39bcdba6633034a69)) + +### Fix + +* fix: Add-response-model ([`4c0259d`](https://github.com/supabase-community/postgrest-py/commit/4c0259d1658c07bf3e78fe03d98b304f7a6f0c7a)) + +### Unknown + +* Add-response-model (#64) + +* add poetry dependency + +* create APIResponse model + +* return APIResponse model in execute method + +* sort imports + +* mypy bug workaround (https://github.com/python/mypy/issues/9319) + +* split logic, validate error existance and better type APIResponse + +* Implement APIError + +* add missing black config in pre-commit config + +* type APIError properties + +* fix: rm unused code and use returning param in update + +* refactor: reorder lines + +* chore: rebuild sync + +* chore: rebuild poetry.lock + +* fix: remove wrong parameter + +* chore: format + +* Chore: add missing return types + +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> + +* chore: replace builtin dict by Dict to support python < 3.9 + +* chore: update precommit hooks + +* chore: apply format + +* update return type in execute method + +* use relative import + +* add link to mypy issue + +* switch super init by class init to avoid future errors + +* chore: apply future annotations notation to return + +* chore: rebuild sync + +* tests: Add tests for response model (#74) + +* initial commit + +* tests: add fixtures for APIResponse + +* tests: [WIP] Test methods that don't interact with RequestResponse + +* tests: replace builtin type by typing type and add type annotations + +* tests: add requests Response fixtures + +* chore: change return order to improve readability + +* tests: add tests for left methods + +Co-authored-by: Joel Lee <joel@joellee.org> +Co-authored-by: Dani Reinón <dani@dribo.es> + +* chore: modify ValueError with ValidationError + +* chore: add "_" to internal methods + +Co-authored-by: Anand <40204976+anand2312@users.noreply.github.com> +Co-authored-by: Lee Yi Jie Joel <lee.yi.jie.joel@gmail.com> +Co-authored-by: Joel Lee <joel@joellee.org> ([`07ef4d4`](https://github.com/supabase-community/postgrest-py/commit/07ef4d4c03f014207ec1707786e601aa7f21b97d)) + +* Bump httpx from 0.21.3 to 0.22.0 (#84) + +Bumps [httpx](https://github.com/encode/httpx) from 0.21.3 to 0.22.0. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.21.3...0.22.0) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`e190621`](https://github.com/supabase-community/postgrest-py/commit/e1906211c42bc6e3f7918f5e0c4bf342690f64d8)) + +* Bump python-semantic-release from 7.23.0 to 7.24.0 (#82) + +Bumps [python-semantic-release](https://github.com/relekang/python-semantic-release) from 7.23.0 to 7.24.0. +- [Release notes](https://github.com/relekang/python-semantic-release/releases) +- [Changelog](https://github.com/relekang/python-semantic-release/blob/master/CHANGELOG.md) +- [Commits](https://github.com/relekang/python-semantic-release/compare/v7.23.0...v7.24.0) + +--- +updated-dependencies: +- dependency-name: python-semantic-release + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`0cefdc7`](https://github.com/supabase-community/postgrest-py/commit/0cefdc7895319ab0fdba25662d62fc54bcaffc7e)) + +## v0.8.1 (2022-01-22) + +### Chore + +* chore(release): bump version to v0.8.1 + +Automatically generated by python-semantic-release ([`1560d8f`](https://github.com/supabase-community/postgrest-py/commit/1560d8f27b7a9466da834f60b309b26e8b897d27)) + +* chore: set upload_to_repository to true ([`c65fe95`](https://github.com/supabase-community/postgrest-py/commit/c65fe9553dcc2b42f404d0cb350eb4b704cdf59c)) + +### Fix + +* fix: order filter ([`094dbad`](https://github.com/supabase-community/postgrest-py/commit/094dbadb26bef4238536579ede71d46a4ef67899)) + +### Unknown + +* Bump pre-commit from 2.16.0 to 2.17.0 (#79) + +Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 2.16.0 to 2.17.0. +- [Release notes](https://github.com/pre-commit/pre-commit/releases) +- [Changelog](https://github.com/pre-commit/pre-commit/blob/master/CHANGELOG.md) +- [Commits](https://github.com/pre-commit/pre-commit/compare/v2.16.0...v2.17.0) + +--- +updated-dependencies: +- dependency-name: pre-commit + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`542bd95`](https://github.com/supabase-community/postgrest-py/commit/542bd95ae84f5522cde9ee2ed286de901198c02e)) + +* Bump pytest-asyncio from 0.17.1 to 0.17.2 (#77) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.17.1 to 0.17.2. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.17.1...v0.17.2) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`5150c17`](https://github.com/supabase-community/postgrest-py/commit/5150c17ef80ca75b164f91ca7e1d61f38eaf271d)) + +* Bump pytest-asyncio from 0.17.0 to 0.17.1 (#76) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.17.0 to 0.17.1. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.17.0...v0.17.1) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`1d80268`](https://github.com/supabase-community/postgrest-py/commit/1d80268f62b3e196f4a56a1930d34644b5a3d1e8)) + +## v0.8.0 (2022-01-16) + +### Chore + +* chore(release): bump version to v0.8.0 + +Automatically generated by python-semantic-release ([`828de1a`](https://github.com/supabase-community/postgrest-py/commit/828de1a6ee5564492469f4f09717e2993e4e2776)) + +* chore: filter deploy section by repo owner (#69) + +* fix: interpolations erros and other things reported by sourcery-ai + +* chore: filter deploy section by repo owner ([`82820e4`](https://github.com/supabase-community/postgrest-py/commit/82820e45d84b511a55ddc5115b1c7f7b2a95264a)) + +* chore: add ignore md rules to dev container and fix changelog (#67) + +* fix: interpolations erros and other things reported by sourcery-ai + +* chore: add ignore md rules to dev container and fix changelog ([`19c949d`](https://github.com/supabase-community/postgrest-py/commit/19c949d1757763ecfb299932e75fec33b0920c71)) + +### Feature + +* feat: add timeout as a parameter of clients (#75) + +* feat: add timeout as a parameter of clients + +This feature is for evicting the use of the default timeout of httpx. + +* feat: use union and constants default value for timeout ([`1ea965a`](https://github.com/supabase-community/postgrest-py/commit/1ea965a6cb32dacb5f41cd1198f8a970a24731b6)) + +### Unknown + +* Bump pytest-asyncio from 0.16.0 to 0.17.0 (#73) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.16.0 to 0.17.0. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.16.0...v0.17.0) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7897d97`](https://github.com/supabase-community/postgrest-py/commit/7897d97bde394a16b8de9c76f1e57813bfc32daf)) + +* Bump httpx from 0.21.2 to 0.21.3 (#71) + +Bumps [httpx](https://github.com/encode/httpx) from 0.21.2 to 0.21.3. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.21.2...0.21.3) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`6d54ba4`](https://github.com/supabase-community/postgrest-py/commit/6d54ba477d19ff4badcdcd1c0746a1e26166c01b)) + +* Bump httpx from 0.21.1 to 0.21.2 (#70) + +Bumps [httpx](https://github.com/encode/httpx) from 0.21.1 to 0.21.2. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.21.1...0.21.2) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`cc9bd9e`](https://github.com/supabase-community/postgrest-py/commit/cc9bd9e766c53c96235af155489e380a64acbd67)) + +* Fix codecov badge (#68) ([`5697d8e`](https://github.com/supabase-community/postgrest-py/commit/5697d8e677f48044a963b3bc44df757d4a3219d1)) + +## v0.7.1 (2022-01-04) + +### Chore + +* chore(release): bump version to v0.7.1 + +Automatically generated by python-semantic-release ([`c80c3ff`](https://github.com/supabase-community/postgrest-py/commit/c80c3ff377852380aa78ce190eb98f86f0699075)) + +### Performance + +* perf: sync configurations with gotrue-py (#66) + +* fix: interpolations errors and other things reported by sourcery-ai + +* perf: sync configurations with gotrue-py + +* fix: warning of precommits rules ([`d5a97da`](https://github.com/supabase-community/postgrest-py/commit/d5a97daad42a431b2d36f16e3969b38b9dded288)) + +### Unknown + +* add poetry local config to gitignore (#63) ([`031cb5f`](https://github.com/supabase-community/postgrest-py/commit/031cb5f4863a25b5be87f047133d293956377d46)) + +* delete poetry.toml file from repo (#62) ([`8b04ae0`](https://github.com/supabase-community/postgrest-py/commit/8b04ae07047ec6a33e36d84b37c99ac9fb07834f)) + +* fix-sanitize_param-double-quote-error (#61) + +* replace utf-8 character by character + +* avoid escaping characters by using single quotes + +* fix tests ([`0eb871a`](https://github.com/supabase-community/postgrest-py/commit/0eb871a53b91bca5edc68f9cc3ba67e83e7ae0a0)) + +## v0.7.0 (2022-01-02) + +### Chore + +* chore: bump version to v0.7.0 (#60) + +* fix: interpolations erros and other things reported by sourcery-ai + +* chore: bump version to v0.7.0 ([`a936820`](https://github.com/supabase-community/postgrest-py/commit/a93682082283f2dbaef679705fd71a5620150f90)) + +### Feature + +* feat: non str arguments to filters (#58) + +* fix: interpolations erros and other things reported by sourcery-ai + +* feat: non str arguments to filters ([`46802db`](https://github.com/supabase-community/postgrest-py/commit/46802db317b4313d9f0241809bcc75312404aac3)) + +* feat: add return mode like a parameter (#59) + +* fix: interpolations erros and other things reported by sourcery-ai + +* feat: add return mode like a parameter + +* chore: change constants.py by types.py ([`8728ee8`](https://github.com/supabase-community/postgrest-py/commit/8728ee8e840a453332a814d00d622d76589fb2a8)) + +### Fix + +* fix: query params are immutable when using order (#57) + +* fix: interpolations erros and other things reported by sourcery-ai + +* fix: query params are immutable when using order ([`d1254a6`](https://github.com/supabase-community/postgrest-py/commit/d1254a60697f67fbf5c837afd1fe047b3ef4ea6e)) + +* fix: params and headers of session are shared between queries (#55) + +* fix: interpolations erros and other things reported by sourcery-ai + +* fix: params and headers of session are shared between queries + +* fix: suggestion of sourcery + +* fix: suggestion of sourcery ([`b631e3b`](https://github.com/supabase-community/postgrest-py/commit/b631e3be6ae2e47477813feae85780219c6c6baf)) + +## v0.6.0 (2022-01-01) + +### Chore + +* chore: update versions (#50) + +Co-authored-by: Joel Lee <joel@joellee.org> ([`c8ba57a`](https://github.com/supabase-community/postgrest-py/commit/c8ba57af60202e5cb98a5388a11cda9954cfd75d)) + +### Feature + +* feat: implement async sync with `unasync-cli` (#30) + +Co-authored-by: Dani Reinón <dani@dribo.es> ([`b1423b5`](https://github.com/supabase-community/postgrest-py/commit/b1423b5e026399b038348a4f25914bf4bdb4e8f4)) + +### Fix + +* fix: interpolations erros and other things reported by sourcery-ai (#37) ([`2fc29b2`](https://github.com/supabase-community/postgrest-py/commit/2fc29b272323203bfa0b4f5f59ae12bde08dc530)) + +### Performance + +* perf: use inheritance to improve our code base (#47) + +* fix: interpolations erros and other things reported by sourcery-ai + +* feat: use inheritance to improve the code base + +* fix: sourcery refactored + +* chore: update pre commit rules + +* fix: remove noqa F401 comments + +* fix: remove duplicate and unused imports in base_client.py + +* feat: use enum instance literals in base_request_builder.py + +* pref: cast session only once in __init__ + +* pref: remove unnecesary cast + +* tests: update tests + +* chore: generate sync code + +* feat: add support for upsert + +* Rm cast from rpc in async client + +* Rm cast from rpc in sync client + +* Add table method as an alias for from_ + +Co-authored-by: dreinon <67071425+dreinon@users.noreply.github.com> +Co-authored-by: Dani Reinón <dani@dribo.es> ([`315f596`](https://github.com/supabase-community/postgrest-py/commit/315f596386e26974595f15f34dad930b37d08e15)) + +### Unknown + +* Bump black from 21.11b1 to 21.12b0 + +Bumps [black](https://github.com/psf/black) from 21.11b1 to 21.12b0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/commits) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development +... + +Signed-off-by: dependabot[bot] <support@github.com> ([`7a54d58`](https://github.com/supabase-community/postgrest-py/commit/7a54d580350ef12480d9df207561bd6a70dd08d8)) + +* Bump mypy from 0.910 to 0.930 (#52) + +Bumps [mypy](https://github.com/python/mypy) from 0.910 to 0.930. +- [Release notes](https://github.com/python/mypy/releases) +- [Commits](https://github.com/python/mypy/compare/v0.910...v0.930) + +--- +updated-dependencies: +- dependency-name: mypy + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`f7217d5`](https://github.com/supabase-community/postgrest-py/commit/f7217d5bfb6ca2c2ab378555c3c3330c007d24ba)) + +* Bump black from 21.10b0 to 21.11b1 (#46) ([`3934fb2`](https://github.com/supabase-community/postgrest-py/commit/3934fb2bd7c755962fa2fe490419d3e967e3555a)) + +* Bump pre-commit from 2.15.0 to 2.16.0 (#45) ([`8788c18`](https://github.com/supabase-community/postgrest-py/commit/8788c184e0fed98aa5b613a7d68d5756372543e8)) + +* Bump httpx from 0.20.0 to 0.21.1 (#44) + +Bumps [httpx](https://github.com/encode/httpx) from 0.20.0 to 0.21.1. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.20.0...0.21.1) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`60ef2ce`](https://github.com/supabase-community/postgrest-py/commit/60ef2ce661448f3ee740e297713c213f75b60427)) + +* Revert "Add Sourcery to pre-commit (#38)" (#41) + +This reverts commit 25f23586774f5a73661c9da92d0035e667d0df2c. ([`f019aaa`](https://github.com/supabase-community/postgrest-py/commit/f019aaaafeb9a899052cd406f1900c9c0b8611ac)) + +* Add Sourcery to pre-commit (#38) ([`25f2358`](https://github.com/supabase-community/postgrest-py/commit/25f23586774f5a73661c9da92d0035e667d0df2c)) + +* Bump black from 21.7b0 to 21.10b0 (#33) + +Bumps [black](https://github.com/psf/black) from 21.7b0 to 21.10b0. +- [Release notes](https://github.com/psf/black/releases) +- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) +- [Commits](https://github.com/psf/black/commits) + +--- +updated-dependencies: +- dependency-name: black + dependency-type: direct:development +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`ba83ba4`](https://github.com/supabase-community/postgrest-py/commit/ba83ba43c6cfba906fbb710d3913e5dc070fdde3)) + +* Bump pytest-cov from 2.12.1 to 3.0.0 (#34) + +Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 2.12.1 to 3.0.0. +- [Release notes](https://github.com/pytest-dev/pytest-cov/releases) +- [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest-cov/compare/v2.12.1...v3.0.0) + +--- +updated-dependencies: +- dependency-name: pytest-cov + dependency-type: direct:development + update-type: version-update:semver-major +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`8af95a7`](https://github.com/supabase-community/postgrest-py/commit/8af95a7dacff9d40e36d2274dcce34a527595a0a)) + +* Bump pytest from 6.2.4 to 6.2.5 (#15) + +Bumps [pytest](https://github.com/pytest-dev/pytest) from 6.2.4 to 6.2.5. +- [Release notes](https://github.com/pytest-dev/pytest/releases) +- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) +- [Commits](https://github.com/pytest-dev/pytest/compare/6.2.4...6.2.5) + +--- +updated-dependencies: +- dependency-name: pytest + dependency-type: direct:development + update-type: version-update:semver-patch +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`fc0434d`](https://github.com/supabase-community/postgrest-py/commit/fc0434d067876d859410f2849cf7db6a405efd1e)) + +* Bump pytest-asyncio from 0.15.1 to 0.16.0 (#32) + +Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.15.1 to 0.16.0. +- [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) +- [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.15.1...v0.16.0) + +--- +updated-dependencies: +- dependency-name: pytest-asyncio + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`765046e`](https://github.com/supabase-community/postgrest-py/commit/765046ef3f17cbe5ea8ed2567b09e7254ed42d2c)) + +* Bump httpx from 0.19.0 to 0.20.0 (#31) + +Bumps [httpx](https://github.com/encode/httpx) from 0.19.0 to 0.20.0. +- [Release notes](https://github.com/encode/httpx/releases) +- [Changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) +- [Commits](https://github.com/encode/httpx/compare/0.19.0...0.20.0) + +--- +updated-dependencies: +- dependency-name: httpx + dependency-type: direct:production + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`36408d7`](https://github.com/supabase-community/postgrest-py/commit/36408d78b2b1fe0f37495cd9a6df1b052561d647)) + +* Bump mypy from 0.902 to 0.910 (#12) + +Bumps [mypy](https://github.com/python/mypy) from 0.902 to 0.910. +- [Release notes](https://github.com/python/mypy/releases) +- [Commits](https://github.com/python/mypy/compare/v0.902...v0.910) + +--- +updated-dependencies: +- dependency-name: mypy + dependency-type: direct:development + update-type: version-update:semver-minor +... + +Signed-off-by: dependabot[bot] <support@github.com> + +Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`7d5bf09`](https://github.com/supabase-community/postgrest-py/commit/7d5bf095f6a4be9b328ff1b2ab8c8b19b6c3974f)) + +* Fix 3.10 pipelines ([`457b70c`](https://github.com/supabase-community/postgrest-py/commit/457b70cf24f4db3e9c59b8d997d65cc70270c080)) + +* Add pre-commit (#28) ([`c5032b5`](https://github.com/supabase-community/postgrest-py/commit/c5032b5ae6c9f80f3801193d706f56820a98f70b)) + +* Update and reformat README.md, docker-compose.yaml ([`7188fa2`](https://github.com/supabase-community/postgrest-py/commit/7188fa2063c09a68e8b40c0db559fb9b6b89f567)) + +* Add docker-compose setup for local development (#22) ([`1cb42a1`](https://github.com/supabase-community/postgrest-py/commit/1cb42a14e8cbec9c0d4d79f5ba03b81435f91eca)) + +* Add Python 3.10 to the build matrix ([`3632d75`](https://github.com/supabase-community/postgrest-py/commit/3632d7521df70ce564a356024450c7a5e5d65fe1)) + +* Implement counting feature (#26) ([`735cefd`](https://github.com/supabase-community/postgrest-py/commit/735cefd9aa6ecea99a392c0bdfa1ef3b633a6067)) + +* Fix bug on sanitizing params (#24) ([`3e7b60e`](https://github.com/supabase-community/postgrest-py/commit/3e7b60eb645f08239d8fb37653f62fbf827e7a12)) + +* Add Code of Conduct and Contributing guide (#23) + +Co-authored-by: Joel Lee <joel@joellee.org> ([`9031297`](https://github.com/supabase-community/postgrest-py/commit/903129738dd782cad495f1c747df8dd694f32328)) + +* Add Match Command (#18) + +Co-authored-by: Joel Lee <joel@joellee.org> ([`9eadbe1`](https://github.com/supabase-community/postgrest-py/commit/9eadbe1275f8c4154ab021f600a57ec2d6f926eb)) + +## v0.5.0 (2021-09-09) + +### Unknown + +* Bump version to 0.5.0 ([`1144895`](https://github.com/supabase-community/postgrest-py/commit/11448957a18eca506ade0430b0c1d5ac554b41cb)) + +* Improve PostgrestClient.auth() (#14) ([`6321ffe`](https://github.com/supabase-community/postgrest-py/commit/6321ffeac7b3bb15e52a8881c4f527479bf885a6)) + +* Update httpx to v0.19.0 (#13) ([`71456e5`](https://github.com/supabase-community/postgrest-py/commit/71456e5382d0062168e5045f3065a69a31fc0e60)) + +* Add dependabot.yml ([`d83fa15`](https://github.com/supabase-community/postgrest-py/commit/d83fa15b8c7d53e3321213d95dbf257db98610ab)) + +* Allow setting headers in PostgrestClient's constructor (#10) ([`1737e69`](https://github.com/supabase-community/postgrest-py/commit/1737e698d1ab5c8b740acef8d506dfc56cac9ca9)) + +* Update Python workflow ([`3ffb7fb`](https://github.com/supabase-community/postgrest-py/commit/3ffb7fb20f1f8b5c9f10b8c7d08980d7bb3016dd)) + +* Upgrade dependencies ([`cf743d9`](https://github.com/supabase-community/postgrest-py/commit/cf743d95a2d045eebeff5eafc1941e1db1e18a79)) + +## v0.4.0 (2021-09-09) + +### Unknown + +* Bump version to 0.4.0 ([`7efa19c`](https://github.com/supabase-community/postgrest-py/commit/7efa19c63f1b16b87db8ace9513165b2327de66f)) + +* Revert the last 2 commits. Drop Python 3.6 support + +This reverts commit 899f75bd6a477a95eef47f2aabb8fdce7cbba200. ([`b3e7df2`](https://github.com/supabase-community/postgrest-py/commit/b3e7df2459218a3fde775e921a0cad54755d5148)) + +* Lower minimum required Python version to 3.6 ([`a3164bd`](https://github.com/supabase-community/postgrest-py/commit/a3164bd20d65281858f257bfbd47bde75d87ad4c)) + +* Add Python 3.6 to build matrix ([`899f75b`](https://github.com/supabase-community/postgrest-py/commit/899f75bd6a477a95eef47f2aabb8fdce7cbba200)) + +* Update tests for httpx v0.16.x (#4) ([`dd90a57`](https://github.com/supabase-community/postgrest-py/commit/dd90a573d99bdce6f2b39bd660208f46c1429d0e)) + +* Upgrade httpx to v0.16.1 ([`cac7fe2`](https://github.com/supabase-community/postgrest-py/commit/cac7fe235e009430b54220c4f0ea84e7c4a0566c)) + +* Allow multivalued query parameters (#2) ([`4f588f8`](https://github.com/supabase-community/postgrest-py/commit/4f588f800b303c04f2cebafaf1576e8409aafbce)) + +* Add some tests ([`275c233`](https://github.com/supabase-community/postgrest-py/commit/275c2332a1d9ce0cd64e23ab618e9cc7b18810ca)) + +* Rename some symbols ([`71c3a33`](https://github.com/supabase-community/postgrest-py/commit/71c3a33c06d8df1bf032e17709394d5c651083ab)) + +* Code refactoring ([`8f4a702`](https://github.com/supabase-community/postgrest-py/commit/8f4a7023df6207fa90673e2ad746a3d55fcba50f)) + +* Add tests for RequestBuilder ([`4f0ed78`](https://github.com/supabase-community/postgrest-py/commit/4f0ed783d1aa7e994358dba2e835171f07d61775)) + +## v0.3.2 (2020-08-20) + +### Documentation + +* docs: adds enterprise sponsors ([`9df43d5`](https://github.com/supabase-community/postgrest-py/commit/9df43d59954b191128dd755057190ca62762d404)) + +### Unknown + +* Bump version to 0.3.2 ([`7d00675`](https://github.com/supabase-community/postgrest-py/commit/7d0067552ecafb698b5a651e2b7fe5af5ff950b8)) + +* Move to supabase/postgrest-py ([`84c847f`](https://github.com/supabase-community/postgrest-py/commit/84c847fe8e3ff32015ab41bd0caf170f39186964)) + +* Merge remote-tracking branch 'supabase/master' into master ([`12e268b`](https://github.com/supabase-community/postgrest-py/commit/12e268b2f1516949991f3153b314716d734cb756)) + +* Add badges to README.md ([`9d328d8`](https://github.com/supabase-community/postgrest-py/commit/9d328d8cdc71f34bc29af428668ec9e3794874b1)) + +## v0.3.1 (2020-08-19) + +### Unknown + +* Bump version to 0.3.1 ([`71c6ea6`](https://github.com/supabase-community/postgrest-py/commit/71c6ea64730979bf76142d1dad944ba58fbb51db)) + +* Remove dummy test cases and PyPy3 from Travis CI ([`a185327`](https://github.com/supabase-community/postgrest-py/commit/a18532780b0ab5d4ba0b37c63c4c60727a093687)) + +## v0.3.0 (2020-08-19) + +### Unknown + +* Bump version to 0.3.0 ([`67dc8d3`](https://github.com/supabase-community/postgrest-py/commit/67dc8d3d19075be18aa20e799dffb71be5f3db62)) + +* Add .travis.yml ([`4e39921`](https://github.com/supabase-community/postgrest-py/commit/4e3992199d12898842d1bfd4888d2f4af52fee67)) + +* Fix PostgrestClient.schema() not work. Add tests for PostgrestClient ([`20a0120`](https://github.com/supabase-community/postgrest-py/commit/20a0120e5b61e6027e150892092b1686eea0ec27)) + +* Add pytest ([`d8d9e2c`](https://github.com/supabase-community/postgrest-py/commit/d8d9e2ce1cc36188bb8da65605e066359376c120)) + +* Support multi-criteria ordering ([`5066c58`](https://github.com/supabase-community/postgrest-py/commit/5066c58345237f787ce119a60f62ae8163975c26)) + +* Code refactoring ([`0803e65`](https://github.com/supabase-community/postgrest-py/commit/0803e658bacce2c3c370a15df856b9710c6bc3c8)) + +* Update RequestBuilder ([`d115f0d`](https://github.com/supabase-community/postgrest-py/commit/d115f0de857b5dd03ac67b81caf6b331db3eee62)) + +* Rename project ([`12734e1`](https://github.com/supabase-community/postgrest-py/commit/12734e198935f14316d07572fe7ca5e857af2798)) + +## v0.2.0 (2020-08-11) + +### Unknown + +* Bump version to 0.2.0 ([`5cfc52b`](https://github.com/supabase-community/postgrest-py/commit/5cfc52b66606e6e6b18e5d37bb414d6ecf84fe14)) + +* Deprecate PostgrestClient.from_table() ([`32f9fba`](https://github.com/supabase-community/postgrest-py/commit/32f9fbac53ba5a151baec751eae91a631e32a35c)) + +* Update ([`4f2accb`](https://github.com/supabase-community/postgrest-py/commit/4f2accb2871e6b783c68115455d1fe80dc15ef49)) + +* Update README and TODO ([`2d8fcbf`](https://github.com/supabase-community/postgrest-py/commit/2d8fcbf7fa071f90e58cdbb0e4e4eb50c7c3c687)) + +* RequestBuilder.select() now accepts columns as *args ([`8901f86`](https://github.com/supabase-community/postgrest-py/commit/8901f86244f4885c7033316deef47ae912d39043)) + +* Rename Client to PostgrestClient and deprecate the old name ([`1200265`](https://github.com/supabase-community/postgrest-py/commit/1200265737b51c5b0d861cd635ee13512b082471)) + +* Support RPC ([`040ad6c`](https://github.com/supabase-community/postgrest-py/commit/040ad6c6e5a5431e18184c29258608c26f98cf47)) + +* Support basic authentication ([`1e8166d`](https://github.com/supabase-community/postgrest-py/commit/1e8166da1ac9c50003dad051dd82c1fbf311b078)) + +* Remove dead code ([`f9ee777`](https://github.com/supabase-community/postgrest-py/commit/f9ee777f1cc90a99ad1d187ed31825862111ecdc)) + +## v0.1.1 (2020-08-07) + +### Unknown + +* Bump version to 0.1.1 ([`25da534`](https://github.com/supabase-community/postgrest-py/commit/25da5340f74565170b71decc40837c0f735c25c5)) + +## v0.1.0 (2020-08-07) + +### Unknown + +* Bump version to 0.1.0 ([`82e7f55`](https://github.com/supabase-community/postgrest-py/commit/82e7f5529ec2a01e6426dc40a20fe8b8094958ba)) + +* Complete basic features ([`96fed3a`](https://github.com/supabase-community/postgrest-py/commit/96fed3a3d2d9e921c0ce9f8225dfa7948fa60f2b)) + +* Add GET only filters ([`3c1cbf2`](https://github.com/supabase-community/postgrest-py/commit/3c1cbf291151d3eb1380d6cc3bbdd392eaff22f0)) + +* Code refactoring ([`4379b7f`](https://github.com/supabase-community/postgrest-py/commit/4379b7fc760707d025143f218e6cee7391c781b1)) + +* Build a basic structure of the project ([`57e90da`](https://github.com/supabase-community/postgrest-py/commit/57e90dabd32e065b8651d74742817b48b366498a)) + +* Rename project to avoid collision ([`a5ff81b`](https://github.com/supabase-community/postgrest-py/commit/a5ff81bde0f38452b4d52c64c41b08b468deb80d)) + +* Update README.md and things ([`c77c5ea`](https://github.com/supabase-community/postgrest-py/commit/c77c5ea9d40d32fb8da3c6f21192c00687268e36)) + +* Poetry init ([`ec3df47`](https://github.com/supabase-community/postgrest-py/commit/ec3df475c35857cc5879c1cc0efd7305ee833a5f)) + +* Initial commit ([`d18b594`](https://github.com/supabase-community/postgrest-py/commit/d18b59465456b0b240d89dfe7236ad93f98c64bd)) diff --git a/src/postgrest/LICENSE b/src/postgrest/LICENSE new file mode 100644 index 00000000..ddeba6a0 --- /dev/null +++ b/src/postgrest/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Supabase + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/postgrest/Makefile b/src/postgrest/Makefile new file mode 100644 index 00000000..3f10eace --- /dev/null +++ b/src/postgrest/Makefile @@ -0,0 +1,37 @@ +tests: pytest + +pytest: start-infra + uv run --package postgrest pytest --cov=./ --cov-report=xml -vv + +start-infra: + cd infra &&\ + docker compose down &&\ + docker compose up -d + +clean-infra: + cd infra &&\ + docker compose down --remove-orphans &&\ + docker system prune -a --volumes -f + +stop-infra: + cd infra &&\ + docker compose down --remove-orphans + +clean: + rm -rf htmlcov .pytest_cache .mypy_cache .ruff_cache + rm -f .coverage coverage.xml + +unasync: + uv run unasync postgrest tests + +build_sync: unasync + sed -i 's/@pytest.mark.asyncio//g' tests/_sync/test_client.py + sed -i 's/_async/_sync/g' tests/_sync/test_client.py + sed -i 's/Async/Sync/g' src/postgrest/_sync/request_builder.py tests/_sync/test_client.py + sed -i 's/_client\.SyncClient/_client\.Client/g' tests/_sync/test_client.py + sed -i 's/SyncHTTPTransport/HTTPTransport/g' tests/_sync/**.py + sed -i 's/SyncClient/Client/g' src/postgrest/_sync/**.py tests/_sync/**.py + sed -i 's/self\.session\.aclose/self\.session\.close/g' src/postgrest/_sync/client.py + +build: + uv build --package supabase diff --git a/src/postgrest/README.md b/src/postgrest/README.md new file mode 100644 index 00000000..53e264d7 --- /dev/null +++ b/src/postgrest/README.md @@ -0,0 +1,109 @@ +# postgrest-py + +[PostgREST](https://postgrest.org) client for Python. This library provides an "ORM-like" interface to PostgREST. + +## INSTALLATION + +### Requirements + +- Python >= 3.9 +- PostgreSQL >= 13 +- PostgREST >= 11 + +### Local PostgREST server + +If you want to use a local PostgREST server for development, you can use our preconfigured instance via Docker Compose. + +```sh +docker-compose up +``` + +Once Docker Compose started, PostgREST is accessible at . + +### Instructions + +#### With Poetry (recommended) + +```sh +poetry add postgrest +``` + +#### With Pip + +```sh +pip install postgrest +``` + +## USAGE + +### Getting started + +```py +import asyncio +from postgrest import AsyncPostgrestClient + +async def main(): + async with AsyncPostgrestClient("http://localhost:3000") as client: + r = await client.from_("countries").select("*").execute() + countries = r.data + +asyncio.run(main()) +``` + +### Create + +```py +await client.from_("countries").insert({ "name": "Việt Nam", "capital": "Hà Nội" }).execute() +``` + +### Read + +```py +r = await client.from_("countries").select("id", "name").execute() +countries = r.data +``` + +### Update + +```py +await client.from_("countries").update({"capital": "Hà Nội"}).eq("name", "Việt Nam").execute() +``` + +### Delete + +```py +await client.from_("countries").delete().eq("name", "Việt Nam").execute() +``` + +### General filters + +### Stored procedures (RPC) +```py +await client.rpc("foobar", {"arg1": "value1", "arg2": "value2"}).execute() +``` + +## DEVELOPMENT + +```sh +git clone https://github.com/supabase/postgrest-py.git +cd postgrest-py +poetry install +poetry run pre-commit install +``` + +### Testing + +```sh +poetry run pytest +``` + +## CHANGELOG + +Read more [here](https://github.com/supabase/postgrest-py/blob/main/CHANGELOG.md). + +## SPONSORS + +We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone. + +[![Worklife VC](https://user-images.githubusercontent.com/10214025/90451355-34d71200-e11e-11ea-81f9-1592fd1e9146.png)](https://www.worklife.vc) +[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) diff --git a/src/postgrest/docs/Makefile b/src/postgrest/docs/Makefile new file mode 100644 index 00000000..d4bb2cbb --- /dev/null +++ b/src/postgrest/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/src/postgrest/docs/api/client.rst b/src/postgrest/docs/api/client.rst new file mode 100644 index 00000000..926c5ddd --- /dev/null +++ b/src/postgrest/docs/api/client.rst @@ -0,0 +1,14 @@ +Client +====== + +To run any queries, the first step is to construct a client. + +The library offers both synchronous and asynchronous clients. + +.. autoclass:: postgrest.AsyncPostgrestClient + :members: + :inherited-members: + +.. autoclass:: postgrest.SyncPostgrestClient + :members: + :inherited-members: diff --git a/src/postgrest/docs/api/exceptions.rst b/src/postgrest/docs/api/exceptions.rst new file mode 100644 index 00000000..f83ef4ba --- /dev/null +++ b/src/postgrest/docs/api/exceptions.rst @@ -0,0 +1,5 @@ +Exceptions +========== + +.. autoexception:: postgrest.APIError + :members: diff --git a/src/postgrest/docs/api/filters.rst b/src/postgrest/docs/api/filters.rst new file mode 100644 index 00000000..50fe0b7d --- /dev/null +++ b/src/postgrest/docs/api/filters.rst @@ -0,0 +1,31 @@ +Filter Builder +============== + +This is a kind of `request builder `_. It contains all the methods used to +filter data during queries. + +.. note:: + In the source code, there are separate AsyncFilterRequestBuilders and SyncFilterRequestBuilders. + These classes are otherwise exactly the same, and provide the same interface. + +.. warning:: + These classes are not meant to be constructed by the user. + +.. tip:: + The full list of supported filter operators are on the `PostgREST documentation `_ + +.. tip:: + All the filter methods return a modified instance of the filter builder, allowing fluent chaining of filters. + + +.. autoclass:: postgrest.AsyncFilterRequestBuilder + :members: + :undoc-members: + :inherited-members: + :member-order: bysource + +.. autoclass:: postgrest.SyncFilterRequestBuilder + :members: + :undoc-members: + :inherited-members: + :member-order: bysource diff --git a/src/postgrest/docs/api/index.rst b/src/postgrest/docs/api/index.rst new file mode 100644 index 00000000..0431351e --- /dev/null +++ b/src/postgrest/docs/api/index.rst @@ -0,0 +1,16 @@ +API Reference +============= + +The library offers both synchronous and asynchronous clients. +Note that the synchronous and asynchronous classes all provide the exact same interface. + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + + Client + Request Builders + Filters + Responses + Types + Exceptions diff --git a/src/postgrest/docs/api/request_builders.rst b/src/postgrest/docs/api/request_builders.rst new file mode 100644 index 00000000..9a2826cb --- /dev/null +++ b/src/postgrest/docs/api/request_builders.rst @@ -0,0 +1,33 @@ +Request Builders +================ + +.. note:: + In the source code, there are separate synchronous and asynchronous request builder classes. + These classes are otherwise exactly the same, and provide the same interfaces. + +.. warning:: + These classes are not meant to be constructed by the user. + +.. autoclass:: postgrest.AsyncRequestBuilder + :members: + :inherited-members: + +.. autoclass:: postgrest.AsyncSelectRequestBuilder + :members: + :inherited-members: + +.. autoclass:: postgrest.AsyncQueryRequestBuilder + :members: + :inherited-members: + +.. autoclass:: postgrest.SyncRequestBuilder + :members: + :inherited-members: + +.. autoclass:: postgrest.SyncSelectRequestBuilder + :members: + :inherited-members: + +.. autoclass:: postgrest.SyncQueryRequestBuilder + :members: + :inherited-members: diff --git a/src/postgrest/docs/api/responses.rst b/src/postgrest/docs/api/responses.rst new file mode 100644 index 00000000..145365b7 --- /dev/null +++ b/src/postgrest/docs/api/responses.rst @@ -0,0 +1,7 @@ +Responses +========= + +Once a query is run, the library parses the server's response into an APIResponse object. + +.. autoclass:: postgrest.APIResponse + :members: diff --git a/src/postgrest/docs/api/types.rst b/src/postgrest/docs/api/types.rst new file mode 100644 index 00000000..af85c9a4 --- /dev/null +++ b/src/postgrest/docs/api/types.rst @@ -0,0 +1,16 @@ +Types +===== + +Some type aliases and enums used in the library. + +.. autoclass:: postgrest.types.CountMethod + :members: + +.. autoclass:: postgrest.types.Filters + :members: + +.. autoclass:: postgrest.types.RequestMethod + :members: + +.. autoclass:: postgrest.types.ReturnMethod + :members: diff --git a/src/postgrest/docs/conf.py b/src/postgrest/docs/conf.py new file mode 100644 index 00000000..54f265d5 --- /dev/null +++ b/src/postgrest/docs/conf.py @@ -0,0 +1,67 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- +import postgrest + +project = "postgrest-py" +version = postgrest.__version__ +release = version +copyright = ( + "2022, Anand Krishna, Daniel Reinón García, Joel Lee, Leynier Gutiérrez González" +) +author = "Anand Krishna, Daniel Reinón García, Joel Lee, Leynier Gutiérrez González" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.extlinks", +] + +# Napolean config +napoleon_google_docstring = True + +# autodoc config +autodoc_member_order = "bysource" +autodoc_class_signature = "separated" + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "furo" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = [] diff --git a/src/postgrest/docs/examples/basic_queries.rst b/src/postgrest/docs/examples/basic_queries.rst new file mode 100644 index 00000000..b8da7653 --- /dev/null +++ b/src/postgrest/docs/examples/basic_queries.rst @@ -0,0 +1,68 @@ +Getting Started +=============== + +We connect to the API and authenticate, and fetch some data. + +.. code-block:: python + :linenos: + + import asyncio + from postgrest import AsyncPostgrestClient + + async def main(): + async with AsyncPostgrestClient("http://localhost:3000") as client: + client.auth("Bearer ") + r = await client.from_("countries").select("*").execute() + countries = r.data + + asyncio.run(main()) + + +**CRUD** + +.. code-block:: python + + await client.from_("countries").insert({ "name": "Việt Nam", "capital": "Hà Nội" }).execute() + + +.. code-block:: python + + r = await client.from_("countries").select("id", "name").execute() + countries = r.data + + +.. code-block:: python + + await client.from_("countries").update({"capital": "Hà Nội"}).eq("name", "Việt Nam").execute() + +.. code-block:: python + + await client.from_("countries").delete().eq("name", "Việt Nam").execute() + +**Calling RPCs** + +.. code-block:: python + + await client.rpc("foo").execute() + +.. code-block:: python + + await client.rpc("bar", {"arg1": "value1", "arg2": "value2"}).execute() + + +**Closing the connection** + +Once you have finished running your queries, close the connection: + +.. code-block:: python + + await client.aclose() + + +You can also use the client with a context manager, which will close the client for you. + +.. code-block:: python + + async with AsyncPostgrestClient("url") as client: + # run queries + # the client is closed when the async with block ends diff --git a/src/postgrest/docs/examples/index.rst b/src/postgrest/docs/examples/index.rst new file mode 100644 index 00000000..e869d961 --- /dev/null +++ b/src/postgrest/docs/examples/index.rst @@ -0,0 +1,14 @@ +Examples +======== + +.. note:: + The library offers both synchronous and asynchronous clients. In the examples, we use the + async client. However, they should work the same for the sync client as well. + + +.. toctree:: + :maxdepth: 1 + :caption: More examples: + + Basic Queries + Logging Requests diff --git a/src/postgrest/docs/examples/logging.rst b/src/postgrest/docs/examples/logging.rst new file mode 100644 index 00000000..4dee8b28 --- /dev/null +++ b/src/postgrest/docs/examples/logging.rst @@ -0,0 +1,25 @@ +Logging Requests +================ + +While debugging, you might want to see the API requests that are being sent for every query. +To do this, just set the logging level to "DEBUG": + +.. code-block:: python + :linenos: + + from logging import basicConfig, DEBUG + from postgrest import SyncPostgrestClient + + basicConfig(level=DEBUG) + + client = SyncPostgrestClient(...) + + client.from_("test").select("*").eq("a", "b").execute() + client.from_("test").select("*").eq("foo", "bar").eq("baz", "spam").execute() + +Output: + +.. code-block:: + + DEBUG:httpx._client:HTTP Request: GET https:///rest/v1/test?select=%2A&a=eq.b "HTTP/1.1 200 OK" + DEBUG:httpx._client:HTTP Request: GET https:///rest/v1/test?select=%2A&foo=eq.bar&baz=eq.spam "HTTP/1.1 200 OK" diff --git a/src/postgrest/docs/index.rst b/src/postgrest/docs/index.rst new file mode 100644 index 00000000..3e552121 --- /dev/null +++ b/src/postgrest/docs/index.rst @@ -0,0 +1,31 @@ +Welcome to postgrest-py's documentation! +======================================== + +`PostgREST `_ client library for Python. This library provides an ORM interface to PostgREST. + +.. attention:: + This library is currently unstable. If you find any bugs, please file an `issue `_. + +Installation +============ +Requirements: + +- Python >= 3.7 + +**With pip:** +:: + + pip install postgrest-py + +**With poetry:** +:: + + poetry add postgrest-py + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + API Reference + Examples diff --git a/src/postgrest/docs/make.bat b/src/postgrest/docs/make.bat new file mode 100644 index 00000000..8084272b --- /dev/null +++ b/src/postgrest/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/src/postgrest/docs/requirements.txt b/src/postgrest/docs/requirements.txt new file mode 100644 index 00000000..bd86fc24 --- /dev/null +++ b/src/postgrest/docs/requirements.txt @@ -0,0 +1,2 @@ +furo >= 2022.4.7 +Sphinx == 7.4.7 diff --git a/src/postgrest/infra/docker-compose.yaml b/src/postgrest/infra/docker-compose.yaml new file mode 100644 index 00000000..783ed1dc --- /dev/null +++ b/src/postgrest/infra/docker-compose.yaml @@ -0,0 +1,28 @@ +# docker-compose.yml +version: '3' +services: + rest: + image: postgrest/postgrest:v11.2.2 + ports: + - '3000:3000' + environment: + PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres + PGRST_DB_SCHEMAS: public,personal + PGRST_DB_EXTRA_SEARCH_PATH: extensions + PGRST_DB_ANON_ROLE: postgres + PGRST_DB_PLAN_ENABLED: 1 + PGRST_DB_TX_END: commit-allow-override + depends_on: + - db + db: + image: supabase/postgres:15.1.0.37 + ports: + - '5432:5432' + volumes: + - .:/docker-entrypoint-initdb.d/ + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_HOST: /var/run/postgresql + POSTGRES_PORT: 5432 diff --git a/src/postgrest/infra/init.sql b/src/postgrest/infra/init.sql new file mode 100644 index 00000000..20edecee --- /dev/null +++ b/src/postgrest/infra/init.sql @@ -0,0 +1,105 @@ +CREATE TABLE public.countries ( + id int8 PRIMARY KEY, + iso CHAR (2) NOT NULL, + country_name VARCHAR (80) NOT NULL, + nicename VARCHAR (80) NOT NULL, + iso3 CHAR (3) DEFAULT NULL, + numcode SMALLINT DEFAULT NULL, + phonecode INT NOT NULL +); + +INSERT INTO public.countries (id, iso, country_name, nicename, iso3, numcode, phonecode) VALUES + (1, 'AF', 'AFGHANISTAN', 'Afghanistan', 'AFG', 4, 93), + (2, 'AL', 'ALBANIA', 'Albania', 'ALB', 8, 355), + (3, 'DZ', 'ALGERIA', 'Algeria', 'DZA', 12, 213), + (4, 'AQ', 'ANTARCTICA', 'Antarctica', NULL, NULL, 0), + (5, 'CR', 'COSTA RICA', 'Costa Rica', 'CRI', 188, 506), + (6, 'ES', 'SPAIN', 'Spain', 'ESP', 724, 34), + (7, 'TH', 'THAILAND', 'Thailand', 'THA', 764, 66), + (8, 'TG', 'TOGO', 'Togo', 'TGO', 768, 228), + (9, 'TT', 'TRINIDAD AND TOBAGO', 'Trinidad and Tobago', 'TTO', 780, 1868), + (10, 'GB', 'UNITED KINGDOM', 'United Kingdom', 'GBR', 826, 44), + (11, 'US', 'UNITED STATES', 'United States', 'USA', 840, 1), + (12, 'ZW', 'ZIMBABWE', 'Zimbabwe', 'ZWE', 716, 263); + +create table public.cities ( + id int8 primary key, + country_id int8 not null references public.countries, + name text +); + +insert into public.cities (id, name, country_id) values + (1, 'London', 10), + (2, 'Manchester', 10), + (3, 'Liverpool', 10), + (4, 'Bristol', 10), + (5, 'Miami', 11), + (6, 'Huston', 11), + (7, 'Atlanta', 11); + +create table public.users ( + id int8 primary key, + name text, + address jsonb +); + +insert into public.users (id, name, address) values + (1, 'Michael', '{ "postcode": 90210, "street": "Melrose Place" }'), + (2, 'Jane', '{}'); + +create table public.reservations ( + id int8 primary key, + room_name text, + during tsrange +); + +insert into public.reservations (id, room_name, during) values + (1, 'Emerald', '[2000-01-01 13:00, 2000-01-01 15:00)'), + (2, 'Topaz', '[2000-01-02 09:00, 2000-01-02 10:00)'); + + +create table public.issues ( + id int8 primary key, + title text, + tags text[] +); + +insert into public.issues (id, title, tags) values + (1, 'Cache invalidation is not working', array['is:open', 'severity:high', 'priority:low']), + (2, 'Use better names', array['is:open', 'severity:low', 'priority:medium']), + (3, 'Add missing postgrest filters', array['is:open', 'severity:low', 'priority:high']), + (4, 'Add alias to filters', array['is:closed', 'severity:low', 'priority:medium']); + +create or replace function public.list_stored_countries() + returns setof countries + language sql +as $function$ + select * from countries; +$function$; + +create or replace function public.search_countries_by_name(search_name text) + returns setof countries + language sql +as $function$ + select * from countries where nicename ilike '%' || search_name || '%'; +$function$; + +create table + orchestral_sections (id int8 primary key, name text); +create table + instruments ( + id int8 primary key, + section_id int8 not null references orchestral_sections, + name text + ); + +insert into + orchestral_sections (id, name) +values + (1, 'strings'), + (2, 'woodwinds'); +insert into + instruments (id, section_id, name) +values + (1, 1, 'harp'), + (2, 1, 'violin'); diff --git a/src/postgrest/pyproject.toml b/src/postgrest/pyproject.toml new file mode 100644 index 00000000..4a5bc3fd --- /dev/null +++ b/src/postgrest/pyproject.toml @@ -0,0 +1,82 @@ +[project] +name = "postgrest" +version = "1.1.1" # {x-release-please-version} +description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." +authors = [ + { name = "Lương Quang Mạnh", email = "luongquangmanh85@gmail.com"}, + { name = "Joel Lee", email = "joel@joellee.org"}, + { name = "Anand"}, + { name = "Oliver Rice"}, + { name = "Andrew Smith", email = "a.smith@silentworks.co.uk"}, +] +readme = "README.md" +license = "MIT" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent" +] +requires-python = ">=3.9" +dependencies = [ + "httpx[http2] >=0.26,<0.29", + "deprecation >=2.1.0", + "pydantic >=1.9,<3.0", + "strenum >=0.4.9; python_version < \"3.11\"", +] + +[project.urls] +homepage = "https://github.com/supabase/postgrest-py" +repository = "https://github.com/supabase/postgrest-py" +documentation = "https://postgrest-py.rtfd.io" + +[dependency-groups] +test = [ + "pytest >= 8.4.1", + "pytest-cov >=6.2.1", + "pytest-depends >=1.0.1", + "pytest-asyncio >=1.0.0", +] +lints = [ + "pre-commit >=4.2.0", + "ruff >=0.12.1", +] +docs = [ + "sphinx >=7.1.2", + "furo >=2023.9.10,<2026.0.0", +] +dev = [ { include-group = "lints" }, { include-group="test" }] + +[tool.uv] +default-groups = [ "dev" ] + +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + # "B", + # flake8-simplify + # "SIM", + # isort + "I", +] +ignore = ["F401", "F403", "F841", "E712", "E501", "E402", "UP006", "UP035"] +# isort.required-imports = ["from __future__ import annotations"] + +[tool.ruff.lint.pyupgrade] +# Preserve types, even if a file imports `from __future__ import annotations`. +keep-runtime-typing = true + +[tool.pytest.ini_options] +asyncio_mode = "auto" +filterwarnings = [ + "ignore::DeprecationWarning", # ignore deprecation warnings globally +] + +[build-system] +requires = ["uv_build>=0.8.3,<0.9.0"] +build-backend = "uv_build" diff --git a/src/postgrest/src/postgrest/__init__.py b/src/postgrest/src/postgrest/__init__.py new file mode 100644 index 00000000..edb87f2e --- /dev/null +++ b/src/postgrest/src/postgrest/__init__.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +from httpx import Timeout + +from ._async.client import AsyncPostgrestClient +from ._async.request_builder import ( + AsyncFilterRequestBuilder, + AsyncMaybeSingleRequestBuilder, + AsyncQueryRequestBuilder, + AsyncRequestBuilder, + AsyncRPCFilterRequestBuilder, + AsyncSelectRequestBuilder, + AsyncSingleRequestBuilder, +) +from ._sync.client import SyncPostgrestClient +from ._sync.request_builder import ( + SyncFilterRequestBuilder, + SyncMaybeSingleRequestBuilder, + SyncQueryRequestBuilder, + SyncRequestBuilder, + SyncRPCFilterRequestBuilder, + SyncSelectRequestBuilder, + SyncSingleRequestBuilder, +) +from .base_request_builder import APIResponse +from .constants import DEFAULT_POSTGREST_CLIENT_HEADERS +from .exceptions import APIError +from .types import ( + CountMethod, + Filters, + RequestMethod, + ReturnMethod, +) +from .version import __version__ + +__all__ = [ + "AsyncPostgrestClient", + "AsyncFilterRequestBuilder", + "AsyncQueryRequestBuilder", + "AsyncRequestBuilder", + "AsyncRPCFilterRequestBuilder", + "AsyncSelectRequestBuilder", + "AsyncSingleRequestBuilder", + "AsyncMaybeSingleRequestBuilder", + "SyncPostgrestClient", + "SyncFilterRequestBuilder", + "SyncMaybeSingleRequestBuilder", + "SyncQueryRequestBuilder", + "SyncRequestBuilder", + "SyncRPCFilterRequestBuilder", + "SyncSelectRequestBuilder", + "SyncSingleRequestBuilder", + "APIResponse", + "DEFAULT_POSTGREST_CLIENT_HEADERS", + "APIError", + "CountMethod", + "Filters", + "RequestMethod", + "ReturnMethod", + "Timeout", + "__version__", +] diff --git a/src/postgrest/src/postgrest/_async/__init__.py b/src/postgrest/src/postgrest/_async/__init__.py new file mode 100644 index 00000000..9d48db4f --- /dev/null +++ b/src/postgrest/src/postgrest/_async/__init__.py @@ -0,0 +1 @@ +from __future__ import annotations diff --git a/src/postgrest/src/postgrest/_async/client.py b/src/postgrest/src/postgrest/_async/client.py new file mode 100644 index 00000000..63fb95e5 --- /dev/null +++ b/src/postgrest/src/postgrest/_async/client.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +from typing import Any, Dict, Optional, Union, cast +from warnings import warn + +from deprecation import deprecated +from httpx import AsyncClient, Headers, QueryParams, Timeout + +from ..base_client import BasePostgrestClient +from ..constants import ( + DEFAULT_POSTGREST_CLIENT_HEADERS, + DEFAULT_POSTGREST_CLIENT_TIMEOUT, +) +from ..types import CountMethod +from ..version import __version__ +from .request_builder import AsyncRequestBuilder, AsyncRPCFilterRequestBuilder + +_TableT = Dict[str, Any] + + +class AsyncPostgrestClient(BasePostgrestClient): + """PostgREST client.""" + + def __init__( + self, + base_url: str, + *, + schema: str = "public", + headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, + timeout: Union[int, float, Timeout, None] = None, + verify: Optional[bool] = None, + proxy: Optional[str] = None, + http_client: Optional[AsyncClient] = None, + ) -> None: + if timeout is not None: + warn( + "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + if verify is not None: + warn( + "The 'verify' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + if proxy is not None: + warn( + "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + + self.verify = bool(verify) if verify is not None else True + self.timeout = ( + timeout + if isinstance(timeout, Timeout) + else ( + int(abs(timeout)) + if timeout is not None + else DEFAULT_POSTGREST_CLIENT_TIMEOUT + ) + ) + + BasePostgrestClient.__init__( + self, + base_url, + schema=schema, + headers=headers, + timeout=self.timeout, + verify=self.verify, + proxy=proxy, + http_client=http_client, + ) + self.session = cast(AsyncClient, self.session) + + def create_session( + self, + base_url: str, + headers: Dict[str, str], + timeout: Union[int, float, Timeout], + verify: bool = True, + proxy: Optional[str] = None, + ) -> AsyncClient: + http_client = None + if isinstance(self.http_client, AsyncClient): + http_client = self.http_client + + if http_client is not None: + http_client.base_url = base_url + http_client.headers.update({**headers}) + return http_client + + return AsyncClient( + base_url=base_url, + headers=headers, + timeout=timeout, + verify=verify, + proxy=proxy, + follow_redirects=True, + http2=True, + ) + + def schema(self, schema: str): + """Switch to another schema.""" + return AsyncPostgrestClient( + base_url=self.base_url, + schema=schema, + headers=self.headers, + timeout=self.timeout, + verify=self.verify, + proxy=self.proxy, + ) + + async def __aenter__(self) -> AsyncPostgrestClient: + return self + + async def __aexit__(self, exc_type, exc, tb) -> None: + await self.aclose() + + async def aclose(self) -> None: + """Close the underlying HTTP connections.""" + await self.session.aclose() + + def from_(self, table: str) -> AsyncRequestBuilder[_TableT]: + """Perform a table operation. + + Args: + table: The name of the table + Returns: + :class:`AsyncRequestBuilder` + """ + return AsyncRequestBuilder[_TableT](self.session, f"/{table}") + + def table(self, table: str) -> AsyncRequestBuilder[_TableT]: + """Alias to :meth:`from_`.""" + return self.from_(table) + + @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") + def from_table(self, table: str) -> AsyncRequestBuilder: + """Alias to :meth:`from_`.""" + return self.from_(table) + + def rpc( + self, + func: str, + params: dict, + count: Optional[CountMethod] = None, + head: bool = False, + get: bool = False, + ) -> AsyncRPCFilterRequestBuilder[Any]: + """Perform a stored procedure call. + + Args: + func: The name of the remote procedure to run. + params: The parameters to be passed to the remote procedure. + count: The method to use to get the count of rows returned. + head: When set to `true`, `data` will not be returned. Useful if you only need the count. + get: When set to `true`, the function will be called with read-only access mode. + Returns: + :class:`AsyncRPCFilterRequestBuilder` + Example: + .. code-block:: python + + await client.rpc("foobar", {"arg": "value"}).execute() + + .. versionchanged:: 0.10.9 + This method now returns a :class:`AsyncRPCFilterRequestBuilder`. + .. versionchanged:: 0.10.2 + This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to + filter on the RPC's resultset. + """ + method = "HEAD" if head else "GET" if get else "POST" + + headers = Headers({"Prefer": f"count={count}"}) if count else Headers() + + if method in ("HEAD", "GET"): + return AsyncRPCFilterRequestBuilder[Any]( + self.session, + f"/rpc/{func}", + method, + headers, + QueryParams(params), + json={}, + ) + # the params here are params to be sent to the RPC and not the queryparams! + return AsyncRPCFilterRequestBuilder[Any]( + self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params + ) diff --git a/src/postgrest/src/postgrest/_async/request_builder.py b/src/postgrest/src/postgrest/_async/request_builder.py new file mode 100644 index 00000000..3c2f5949 --- /dev/null +++ b/src/postgrest/src/postgrest/_async/request_builder.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +from typing import Any, Generic, Optional, TypeVar, Union + +from httpx import AsyncClient, Headers, QueryParams +from pydantic import ValidationError + +from ..base_request_builder import ( + APIResponse, + BaseFilterRequestBuilder, + BaseRPCRequestBuilder, + BaseSelectRequestBuilder, + CountMethod, + SingleAPIResponse, + pre_delete, + pre_insert, + pre_select, + pre_update, + pre_upsert, +) +from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message +from ..types import ReturnMethod +from ..utils import get_origin_and_cast, model_validate_json + +_ReturnT = TypeVar("_ReturnT") + + +class AsyncQueryRequestBuilder(Generic[_ReturnT]): + def __init__( + self, + session: AsyncClient, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + self.session = session + self.path = path + self.http_method = http_method + self.headers = headers + self.params = params + self.json = None if http_method in {"GET", "HEAD"} else json + + async def execute(self) -> APIResponse[_ReturnT]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`APIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + r = await self.session.request( + self.http_method, + self.path, + json=self.json, + params=self.params, + headers=self.headers, + ) + try: + if r.is_success: + if self.http_method != "HEAD": + body = r.text + if self.headers.get("Accept") == "text/csv": + return body + if self.headers.get( + "Accept" + ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"): + if "+json" not in self.headers.get("Accept"): + return body + return APIResponse[_ReturnT].from_http_request_response(r) + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError as e: + raise APIError(generate_default_error_message(r)) + + +class AsyncSingleRequestBuilder(Generic[_ReturnT]): + def __init__( + self, + session: AsyncClient, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + self.session = session + self.path = path + self.http_method = http_method + self.headers = headers + self.params = params + self.json = json + + async def execute(self) -> SingleAPIResponse[_ReturnT]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`SingleAPIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + r = await self.session.request( + self.http_method, + self.path, + json=self.json, + params=self.params, + headers=self.headers, + ) + try: + if ( + 200 <= r.status_code <= 299 + ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) + return SingleAPIResponse[_ReturnT].from_http_request_response(r) + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError as e: + raise APIError(generate_default_error_message(r)) + + +class AsyncMaybeSingleRequestBuilder(AsyncSingleRequestBuilder[_ReturnT]): + async def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]: + r = None + try: + r = await AsyncSingleRequestBuilder[_ReturnT].execute(self) + except APIError as e: + if e.details and "The result contains 0 rows" in e.details: + return None + if not r: + raise APIError( + { + "message": "Missing response", + "code": "204", + "hint": "Please check traceback of the code", + "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.", + } + ) + return r + + +# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 +class AsyncFilterRequestBuilder( + BaseFilterRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT] +): # type: ignore + def __init__( + self, + session: AsyncClient, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + +# this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf +class AsyncRPCFilterRequestBuilder( + BaseRPCRequestBuilder[_ReturnT], AsyncSingleRequestBuilder[_ReturnT] +): + def __init__( + self, + session: AsyncClient, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(AsyncSingleRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + +# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 +class AsyncSelectRequestBuilder( + BaseSelectRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT] +): # type: ignore + def __init__( + self, + session: AsyncClient, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + def single(self) -> AsyncSingleRequestBuilder[_ReturnT]: + """Specify that the query will only return a single row in response. + + .. caution:: + The API will raise an error if the query returned more than one row. + """ + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return AsyncSingleRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def maybe_single(self) -> AsyncMaybeSingleRequestBuilder[_ReturnT]: + """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return AsyncMaybeSingleRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def text_search( + self, column: str, query: str, options: dict[str, Any] = {} + ) -> AsyncFilterRequestBuilder[_ReturnT]: + type_ = options.get("type") + type_part = "" + if type_ == "plain": + type_part = "pl" + elif type_ == "phrase": + type_part = "ph" + elif type_ == "web_search": + type_part = "w" + config_part = f"({options.get('config')})" if options.get("config") else "" + self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}") + + return AsyncQueryRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def csv(self) -> AsyncSingleRequestBuilder[str]: + """Specify that the query must retrieve data as a single CSV string.""" + self.headers["Accept"] = "text/csv" + return AsyncSingleRequestBuilder[str]( + session=self.session, # type: ignore + path=self.path, + http_method=self.http_method, + headers=self.headers, + params=self.params, + json=self.json, + ) + + +class AsyncRequestBuilder(Generic[_ReturnT]): + def __init__(self, session: AsyncClient, path: str) -> None: + self.session = session + self.path = path + + def select( + self, + *columns: str, + count: Optional[CountMethod] = None, + head: Optional[bool] = None, + ) -> AsyncSelectRequestBuilder[_ReturnT]: + """Run a SELECT query. + + Args: + *columns: The names of the columns to fetch. + count: The method to use to get the count of rows returned. + Returns: + :class:`AsyncSelectRequestBuilder` + """ + method, params, headers, json = pre_select(*columns, count=count, head=head) + return AsyncSelectRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def insert( + self, + json: Union[dict, list], + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + upsert: bool = False, + default_to_null: bool = True, + ) -> AsyncQueryRequestBuilder[_ReturnT]: + """Run an INSERT query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + upsert: Whether the query should be an upsert. + default_to_null: Make missing fields default to `null`. + Otherwise, use the default value for the column. + Only applies for bulk inserts. + Returns: + :class:`AsyncQueryRequestBuilder` + """ + method, params, headers, json = pre_insert( + json, + count=count, + returning=returning, + upsert=upsert, + default_to_null=default_to_null, + ) + return AsyncQueryRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def upsert( + self, + json: Union[dict, list], + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ignore_duplicates: bool = False, + on_conflict: str = "", + default_to_null: bool = True, + ) -> AsyncQueryRequestBuilder[_ReturnT]: + """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + ignore_duplicates: Whether duplicate rows should be ignored. + on_conflict: Specified columns to be made to work with UNIQUE constraint. + default_to_null: Make missing fields default to `null`. Otherwise, use the + default value for the column. This only applies when inserting new rows, + not when merging with existing rows under `ignoreDuplicates: false`. + This also only applies when doing bulk upserts. + Returns: + :class:`AsyncQueryRequestBuilder` + """ + method, params, headers, json = pre_upsert( + json, + count=count, + returning=returning, + ignore_duplicates=ignore_duplicates, + on_conflict=on_conflict, + default_to_null=default_to_null, + ) + return AsyncQueryRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def update( + self, + json: dict, + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> AsyncFilterRequestBuilder[_ReturnT]: + """Run an UPDATE query. + + Args: + json: The updated fields. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`AsyncFilterRequestBuilder` + """ + method, params, headers, json = pre_update( + json, + count=count, + returning=returning, + ) + return AsyncFilterRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def delete( + self, + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> AsyncFilterRequestBuilder[_ReturnT]: + """Run a DELETE query. + + Args: + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`AsyncFilterRequestBuilder` + """ + method, params, headers, json = pre_delete( + count=count, + returning=returning, + ) + return AsyncFilterRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) diff --git a/src/postgrest/src/postgrest/_sync/__init__.py b/src/postgrest/src/postgrest/_sync/__init__.py new file mode 100644 index 00000000..9d48db4f --- /dev/null +++ b/src/postgrest/src/postgrest/_sync/__init__.py @@ -0,0 +1 @@ +from __future__ import annotations diff --git a/src/postgrest/src/postgrest/_sync/client.py b/src/postgrest/src/postgrest/_sync/client.py new file mode 100644 index 00000000..908db515 --- /dev/null +++ b/src/postgrest/src/postgrest/_sync/client.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +from typing import Any, Dict, Optional, Union, cast +from warnings import warn + +from deprecation import deprecated +from httpx import Client, Headers, QueryParams, Timeout + +from ..base_client import BasePostgrestClient +from ..constants import ( + DEFAULT_POSTGREST_CLIENT_HEADERS, + DEFAULT_POSTGREST_CLIENT_TIMEOUT, +) +from ..types import CountMethod +from ..version import __version__ +from .request_builder import SyncRequestBuilder, SyncRPCFilterRequestBuilder + +_TableT = Dict[str, Any] + + +class SyncPostgrestClient(BasePostgrestClient): + """PostgREST client.""" + + def __init__( + self, + base_url: str, + *, + schema: str = "public", + headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, + timeout: Union[int, float, Timeout, None] = None, + verify: Optional[bool] = None, + proxy: Optional[str] = None, + http_client: Optional[Client] = None, + ) -> None: + if timeout is not None: + warn( + "The 'timeout' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + if verify is not None: + warn( + "The 'verify' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + if proxy is not None: + warn( + "The 'proxy' parameter is deprecated. Please configure it in the http client instead.", + DeprecationWarning, + stacklevel=2, + ) + + self.verify = bool(verify) if verify is not None else True + self.timeout = ( + timeout + if isinstance(timeout, Timeout) + else ( + int(abs(timeout)) + if timeout is not None + else DEFAULT_POSTGREST_CLIENT_TIMEOUT + ) + ) + + BasePostgrestClient.__init__( + self, + base_url, + schema=schema, + headers=headers, + timeout=self.timeout, + verify=self.verify, + proxy=proxy, + http_client=http_client, + ) + self.session = cast(Client, self.session) + + def create_session( + self, + base_url: str, + headers: Dict[str, str], + timeout: Union[int, float, Timeout], + verify: bool = True, + proxy: Optional[str] = None, + ) -> Client: + http_client = None + if isinstance(self.http_client, Client): + http_client = self.http_client + + if http_client is not None: + http_client.base_url = base_url + http_client.headers.update({**headers}) + return http_client + + return Client( + base_url=base_url, + headers=headers, + timeout=timeout, + verify=verify, + proxy=proxy, + follow_redirects=True, + http2=True, + ) + + def schema(self, schema: str): + """Switch to another schema.""" + return SyncPostgrestClient( + base_url=self.base_url, + schema=schema, + headers=self.headers, + timeout=self.timeout, + verify=self.verify, + proxy=self.proxy, + ) + + def __enter__(self) -> SyncPostgrestClient: + return self + + def __exit__(self, exc_type, exc, tb) -> None: + self.aclose() + + def aclose(self) -> None: + """Close the underlying HTTP connections.""" + self.session.close() + + def from_(self, table: str) -> SyncRequestBuilder[_TableT]: + """Perform a table operation. + + Args: + table: The name of the table + Returns: + :class:`AsyncRequestBuilder` + """ + return SyncRequestBuilder[_TableT](self.session, f"/{table}") + + def table(self, table: str) -> SyncRequestBuilder[_TableT]: + """Alias to :meth:`from_`.""" + return self.from_(table) + + @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") + def from_table(self, table: str) -> SyncRequestBuilder: + """Alias to :meth:`from_`.""" + return self.from_(table) + + def rpc( + self, + func: str, + params: dict, + count: Optional[CountMethod] = None, + head: bool = False, + get: bool = False, + ) -> SyncRPCFilterRequestBuilder[Any]: + """Perform a stored procedure call. + + Args: + func: The name of the remote procedure to run. + params: The parameters to be passed to the remote procedure. + count: The method to use to get the count of rows returned. + head: When set to `true`, `data` will not be returned. Useful if you only need the count. + get: When set to `true`, the function will be called with read-only access mode. + Returns: + :class:`AsyncRPCFilterRequestBuilder` + Example: + .. code-block:: python + + await client.rpc("foobar", {"arg": "value"}).execute() + + .. versionchanged:: 0.10.9 + This method now returns a :class:`AsyncRPCFilterRequestBuilder`. + .. versionchanged:: 0.10.2 + This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to + filter on the RPC's resultset. + """ + method = "HEAD" if head else "GET" if get else "POST" + + headers = Headers({"Prefer": f"count={count}"}) if count else Headers() + + if method in ("HEAD", "GET"): + return SyncRPCFilterRequestBuilder[Any]( + self.session, + f"/rpc/{func}", + method, + headers, + QueryParams(params), + json={}, + ) + # the params here are params to be sent to the RPC and not the queryparams! + return SyncRPCFilterRequestBuilder[Any]( + self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params + ) diff --git a/src/postgrest/src/postgrest/_sync/request_builder.py b/src/postgrest/src/postgrest/_sync/request_builder.py new file mode 100644 index 00000000..efb0f7a7 --- /dev/null +++ b/src/postgrest/src/postgrest/_sync/request_builder.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +from typing import Any, Generic, Optional, TypeVar, Union + +from httpx import Client, Headers, QueryParams +from pydantic import ValidationError + +from ..base_request_builder import ( + APIResponse, + BaseFilterRequestBuilder, + BaseRPCRequestBuilder, + BaseSelectRequestBuilder, + CountMethod, + SingleAPIResponse, + pre_delete, + pre_insert, + pre_select, + pre_update, + pre_upsert, +) +from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message +from ..types import ReturnMethod +from ..utils import get_origin_and_cast, model_validate_json + +_ReturnT = TypeVar("_ReturnT") + + +class SyncQueryRequestBuilder(Generic[_ReturnT]): + def __init__( + self, + session: Client, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + self.session = session + self.path = path + self.http_method = http_method + self.headers = headers + self.params = params + self.json = None if http_method in {"GET", "HEAD"} else json + + def execute(self) -> APIResponse[_ReturnT]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`APIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + r = self.session.request( + self.http_method, + self.path, + json=self.json, + params=self.params, + headers=self.headers, + ) + try: + if r.is_success: + if self.http_method != "HEAD": + body = r.text + if self.headers.get("Accept") == "text/csv": + return body + if self.headers.get( + "Accept" + ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"): + if "+json" not in self.headers.get("Accept"): + return body + return APIResponse[_ReturnT].from_http_request_response(r) + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError as e: + raise APIError(generate_default_error_message(r)) + + +class SyncSingleRequestBuilder(Generic[_ReturnT]): + def __init__( + self, + session: Client, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + self.session = session + self.path = path + self.http_method = http_method + self.headers = headers + self.params = params + self.json = json + + def execute(self) -> SingleAPIResponse[_ReturnT]: + """Execute the query. + + .. tip:: + This is the last method called, after the query is built. + + Returns: + :class:`SingleAPIResponse` + + Raises: + :class:`APIError` If the API raised an error. + """ + r = self.session.request( + self.http_method, + self.path, + json=self.json, + params=self.params, + headers=self.headers, + ) + try: + if ( + 200 <= r.status_code <= 299 + ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) + return SingleAPIResponse[_ReturnT].from_http_request_response(r) + else: + json_obj = model_validate_json(APIErrorFromJSON, r.content) + raise APIError(dict(json_obj)) + except ValidationError as e: + raise APIError(generate_default_error_message(r)) + + +class SyncMaybeSingleRequestBuilder(SyncSingleRequestBuilder[_ReturnT]): + def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]: + r = None + try: + r = SyncSingleRequestBuilder[_ReturnT].execute(self) + except APIError as e: + if e.details and "The result contains 0 rows" in e.details: + return None + if not r: + raise APIError( + { + "message": "Missing response", + "code": "204", + "hint": "Please check traceback of the code", + "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.", + } + ) + return r + + +# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 +class SyncFilterRequestBuilder( + BaseFilterRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT] +): # type: ignore + def __init__( + self, + session: Client, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + +# this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf +class SyncRPCFilterRequestBuilder( + BaseRPCRequestBuilder[_ReturnT], SyncSingleRequestBuilder[_ReturnT] +): + def __init__( + self, + session: Client, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(SyncSingleRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + +# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 +class SyncSelectRequestBuilder( + BaseSelectRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT] +): # type: ignore + def __init__( + self, + session: Client, + path: str, + http_method: str, + headers: Headers, + params: QueryParams, + json: dict, + ) -> None: + get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__( + self, session, path, http_method, headers, params, json + ) + + def single(self) -> SyncSingleRequestBuilder[_ReturnT]: + """Specify that the query will only return a single row in response. + + .. caution:: + The API will raise an error if the query returned more than one row. + """ + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return SyncSingleRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def maybe_single(self) -> SyncMaybeSingleRequestBuilder[_ReturnT]: + """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return SyncMaybeSingleRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def text_search( + self, column: str, query: str, options: dict[str, Any] = {} + ) -> SyncFilterRequestBuilder[_ReturnT]: + type_ = options.get("type") + type_part = "" + if type_ == "plain": + type_part = "pl" + elif type_ == "phrase": + type_part = "ph" + elif type_ == "web_search": + type_part = "w" + config_part = f"({options.get('config')})" if options.get("config") else "" + self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}") + + return SyncQueryRequestBuilder[_ReturnT]( + headers=self.headers, + http_method=self.http_method, + json=self.json, + params=self.params, + path=self.path, + session=self.session, # type: ignore + ) + + def csv(self) -> SyncSingleRequestBuilder[str]: + """Specify that the query must retrieve data as a single CSV string.""" + self.headers["Accept"] = "text/csv" + return SyncSingleRequestBuilder[str]( + session=self.session, # type: ignore + path=self.path, + http_method=self.http_method, + headers=self.headers, + params=self.params, + json=self.json, + ) + + +class SyncRequestBuilder(Generic[_ReturnT]): + def __init__(self, session: Client, path: str) -> None: + self.session = session + self.path = path + + def select( + self, + *columns: str, + count: Optional[CountMethod] = None, + head: Optional[bool] = None, + ) -> SyncSelectRequestBuilder[_ReturnT]: + """Run a SELECT query. + + Args: + *columns: The names of the columns to fetch. + count: The method to use to get the count of rows returned. + Returns: + :class:`SyncSelectRequestBuilder` + """ + method, params, headers, json = pre_select(*columns, count=count, head=head) + return SyncSelectRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def insert( + self, + json: Union[dict, list], + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + upsert: bool = False, + default_to_null: bool = True, + ) -> SyncQueryRequestBuilder[_ReturnT]: + """Run an INSERT query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + upsert: Whether the query should be an upsert. + default_to_null: Make missing fields default to `null`. + Otherwise, use the default value for the column. + Only applies for bulk inserts. + Returns: + :class:`SyncQueryRequestBuilder` + """ + method, params, headers, json = pre_insert( + json, + count=count, + returning=returning, + upsert=upsert, + default_to_null=default_to_null, + ) + return SyncQueryRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def upsert( + self, + json: Union[dict, list], + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ignore_duplicates: bool = False, + on_conflict: str = "", + default_to_null: bool = True, + ) -> SyncQueryRequestBuilder[_ReturnT]: + """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. + + Args: + json: The row to be inserted. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + ignore_duplicates: Whether duplicate rows should be ignored. + on_conflict: Specified columns to be made to work with UNIQUE constraint. + default_to_null: Make missing fields default to `null`. Otherwise, use the + default value for the column. This only applies when inserting new rows, + not when merging with existing rows under `ignoreDuplicates: false`. + This also only applies when doing bulk upserts. + Returns: + :class:`SyncQueryRequestBuilder` + """ + method, params, headers, json = pre_upsert( + json, + count=count, + returning=returning, + ignore_duplicates=ignore_duplicates, + on_conflict=on_conflict, + default_to_null=default_to_null, + ) + return SyncQueryRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def update( + self, + json: dict, + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> SyncFilterRequestBuilder[_ReturnT]: + """Run an UPDATE query. + + Args: + json: The updated fields. + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`SyncFilterRequestBuilder` + """ + method, params, headers, json = pre_update( + json, + count=count, + returning=returning, + ) + return SyncFilterRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) + + def delete( + self, + *, + count: Optional[CountMethod] = None, + returning: ReturnMethod = ReturnMethod.representation, + ) -> SyncFilterRequestBuilder[_ReturnT]: + """Run a DELETE query. + + Args: + count: The method to use to get the count of rows returned. + returning: Either 'minimal' or 'representation' + Returns: + :class:`SyncFilterRequestBuilder` + """ + method, params, headers, json = pre_delete( + count=count, + returning=returning, + ) + return SyncFilterRequestBuilder[_ReturnT]( + self.session, self.path, method, headers, params, json + ) diff --git a/src/postgrest/src/postgrest/base_client.py b/src/postgrest/src/postgrest/base_client.py new file mode 100644 index 00000000..f4a819d8 --- /dev/null +++ b/src/postgrest/src/postgrest/base_client.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Dict, Optional, Union + +from httpx import AsyncClient, BasicAuth, Client, Timeout + +from .utils import is_http_url + + +class BasePostgrestClient(ABC): + """Base PostgREST client.""" + + def __init__( + self, + base_url: str, + *, + schema: str, + headers: Dict[str, str], + timeout: Union[int, float, Timeout], + verify: bool = True, + proxy: Optional[str] = None, + http_client: Union[Client, AsyncClient, None] = None, + ) -> None: + if not is_http_url(base_url): + ValueError("base_url must be a valid HTTP URL string") + + self.base_url = base_url + self.headers = { + **headers, + "Accept-Profile": schema, + "Content-Profile": schema, + } + self.timeout = timeout + self.verify = verify + self.proxy = proxy + self.http_client = http_client + self.session = self.create_session( + self.base_url, + self.headers, + self.timeout, + self.verify, + self.proxy, + ) + + @abstractmethod + def create_session( + self, + base_url: str, + headers: Dict[str, str], + timeout: Union[int, float, Timeout], + verify: bool = True, + proxy: Optional[str] = None, + ) -> Union[Client, AsyncClient]: + raise NotImplementedError() + + def auth( + self, + token: Optional[str], + *, + username: Union[str, bytes, None] = None, + password: Union[str, bytes] = "", + ): + """ + Authenticate the client with either bearer token or basic authentication. + + Raises: + `ValueError`: If neither authentication scheme is provided. + + .. note:: + Bearer token is preferred if both ones are provided. + """ + if token: + self.session.headers["Authorization"] = f"Bearer {token}" + elif username: + self.session.auth = BasicAuth(username, password) + else: + raise ValueError( + "Neither bearer token or basic authentication scheme is provided" + ) + return self diff --git a/src/postgrest/src/postgrest/base_request_builder.py b/src/postgrest/src/postgrest/base_request_builder.py new file mode 100644 index 00000000..973316fe --- /dev/null +++ b/src/postgrest/src/postgrest/base_request_builder.py @@ -0,0 +1,687 @@ +from __future__ import annotations + +import json +from json import JSONDecodeError +from re import search +from typing import ( + Any, + Dict, + Generic, + Iterable, + List, + Literal, + NamedTuple, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from httpx import AsyncClient, Client, Headers, QueryParams +from httpx import Response as RequestResponse +from pydantic import BaseModel + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +try: + # >= 2.0.0 + from pydantic import field_validator +except ImportError: + # < 2.0.0 + from pydantic import validator as field_validator + +from .types import CountMethod, Filters, RequestMethod, ReturnMethod +from .utils import get_origin_and_cast, sanitize_param + + +class QueryArgs(NamedTuple): + # groups the method, json, headers and params for a query in a single object + method: RequestMethod + params: QueryParams + headers: Headers + json: Dict[Any, Any] + + +def _unique_columns(json: List[Dict]): + unique_keys = {key for row in json for key in row.keys()} + columns = ",".join([f'"{k}"' for k in unique_keys]) + return columns + + +def _cleaned_columns(columns: Tuple[str, ...]) -> str: + quoted = False + cleaned = [] + + for column in columns: + clean_column = "" + for char in column: + if char.isspace() and not quoted: + continue + if char == '"': + quoted = not quoted + clean_column += char + cleaned.append(clean_column) + + return ",".join(cleaned) + + +def pre_select( + *columns: str, + count: Optional[CountMethod] = None, + head: Optional[bool] = None, +) -> QueryArgs: + method = RequestMethod.HEAD if head else RequestMethod.GET + cleaned_columns = _cleaned_columns(columns or "*") + params = QueryParams({"select": cleaned_columns}) + + headers = Headers({"Prefer": f"count={count}"}) if count else Headers() + return QueryArgs(method, params, headers, {}) + + +def pre_insert( + json: Union[dict, list], + *, + count: Optional[CountMethod], + returning: ReturnMethod, + upsert: bool, + default_to_null: bool = True, +) -> QueryArgs: + prefer_headers = [f"return={returning}"] + if count: + prefer_headers.append(f"count={count}") + if upsert: + prefer_headers.append("resolution=merge-duplicates") + if not default_to_null: + prefer_headers.append("missing=default") + headers = Headers({"Prefer": ",".join(prefer_headers)}) + # Adding 'columns' query parameters + query_params = {} + if isinstance(json, list): + query_params = {"columns": _unique_columns(json)} + return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) + + +def pre_upsert( + json: Union[dict, list], + *, + count: Optional[CountMethod], + returning: ReturnMethod, + ignore_duplicates: bool, + on_conflict: str = "", + default_to_null: bool = True, +) -> QueryArgs: + query_params = {} + prefer_headers = [f"return={returning}"] + if count: + prefer_headers.append(f"count={count}") + resolution = "ignore" if ignore_duplicates else "merge" + prefer_headers.append(f"resolution={resolution}-duplicates") + if not default_to_null: + prefer_headers.append("missing=default") + headers = Headers({"Prefer": ",".join(prefer_headers)}) + if on_conflict: + query_params["on_conflict"] = on_conflict + # Adding 'columns' query parameters + if isinstance(json, list): + query_params["columns"] = _unique_columns(json) + return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) + + +def pre_update( + json: dict, + *, + count: Optional[CountMethod], + returning: ReturnMethod, +) -> QueryArgs: + prefer_headers = [f"return={returning}"] + if count: + prefer_headers.append(f"count={count}") + headers = Headers({"Prefer": ",".join(prefer_headers)}) + return QueryArgs(RequestMethod.PATCH, QueryParams(), headers, json) + + +def pre_delete( + *, + count: Optional[CountMethod], + returning: ReturnMethod, +) -> QueryArgs: + prefer_headers = [f"return={returning}"] + if count: + prefer_headers.append(f"count={count}") + headers = Headers({"Prefer": ",".join(prefer_headers)}) + return QueryArgs(RequestMethod.DELETE, QueryParams(), headers, {}) + + +_ReturnT = TypeVar("_ReturnT") + + +# the APIResponse.data is marked as _ReturnT instead of list[_ReturnT] +# as it is also returned in the case of rpc() calls; and rpc calls do not +# necessarily return lists. +# https://github.com/supabase-community/postgrest-py/issues/200 +class APIResponse(BaseModel, Generic[_ReturnT]): + data: List[_ReturnT] + """The data returned by the query.""" + count: Optional[int] = None + """The number of rows returned.""" + + @field_validator("data") + @classmethod + def raise_when_api_error(cls: Type[Self], value: Any) -> Any: + if isinstance(value, dict) and value.get("message"): + raise ValueError("You are passing an API error to the data field.") + return value + + @staticmethod + def _get_count_from_content_range_header( + content_range_header: str, + ) -> Optional[int]: + content_range = content_range_header.split("/") + return None if len(content_range) < 2 else int(content_range[1]) + + @staticmethod + def _is_count_in_prefer_header(prefer_header: str) -> bool: + pattern = f"count=({'|'.join([cm.value for cm in CountMethod])})" + return bool(search(pattern, prefer_header)) + + @classmethod + def _get_count_from_http_request_response( + cls: Type[Self], + request_response: RequestResponse, + ) -> Optional[int]: + prefer_header: Optional[str] = request_response.request.headers.get("prefer") + if not prefer_header: + return None + is_count_in_prefer_header = cls._is_count_in_prefer_header(prefer_header) + content_range_header: Optional[str] = request_response.headers.get( + "content-range" + ) + return ( + cls._get_count_from_content_range_header(content_range_header) + if (is_count_in_prefer_header and content_range_header) + else None + ) + + @classmethod + def from_http_request_response( + cls: Type[Self], request_response: RequestResponse + ) -> Self: + count = cls._get_count_from_http_request_response(request_response) + try: + data = request_response.json() + except JSONDecodeError: + data = request_response.text if len(request_response.text) > 0 else [] + # the type-ignore here is as pydantic needs us to pass the type parameter + # here explicitly, but pylance already knows that cls is correctly parametrized + return cls[_ReturnT](data=data, count=count) # type: ignore + + @classmethod + def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self: + keys = dict.keys() + assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys + return cls[_ReturnT]( # type: ignore + data=dict.get("data"), count=dict.get("count"), error=dict.get("error") + ) + + +class SingleAPIResponse(APIResponse[_ReturnT], Generic[_ReturnT]): + data: _ReturnT # type: ignore + """The data returned by the query.""" + + @classmethod + def from_http_request_response( + cls: Type[Self], request_response: RequestResponse + ) -> Self: + count = cls._get_count_from_http_request_response(request_response) + try: + data = request_response.json() + except JSONDecodeError: + data = request_response.text if len(request_response.text) > 0 else [] + return cls[_ReturnT](data=data, count=count) # type: ignore + + @classmethod + def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self: + keys = dict.keys() + assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys + return cls[_ReturnT]( # type: ignore + data=dict.get("data"), count=dict.get("count"), error=dict.get("error") + ) + + +class BaseFilterRequestBuilder(Generic[_ReturnT]): + def __init__( + self, + session: Union[AsyncClient, Client], + headers: Headers, + params: QueryParams, + ) -> None: + self.session = session + self.headers = headers + self.params = params + self.negate_next = False + + @property + def not_(self: Self) -> Self: + """Whether the filter applied next should be negated.""" + self.negate_next = True + return self + + def filter(self: Self, column: str, operator: str, criteria: str) -> Self: + """Apply filters on a query. + + Args: + column: The name of the column to apply a filter on + operator: The operator to use while filtering + criteria: The value to filter by + """ + if self.negate_next is True: + self.negate_next = False + operator = f"{Filters.NOT}.{operator}" + key, val = sanitize_param(column), f"{operator}.{criteria}" + self.params = self.params.add(key, val) + return self + + def eq(self: Self, column: str, value: Any) -> Self: + """An 'equal to' filter. + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.EQ, value) + + def neq(self: Self, column: str, value: Any) -> Self: + """A 'not equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.NEQ, value) + + def gt(self: Self, column: str, value: Any) -> Self: + """A 'greater than' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.GT, value) + + def gte(self: Self, column: str, value: Any) -> Self: + """A 'greater than or equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.GTE, value) + + def lt(self: Self, column: str, value: Any) -> Self: + """A 'less than' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.LT, value) + + def lte(self: Self, column: str, value: Any) -> Self: + """A 'less than or equal to' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + return self.filter(column, Filters.LTE, value) + + def is_(self: Self, column: str, value: Any) -> Self: + """An 'is' filter + + Args: + column: The name of the column to apply a filter on + value: The value to filter by + """ + if value is None: + value = "null" + return self.filter(column, Filters.IS, value) + + def like(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + return self.filter(column, Filters.LIKE, pattern) + + def like_all_of(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.LIKE_ALL, f"{{{pattern}}}") + + def like_any_of(self: Self, column: str, pattern: str) -> Self: + """A 'LIKE' filter, to use for pattern matching. + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.LIKE_ANY, f"{{{pattern}}}") + + def ilike_all_of(self: Self, column: str, pattern: str) -> Self: + """A 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.ILIKE_ALL, f"{{{pattern}}}") + + def ilike_any_of(self: Self, column: str, pattern: str) -> Self: + """A 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + + return self.filter(column, Filters.ILIKE_ANY, f"{{{pattern}}}") + + def ilike(self: Self, column: str, pattern: str) -> Self: + """An 'ILIKE' filter, to use for pattern matching (case insensitive). + + Args: + column: The name of the column to apply a filter on + pattern: The pattern to filter by + """ + return self.filter(column, Filters.ILIKE, pattern) + + def or_(self: Self, filters: str, reference_table: Optional[str] = None) -> Self: + """An 'or' filter + + Args: + filters: The filters to use, following PostgREST syntax + reference_table: Set this to filter on referenced tables instead of the parent table + """ + key = f"{sanitize_param(reference_table)}.or" if reference_table else "or" + self.params = self.params.add(key, f"({filters})") + return self + + def fts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.FTS, query) + + def plfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.PLFTS, query) + + def phfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.PHFTS, query) + + def wfts(self: Self, column: str, query: Any) -> Self: + return self.filter(column, Filters.WFTS, query) + + def in_(self: Self, column: str, values: Iterable[Any]) -> Self: + values = map(sanitize_param, values) + values = ",".join(values) + return self.filter(column, Filters.IN, f"({values})") + + def cs(self: Self, column: str, values: Iterable[Any]) -> Self: + values = ",".join(values) + return self.filter(column, Filters.CS, f"{{{values}}}") + + def cd(self: Self, column: str, values: Iterable[Any]) -> Self: + values = ",".join(values) + return self.filter(column, Filters.CD, f"{{{values}}}") + + def contains( + self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] + ) -> Self: + if isinstance(value, str): + # range types can be inclusive '[', ']' or exclusive '(', ')' so just + # keep it simple and accept a string + return self.filter(column, Filters.CS, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + # Expected to be some type of iterable + stringified_values = ",".join(value) + return self.filter(column, Filters.CS, f"{{{stringified_values}}}") + + return self.filter(column, Filters.CS, json.dumps(value)) + + def contained_by( + self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] + ) -> Self: + if isinstance(value, str): + # range + return self.filter(column, Filters.CD, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + stringified_values = ",".join(value) + return self.filter(column, Filters.CD, f"{{{stringified_values}}}") + return self.filter(column, Filters.CD, json.dumps(value)) + + def ov(self: Self, column: str, value: Iterable[Any]) -> Self: + if isinstance(value, str): + # range types can be inclusive '[', ']' or exclusive '(', ')' so just + # keep it simple and accept a string + return self.filter(column, Filters.OV, value) + if not isinstance(value, dict) and isinstance(value, Iterable): + # Expected to be some type of iterable + stringified_values = ",".join(value) + return self.filter(column, Filters.OV, f"{{{stringified_values}}}") + return self.filter(column, Filters.OV, json.dumps(value)) + + def sl(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.filter(column, Filters.SL, f"({range[0]},{range[1]})") + + def sr(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.filter(column, Filters.SR, f"({range[0]},{range[1]})") + + def nxl(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.filter(column, Filters.NXL, f"({range[0]},{range[1]})") + + def nxr(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.filter(column, Filters.NXR, f"({range[0]},{range[1]})") + + def adj(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.filter(column, Filters.ADJ, f"({range[0]},{range[1]})") + + def range_gt(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.sr(column, range) + + def range_gte(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.nxl(column, range) + + def range_lt(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.sl(column, range) + + def range_lte(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.nxr(column, range) + + def range_adjacent(self: Self, column: str, range: Tuple[int, int]) -> Self: + return self.adj(column, range) + + def overlaps(self: Self, column: str, values: Iterable[Any]) -> Self: + return self.ov(column, values) + + def match(self: Self, query: Dict[str, Any]) -> Self: + updated_query = self + + if not query: + raise ValueError( + "query dictionary should contain at least one key-value pair" + ) + + for key, value in query.items(): + updated_query = self.eq(key, value) + + return updated_query + + +class BaseSelectRequestBuilder(BaseFilterRequestBuilder[_ReturnT]): + def __init__( + self, + session: Union[AsyncClient, Client], + headers: Headers, + params: QueryParams, + ) -> None: + # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__ + # tries to call _GenericAlias.__init__ - which is the wrong method + # The __origin__ attribute of the _GenericAlias is the actual class + get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + + def explain( + self: Self, + analyze: bool = False, + verbose: bool = False, + settings: bool = False, + buffers: bool = False, + wal: bool = False, + format: Literal["text", "json"] = "text", + ) -> Self: + options = [ + key + for key, value in locals().items() + if key not in ["self", "format"] and value + ] + options_str = "|".join(options) + self.headers["Accept"] = ( + f"application/vnd.pgrst.plan+{format}; options={options_str}" + ) + return self + + def order( + self: Self, + column: str, + *, + desc: bool = False, + nullsfirst: Optional[bool] = None, + foreign_table: Optional[str] = None, + ) -> Self: + """Sort the returned rows in some specific order. + + Args: + column: The column to order by + desc: Whether the rows should be ordered in descending order or not. + nullsfirst: nullsfirst + foreign_table: Foreign table name whose results are to be ordered. + .. versionchanged:: 0.10.3 + Allow ordering results for foreign tables with the foreign_table parameter. + """ + key = f"{foreign_table}.order" if foreign_table else "order" + existing_order = self.params.get(key) + + self.params = self.params.set( + key, + f"{existing_order + ',' if existing_order else ''}" + + f"{column}.{'desc' if desc else 'asc'}" + + ( + f".{'nullsfirst' if nullsfirst else 'nullslast'}" + if nullsfirst is not None + else "" + ), + ) + return self + + def limit(self: Self, size: int, *, foreign_table: Optional[str] = None) -> Self: + """Limit the number of rows returned by a query. + + Args: + size: The number of rows to be returned + foreign_table: Foreign table name to limit + .. versionchanged:: 0.10.3 + Allow limiting results returned for foreign tables with the foreign_table parameter. + """ + self.params = self.params.add( + f"{foreign_table}.limit" if foreign_table else "limit", + size, + ) + return self + + def offset(self: Self, size: int) -> Self: + """Set the starting row index returned by a query. + Args: + size: The number of the row to start at + """ + self.params = self.params.add( + "offset", + size, + ) + return self + + def range( + self: Self, start: int, end: int, foreign_table: Optional[str] = None + ) -> Self: + self.params = self.params.add( + f"{foreign_table}.offset" if foreign_table else "offset", start + ) + self.params = self.params.add( + f"{foreign_table}.limit" if foreign_table else "limit", + end - start + 1, + ) + return self + + +class BaseRPCRequestBuilder(BaseSelectRequestBuilder[_ReturnT]): + def __init__( + self, + session: Union[AsyncClient, Client], + headers: Headers, + params: QueryParams, + ) -> None: + # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__ + # tries to call _GenericAlias.__init__ - which is the wrong method + # The __origin__ attribute of the _GenericAlias is the actual class + get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( + self, session, headers, params + ) + + def select( + self, + *columns: str, + ) -> Self: + """Run a SELECT query. + + Args: + *columns: The names of the columns to fetch. + Returns: + :class:`BaseSelectRequestBuilder` + """ + method, params, headers, json = pre_select(*columns, count=None) + self.params = self.params.add("select", params.get("select")) + if self.headers.get("Prefer"): + self.headers["Prefer"] += ",return=representation" + else: + self.headers["Prefer"] = "return=representation" + + return self + + def single(self) -> Self: + """Specify that the query will only return a single row in response. + + .. caution:: + The API will raise an error if the query returned more than one row. + """ + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return self + + def maybe_single(self) -> Self: + """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" + self.headers["Accept"] = "application/vnd.pgrst.object+json" + return self + + def csv(self) -> Self: + """Specify that the query must retrieve data as a single CSV string.""" + self.headers["Accept"] = "text/csv" + return self diff --git a/src/postgrest/src/postgrest/constants.py b/src/postgrest/src/postgrest/constants.py new file mode 100644 index 00000000..4c3c17c8 --- /dev/null +++ b/src/postgrest/src/postgrest/constants.py @@ -0,0 +1,6 @@ +DEFAULT_POSTGREST_CLIENT_HEADERS = { + "Accept": "application/json", + "Content-Type": "application/json", +} + +DEFAULT_POSTGREST_CLIENT_TIMEOUT = 120 diff --git a/src/postgrest/src/postgrest/exceptions.py b/src/postgrest/src/postgrest/exceptions.py new file mode 100644 index 00000000..d4ef668d --- /dev/null +++ b/src/postgrest/src/postgrest/exceptions.py @@ -0,0 +1,68 @@ +from typing import Any, Dict, Optional + +from pydantic import BaseModel + + +class APIErrorFromJSON(BaseModel): + """ + A pydantic object to validate an error info object + from a json string. + """ + + message: Optional[str] + """The error message.""" + code: Optional[str] + """The error code.""" + hint: Optional[str] + """The error hint.""" + details: Optional[str] + """The error details.""" + + +class APIError(Exception): + """ + Base exception for all API errors. + """ + + _raw_error: Dict[str, str] + message: Optional[str] + """The error message.""" + code: Optional[str] + """The error code.""" + hint: Optional[str] + """The error hint.""" + details: Optional[str] + """The error details.""" + + def __init__(self, error: Dict[str, Any]) -> None: + self._raw_error = error + self.message = error.get("message") + self.code = error.get("code") + self.hint = error.get("hint") + self.details = error.get("details") + Exception.__init__(self, str(self)) + + def __repr__(self) -> str: + error_text = f"Error {self.code}:" if self.code else "" + message_text = f"\nMessage: {self.message}" if self.message else "" + hint_text = f"\nHint: {self.hint}" if self.hint else "" + details_text = f"\nDetails: {self.details}" if self.details else "" + complete_error_text = f"{error_text}{message_text}{hint_text}{details_text}" + return complete_error_text or "Empty error" + + def json(self) -> Dict[str, str]: + """Convert the error into a dictionary. + + Returns: + :class:`dict` + """ + return self._raw_error + + +def generate_default_error_message(r): + return { + "message": "JSON could not be generated", + "code": r.status_code, + "hint": "Refer to full message for details", + "details": str(r.content), + } diff --git a/src/postgrest/src/postgrest/py.typed b/src/postgrest/src/postgrest/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/postgrest/src/postgrest/types.py b/src/postgrest/src/postgrest/types.py new file mode 100644 index 00000000..fa6f94ce --- /dev/null +++ b/src/postgrest/src/postgrest/types.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import sys + +if sys.version_info >= (3, 11): + from enum import StrEnum +else: + from strenum import StrEnum + + +class CountMethod(StrEnum): + exact = "exact" + planned = "planned" + estimated = "estimated" + + +class Filters(StrEnum): + NOT = "not" + EQ = "eq" + NEQ = "neq" + GT = "gt" + GTE = "gte" + LT = "lt" + LTE = "lte" + IS = "is" + LIKE = "like" + LIKE_ALL = "like(all)" + LIKE_ANY = "like(any)" + ILIKE = "ilike" + ILIKE_ALL = "ilike(all)" + ILIKE_ANY = "ilike(any)" + FTS = "fts" + PLFTS = "plfts" + PHFTS = "phfts" + WFTS = "wfts" + IN = "in" + CS = "cs" + CD = "cd" + OV = "ov" + SL = "sl" + SR = "sr" + NXL = "nxl" + NXR = "nxr" + ADJ = "adj" + + +class RequestMethod(StrEnum): + GET = "GET" + POST = "POST" + PATCH = "PATCH" + PUT = "PUT" + DELETE = "DELETE" + HEAD = "HEAD" + + +class ReturnMethod(StrEnum): + minimal = "minimal" + representation = "representation" diff --git a/src/postgrest/src/postgrest/utils.py b/src/postgrest/src/postgrest/utils.py new file mode 100644 index 00000000..c458af26 --- /dev/null +++ b/src/postgrest/src/postgrest/utils.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from typing import Any, Type, TypeVar, cast, get_origin +from urllib.parse import urlparse + +from deprecation import deprecated +from httpx import AsyncClient # noqa: F401 +from httpx import Client as BaseClient # noqa: F401 +from pydantic import BaseModel + +from .version import __version__ + + +class SyncClient(BaseClient): + @deprecated( + "1.0.2", "1.3.0", __version__, "Use `Client` from the httpx package instead" + ) + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + @deprecated( + "1.0.2", + "1.3.0", + __version__, + "Use `close` method from `Client` in the httpx package instead", + ) + def aclose(self) -> None: + self.close() + + +def sanitize_param(param: Any) -> str: + param_str = str(param) + reserved_chars = ",:()" + if any(char in param_str for char in reserved_chars): + return f'"{param_str}"' + return param_str + + +def sanitize_pattern_param(pattern: str) -> str: + return sanitize_param(pattern.replace("%", "*")) + + +_T = TypeVar("_T") + + +def get_origin_and_cast(typ: type[type[_T]]) -> type[_T]: + # Base[T] is an instance of typing._GenericAlias, so doing Base[T].__init__ + # tries to call _GenericAlias.__init__ - which is the wrong method + # get_origin(Base[T]) returns Base + # This function casts Base back to Base[T] to maintain type-safety + # while still allowing us to access the methods of `Base` at runtime + # See: definitions of request builders that use multiple-inheritance + # like AsyncFilterRequestBuilder + return cast(Type[_T], get_origin(typ)) + + +def is_http_url(url: str) -> bool: + return urlparse(url).scheme in {"https", "http"} + + +TBaseModel = TypeVar("TBaseModel", bound=BaseModel) + + +def model_validate_json(model: Type[TBaseModel], contents) -> TBaseModel: + """Compatibility layer between pydantic 1 and 2 for parsing an instance + of a BaseModel from varied""" + try: + # pydantic > 2 + return model.model_validate_json(contents) + except AttributeError: + # pydantic < 2 + return model.parse_raw(contents) diff --git a/src/postgrest/src/postgrest/version.py b/src/postgrest/src/postgrest/version.py new file mode 100644 index 00000000..349bf79f --- /dev/null +++ b/src/postgrest/src/postgrest/version.py @@ -0,0 +1 @@ +__version__ = "1.1.1" # {x-release-please-version} diff --git a/src/postgrest/tests/__init__.py b/src/postgrest/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/postgrest/tests/_async/__init__.py b/src/postgrest/tests/_async/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/postgrest/tests/_async/client.py b/src/postgrest/tests/_async/client.py new file mode 100644 index 00000000..25cdeb0e --- /dev/null +++ b/src/postgrest/tests/_async/client.py @@ -0,0 +1,28 @@ +from httpx import AsyncClient, AsyncHTTPTransport, Limits + +from postgrest import AsyncPostgrestClient + +REST_URL = "http://127.0.0.1:3000" + + +def rest_client(): + return AsyncPostgrestClient( + base_url=REST_URL, + ) + + +def rest_client_httpx(): + transport = AsyncHTTPTransport( + retries=4, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = AsyncClient(transport=transport, headers=headers) + return AsyncPostgrestClient( + base_url=REST_URL, + http_client=http_client, + ) diff --git a/src/postgrest/tests/_async/test_client.py b/src/postgrest/tests/_async/test_client.py new file mode 100644 index 00000000..24fc43cc --- /dev/null +++ b/src/postgrest/tests/_async/test_client.py @@ -0,0 +1,189 @@ +from unittest.mock import patch + +import pytest +from httpx import ( + AsyncClient, + AsyncHTTPTransport, + BasicAuth, + Headers, + Limits, + Request, + Response, + Timeout, +) + +from postgrest import AsyncPostgrestClient +from postgrest.exceptions import APIError + + +@pytest.fixture +async def postgrest_client(): + async with AsyncPostgrestClient("https://example.com") as client: + yield client + + +class TestConstructor: + def test_simple(self, postgrest_client: AsyncPostgrestClient): + session = postgrest_client.session + + assert session.base_url == "https://example.com" + headers = Headers( + { + "Accept": "application/json", + "Content-Type": "application/json", + "Accept-Profile": "public", + "Content-Profile": "public", + } + ) + assert session.headers.items() >= headers.items() + + @pytest.mark.asyncio + async def test_custom_headers(self): + async with AsyncPostgrestClient( + "https://example.com", schema="pub", headers={"Custom-Header": "value"} + ) as client: + session = client.session + + assert session.base_url == "https://example.com" + headers = Headers( + { + "Accept-Profile": "pub", + "Content-Profile": "pub", + "Custom-Header": "value", + } + ) + assert session.headers.items() >= headers.items() + + +class TestHttpxClientConstructor: + @pytest.mark.asyncio + async def test_custom_httpx_client(self): + transport = AsyncHTTPTransport( + retries=10, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = AsyncClient(transport=transport, headers=headers) + async with AsyncPostgrestClient( + "https://example.com", http_client=http_client, timeout=20.0 + ) as client: + session = client.session + + assert session.base_url == "https://example.com" + assert session.timeout == Timeout( + timeout=5.0 + ) # Should be the default 5 since we use custom httpx client + assert session.headers.get("x-user-agent") == "my-app/0.0.1" + assert isinstance(session, AsyncClient) + + +class TestAuth: + def test_auth_token(self, postgrest_client: AsyncPostgrestClient): + postgrest_client.auth("s3cr3t") + session = postgrest_client.session + + assert session.headers["Authorization"] == "Bearer s3cr3t" + + def test_auth_basic(self, postgrest_client: AsyncPostgrestClient): + postgrest_client.auth(None, username="admin", password="s3cr3t") + session = postgrest_client.session + + assert isinstance(session.auth, BasicAuth) + assert session.auth._auth_header == BasicAuth("admin", "s3cr3t")._auth_header + + +def test_schema(postgrest_client: AsyncPostgrestClient): + client = postgrest_client.schema("private") + session = client.session + subheaders = { + "accept-profile": "private", + "content-profile": "private", + } + + assert subheaders.items() < dict(session.headers).items() + + +@pytest.mark.asyncio +async def test_params_purged_after_execute(postgrest_client: AsyncPostgrestClient): + assert len(postgrest_client.session.params) == 0 + with pytest.raises(APIError): + await postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() + assert len(postgrest_client.session.params) == 0 + + +@pytest.mark.asyncio +async def test_response_status_code_outside_ok(postgrest_client: AsyncPostgrestClient): + with patch( + "postgrest._async.request_builder.AsyncSelectRequestBuilder.execute", + side_effect=APIError( + { + "message": "mock error", + "code": "400", + "hint": "mock", + "details": "mock", + "errors": [{"code": 400}], + } + ), + ): + with pytest.raises(APIError) as exc_info: + await ( + postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() + ) # gives status_code = 400 + exc_response = exc_info.value.json() + assert not exc_response.get("success") + assert isinstance(exc_response.get("errors"), list) + assert ( + isinstance(exc_response["errors"][0], dict) + and "code" in exc_response["errors"][0] + ) + assert exc_response["errors"][0].get("code") == 400 + + +@pytest.mark.asyncio +async def test_response_maybe_single(postgrest_client: AsyncPostgrestClient): + with patch( + "postgrest._async.request_builder.AsyncSingleRequestBuilder.execute", + side_effect=APIError( + {"message": "mock error", "code": "400", "hint": "mock", "details": "mock"} + ), + ): + client = ( + postgrest_client.from_("test").select("a", "b").eq("c", "d").maybe_single() + ) + assert "Accept" in client.headers + assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" + with pytest.raises(APIError) as exc_info: + await client.execute() + assert isinstance(exc_info, pytest.ExceptionInfo) + exc_response = exc_info.value.json() + assert isinstance(exc_response.get("message"), str) + assert "code" in exc_response and int(exc_response["code"]) == 204 + + +# https://github.com/supabase/postgrest-py/issues/595 +@pytest.mark.asyncio +async def test_response_client_invalid_response_but_valid_json( + postgrest_client: AsyncPostgrestClient, +): + with patch( + "httpx._client.AsyncClient.request", + return_value=Response( + status_code=502, + text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object + request=Request(method="GET", url="http://example.com"), + ), + ): + client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() + assert "Accept" in client.headers + assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" + with pytest.raises(APIError) as exc_info: + await client.execute() + assert isinstance(exc_info, pytest.ExceptionInfo) + exc_response = exc_info.value.json() + assert isinstance(exc_response.get("message"), str) + assert exc_response.get("message") == "JSON could not be generated" + assert "code" in exc_response and int(exc_response["code"]) == 502 diff --git a/src/postgrest/tests/_async/test_filter_request_builder.py b/src/postgrest/tests/_async/test_filter_request_builder.py new file mode 100644 index 00000000..b381e801 --- /dev/null +++ b/src/postgrest/tests/_async/test_filter_request_builder.py @@ -0,0 +1,243 @@ +import pytest +from httpx import AsyncClient, Headers, QueryParams + +from postgrest import AsyncFilterRequestBuilder + + +@pytest.fixture +async def filter_request_builder(): + async with AsyncClient() as client: + yield AsyncFilterRequestBuilder( + client, "/example_table", "GET", Headers(), QueryParams(), {} + ) + + +def test_constructor(filter_request_builder: AsyncFilterRequestBuilder): + builder = filter_request_builder + + assert builder.path == "/example_table" + assert len(builder.headers) == 0 + assert len(builder.params) == 0 + assert builder.http_method == "GET" + assert builder.json is None + assert not builder.negate_next + + +def test_not_(filter_request_builder): + builder = filter_request_builder.not_ + + assert builder.negate_next + + +def test_filter(filter_request_builder): + builder = filter_request_builder.filter(":col.name", "eq", "val") + + assert builder.params['":col.name"'] == "eq.val" + + +@pytest.mark.parametrize( + "col_name, expected_query_prefix", + [ + ("col:name", "%22col%3Aname%22"), + ("col.name", "col.name"), + ], +) +def test_filter_special_characters( + filter_request_builder, col_name, expected_query_prefix +): + builder = filter_request_builder.filter(col_name, "eq", "val") + + assert str(builder.params) == f"{expected_query_prefix}=eq.val" + + +def test_multivalued_param(filter_request_builder): + builder = filter_request_builder.lte("x", "a").gte("x", "b") + + assert str(builder.params) == "x=lte.a&x=gte.b" + + +def test_match(filter_request_builder): + builder = filter_request_builder.match({"id": "1", "done": "false"}) + assert str(builder.params) == "id=eq.1&done=eq.false" + + +def test_equals(filter_request_builder): + builder = filter_request_builder.eq("x", "a") + + assert str(builder.params) == "x=eq.a" + + +def test_not_equal(filter_request_builder): + builder = filter_request_builder.neq("x", "a") + + assert str(builder.params) == "x=neq.a" + + +def test_greater_than(filter_request_builder): + builder = filter_request_builder.gt("x", "a") + + assert str(builder.params) == "x=gt.a" + + +def test_greater_than_or_equals_to(filter_request_builder): + builder = filter_request_builder.gte("x", "a") + + assert str(builder.params) == "x=gte.a" + + +def test_contains(filter_request_builder): + builder = filter_request_builder.contains("x", "a") + + assert str(builder.params) == "x=cs.a" + + +def test_contains_dictionary(filter_request_builder): + builder = filter_request_builder.contains("x", {"a": "b"}) + + # {"a":"b"} + assert str(builder.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" + + +def test_contains_any_item(filter_request_builder): + builder = filter_request_builder.contains("x", ["a", "b"]) + + # {a,b} + assert str(builder.params) == "x=cs.%7Ba%2Cb%7D" + + +def test_contains_in_list(filter_request_builder): + builder = filter_request_builder.contains("x", '[{"a": "b"}]') + + # [{"a":+"b"}] (the + represents the space) + assert str(builder.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" + + +def test_contained_by_mixed_items(filter_request_builder): + builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" + + +def test_range_greater_than(filter_request_builder): + builder = filter_request_builder.range_gt( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_greater_than_or_equal_to(filter_request_builder): + builder = filter_request_builder.range_gte( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_less_than(filter_request_builder): + builder = filter_request_builder.range_lt( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_less_than_or_equal_to(filter_request_builder): + builder = filter_request_builder.range_lte( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_adjacent(filter_request_builder): + builder = filter_request_builder.range_adjacent( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_overlaps(filter_request_builder): + builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" + + +def test_overlaps_with_timestamp_range(filter_request_builder): + builder = filter_request_builder.overlaps( + "x", "[2000-01-01 12:45, 2000-01-01 13:15)" + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" + + +def test_like(filter_request_builder): + builder = filter_request_builder.like("x", "%a%") + + assert str(builder.params) == "x=like.%25a%25" + + +def test_ilike(filter_request_builder): + builder = filter_request_builder.ilike("x", "%a%") + + assert str(builder.params) == "x=ilike.%25a%25" + + +def test_like_all_of(filter_request_builder): + builder = filter_request_builder.like_all_of("x", "A*,*b") + + assert str(builder.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" + + +def test_like_any_of(filter_request_builder): + builder = filter_request_builder.like_any_of("x", "a*,*b") + + assert str(builder.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" + + +def test_ilike_all_of(filter_request_builder): + builder = filter_request_builder.ilike_all_of("x", "A*,*b") + + assert str(builder.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" + + +def test_ilike_any_of(filter_request_builder): + builder = filter_request_builder.ilike_any_of("x", "A*,*b") + + assert str(builder.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" + + +def test_is_(filter_request_builder): + builder = filter_request_builder.is_("x", "a") + + assert str(builder.params) == "x=is.a" + + +def test_in_(filter_request_builder): + builder = filter_request_builder.in_("x", ["a", "b"]) + + assert str(builder.params) == "x=in.%28a%2Cb%29" + + +def test_or_(filter_request_builder): + builder = filter_request_builder.or_("x.eq.1") + + assert str(builder.params) == "or=%28x.eq.1%29" + + +def test_or_in_contain(filter_request_builder): + builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") + + assert ( + str(builder.params) + == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" + ) diff --git a/src/postgrest/tests/_async/test_filter_request_builder_integration.py b/src/postgrest/tests/_async/test_filter_request_builder_integration.py new file mode 100644 index 00000000..b4ff44fc --- /dev/null +++ b/src/postgrest/tests/_async/test_filter_request_builder_integration.py @@ -0,0 +1,586 @@ +from postgrest import CountMethod + +from .client import rest_client, rest_client_httpx + + +async def test_multivalued_param_httpx(): + res = ( + await rest_client_httpx() + .from_("countries") + .select("country_name, iso", count=CountMethod.exact) + .lte("numcode", 8) + .gte("numcode", 4) + .execute() + ) + + assert res.count == 2 + assert res.data == [ + {"country_name": "AFGHANISTAN", "iso": "AF"}, + {"country_name": "ALBANIA", "iso": "AL"}, + ] + + +async def test_multivalued_param(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso", count=CountMethod.exact) + .lte("numcode", 8) + .gte("numcode", 4) + .execute() + ) + + assert res.count == 2 + assert res.data == [ + {"country_name": "AFGHANISTAN", "iso": "AF"}, + {"country_name": "ALBANIA", "iso": "AL"}, + ] + + +async def test_match(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .match({"numcode": 8, "nicename": "Albania"}) + .single() + .execute() + ) + + assert res.data == {"country_name": "ALBANIA", "iso": "AL"} + + +async def test_equals(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .eq("nicename", "Albania") + .single() + .execute() + ) + + assert res.data == {"country_name": "ALBANIA", "iso": "AL"} + + +async def test_not_equal(): + res = ( + await rest_client() + .from_("users") + .select("id, name") + .neq("name", "Jane") + .single() + .execute() + ) + + assert res.data == {"id": 1, "name": "Michael"} + + +async def test_greater_than(): + res = ( + await rest_client() + .from_("users") + .select("id, name") + .gt("id", 1) + .single() + .execute() + ) + + assert res.data == {"id": 2, "name": "Jane"} + + +async def test_greater_than_or_equals_to(): + res = await rest_client().from_("users").select("id, name").gte("id", 1).execute() + + assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] + + +async def test_contains_dictionary(): + res = ( + await rest_client() + .from_("users") + .select("name") + .contains("address", {"postcode": 90210}) + .single() + .execute() + ) + + assert res.data == {"name": "Michael"} + + +async def test_contains_any_item(): + res = ( + await rest_client() + .from_("issues") + .select("title") + .contains("tags", ["is:open", "priority:low"]) + .execute() + ) + + assert res.data == [{"title": "Cache invalidation is not working"}] + + +async def test_contains_on_range(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +async def test_contained_by_mixed_items(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +async def test_range_greater_than(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) + .execute() + ) + + assert res.data == [{"id": 2, "room_name": "Topaz"}] + + +async def test_range_greater_than_or_equal_to(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) + .execute() + ) + + assert res.data == [{"id": 2, "room_name": "Topaz"}] + + +async def test_range_less_than(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +async def test_range_less_than_or_equal_to(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +async def test_range_adjacent(): + res = ( + await rest_client() + .from_("reservations") + .select("id, room_name") + .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +async def test_overlaps(): + res = ( + await rest_client() + .from_("issues") + .select("title") + .overlaps("tags", ["is:closed", "severity:high"]) + .execute() + ) + + assert res.data == [ + {"title": "Cache invalidation is not working"}, + {"title": "Add alias to filters"}, + ] + + +async def test_overlaps_with_timestamp_range(): + res = ( + await rest_client() + .from_("reservations") + .select("room_name") + .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") + .execute() + ) + + assert res.data == [ + {"room_name": "Emerald"}, + ] + + +async def test_like(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .like("nicename", "%Alba%") + .execute() + ) + + assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] + + +async def test_ilike(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .ilike("nicename", "%alban%") + .execute() + ) + + assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] + + +async def test_like_all_of(): + res = ( + await rest_client() + .from_("countries") + .select("nicename, iso") + .like_all_of("nicename", "A*,*n") + .execute() + ) + + assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] + + +async def test_like_any_of(): + res = ( + await rest_client() + .from_("countries") + .select("nicename, iso") + .like_any_of("nicename", "Al*,*ia") + .execute() + ) + + assert res.data == [ + {"iso": "AL", "nicename": "Albania"}, + {"iso": "DZ", "nicename": "Algeria"}, + ] + + +async def test_ilike_all_of(): + res = ( + await rest_client() + .from_("countries") + .select("nicename, iso") + .ilike_all_of("nicename", "a*,*n") + .execute() + ) + + assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] + + +async def test_ilike_any_of(): + res = ( + await rest_client() + .from_("countries") + .select("nicename, iso") + .ilike_any_of("nicename", "al*,*ia") + .execute() + ) + + assert res.data == [ + {"iso": "AL", "nicename": "Albania"}, + {"iso": "DZ", "nicename": "Algeria"}, + ] + + +async def test_is_(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .is_("numcode", "null") + .limit(1) + .order("nicename") + .execute() + ) + + assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] + + +async def test_is_not(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .not_.is_("numcode", "null") + .limit(1) + .order("nicename") + .execute() + ) + + assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] + + +async def test_in_(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .in_("nicename", ["Albania", "Algeria"]) + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "ALGERIA", "iso": "DZ"}, + ] + + +async def test_or_(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .or_("iso.eq.DZ,nicename.eq.Albania") + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "ALGERIA", "iso": "DZ"}, + ] + + +async def test_or_with_and(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "TRINIDAD AND TOBAGO", "iso": "TT"}, + ] + + +async def test_or_in(): + res = ( + await rest_client() + .from_("issues") + .select("id, title") + .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") + .execute() + ) + + assert res.data == [ + {"id": 1, "title": "Cache invalidation is not working"}, + {"id": 3, "title": "Add missing postgrest filters"}, + {"id": 4, "title": "Add alias to filters"}, + ] + + +async def test_or_on_reference_table(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .execute() + ) + + assert res.data == [ + { + "country_name": "UNITED KINGDOM", + "cities": [ + {"name": "London"}, + {"name": "Manchester"}, + {"name": "Liverpool"}, + {"name": "Bristol"}, + ], + }, + ] + + +async def test_explain_json(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .explain(format="json", analyze=True) + .execute() + ) + assert res.data[0]["Plan"]["Node Type"] == "Aggregate" + + +async def test_csv(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .in_("nicename", ["Albania", "Algeria"]) + .csv() + .execute() + ) + assert "ALBANIA,AL\nALGERIA,DZ" in res.data + + +async def test_explain_text(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) + .execute() + ) + assert ( + "((cities_1.country_id = countries.id) AND ((cities_1.country_id = '10'::bigint) OR (cities_1.name = 'Paris'::text)))" + in res + ) + + +async def test_rpc_with_single(): + res = ( + await rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, country_name, iso") + .eq("nicename", "Albania") + .single() + .execute() + ) + + assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} + + +async def test_rpc_with_limit(): + res = ( + await rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, country_name, iso") + .eq("nicename", "Albania") + .limit(1) + .execute() + ) + + assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] + + +async def test_rpc_with_range(): + res = ( + await rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, iso") + .range(1, 2) + .execute() + ) + + assert res.data == [ + {"nicename": "Albania", "iso": "AL"}, + {"nicename": "Algeria", "iso": "DZ"}, + ] + + +async def test_rpc_post_with_args(): + res = ( + await rest_client() + .rpc("search_countries_by_name", {"search_name": "Alban"}) + .select("nicename, iso") + .execute() + ) + assert res.data == [{"nicename": "Albania", "iso": "AL"}] + + +async def test_rpc_get_with_args(): + res = ( + await rest_client() + .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) + .select("nicename, iso") + .execute() + ) + assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] + + +async def test_rpc_get_with_count(): + res = ( + await rest_client() + .rpc( + "search_countries_by_name", + {"search_name": "Al"}, + get=True, + count=CountMethod.exact, + ) + .select("nicename") + .execute() + ) + assert res.count == 2 + assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] + + +async def test_rpc_head_count(): + res = ( + await rest_client() + .rpc( + "search_countries_by_name", + {"search_name": "Al"}, + head=True, + count=CountMethod.exact, + ) + .execute() + ) + + assert res.count == 2 + assert res.data == [] + + +async def test_order(): + res = ( + await rest_client() + .from_("countries") + .select("country_name, iso") + .limit(3) + .order("nicename", desc=True) + .execute() + ) + + assert res.data == [ + {"country_name": "ZIMBABWE", "iso": "ZW"}, + {"country_name": "UNITED STATES", "iso": "US"}, + {"country_name": "UNITED KINGDOM", "iso": "GB"}, + ] + + +async def test_order_on_foreign_table(): + res = ( + await rest_client() + .from_("orchestral_sections") + .select("name, instruments(name)") + .order("name", desc=True, foreign_table="instruments") + .execute() + ) + + assert res.data == [ + {"name": "strings", "instruments": [{"name": "violin"}, {"name": "harp"}]}, + {"name": "woodwinds", "instruments": []}, + ] diff --git a/src/postgrest/tests/_async/test_query_request_builder.py b/src/postgrest/tests/_async/test_query_request_builder.py new file mode 100644 index 00000000..ccbf5c5b --- /dev/null +++ b/src/postgrest/tests/_async/test_query_request_builder.py @@ -0,0 +1,22 @@ +import pytest +from httpx import AsyncClient, Headers, QueryParams + +from postgrest import AsyncQueryRequestBuilder + + +@pytest.fixture +async def query_request_builder(): + async with AsyncClient() as client: + yield AsyncQueryRequestBuilder( + client, "/example_table", "GET", Headers(), QueryParams(), {} + ) + + +def test_constructor(query_request_builder: AsyncQueryRequestBuilder): + builder = query_request_builder + + assert builder.path == "/example_table" + assert len(builder.headers) == 0 + assert len(builder.params) == 0 + assert builder.http_method == "GET" + assert builder.json is None diff --git a/src/postgrest/tests/_async/test_request_builder.py b/src/postgrest/tests/_async/test_request_builder.py new file mode 100644 index 00000000..07224344 --- /dev/null +++ b/src/postgrest/tests/_async/test_request_builder.py @@ -0,0 +1,492 @@ +from typing import Any, Dict, List + +import pytest +from httpx import AsyncClient, Request, Response + +from postgrest import AsyncRequestBuilder, AsyncSingleRequestBuilder +from postgrest.base_request_builder import APIResponse, SingleAPIResponse +from postgrest.types import CountMethod + + +@pytest.fixture +async def request_builder(): + async with AsyncClient() as client: + yield AsyncRequestBuilder(client, "/example_table") + + +def test_constructor(request_builder): + assert request_builder.path == "/example_table" + + +class TestSelect: + def test_select(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("col1", "col2") + + assert builder.params["select"] == "col1,col2" + assert builder.headers.get("prefer") is None + assert builder.http_method == "GET" + assert builder.json is None + + def test_select_with_count(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select(count=CountMethod.exact) + + assert builder.params["select"] == "*" + assert builder.headers["prefer"] == "count=exact" + assert builder.http_method == "GET" + assert builder.json is None + + def test_select_with_head(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("col1", "col2", head=True) + + assert builder.params.get("select") == "col1,col2" + assert builder.headers.get("prefer") is None + assert builder.http_method == "HEAD" + assert builder.json is None + + def test_select_as_csv(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("*").csv() + + assert builder.headers["Accept"] == "text/csv" + assert isinstance(builder, AsyncSingleRequestBuilder) + + +class TestInsert: + def test_insert(self, request_builder: AsyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_insert_with_count(self, request_builder: AsyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_insert_with_upsert(self, request_builder: AsyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}, upsert=True) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_upsert_with_default_single(self, request_builder: AsyncRequestBuilder): + builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1"}] + assert builder.params.get("columns") == '"key1"' + + def test_bulk_insert_using_default(self, request_builder: AsyncRequestBuilder): + builder = request_builder.insert( + [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False + ) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] + assert set(builder.params["columns"].split(",")) == set( + '"key1","key2","key3"'.split(",") + ) + + def test_upsert(self, request_builder: AsyncRequestBuilder): + builder = request_builder.upsert({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_bulk_upsert_with_default(self, request_builder: AsyncRequestBuilder): + builder = request_builder.upsert( + [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False + ) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] + assert set(builder.params["columns"].split(",")) == set( + '"key1","key2","key3"'.split(",") + ) + + +class TestUpdate: + def test_update(self, request_builder: AsyncRequestBuilder): + builder = request_builder.update({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "PATCH" + assert builder.json == {"key1": "val1"} + + def test_update_with_count(self, request_builder: AsyncRequestBuilder): + builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "PATCH" + assert builder.json == {"key1": "val1"} + + +class TestDelete: + def test_delete(self, request_builder: AsyncRequestBuilder): + builder = request_builder.delete() + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "DELETE" + assert builder.json == {} + + def test_delete_with_count(self, request_builder: AsyncRequestBuilder): + builder = request_builder.delete(count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "DELETE" + assert builder.json == {} + + +class TestTextSearch: + def test_text_search(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("catchphrase").text_search( + "catchphrase", + "'fat' & 'cat'", + { + "type": "plain", + "config": "english", + }, + ) + assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( + builder.params + ) + + +class TestExplain: + def test_explain_plain(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("*").explain() + assert builder.params["select"] == "*" + assert "application/vnd.pgrst.plan" in str(builder.headers.get("accept")) + + def test_explain_options(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("*").explain( + format="json", analyze=True, verbose=True, buffers=True, wal=True + ) + assert builder.params["select"] == "*" + assert "application/vnd.pgrst.plan+json;" in str(builder.headers.get("accept")) + assert "options=analyze|verbose|buffers|wal" in str( + builder.headers.get("accept") + ) + + +class TestOrder: + def test_order(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select().order("country_name", desc=True) + assert str(builder.params) == "select=%2A&order=country_name.desc" + + def test_multiple_orders(self, request_builder: AsyncRequestBuilder): + builder = ( + request_builder.select() + .order("country_name", desc=True) + .order("iso", desc=True) + ) + assert str(builder.params) == "select=%2A&order=country_name.desc%2Ciso.desc" + + def test_multiple_orders_on_foreign_table( + self, request_builder: AsyncRequestBuilder + ): + foreign_table = "cities" + builder = ( + request_builder.select() + .order("city_name", desc=True, foreign_table=foreign_table) + .order("id", desc=True, foreign_table=foreign_table) + ) + assert str(builder.params) == "select=%2A&cities.order=city_name.desc%2Cid.desc" + + +class TestRange: + def test_range_on_own_table(self, request_builder: AsyncRequestBuilder): + builder = request_builder.select("*").range(0, 1) + assert builder.params["select"] == "*" + assert builder.params["limit"] == "2" + assert builder.params["offset"] == "0" + + def test_range_on_foreign_table(self, request_builder: AsyncRequestBuilder): + foreign_table = "cities" + builder = request_builder.select("*").range(1, 2, foreign_table) + assert builder.params["select"] == "*" + assert builder.params[f"{foreign_table}.limit"] == "2" + assert builder.params[f"{foreign_table}.offset"] == "1" + + +@pytest.fixture +def csv_api_response() -> str: + return "id,name\n1,foo\n" + + +@pytest.fixture +def api_response_with_error() -> Dict[str, Any]: + return { + "message": "Route GET:/countries?select=%2A not found", + "error": "Not Found", + "statusCode": 404, + } + + +@pytest.fixture +def api_response() -> List[Dict[str, Any]]: + return [ + { + "id": 1, + "name": "Bonaire, Sint Eustatius and Saba", + "iso2": "BQ", + "iso3": "BES", + "local_name": None, + "continent": None, + }, + { + "id": 2, + "name": "Curaçao", + "iso2": "CW", + "iso3": "CUW", + "local_name": None, + "continent": None, + }, + ] + + +@pytest.fixture +def single_api_response() -> Dict[str, Any]: + return { + "id": 1, + "name": "Bonaire, Sint Eustatius and Saba", + "iso2": "BQ", + "iso3": "BES", + "local_name": None, + "continent": None, + } + + +@pytest.fixture +def content_range_header_with_count() -> str: + return "0-1/2" + + +@pytest.fixture +def content_range_header_without_count() -> str: + return "0-1" + + +@pytest.fixture +def prefer_header_with_count() -> str: + return "count=exact" + + +@pytest.fixture +def prefer_header_without_count() -> str: + return "random prefer header" + + +@pytest.fixture +def request_response_without_prefer_header() -> Response: + return Response( + status_code=200, request=Request(method="GET", url="http://example.com") + ) + + +@pytest.fixture +def request_response_with_prefer_header_without_count( + prefer_header_without_count: str, +) -> Response: + return Response( + status_code=200, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_without_count}, + ), + ) + + +@pytest.fixture +def request_response_with_prefer_header_with_count_and_content_range( + prefer_header_with_count: str, content_range_header_with_count: str +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_data( + prefer_header_with_count: str, + content_range_header_with_count: str, + api_response: List[Dict[str, Any]], +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + json=api_response, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_single_data( + prefer_header_with_count: str, + content_range_header_with_count: str, + single_api_response: Dict[str, Any], +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + json=single_api_response, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_csv_data(csv_api_response: str) -> Response: + return Response( + status_code=200, + text=csv_api_response, + request=Request(method="GET", url="http://example.com"), + ) + + +class TestApiResponse: + def test_response_raises_when_api_error( + self, api_response_with_error: Dict[str, Any] + ): + with pytest.raises(ValueError): + APIResponse(data=api_response_with_error) + + def test_parses_valid_response_only_data(self, api_response: List[Dict[str, Any]]): + result = APIResponse(data=api_response) + assert result.data == api_response + + def test_parses_valid_response_data_and_count( + self, api_response: List[Dict[str, Any]] + ): + count = len(api_response) + result = APIResponse(data=api_response, count=count) + assert result.data == api_response + assert result.count == count + + def test_get_count_from_content_range_header_with_count( + self, content_range_header_with_count: str + ): + assert ( + APIResponse._get_count_from_content_range_header( + content_range_header_with_count + ) + == 2 + ) + + def test_get_count_from_content_range_header_without_count( + self, content_range_header_without_count: str + ): + assert ( + APIResponse._get_count_from_content_range_header( + content_range_header_without_count + ) + is None + ) + + def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): + assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) + + def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): + assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) + + def test_get_count_from_http_request_response_without_prefer_header( + self, request_response_without_prefer_header: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_without_prefer_header + ) + is None + ) + + def test_get_count_from_http_request_response_with_prefer_header_without_count( + self, request_response_with_prefer_header_without_count: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_with_prefer_header_without_count + ) + is None + ) + + def test_get_count_from_http_request_response_with_count_and_content_range( + self, request_response_with_prefer_header_with_count_and_content_range: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_with_prefer_header_with_count_and_content_range + ) + == 2 + ) + + def test_from_http_request_response_constructor( + self, request_response_with_data: Response, api_response: List[Dict[str, Any]] + ): + result = APIResponse.from_http_request_response(request_response_with_data) + assert result.data == api_response + assert result.count == 2 + + def test_single_from_http_request_response_constructor( + self, + request_response_with_single_data: Response, + single_api_response: Dict[str, Any], + ): + result = SingleAPIResponse.from_http_request_response( + request_response_with_single_data + ) + assert isinstance(result.data, dict) + assert result.data == single_api_response + assert result.count == 2 + + def test_single_with_csv_data( + self, request_response_with_csv_data: Response, csv_api_response: str + ): + result = SingleAPIResponse.from_http_request_response( + request_response_with_csv_data + ) + assert isinstance(result.data, str) + assert result.data == csv_api_response diff --git a/src/postgrest/tests/_sync/__init__.py b/src/postgrest/tests/_sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/postgrest/tests/_sync/client.py b/src/postgrest/tests/_sync/client.py new file mode 100644 index 00000000..a4b2e132 --- /dev/null +++ b/src/postgrest/tests/_sync/client.py @@ -0,0 +1,28 @@ +from httpx import Client, HTTPTransport, Limits + +from postgrest import SyncPostgrestClient + +REST_URL = "http://127.0.0.1:3000" + + +def rest_client(): + return SyncPostgrestClient( + base_url=REST_URL, + ) + + +def rest_client_httpx(): + transport = HTTPTransport( + retries=4, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = Client(transport=transport, headers=headers) + return SyncPostgrestClient( + base_url=REST_URL, + http_client=http_client, + ) diff --git a/src/postgrest/tests/_sync/test_client.py b/src/postgrest/tests/_sync/test_client.py new file mode 100644 index 00000000..ba07fd89 --- /dev/null +++ b/src/postgrest/tests/_sync/test_client.py @@ -0,0 +1,185 @@ +from unittest.mock import patch + +import pytest +from httpx import ( + BasicAuth, + Client, + Headers, + HTTPTransport, + Limits, + Request, + Response, + Timeout, +) + +from postgrest import SyncPostgrestClient +from postgrest.exceptions import APIError + + +@pytest.fixture +def postgrest_client(): + with SyncPostgrestClient("https://example.com") as client: + yield client + + +class TestConstructor: + def test_simple(self, postgrest_client: SyncPostgrestClient): + session = postgrest_client.session + + assert session.base_url == "https://example.com" + headers = Headers( + { + "Accept": "application/json", + "Content-Type": "application/json", + "Accept-Profile": "public", + "Content-Profile": "public", + } + ) + assert session.headers.items() >= headers.items() + + def test_custom_headers(self): + with SyncPostgrestClient( + "https://example.com", schema="pub", headers={"Custom-Header": "value"} + ) as client: + session = client.session + + assert session.base_url == "https://example.com" + headers = Headers( + { + "Accept-Profile": "pub", + "Content-Profile": "pub", + "Custom-Header": "value", + } + ) + assert session.headers.items() >= headers.items() + + +class TestHttpxClientConstructor: + def test_custom_httpx_client(self): + transport = HTTPTransport( + retries=10, + limits=Limits( + max_connections=1, + max_keepalive_connections=1, + keepalive_expiry=None, + ), + ) + headers = {"x-user-agent": "my-app/0.0.1"} + http_client = Client(transport=transport, headers=headers) + with SyncPostgrestClient( + "https://example.com", http_client=http_client, timeout=20.0 + ) as client: + session = client.session + + assert session.base_url == "https://example.com" + assert session.timeout == Timeout( + timeout=5.0 + ) # Should be the default 5 since we use custom httpx client + assert session.headers.get("x-user-agent") == "my-app/0.0.1" + assert isinstance(session, Client) + + +class TestAuth: + def test_auth_token(self, postgrest_client: SyncPostgrestClient): + postgrest_client.auth("s3cr3t") + session = postgrest_client.session + + assert session.headers["Authorization"] == "Bearer s3cr3t" + + def test_auth_basic(self, postgrest_client: SyncPostgrestClient): + postgrest_client.auth(None, username="admin", password="s3cr3t") + session = postgrest_client.session + + assert isinstance(session.auth, BasicAuth) + assert session.auth._auth_header == BasicAuth("admin", "s3cr3t")._auth_header + + +def test_schema(postgrest_client: SyncPostgrestClient): + client = postgrest_client.schema("private") + session = client.session + subheaders = { + "accept-profile": "private", + "content-profile": "private", + } + + assert subheaders.items() < dict(session.headers).items() + + +def test_params_purged_after_execute(postgrest_client: SyncPostgrestClient): + assert len(postgrest_client.session.params) == 0 + with pytest.raises(APIError): + postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() + assert len(postgrest_client.session.params) == 0 + + +def test_response_status_code_outside_ok(postgrest_client: SyncPostgrestClient): + with patch( + "postgrest._sync.request_builder.SyncSelectRequestBuilder.execute", + side_effect=APIError( + { + "message": "mock error", + "code": "400", + "hint": "mock", + "details": "mock", + "errors": [{"code": 400}], + } + ), + ): + with pytest.raises(APIError) as exc_info: + ( + postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() + ) # gives status_code = 400 + exc_response = exc_info.value.json() + assert not exc_response.get("success") + assert isinstance(exc_response.get("errors"), list) + assert ( + isinstance(exc_response["errors"][0], dict) + and "code" in exc_response["errors"][0] + ) + assert exc_response["errors"][0].get("code") == 400 + + +def test_response_maybe_single(postgrest_client: SyncPostgrestClient): + with patch( + "postgrest._sync.request_builder.SyncSingleRequestBuilder.execute", + side_effect=APIError( + {"message": "mock error", "code": "400", "hint": "mock", "details": "mock"} + ), + ): + client = ( + postgrest_client.from_("test").select("a", "b").eq("c", "d").maybe_single() + ) + assert "Accept" in client.headers + assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" + with pytest.raises(APIError) as exc_info: + client.execute() + assert isinstance(exc_info, pytest.ExceptionInfo) + exc_response = exc_info.value.json() + assert isinstance(exc_response.get("message"), str) + assert "code" in exc_response and int(exc_response["code"]) == 204 + + +# https://github.com/supabase/postgrest-py/issues/595 + + +def test_response_client_invalid_response_but_valid_json( + postgrest_client: SyncPostgrestClient, +): + with patch( + "httpx._client.Client.request", + return_value=Response( + status_code=502, + text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object + request=Request(method="GET", url="http://example.com"), + ), + ): + client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() + assert "Accept" in client.headers + assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" + with pytest.raises(APIError) as exc_info: + client.execute() + assert isinstance(exc_info, pytest.ExceptionInfo) + exc_response = exc_info.value.json() + assert isinstance(exc_response.get("message"), str) + assert exc_response.get("message") == "JSON could not be generated" + assert "code" in exc_response and int(exc_response["code"]) == 502 diff --git a/src/postgrest/tests/_sync/test_filter_request_builder.py b/src/postgrest/tests/_sync/test_filter_request_builder.py new file mode 100644 index 00000000..ef28f210 --- /dev/null +++ b/src/postgrest/tests/_sync/test_filter_request_builder.py @@ -0,0 +1,243 @@ +import pytest +from httpx import Client, Headers, QueryParams + +from postgrest import SyncFilterRequestBuilder + + +@pytest.fixture +def filter_request_builder(): + with Client() as client: + yield SyncFilterRequestBuilder( + client, "/example_table", "GET", Headers(), QueryParams(), {} + ) + + +def test_constructor(filter_request_builder: SyncFilterRequestBuilder): + builder = filter_request_builder + + assert builder.path == "/example_table" + assert len(builder.headers) == 0 + assert len(builder.params) == 0 + assert builder.http_method == "GET" + assert builder.json is None + assert not builder.negate_next + + +def test_not_(filter_request_builder): + builder = filter_request_builder.not_ + + assert builder.negate_next + + +def test_filter(filter_request_builder): + builder = filter_request_builder.filter(":col.name", "eq", "val") + + assert builder.params['":col.name"'] == "eq.val" + + +@pytest.mark.parametrize( + "col_name, expected_query_prefix", + [ + ("col:name", "%22col%3Aname%22"), + ("col.name", "col.name"), + ], +) +def test_filter_special_characters( + filter_request_builder, col_name, expected_query_prefix +): + builder = filter_request_builder.filter(col_name, "eq", "val") + + assert str(builder.params) == f"{expected_query_prefix}=eq.val" + + +def test_multivalued_param(filter_request_builder): + builder = filter_request_builder.lte("x", "a").gte("x", "b") + + assert str(builder.params) == "x=lte.a&x=gte.b" + + +def test_match(filter_request_builder): + builder = filter_request_builder.match({"id": "1", "done": "false"}) + assert str(builder.params) == "id=eq.1&done=eq.false" + + +def test_equals(filter_request_builder): + builder = filter_request_builder.eq("x", "a") + + assert str(builder.params) == "x=eq.a" + + +def test_not_equal(filter_request_builder): + builder = filter_request_builder.neq("x", "a") + + assert str(builder.params) == "x=neq.a" + + +def test_greater_than(filter_request_builder): + builder = filter_request_builder.gt("x", "a") + + assert str(builder.params) == "x=gt.a" + + +def test_greater_than_or_equals_to(filter_request_builder): + builder = filter_request_builder.gte("x", "a") + + assert str(builder.params) == "x=gte.a" + + +def test_contains(filter_request_builder): + builder = filter_request_builder.contains("x", "a") + + assert str(builder.params) == "x=cs.a" + + +def test_contains_dictionary(filter_request_builder): + builder = filter_request_builder.contains("x", {"a": "b"}) + + # {"a":"b"} + assert str(builder.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" + + +def test_contains_any_item(filter_request_builder): + builder = filter_request_builder.contains("x", ["a", "b"]) + + # {a,b} + assert str(builder.params) == "x=cs.%7Ba%2Cb%7D" + + +def test_contains_in_list(filter_request_builder): + builder = filter_request_builder.contains("x", '[{"a": "b"}]') + + # [{"a":+"b"}] (the + represents the space) + assert str(builder.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" + + +def test_contained_by_mixed_items(filter_request_builder): + builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" + + +def test_range_greater_than(filter_request_builder): + builder = filter_request_builder.range_gt( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_greater_than_or_equal_to(filter_request_builder): + builder = filter_request_builder.range_gte( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_less_than(filter_request_builder): + builder = filter_request_builder.range_lt( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_less_than_or_equal_to(filter_request_builder): + builder = filter_request_builder.range_lte( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_range_adjacent(filter_request_builder): + builder = filter_request_builder.range_adjacent( + "x", ["2000-01-02 08:30", "2000-01-02 09:30"] + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" + + +def test_overlaps(filter_request_builder): + builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" + + +def test_overlaps_with_timestamp_range(filter_request_builder): + builder = filter_request_builder.overlaps( + "x", "[2000-01-01 12:45, 2000-01-01 13:15)" + ) + + # {a,["b",+"c"]} + assert str(builder.params) == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" + + +def test_like(filter_request_builder): + builder = filter_request_builder.like("x", "%a%") + + assert str(builder.params) == "x=like.%25a%25" + + +def test_ilike(filter_request_builder): + builder = filter_request_builder.ilike("x", "%a%") + + assert str(builder.params) == "x=ilike.%25a%25" + + +def test_like_all_of(filter_request_builder): + builder = filter_request_builder.like_all_of("x", "A*,*b") + + assert str(builder.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" + + +def test_like_any_of(filter_request_builder): + builder = filter_request_builder.like_any_of("x", "a*,*b") + + assert str(builder.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" + + +def test_ilike_all_of(filter_request_builder): + builder = filter_request_builder.ilike_all_of("x", "A*,*b") + + assert str(builder.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" + + +def test_ilike_any_of(filter_request_builder): + builder = filter_request_builder.ilike_any_of("x", "A*,*b") + + assert str(builder.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" + + +def test_is_(filter_request_builder): + builder = filter_request_builder.is_("x", "a") + + assert str(builder.params) == "x=is.a" + + +def test_in_(filter_request_builder): + builder = filter_request_builder.in_("x", ["a", "b"]) + + assert str(builder.params) == "x=in.%28a%2Cb%29" + + +def test_or_(filter_request_builder): + builder = filter_request_builder.or_("x.eq.1") + + assert str(builder.params) == "or=%28x.eq.1%29" + + +def test_or_in_contain(filter_request_builder): + builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") + + assert ( + str(builder.params) + == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" + ) diff --git a/src/postgrest/tests/_sync/test_filter_request_builder_integration.py b/src/postgrest/tests/_sync/test_filter_request_builder_integration.py new file mode 100644 index 00000000..896bd9ee --- /dev/null +++ b/src/postgrest/tests/_sync/test_filter_request_builder_integration.py @@ -0,0 +1,579 @@ +from postgrest import CountMethod + +from .client import rest_client, rest_client_httpx + + +def test_multivalued_param_httpx(): + res = ( + rest_client_httpx() + .from_("countries") + .select("country_name, iso", count=CountMethod.exact) + .lte("numcode", 8) + .gte("numcode", 4) + .execute() + ) + + assert res.count == 2 + assert res.data == [ + {"country_name": "AFGHANISTAN", "iso": "AF"}, + {"country_name": "ALBANIA", "iso": "AL"}, + ] + + +def test_multivalued_param(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso", count=CountMethod.exact) + .lte("numcode", 8) + .gte("numcode", 4) + .execute() + ) + + assert res.count == 2 + assert res.data == [ + {"country_name": "AFGHANISTAN", "iso": "AF"}, + {"country_name": "ALBANIA", "iso": "AL"}, + ] + + +def test_match(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .match({"numcode": 8, "nicename": "Albania"}) + .single() + .execute() + ) + + assert res.data == {"country_name": "ALBANIA", "iso": "AL"} + + +def test_equals(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .eq("nicename", "Albania") + .single() + .execute() + ) + + assert res.data == {"country_name": "ALBANIA", "iso": "AL"} + + +def test_not_equal(): + res = ( + rest_client() + .from_("users") + .select("id, name") + .neq("name", "Jane") + .single() + .execute() + ) + + assert res.data == {"id": 1, "name": "Michael"} + + +def test_greater_than(): + res = rest_client().from_("users").select("id, name").gt("id", 1).single().execute() + + assert res.data == {"id": 2, "name": "Jane"} + + +def test_greater_than_or_equals_to(): + res = rest_client().from_("users").select("id, name").gte("id", 1).execute() + + assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] + + +def test_contains_dictionary(): + res = ( + rest_client() + .from_("users") + .select("name") + .contains("address", {"postcode": 90210}) + .single() + .execute() + ) + + assert res.data == {"name": "Michael"} + + +def test_contains_any_item(): + res = ( + rest_client() + .from_("issues") + .select("title") + .contains("tags", ["is:open", "priority:low"]) + .execute() + ) + + assert res.data == [{"title": "Cache invalidation is not working"}] + + +def test_contains_on_range(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +def test_contained_by_mixed_items(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +def test_range_greater_than(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) + .execute() + ) + + assert res.data == [{"id": 2, "room_name": "Topaz"}] + + +def test_range_greater_than_or_equal_to(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) + .execute() + ) + + assert res.data == [{"id": 2, "room_name": "Topaz"}] + + +def test_range_less_than(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +def test_range_less_than_or_equal_to(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +def test_range_adjacent(): + res = ( + rest_client() + .from_("reservations") + .select("id, room_name") + .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) + .execute() + ) + + assert res.data == [{"id": 1, "room_name": "Emerald"}] + + +def test_overlaps(): + res = ( + rest_client() + .from_("issues") + .select("title") + .overlaps("tags", ["is:closed", "severity:high"]) + .execute() + ) + + assert res.data == [ + {"title": "Cache invalidation is not working"}, + {"title": "Add alias to filters"}, + ] + + +def test_overlaps_with_timestamp_range(): + res = ( + rest_client() + .from_("reservations") + .select("room_name") + .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") + .execute() + ) + + assert res.data == [ + {"room_name": "Emerald"}, + ] + + +def test_like(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .like("nicename", "%Alba%") + .execute() + ) + + assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] + + +def test_ilike(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .ilike("nicename", "%alban%") + .execute() + ) + + assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] + + +def test_like_all_of(): + res = ( + rest_client() + .from_("countries") + .select("nicename, iso") + .like_all_of("nicename", "A*,*n") + .execute() + ) + + assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] + + +def test_like_any_of(): + res = ( + rest_client() + .from_("countries") + .select("nicename, iso") + .like_any_of("nicename", "Al*,*ia") + .execute() + ) + + assert res.data == [ + {"iso": "AL", "nicename": "Albania"}, + {"iso": "DZ", "nicename": "Algeria"}, + ] + + +def test_ilike_all_of(): + res = ( + rest_client() + .from_("countries") + .select("nicename, iso") + .ilike_all_of("nicename", "a*,*n") + .execute() + ) + + assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] + + +def test_ilike_any_of(): + res = ( + rest_client() + .from_("countries") + .select("nicename, iso") + .ilike_any_of("nicename", "al*,*ia") + .execute() + ) + + assert res.data == [ + {"iso": "AL", "nicename": "Albania"}, + {"iso": "DZ", "nicename": "Algeria"}, + ] + + +def test_is_(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .is_("numcode", "null") + .limit(1) + .order("nicename") + .execute() + ) + + assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] + + +def test_is_not(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .not_.is_("numcode", "null") + .limit(1) + .order("nicename") + .execute() + ) + + assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] + + +def test_in_(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .in_("nicename", ["Albania", "Algeria"]) + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "ALGERIA", "iso": "DZ"}, + ] + + +def test_or_(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .or_("iso.eq.DZ,nicename.eq.Albania") + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "ALGERIA", "iso": "DZ"}, + ] + + +def test_or_with_and(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") + .execute() + ) + + assert res.data == [ + {"country_name": "ALBANIA", "iso": "AL"}, + {"country_name": "TRINIDAD AND TOBAGO", "iso": "TT"}, + ] + + +def test_or_in(): + res = ( + rest_client() + .from_("issues") + .select("id, title") + .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") + .execute() + ) + + assert res.data == [ + {"id": 1, "title": "Cache invalidation is not working"}, + {"id": 3, "title": "Add missing postgrest filters"}, + {"id": 4, "title": "Add alias to filters"}, + ] + + +def test_or_on_reference_table(): + res = ( + rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .execute() + ) + + assert res.data == [ + { + "country_name": "UNITED KINGDOM", + "cities": [ + {"name": "London"}, + {"name": "Manchester"}, + {"name": "Liverpool"}, + {"name": "Bristol"}, + ], + }, + ] + + +def test_explain_json(): + res = ( + rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .explain(format="json", analyze=True) + .execute() + ) + assert res.data[0]["Plan"]["Node Type"] == "Aggregate" + + +def test_csv(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .in_("nicename", ["Albania", "Algeria"]) + .csv() + .execute() + ) + assert "ALBANIA,AL\nALGERIA,DZ" in res.data + + +def test_explain_text(): + res = ( + rest_client() + .from_("countries") + .select("country_name, cities!inner(name)") + .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") + .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) + .execute() + ) + assert ( + "((cities_1.country_id = countries.id) AND ((cities_1.country_id = '10'::bigint) OR (cities_1.name = 'Paris'::text)))" + in res + ) + + +def test_rpc_with_single(): + res = ( + rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, country_name, iso") + .eq("nicename", "Albania") + .single() + .execute() + ) + + assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} + + +def test_rpc_with_limit(): + res = ( + rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, country_name, iso") + .eq("nicename", "Albania") + .limit(1) + .execute() + ) + + assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] + + +def test_rpc_with_range(): + res = ( + rest_client() + .rpc("list_stored_countries", {}) + .select("nicename, iso") + .range(1, 2) + .execute() + ) + + assert res.data == [ + {"nicename": "Albania", "iso": "AL"}, + {"nicename": "Algeria", "iso": "DZ"}, + ] + + +def test_rpc_post_with_args(): + res = ( + rest_client() + .rpc("search_countries_by_name", {"search_name": "Alban"}) + .select("nicename, iso") + .execute() + ) + assert res.data == [{"nicename": "Albania", "iso": "AL"}] + + +def test_rpc_get_with_args(): + res = ( + rest_client() + .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) + .select("nicename, iso") + .execute() + ) + assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] + + +def test_rpc_get_with_count(): + res = ( + rest_client() + .rpc( + "search_countries_by_name", + {"search_name": "Al"}, + get=True, + count=CountMethod.exact, + ) + .select("nicename") + .execute() + ) + assert res.count == 2 + assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] + + +def test_rpc_head_count(): + res = ( + rest_client() + .rpc( + "search_countries_by_name", + {"search_name": "Al"}, + head=True, + count=CountMethod.exact, + ) + .execute() + ) + + assert res.count == 2 + assert res.data == [] + + +def test_order(): + res = ( + rest_client() + .from_("countries") + .select("country_name, iso") + .limit(3) + .order("nicename", desc=True) + .execute() + ) + + assert res.data == [ + {"country_name": "ZIMBABWE", "iso": "ZW"}, + {"country_name": "UNITED STATES", "iso": "US"}, + {"country_name": "UNITED KINGDOM", "iso": "GB"}, + ] + + +def test_order_on_foreign_table(): + res = ( + rest_client() + .from_("orchestral_sections") + .select("name, instruments(name)") + .order("name", desc=True, foreign_table="instruments") + .execute() + ) + + assert res.data == [ + {"name": "strings", "instruments": [{"name": "violin"}, {"name": "harp"}]}, + {"name": "woodwinds", "instruments": []}, + ] diff --git a/src/postgrest/tests/_sync/test_query_request_builder.py b/src/postgrest/tests/_sync/test_query_request_builder.py new file mode 100644 index 00000000..81b3a3ef --- /dev/null +++ b/src/postgrest/tests/_sync/test_query_request_builder.py @@ -0,0 +1,22 @@ +import pytest +from httpx import Client, Headers, QueryParams + +from postgrest import SyncQueryRequestBuilder + + +@pytest.fixture +def query_request_builder(): + with Client() as client: + yield SyncQueryRequestBuilder( + client, "/example_table", "GET", Headers(), QueryParams(), {} + ) + + +def test_constructor(query_request_builder: SyncQueryRequestBuilder): + builder = query_request_builder + + assert builder.path == "/example_table" + assert len(builder.headers) == 0 + assert len(builder.params) == 0 + assert builder.http_method == "GET" + assert builder.json is None diff --git a/src/postgrest/tests/_sync/test_request_builder.py b/src/postgrest/tests/_sync/test_request_builder.py new file mode 100644 index 00000000..6f89d52e --- /dev/null +++ b/src/postgrest/tests/_sync/test_request_builder.py @@ -0,0 +1,492 @@ +from typing import Any, Dict, List + +import pytest +from httpx import Client, Request, Response + +from postgrest import SyncRequestBuilder, SyncSingleRequestBuilder +from postgrest.base_request_builder import APIResponse, SingleAPIResponse +from postgrest.types import CountMethod + + +@pytest.fixture +def request_builder(): + with Client() as client: + yield SyncRequestBuilder(client, "/example_table") + + +def test_constructor(request_builder): + assert request_builder.path == "/example_table" + + +class TestSelect: + def test_select(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("col1", "col2") + + assert builder.params["select"] == "col1,col2" + assert builder.headers.get("prefer") is None + assert builder.http_method == "GET" + assert builder.json is None + + def test_select_with_count(self, request_builder: SyncRequestBuilder): + builder = request_builder.select(count=CountMethod.exact) + + assert builder.params["select"] == "*" + assert builder.headers["prefer"] == "count=exact" + assert builder.http_method == "GET" + assert builder.json is None + + def test_select_with_head(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("col1", "col2", head=True) + + assert builder.params.get("select") == "col1,col2" + assert builder.headers.get("prefer") is None + assert builder.http_method == "HEAD" + assert builder.json is None + + def test_select_as_csv(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("*").csv() + + assert builder.headers["Accept"] == "text/csv" + assert isinstance(builder, SyncSingleRequestBuilder) + + +class TestInsert: + def test_insert(self, request_builder: SyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_insert_with_count(self, request_builder: SyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_insert_with_upsert(self, request_builder: SyncRequestBuilder): + builder = request_builder.insert({"key1": "val1"}, upsert=True) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_upsert_with_default_single(self, request_builder: SyncRequestBuilder): + builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1"}] + assert builder.params.get("columns") == '"key1"' + + def test_bulk_insert_using_default(self, request_builder: SyncRequestBuilder): + builder = request_builder.insert( + [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False + ) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] + assert set(builder.params["columns"].split(",")) == set( + '"key1","key2","key3"'.split(",") + ) + + def test_upsert(self, request_builder: SyncRequestBuilder): + builder = request_builder.upsert({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + ] + assert builder.http_method == "POST" + assert builder.json == {"key1": "val1"} + + def test_bulk_upsert_with_default(self, request_builder: SyncRequestBuilder): + builder = request_builder.upsert( + [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False + ) + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "resolution=merge-duplicates", + "missing=default", + ] + assert builder.http_method == "POST" + assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] + assert set(builder.params["columns"].split(",")) == set( + '"key1","key2","key3"'.split(",") + ) + + +class TestUpdate: + def test_update(self, request_builder: SyncRequestBuilder): + builder = request_builder.update({"key1": "val1"}) + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "PATCH" + assert builder.json == {"key1": "val1"} + + def test_update_with_count(self, request_builder: SyncRequestBuilder): + builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "PATCH" + assert builder.json == {"key1": "val1"} + + +class TestDelete: + def test_delete(self, request_builder: SyncRequestBuilder): + builder = request_builder.delete() + + assert builder.headers.get_list("prefer", True) == ["return=representation"] + assert builder.http_method == "DELETE" + assert builder.json == {} + + def test_delete_with_count(self, request_builder: SyncRequestBuilder): + builder = request_builder.delete(count=CountMethod.exact) + + assert builder.headers.get_list("prefer", True) == [ + "return=representation", + "count=exact", + ] + assert builder.http_method == "DELETE" + assert builder.json == {} + + +class TestTextSearch: + def test_text_search(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("catchphrase").text_search( + "catchphrase", + "'fat' & 'cat'", + { + "type": "plain", + "config": "english", + }, + ) + assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( + builder.params + ) + + +class TestExplain: + def test_explain_plain(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("*").explain() + assert builder.params["select"] == "*" + assert "application/vnd.pgrst.plan" in str(builder.headers.get("accept")) + + def test_explain_options(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("*").explain( + format="json", analyze=True, verbose=True, buffers=True, wal=True + ) + assert builder.params["select"] == "*" + assert "application/vnd.pgrst.plan+json;" in str(builder.headers.get("accept")) + assert "options=analyze|verbose|buffers|wal" in str( + builder.headers.get("accept") + ) + + +class TestOrder: + def test_order(self, request_builder: SyncRequestBuilder): + builder = request_builder.select().order("country_name", desc=True) + assert str(builder.params) == "select=%2A&order=country_name.desc" + + def test_multiple_orders(self, request_builder: SyncRequestBuilder): + builder = ( + request_builder.select() + .order("country_name", desc=True) + .order("iso", desc=True) + ) + assert str(builder.params) == "select=%2A&order=country_name.desc%2Ciso.desc" + + def test_multiple_orders_on_foreign_table( + self, request_builder: SyncRequestBuilder + ): + foreign_table = "cities" + builder = ( + request_builder.select() + .order("city_name", desc=True, foreign_table=foreign_table) + .order("id", desc=True, foreign_table=foreign_table) + ) + assert str(builder.params) == "select=%2A&cities.order=city_name.desc%2Cid.desc" + + +class TestRange: + def test_range_on_own_table(self, request_builder: SyncRequestBuilder): + builder = request_builder.select("*").range(0, 1) + assert builder.params["select"] == "*" + assert builder.params["limit"] == "2" + assert builder.params["offset"] == "0" + + def test_range_on_foreign_table(self, request_builder: SyncRequestBuilder): + foreign_table = "cities" + builder = request_builder.select("*").range(1, 2, foreign_table) + assert builder.params["select"] == "*" + assert builder.params[f"{foreign_table}.limit"] == "2" + assert builder.params[f"{foreign_table}.offset"] == "1" + + +@pytest.fixture +def csv_api_response() -> str: + return "id,name\n1,foo\n" + + +@pytest.fixture +def api_response_with_error() -> Dict[str, Any]: + return { + "message": "Route GET:/countries?select=%2A not found", + "error": "Not Found", + "statusCode": 404, + } + + +@pytest.fixture +def api_response() -> List[Dict[str, Any]]: + return [ + { + "id": 1, + "name": "Bonaire, Sint Eustatius and Saba", + "iso2": "BQ", + "iso3": "BES", + "local_name": None, + "continent": None, + }, + { + "id": 2, + "name": "Curaçao", + "iso2": "CW", + "iso3": "CUW", + "local_name": None, + "continent": None, + }, + ] + + +@pytest.fixture +def single_api_response() -> Dict[str, Any]: + return { + "id": 1, + "name": "Bonaire, Sint Eustatius and Saba", + "iso2": "BQ", + "iso3": "BES", + "local_name": None, + "continent": None, + } + + +@pytest.fixture +def content_range_header_with_count() -> str: + return "0-1/2" + + +@pytest.fixture +def content_range_header_without_count() -> str: + return "0-1" + + +@pytest.fixture +def prefer_header_with_count() -> str: + return "count=exact" + + +@pytest.fixture +def prefer_header_without_count() -> str: + return "random prefer header" + + +@pytest.fixture +def request_response_without_prefer_header() -> Response: + return Response( + status_code=200, request=Request(method="GET", url="http://example.com") + ) + + +@pytest.fixture +def request_response_with_prefer_header_without_count( + prefer_header_without_count: str, +) -> Response: + return Response( + status_code=200, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_without_count}, + ), + ) + + +@pytest.fixture +def request_response_with_prefer_header_with_count_and_content_range( + prefer_header_with_count: str, content_range_header_with_count: str +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_data( + prefer_header_with_count: str, + content_range_header_with_count: str, + api_response: List[Dict[str, Any]], +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + json=api_response, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_single_data( + prefer_header_with_count: str, + content_range_header_with_count: str, + single_api_response: Dict[str, Any], +) -> Response: + return Response( + status_code=200, + headers={"content-range": content_range_header_with_count}, + json=single_api_response, + request=Request( + method="GET", + url="http://example.com", + headers={"prefer": prefer_header_with_count}, + ), + ) + + +@pytest.fixture +def request_response_with_csv_data(csv_api_response: str) -> Response: + return Response( + status_code=200, + text=csv_api_response, + request=Request(method="GET", url="http://example.com"), + ) + + +class TestApiResponse: + def test_response_raises_when_api_error( + self, api_response_with_error: Dict[str, Any] + ): + with pytest.raises(ValueError): + APIResponse(data=api_response_with_error) + + def test_parses_valid_response_only_data(self, api_response: List[Dict[str, Any]]): + result = APIResponse(data=api_response) + assert result.data == api_response + + def test_parses_valid_response_data_and_count( + self, api_response: List[Dict[str, Any]] + ): + count = len(api_response) + result = APIResponse(data=api_response, count=count) + assert result.data == api_response + assert result.count == count + + def test_get_count_from_content_range_header_with_count( + self, content_range_header_with_count: str + ): + assert ( + APIResponse._get_count_from_content_range_header( + content_range_header_with_count + ) + == 2 + ) + + def test_get_count_from_content_range_header_without_count( + self, content_range_header_without_count: str + ): + assert ( + APIResponse._get_count_from_content_range_header( + content_range_header_without_count + ) + is None + ) + + def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): + assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) + + def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): + assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) + + def test_get_count_from_http_request_response_without_prefer_header( + self, request_response_without_prefer_header: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_without_prefer_header + ) + is None + ) + + def test_get_count_from_http_request_response_with_prefer_header_without_count( + self, request_response_with_prefer_header_without_count: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_with_prefer_header_without_count + ) + is None + ) + + def test_get_count_from_http_request_response_with_count_and_content_range( + self, request_response_with_prefer_header_with_count_and_content_range: Response + ): + assert ( + APIResponse._get_count_from_http_request_response( + request_response_with_prefer_header_with_count_and_content_range + ) + == 2 + ) + + def test_from_http_request_response_constructor( + self, request_response_with_data: Response, api_response: List[Dict[str, Any]] + ): + result = APIResponse.from_http_request_response(request_response_with_data) + assert result.data == api_response + assert result.count == 2 + + def test_single_from_http_request_response_constructor( + self, + request_response_with_single_data: Response, + single_api_response: Dict[str, Any], + ): + result = SingleAPIResponse.from_http_request_response( + request_response_with_single_data + ) + assert isinstance(result.data, dict) + assert result.data == single_api_response + assert result.count == 2 + + def test_single_with_csv_data( + self, request_response_with_csv_data: Response, csv_api_response: str + ): + result = SingleAPIResponse.from_http_request_response( + request_response_with_csv_data + ) + assert isinstance(result.data, str) + assert result.data == csv_api_response diff --git a/src/postgrest/tests/test_utils.py b/src/postgrest/tests/test_utils.py new file mode 100644 index 00000000..66772925 --- /dev/null +++ b/src/postgrest/tests/test_utils.py @@ -0,0 +1,29 @@ +import pytest +from deprecation import fail_if_not_removed + +from postgrest.utils import SyncClient, sanitize_param + + +@fail_if_not_removed +def test_sync_client(): + client = SyncClient() + # Verify that aclose method exists and calls close + assert hasattr(client, "aclose") + assert callable(client.aclose) + client.aclose() # Should not raise any exception + + +@pytest.mark.parametrize( + "value, expected", + [ + ("param,name", '"param,name"'), + ("param:name", '"param:name"'), + ("param(name", '"param(name"'), + ("param)name", '"param)name"'), + ("param,name", '"param,name"'), + ("table.column", "table.column"), + ("table_column", "table_column"), + ], +) +def test_sanitize_params(value, expected): + assert sanitize_param(value) == expected diff --git a/src/supabase/pyproject.toml b/src/supabase/pyproject.toml index 6d290e2b..9247d76b 100644 --- a/src/supabase/pyproject.toml +++ b/src/supabase/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ "supabase_functions", "storage3", "supabase_auth", - "postgrest == 1.1.1", + "postgrest", "httpx >=0.26,<0.29", ] diff --git a/test.ps1 b/test.ps1 deleted file mode 100644 index e2dd982b..00000000 --- a/test.ps1 +++ /dev/null @@ -1,5 +0,0 @@ -powershell -Command { - poetry install; - poetry run pytest --cov=./ --cov-report=xml --cov-report=html -vv; - poetry run pre-commit run --all-files; -} diff --git a/uv.lock b/uv.lock index 5029702a..28776b91 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,7 @@ resolution-markers = [ [manifest] members = [ + "postgrest", "realtime", "storage3", "supabase", @@ -17,6 +18,18 @@ members = [ "supabase-functions", ] +[[package]] +name = "accessible-pygments" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c1/bbac6a50d02774f91572938964c582fff4270eee73ab822a4aeea4d8b11b/accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872", size = 1377899, upload-time = "2024-05-10T11:23:10.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/3f/95338030883d8c8b91223b4e21744b04d11b161a3ef117295d8241f50ab4/accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7", size = 1395903, upload-time = "2024-05-10T11:23:08.421Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -824,7 +837,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -963,6 +976,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, ] +[[package]] +name = "furo" +version = "2025.7.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "accessible-pygments" }, + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/69/312cd100fa45ddaea5a588334d2defa331ff427bcb61f5fe2ae61bdc3762/furo-2025.7.19.tar.gz", hash = "sha256:4164b2cafcf4023a59bb3c594e935e2516f6b9d35e9a5ea83d8f6b43808fe91f", size = 1662054, upload-time = "2025-07-19T10:52:09.754Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/34/2b07b72bee02a63241d654f5d8af87a2de977c59638eec41ca356ab915cd/furo-2025.7.19-py3-none-any.whl", hash = "sha256:bdea869822dfd2b494ea84c0973937e35d1575af088b6721a29c7f7878adc9e3", size = 342175, upload-time = "2025-07-19T10:52:02.399Z" }, +] + [[package]] name = "future-fstrings" version = "1.2.0" @@ -1581,16 +1612,70 @@ wheels = [ [[package]] name = "postgrest" version = "1.1.1" -source = { registry = "https://pypi.org/simple" } +source = { editable = "src/postgrest" } dependencies = [ { name = "deprecation" }, { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, { name = "strenum", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/3e/1b50568e1f5db0bdced4a82c7887e37326585faef7ca43ead86849cb4861/postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322", size = 15431, upload-time = "2025-06-23T19:21:34.742Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/71/188a50ea64c17f73ff4df5196ec1553a8f1723421eb2d1069c73bab47d78/postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a", size = 22366, upload-time = "2025-06-23T19:21:33.637Z" }, + +[package.dev-dependencies] +dev = [ + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-depends" }, + { name = "ruff" }, +] +docs = [ + { name = "furo" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +lints = [ + { name = "pre-commit" }, + { name = "ruff" }, +] +test = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-depends" }, +] + +[package.metadata] +requires-dist = [ + { name = "deprecation", specifier = ">=2.1.0" }, + { name = "httpx", extras = ["http2"], specifier = ">=0.26,<0.29" }, + { name = "pydantic", specifier = ">=1.9,<3.0" }, + { name = "strenum", marker = "python_full_version < '3.11'", specifier = ">=0.4.9" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-asyncio", specifier = ">=1.0.0" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-depends", specifier = ">=1.0.1" }, + { name = "ruff", specifier = ">=0.12.1" }, +] +docs = [ + { name = "furo", specifier = ">=2023.9.10,<2026.0.0" }, + { name = "sphinx", specifier = ">=7.1.2" }, +] +lints = [ + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "ruff", specifier = ">=0.12.1" }, +] +test = [ + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-asyncio", specifier = ">=1.0.0" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-depends", specifier = ">=1.0.1" }, ] [[package]] @@ -2015,7 +2100,6 @@ dependencies = [ { name = "black" }, { name = "docstring-to-markdown" }, { name = "importlib-metadata", version = "8.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "importlib-metadata", version = "8.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_version < '0'" }, { name = "jedi" }, { name = "pluggy" }, { name = "python-lsp-jsonrpc" }, @@ -2477,6 +2561,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/c7/8aab362e86cbf887e58be749a78d20ad743e1eb2c73c2b13d4761f39a104/sphinx_autodoc_typehints-3.2.0-py3-none-any.whl", hash = "sha256:884b39be23b1d884dcc825d4680c9c6357a476936e3b381a67ae80091984eb49", size = 20563, upload-time = "2025-04-25T16:53:24.492Z" }, ] +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736, upload-time = "2023-07-08T18:40:54.166Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496, upload-time = "2023-07-08T18:40:52.659Z" }, +] + [[package]] name = "sphinx-jinja2-compat" version = "0.4.1" @@ -2789,7 +2887,7 @@ tests = [ [package.metadata] requires-dist = [ { name = "httpx", specifier = ">=0.26,<0.29" }, - { name = "postgrest", specifier = "==1.1.1" }, + { name = "postgrest", editable = "src/postgrest" }, { name = "realtime", editable = "src/realtime" }, { name = "storage3", editable = "src/storage" }, { name = "supabase-auth", editable = "src/auth" }, From b08af16908417c86ec1fd989555b78df1b035039 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 28 Aug 2025 11:49:00 -0300 Subject: [PATCH 02/25] chore: add postgrest to release-please --- .release-please-manifest.json | 1 + release-please-config.json | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 72c2693d..2021d7ba 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -3,5 +3,6 @@ "src/realtime": "2.7.0", "src/functions": "0.10.1", "src/storage": "0.12.1", + "src/postgrest": "1.1.1", "src/auth": "2.12.3" } diff --git a/release-please-config.json b/release-please-config.json index 1e88490d..e0fd6701 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -17,6 +17,10 @@ "changelog-path": "src/storage/CHANGELOG.md", "release-type": "python" }, + "src/storage": { + "changelog-path": "src/postgrest/CHANGELOG.md", + "release-type": "python" + }, "src/supabase": { "changelog-path": "src/supabase/CHANGELOG.md", "release-type": "python" From 40da11e3c3e1de312d488d07a269889fea517720 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 28 Aug 2025 11:51:28 -0300 Subject: [PATCH 03/25] chore: modify READMEs --- README.md | 3 ++- src/postgrest/README.md | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 55a1f7c7..90eef06b 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,12 @@ # `supabase-py` -Python monorepo for all [Supabase](https://supabase.com) libraries. This is a work in progress, and currently these are the ones contained in this repository: +Python monorepo for all [Supabase](https://supabase.com) libraries. - [supabase](src/supabase/README.md) - [realtime](src/realtime/README.md) - [supabase_functions](src/functions/README.md) - [storage3](src/storage/README.md) +- [postgrest](src/postgrest/README.md) - [supabase_auth](src/auth/README.md) Relevant links: diff --git a/src/postgrest/README.md b/src/postgrest/README.md index 53e264d7..5d0cd923 100644 --- a/src/postgrest/README.md +++ b/src/postgrest/README.md @@ -22,10 +22,10 @@ Once Docker Compose started, PostgREST is accessible at . ### Instructions -#### With Poetry (recommended) +#### With uv (recommended) ```sh -poetry add postgrest +uv add postgrest ``` #### With Pip From 70f0c160d4970659bfa5c69e5100616bce610183 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 28 Aug 2025 13:01:56 -0300 Subject: [PATCH 04/25] chore: improve `make help` message --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index c417286a..370dbb98 100644 --- a/Makefile +++ b/Makefile @@ -27,10 +27,13 @@ publish: $(call FORALL_PKGS,build) # not all packages have infra, so just manually instantiate the ones that do for now start-infra: realtime.start-infra storage.start-infra auth.start-infra postgrest.start-infra help:: - @echo " start-infra -- Start all containers necessary for tests. NOTE: it is not necessary to this before running CI tests, they start the infra by themselves" + @echo " start-infra -- Start all containers necessary for tests." + @echo " NOTE: it is not necessary to this command before running CI tests" + stop-infra: realtime.stop-infra storage.stop-infra auth.stop-infra postgrest.stop-infra help:: - @echo " stop-infra -- Stop all infra used by tests. NOTE: tests do leave their infra running, so run this to ensure all containers are stopped" + @echo " stop-infra -- Stop all infra used by tests." + @echo " NOTE: run this command to ensure all containers are stopped after tests" realtime.%: From f3d1e2e8174f3db85f70eb0088a75abaa9f6e19a Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 28 Aug 2025 13:05:47 -0300 Subject: [PATCH 05/25] fix: fix reference in `make postgrest.build` --- src/postgrest/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/postgrest/Makefile b/src/postgrest/Makefile index 3f10eace..2f4e2334 100644 --- a/src/postgrest/Makefile +++ b/src/postgrest/Makefile @@ -34,4 +34,4 @@ build_sync: unasync sed -i 's/self\.session\.aclose/self\.session\.close/g' src/postgrest/_sync/client.py build: - uv build --package supabase + uv build --package postgrest From edec3f5de9341dd574e0eea21d5de1483d99d7d1 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 14:26:54 -0300 Subject: [PATCH 06/25] chore: improve the README to reflect the current setup --- README.md | 50 +++++++++++++++++++++++++++++++++----------------- flake.nix | 1 + 2 files changed, 34 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 90eef06b..50a04161 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Relevant links: - [GitHub OAuth in your Python Flask app](https://supabase.com/blog/oauth2-login-python-flask-apps) - [Python data loading with Supabase](https://supabase.com/blog/loading-data-supabase-python) -## Set up a Local Development Environment +## Local Development ### Clone the Repository @@ -25,39 +25,55 @@ git clone https://github.com/supabase/supabase-py.git cd supabase-py ``` -### Create and Activate a Virtual Environment +### Dependencies -We recommend activating your virtual environment. For example, we like `uv`, `conda` and `nix`! Click [here](https://docs.python.org/3/library/venv.html) for more about Python virtual environments and working with [conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) and [uv](https://docs.astral.sh/uv/getting-started/features/). For nix, just install it with flakes enabled. +This repo relies on the following dev dependencies: +- `uv` for python project management. +- `make` for command running. +- `docker` for both `storage` and `auth` test containers. +- `supabase-cli` for both `functions` and `realtime` test containers. + +All of these dependencies are included in the nix shell environment, through `flake.nix`. If you've got `nix` installed, you may prefer to use it through `nix develop`. + +### Use a Virtual Environment + +We recommend using a virtual environment, preferrably through `uv`, given it is currently the only tool that understands the workspace setup (you can read more about it in [the uv docs](https://docs.astral.sh/uv/concepts/projects/workspaces/). -Using uv: ``` uv venv supabase-py source supabase-py/bin/activate uv sync ``` -Using venv (Python 3 built-in): +If you're using nix, the `python` instance in the shell should have the correct dependencies installed for the whole workspace, given it is derived from the root's `pyproject.toml` using [uv2nix](https://github.com/pyproject-nix/uv2nix). -```bash -python3 -m venv env -source env/bin/activate # On Windows, use .\env\Scripts\activate -``` +### Running tests and other commands -Using conda: +We use `make` to store and run the relevant commands. The structure is setup such that each sub package can individually set its command in its own `Makefile`, and the job of the main `Makefile` is just coordinate calling each of them. +For instance, in order to run all tests of all packages, you should use the following root command ```bash -conda create --name supabase-py -conda activate supabase-py +make ci ``` +Which internally dispatches `make -C src/{package} tests` calls to each package in the monorepo. -Using nix: +You should also consider using ```bash -nix develop +make ci -jN # where N is the number of max concurrent jobs, or just -j for infinite jobs ``` +To run each of the packages' tests in parallel. This should be generally faster than running in 1 job, but has the downside of messing up the CLI output, so parsing error messages might not be easy. -### Local installation - -You can also install locally after cloning this repo. Install Development mode with `pip install -e`, which makes it editable, so when you edit the source code the changes will be reflected in your python module. +Other relevant commands include +```bash +make pre-commit # run lints and formmating before commiting +make stop-infra # stops all running containers from all packages +make clean # delete all intermediary files created by testing +``` +All the sub packages command are available from the main root by prefixing the command with `{package_name}.`. Examples: +```bash +make realtime.tests # run only realtime tests +make storage.clean # delete temporary files only in the storage package +``` ## Badges diff --git a/flake.nix b/flake.nix index 96f43e9a..baac2098 100644 --- a/flake.nix +++ b/flake.nix @@ -33,6 +33,7 @@ pkgs.supabase-cli pkgs.uv pkgs.gnumake + pkgs.docker ]; workspace = uv2nix.lib.workspace.loadWorkspace { workspaceRoot = ./.; }; From 044c71975b86e7e130096cd4a7565b3ff7c97925 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 14:28:06 -0300 Subject: [PATCH 07/25] fix: fix release please config --- release-please-config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release-please-config.json b/release-please-config.json index e0fd6701..4e325854 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -17,7 +17,7 @@ "changelog-path": "src/storage/CHANGELOG.md", "release-type": "python" }, - "src/storage": { + "src/postgrest": { "changelog-path": "src/postgrest/CHANGELOG.md", "release-type": "python" }, From 181ba573ccb2057c1d3818b8f05dd565f771a4b1 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 14:37:14 -0300 Subject: [PATCH 08/25] chore: improve readme some more --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 50a04161..abe60ff2 100644 --- a/README.md +++ b/README.md @@ -27,17 +27,17 @@ cd supabase-py ### Dependencies -This repo relies on the following dev dependencies: +This repository relies on the following dependencies for development: - `uv` for python project management. - `make` for command running. -- `docker` for both `storage` and `auth` test containers. -- `supabase-cli` for both `functions` and `realtime` test containers. +- `docker` for both `postgrest` and `auth` test containers. +- `supabase-cli` for both `storage` and `realtime` test containers. All of these dependencies are included in the nix shell environment, through `flake.nix`. If you've got `nix` installed, you may prefer to use it through `nix develop`. ### Use a Virtual Environment -We recommend using a virtual environment, preferrably through `uv`, given it is currently the only tool that understands the workspace setup (you can read more about it in [the uv docs](https://docs.astral.sh/uv/concepts/projects/workspaces/). +We recommend using a virtual environment, preferrably through `uv`, given it is currently the only tool that understands the workspace setup (you can read more about it in [the uv docs](https://docs.astral.sh/uv/concepts/projects/workspaces/)). ``` uv venv supabase-py @@ -45,7 +45,7 @@ source supabase-py/bin/activate uv sync ``` -If you're using nix, the `python` instance in the shell should have the correct dependencies installed for the whole workspace, given it is derived from the root's `pyproject.toml` using [uv2nix](https://github.com/pyproject-nix/uv2nix). +If you're using nix, the generated `python` executable should have the correct dependencies installed for the whole workspace, given it is derived from the root's `pyproject.toml` using [uv2nix](https://github.com/pyproject-nix/uv2nix). ### Running tests and other commands From c95b5932b81e0fc0bc265a0250bb5090f9e63916 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 15:11:39 -0300 Subject: [PATCH 09/25] fix: add `run-unasync.py` --- src/postgrest/Makefile | 4 ++-- src/postgrest/pyproject.toml | 1 + src/postgrest/run-unasync.py | 12 ++++++++++++ uv.lock | 4 ++++ 4 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 src/postgrest/run-unasync.py diff --git a/src/postgrest/Makefile b/src/postgrest/Makefile index 2f4e2334..4fb7641a 100644 --- a/src/postgrest/Makefile +++ b/src/postgrest/Makefile @@ -22,9 +22,9 @@ clean: rm -f .coverage coverage.xml unasync: - uv run unasync postgrest tests + uv run --package postgrest run-unasync.py -build_sync: unasync +build-sync: unasync sed -i 's/@pytest.mark.asyncio//g' tests/_sync/test_client.py sed -i 's/_async/_sync/g' tests/_sync/test_client.py sed -i 's/Async/Sync/g' src/postgrest/_sync/request_builder.py tests/_sync/test_client.py diff --git a/src/postgrest/pyproject.toml b/src/postgrest/pyproject.toml index 4a5bc3fd..16292013 100644 --- a/src/postgrest/pyproject.toml +++ b/src/postgrest/pyproject.toml @@ -35,6 +35,7 @@ test = [ "pytest-cov >=6.2.1", "pytest-depends >=1.0.1", "pytest-asyncio >=1.0.0", + "unasync >= 0.6.0", ] lints = [ "pre-commit >=4.2.0", diff --git a/src/postgrest/run-unasync.py b/src/postgrest/run-unasync.py new file mode 100644 index 00000000..3811899e --- /dev/null +++ b/src/postgrest/run-unasync.py @@ -0,0 +1,12 @@ +import unasync +from pathlib import Path + +paths = Path("src/supabase").glob("**/*.py") +tests = Path("tests").glob("**/*.py") + +rules = (unasync._DEFAULT_RULE,) + +files = [str(p) for p in list(paths) + list(tests)] + +if __name__ == "__main__": + unasync.unasync_files(files, rules=rules) diff --git a/uv.lock b/uv.lock index 28776b91..1a57c612 100644 --- a/uv.lock +++ b/uv.lock @@ -1628,6 +1628,7 @@ dev = [ { name = "pytest-cov" }, { name = "pytest-depends" }, { name = "ruff" }, + { name = "unasync" }, ] docs = [ { name = "furo" }, @@ -1644,6 +1645,7 @@ test = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-depends" }, + { name = "unasync" }, ] [package.metadata] @@ -1662,6 +1664,7 @@ dev = [ { name = "pytest-cov", specifier = ">=6.2.1" }, { name = "pytest-depends", specifier = ">=1.0.1" }, { name = "ruff", specifier = ">=0.12.1" }, + { name = "unasync", specifier = ">=0.6.0" }, ] docs = [ { name = "furo", specifier = ">=2023.9.10,<2026.0.0" }, @@ -1676,6 +1679,7 @@ test = [ { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.2.1" }, { name = "pytest-depends", specifier = ">=1.0.1" }, + { name = "unasync", specifier = ">=0.6.0" }, ] [[package]] From 988249abde19f701ecf233c87752da1cb4c9192a Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:04:44 -0300 Subject: [PATCH 10/25] chore: move most badges to top of repo --- README.md | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index abe60ff2..bca3d2a3 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,10 @@ # `supabase-py` +[![CI](https://github.com/supabase/supabase-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase/supabase-py/actions/workflows/ci.yml) +[![Python](https://img.shields.io/pypi/pyversions/supabase)](https://pypi.org/project/supabase) +[![Version](https://img.shields.io/pypi/v/supabase?color=%2334D058)](https://pypi.org/project/supabase) +[![Coverage status](https://codecov.io/gh/supabase/supabase-py/branch/develop/graph/badge.svg)](https://codecov.io/gh/supabase/supabase-py) + Python monorepo for all [Supabase](https://supabase.com) libraries. - [supabase](src/supabase/README.md) @@ -74,17 +79,3 @@ All the sub packages command are available from the main root by prefixing the c make realtime.tests # run only realtime tests make storage.clean # delete temporary files only in the storage package ``` - -## Badges - -[![License: MIT](https://img.shields.io/badge/License-MIT-green.svg?label=license)](https://opensource.org/licenses/MIT) -[![CI](https://github.com/supabase/supabase-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase/supabase-py/actions/workflows/ci.yml) -[![Python](https://img.shields.io/pypi/pyversions/supabase)](https://pypi.org/project/supabase) -[![Version](https://img.shields.io/pypi/v/supabase?color=%2334D058)](https://pypi.org/project/supabase) -[![Codecov](https://codecov.io/gh/supabase/supabase-py/branch/develop/graph/badge.svg)](https://codecov.io/gh/supabase/supabase-py) -[![Last commit](https://img.shields.io/github/last-commit/supabase/supabase-py.svg?style=flat)](https://github.com/supabase/supabase-py/commits) -[![GitHub commit activity](https://img.shields.io/github/commit-activity/m/supabase/supabase-py)](https://github.com/supabase/supabase-py/commits) -[![Github Stars](https://img.shields.io/github/stars/supabase/supabase-py?style=flat&logo=github)](https://github.com/supabase/supabase-py/stargazers) -[![Github Forks](https://img.shields.io/github/forks/supabase/supabase-py?style=flat&logo=github)](https://github.com/supabase/supabase-py/network/members) -[![Github Watchers](https://img.shields.io/github/watchers/supabase/supabase-py?style=flat&logo=github)](https://github.com/supabase/supabase-py) -[![GitHub contributors](https://img.shields.io/github/contributors/supabase/supabase-py)](https://github.com/supabase/supabase-py/graphs/contributors) From 03a4aa5dc3d0643afa995eb766fdec5e21eec2ef Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:06:46 -0300 Subject: [PATCH 11/25] chore: rename realtime to pypi package name --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bca3d2a3..62f4f2ce 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Python monorepo for all [Supabase](https://supabase.com) libraries. - [supabase](src/supabase/README.md) -- [realtime](src/realtime/README.md) +- [realtime-py](src/realtime/README.md) - [supabase_functions](src/functions/README.md) - [storage3](src/storage/README.md) - [postgrest](src/postgrest/README.md) From 8ae4d15622db15383cc4c4316f15a9d4b9e95bdc Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:10:29 -0300 Subject: [PATCH 12/25] chore: fix more readmes to mention uv, remove poetry backwards compat in realtime --- src/auth/README.md | 17 ++++++++++++----- src/postgrest/README.md | 15 --------------- src/realtime/README.md | 6 ++++++ src/realtime/pyproject.toml | 16 ---------------- 4 files changed, 18 insertions(+), 36 deletions(-) diff --git a/src/auth/README.md b/src/auth/README.md index a764beab..b3790424 100644 --- a/src/auth/README.md +++ b/src/auth/README.md @@ -9,18 +9,25 @@ This is a Python port of the [supabase js gotrue client](https://github.com/supa ## Installation -The package can be installed using pip or poetry: +The package can be installed using pip, uv or poetry: -### Poetry +### Pip ```bash -poetry add supabase_auth +pip install supabase_auth ``` -### Pip + +### Uv ```bash -pip install supabase_auth +uv add supabase_auth +``` + +### Poetry + +```bash +poetry add supabase_auth ``` ## Features diff --git a/src/postgrest/README.md b/src/postgrest/README.md index 5d0cd923..e71b03db 100644 --- a/src/postgrest/README.md +++ b/src/postgrest/README.md @@ -82,21 +82,6 @@ await client.from_("countries").delete().eq("name", "Việt Nam").execute() await client.rpc("foobar", {"arg1": "value1", "arg2": "value2"}).execute() ``` -## DEVELOPMENT - -```sh -git clone https://github.com/supabase/postgrest-py.git -cd postgrest-py -poetry install -poetry run pre-commit install -``` - -### Testing - -```sh -poetry run pytest -``` - ## CHANGELOG Read more [here](https://github.com/supabase/postgrest-py/blob/main/CHANGELOG.md). diff --git a/src/realtime/README.md b/src/realtime/README.md index 78922fed..03670215 100644 --- a/src/realtime/README.md +++ b/src/realtime/README.md @@ -37,6 +37,12 @@ This client enables you to use the following Supabase Realtime's features: pip3 install realtime ``` +or using `uv` + +```bash +uv add realtime +``` + ## Creating a Channel ```python diff --git a/src/realtime/pyproject.toml b/src/realtime/pyproject.toml index 44ba5f4e..ce79dc5f 100644 --- a/src/realtime/pyproject.toml +++ b/src/realtime/pyproject.toml @@ -16,22 +16,6 @@ dependencies = [ "pydantic (>=2.11.7,<3.0.0)", ] -[tool.poetry.group.dev.dependencies] -aiohttp = "^3.12.15" -pytest = "^8.4.1" -pytest-cov = "^6.2.1" -python-dotenv = "^1.1.1" -pytest-asyncio = "^1.1.0" -pre-commit = "^4.2.0" -ruff = "^0.12.5" -python-lsp-server = ">=1.12.2,<2.0.0" -pylsp-mypy = ">=0.7.0,<0.8.0" -python-lsp-ruff = ">=2.2.2,<3.0.0" - -# maintain two copies of the groups while poetry -# doesn't implement pep 735 support. -# https://peps.python.org/pep-0735/ -# https://github.com/python-poetry/poetry/issues/9751 [dependency-groups] tests = [ "aiohttp >= 3.12.13", From 49a528af1d71ac479f03fbcea7555a612095bf00 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:17:23 -0300 Subject: [PATCH 13/25] chore: modify pyproject's urls to reflect monorepo --- src/auth/pyproject.toml | 6 +++--- src/functions/pyproject.toml | 1 + src/postgrest/pyproject.toml | 4 ++-- src/postgrest/run-unasync.py | 3 ++- src/realtime/pyproject.toml | 5 +++++ src/supabase/pyproject.toml | 2 +- 6 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/auth/pyproject.toml b/src/auth/pyproject.toml index cb932f32..a1904cf6 100644 --- a/src/auth/pyproject.toml +++ b/src/auth/pyproject.toml @@ -20,9 +20,9 @@ dependencies = [ ] [project.urls] -homepage = "https://github.com/supabase/auth-py" -repository = "https://github.com/supabase/auth-py" -documentation = "https://github.com/supabase/auth-py" +homepage = "https://github.com/supabase/supabase-py/tree/main/src/auth" +repository = "https://github.com/supabase/supabase-py" +documentation = "https://github.com/supabase/supabase-py/tree/main/src/auth" # [project.scripts] # gh-download = "scripts.gh-download:main" diff --git a/src/functions/pyproject.toml b/src/functions/pyproject.toml index f8fdb668..d57f8fef 100644 --- a/src/functions/pyproject.toml +++ b/src/functions/pyproject.toml @@ -16,6 +16,7 @@ dependencies = [ [project.urls] repository = "https://github.com/supabase/supabase-py" +homepage = "https://github.com/supabase/supabase/tree/main/src/functions" [dependency-groups] tests = [ diff --git a/src/postgrest/pyproject.toml b/src/postgrest/pyproject.toml index 16292013..a78cebb2 100644 --- a/src/postgrest/pyproject.toml +++ b/src/postgrest/pyproject.toml @@ -25,8 +25,8 @@ dependencies = [ ] [project.urls] -homepage = "https://github.com/supabase/postgrest-py" -repository = "https://github.com/supabase/postgrest-py" +homepage = "https://github.com/supabase/supabase/tree/main/src/postgrest" +repository = "https://github.com/supabase/supabase-py" documentation = "https://postgrest-py.rtfd.io" [dependency-groups] diff --git a/src/postgrest/run-unasync.py b/src/postgrest/run-unasync.py index 3811899e..034cfdc4 100644 --- a/src/postgrest/run-unasync.py +++ b/src/postgrest/run-unasync.py @@ -1,6 +1,7 @@ -import unasync from pathlib import Path +import unasync + paths = Path("src/supabase").glob("**/*.py") tests = Path("tests").glob("**/*.py") diff --git a/src/realtime/pyproject.toml b/src/realtime/pyproject.toml index ce79dc5f..b1a1f533 100644 --- a/src/realtime/pyproject.toml +++ b/src/realtime/pyproject.toml @@ -16,6 +16,11 @@ dependencies = [ "pydantic (>=2.11.7,<3.0.0)", ] +[project.urls] +homepage = "https://github.com/supabase/supabase/tree/main/src/realime" +repository = "https://github.com/supabase/supabase-py" +documentation = "https://github.com/supabase/supabase/tree/main/src/realime" + [dependency-groups] tests = [ "aiohttp >= 3.12.13", diff --git a/src/supabase/pyproject.toml b/src/supabase/pyproject.toml index 9247d76b..3862c62b 100644 --- a/src/supabase/pyproject.toml +++ b/src/supabase/pyproject.toml @@ -30,7 +30,7 @@ dependencies = [ [project.urls] homepage = "https://github.com/supabase/supabase-py" repository = "https://github.com/supabase/supabase-py" -documentation = "https://github.com/supabase/supabase-py" +documentation = "https://github.com/supabase/supabase-py/src/supabase" [dependency-groups] dev = [ From fef7417bdbc0411e11cf3864f7f31878fb52205e Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:22:47 -0300 Subject: [PATCH 14/25] chore: add myself as a maintaner to all the projects --- src/auth/pyproject.toml | 3 +++ src/functions/pyproject.toml | 4 ++++ src/postgrest/pyproject.toml | 3 +++ src/realtime/pyproject.toml | 3 +++ src/storage/pyproject.toml | 3 +++ src/supabase/pyproject.toml | 3 +++ 6 files changed, 19 insertions(+) diff --git a/src/auth/pyproject.toml b/src/auth/pyproject.toml index a1904cf6..15060378 100644 --- a/src/auth/pyproject.toml +++ b/src/auth/pyproject.toml @@ -5,6 +5,9 @@ description = "Python Client Library for Supabase Auth" authors = [ {name = "Joel Lee", email = "joel@joellee.org" } ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] readme = "README.md" license = "MIT" classifiers = [ diff --git a/src/functions/pyproject.toml b/src/functions/pyproject.toml index d57f8fef..131e900f 100644 --- a/src/functions/pyproject.toml +++ b/src/functions/pyproject.toml @@ -6,6 +6,9 @@ authors = [ { name = "Joel Lee", email = "joel@joellee.org" }, { name = "Andrew Smith", email = "a.smith@silentworks.co.uk" }, ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] license = "MIT" readme = "README.md" requires-python = ">=3.9" @@ -14,6 +17,7 @@ dependencies = [ "strenum >=0.4.15", ] + [project.urls] repository = "https://github.com/supabase/supabase-py" homepage = "https://github.com/supabase/supabase/tree/main/src/functions" diff --git a/src/postgrest/pyproject.toml b/src/postgrest/pyproject.toml index a78cebb2..f0f125d2 100644 --- a/src/postgrest/pyproject.toml +++ b/src/postgrest/pyproject.toml @@ -9,6 +9,9 @@ authors = [ { name = "Oliver Rice"}, { name = "Andrew Smith", email = "a.smith@silentworks.co.uk"}, ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] readme = "README.md" license = "MIT" classifiers = [ diff --git a/src/realtime/pyproject.toml b/src/realtime/pyproject.toml index b1a1f533..b8a8c45b 100644 --- a/src/realtime/pyproject.toml +++ b/src/realtime/pyproject.toml @@ -6,6 +6,9 @@ authors = [ { name = "Joel Lee", email="joel@joellee.org"}, { name = "Andrew Smith", email="a.smith@silentworks.co.uk"}, ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] license = "MIT" readme = "README.md" repository = "https://github.com/supabase/supabase-py" diff --git a/src/storage/pyproject.toml b/src/storage/pyproject.toml index 049b8ab2..bddef39f 100644 --- a/src/storage/pyproject.toml +++ b/src/storage/pyproject.toml @@ -8,6 +8,9 @@ authors = [ { name = "Leynier Gutiérrez González" , email="leynier41@gmail.com"}, { name = "Anand Krishna" , email="anand2312@proton.me"}, ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", diff --git a/src/supabase/pyproject.toml b/src/supabase/pyproject.toml index 3862c62b..d2fd6e54 100644 --- a/src/supabase/pyproject.toml +++ b/src/supabase/pyproject.toml @@ -10,6 +10,9 @@ authors = [ { name = "Anand" }, { name = "Andrew Smith", email = "a.smith@silentworks.co.uk" }, ] +maintainers = [ + { name = "Leonardo Santiago", email = "leonardo.santiago@supabase.io" } +] readme = "README.md" license = "MIT" license-files = [ "LICENSE" ] From fd5da1024e8d8bf25b9c8f6d5c4aab70fe8ee797 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:26:57 -0300 Subject: [PATCH 15/25] fix: fix coverage badge to use coveralls --- README.md | 3 +-- src/auth/README.md | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index 62f4f2ce..e524bf5c 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,8 @@ # `supabase-py` [![CI](https://github.com/supabase/supabase-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase/supabase-py/actions/workflows/ci.yml) -[![Python](https://img.shields.io/pypi/pyversions/supabase)](https://pypi.org/project/supabase) [![Version](https://img.shields.io/pypi/v/supabase?color=%2334D058)](https://pypi.org/project/supabase) -[![Coverage status](https://codecov.io/gh/supabase/supabase-py/branch/develop/graph/badge.svg)](https://codecov.io/gh/supabase/supabase-py) +[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main)](https://coveralls.io/github/supabase/auth-py?branch=main) Python monorepo for all [Supabase](https://supabase.com) libraries. diff --git a/src/auth/README.md b/src/auth/README.md index b3790424..fd56ce44 100644 --- a/src/auth/README.md +++ b/src/auth/README.md @@ -3,7 +3,6 @@ [![CI](https://github.com/supabase-community/gotrue-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase-community/gotrue-py/actions/workflows/ci.yml) [![Python](https://img.shields.io/pypi/pyversions/gotrue)](https://pypi.org/project/gotrue) [![Version](https://img.shields.io/pypi/v/gotrue?color=%2334D058)](https://pypi.org/project/gotrue) -[![Coverage Status](https://coveralls.io/repos/github/supabase/auth-py/badge.svg?branch=main)](https://coveralls.io/github/supabase/auth-py?branch=main) This is a Python port of the [supabase js gotrue client](https://github.com/supabase/gotrue-js). The current state is that there is a features parity but with small differences that are mentioned in the section **Differences to the JS client**. As of December 14th, we renamed to repo from `gotrue-py` to `auth-py` to mirror the changes in the JavaScript library. From 4f48d3e71c145c6173e71e6cf4dbc7e3b8bb3c31 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:28:13 -0300 Subject: [PATCH 16/25] fix: make main coveralls badge point to correct repo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e524bf5c..5ae51b1c 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![CI](https://github.com/supabase/supabase-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase/supabase-py/actions/workflows/ci.yml) [![Version](https://img.shields.io/pypi/v/supabase?color=%2334D058)](https://pypi.org/project/supabase) -[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main)](https://coveralls.io/github/supabase/auth-py?branch=main) +[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main)](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main) Python monorepo for all [Supabase](https://supabase.com) libraries. From bbe82f0b1234c2d0942cee324402eac1f9fa566c Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 29 Aug 2025 17:30:42 -0300 Subject: [PATCH 17/25] fix: actually grab badge from coveralls website so it is correct --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5ae51b1c..a2d7713a 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![CI](https://github.com/supabase/supabase-py/actions/workflows/ci.yml/badge.svg)](https://github.com/supabase/supabase-py/actions/workflows/ci.yml) [![Version](https://img.shields.io/pypi/v/supabase?color=%2334D058)](https://pypi.org/project/supabase) -[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main)](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main) +[![Coverage Status](https://coveralls.io/repos/github/supabase/supabase-py/badge.svg?branch=main)](https://coveralls.io/github/supabase/supabase-py?branch=main) Python monorepo for all [Supabase](https://supabase.com) libraries. From c261594c333cc1ed456084f6abc0a22e14ebb3ce Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 1 Sep 2025 11:34:31 -0300 Subject: [PATCH 18/25] chore: remove unused whitespace --- src/functions/README.md | 1 - src/storage/README.md | 2 -- 2 files changed, 3 deletions(-) diff --git a/src/functions/README.md b/src/functions/README.md index 38e15a41..9f7f24a5 100644 --- a/src/functions/README.md +++ b/src/functions/README.md @@ -1,6 +1,5 @@ # Functions-py - ## Installation `pip3 install supabase_functions` diff --git a/src/storage/README.md b/src/storage/README.md index a2c077c0..fc14ca19 100644 --- a/src/storage/README.md +++ b/src/storage/README.md @@ -2,8 +2,6 @@ Python Client library to interact with Supabase Storage. - - ## How to use As it takes some effort to get the headers. We suggest that you use the storage functionality through the main [Supabase Python Client](https://github.com/supabase-community/supabase-py) From fe8a66e33df81fb594796d9f60700061a4c4f516 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 1 Sep 2025 12:05:58 -0300 Subject: [PATCH 19/25] chore: add name parameter to auth and postgrest docker compose --- src/auth/infra/docker-compose.yml | 1 + src/postgrest/infra/docker-compose.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/src/auth/infra/docker-compose.yml b/src/auth/infra/docker-compose.yml index b3a4fe19..6cfa0a0a 100644 --- a/src/auth/infra/docker-compose.yml +++ b/src/auth/infra/docker-compose.yml @@ -1,4 +1,5 @@ # docker-compose.yml +name: auth-tests version: '3' services: gotrue: # Signup enabled, autoconfirm off diff --git a/src/postgrest/infra/docker-compose.yaml b/src/postgrest/infra/docker-compose.yaml index 783ed1dc..191346e4 100644 --- a/src/postgrest/infra/docker-compose.yaml +++ b/src/postgrest/infra/docker-compose.yaml @@ -1,4 +1,5 @@ # docker-compose.yml +name: postgrest-tests version: '3' services: rest: From a428e7c21f7ac9226dbd5eb3817dbb7249729ecf Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 1 Sep 2025 13:56:51 -0300 Subject: [PATCH 20/25] fix: change postgres port in postgrest to not conflict with auth --- src/postgrest/infra/docker-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/postgrest/infra/docker-compose.yaml b/src/postgrest/infra/docker-compose.yaml index 191346e4..cf587f8d 100644 --- a/src/postgrest/infra/docker-compose.yaml +++ b/src/postgrest/infra/docker-compose.yaml @@ -18,7 +18,7 @@ services: db: image: supabase/postgres:15.1.0.37 ports: - - '5432:5432' + - '5732:5732' volumes: - .:/docker-entrypoint-initdb.d/ environment: @@ -26,4 +26,4 @@ services: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_HOST: /var/run/postgresql - POSTGRES_PORT: 5432 + POSTGRES_PORT: 5732 From 6204de6c12a58e6d54f5f7169532305b7bdb9630 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 1 Sep 2025 14:04:11 -0300 Subject: [PATCH 21/25] fix: do not run ci with -j for now --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5a61d359..e086d619 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,7 +40,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Run Tests - run: make ci -j + run: make ci - name: Upload coverage to Coveralls uses: coverallsapp/github-action@v2 From 12b60b881bd171681a36c5520062a4a3faa904aa Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Tue, 2 Sep 2025 09:21:49 -0300 Subject: [PATCH 22/25] fix: improve nix setup, add manual override --- flake.lock | 6 +++--- flake.nix | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index acbde409..ad5b6fc1 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "nixpkgs": { "locked": { - "lastModified": 1755615617, - "narHash": "sha256-HMwfAJBdrr8wXAkbGhtcby1zGFvs+StOp19xNsbqdOg=", + "lastModified": 1756542300, + "narHash": "sha256-tlOn88coG5fzdyqz6R93SQL5Gpq+m/DsWpekNFhqPQk=", "owner": "nixos", "repo": "nixpkgs", - "rev": "20075955deac2583bb12f07151c2df830ef346b4", + "rev": "d7600c775f877cd87b4f5a831c28aa94137377aa", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index baac2098..8f6a34d7 100644 --- a/flake.nix +++ b/flake.nix @@ -41,10 +41,22 @@ sourcePreference = "wheel"; # or sourcePreference = "sdist"; }; + pyproject-overlay = final: prev: { + ruamel-yaml-clib = prev.ruamel-yaml-clib.overrideAttrs (old: { + nativeBuildInputs = old.nativeBuildInputs ++ [ + (final.resolveBuildSystem { + setuptools = []; + }) + ]; + }); + + }; + python-for = pkgs: let extensions = pkgs.lib.composeManyExtensions [ pyproject-build-systems.overlays.default workspace-overlay + pyproject-overlay ]; base-python = pkgs.callPackage pyproject-nix.build.packages { python = pkgs.python311; @@ -56,6 +68,20 @@ python-env = python.mkVirtualEnv "supabase-py" workspace.deps.all; in { default = pkgs.mkShell { + env = { + # Don't create venv using uv + UV_NO_SYNC = "1"; + + # Force uv to use nixpkgs Python interpreter + UV_PYTHON = pkgs.python311.interpreter; + + # Prevent uv from downloading managed Python's + UV_PYTHON_DOWNLOADS = "never"; + }; + shellHook = '' + # Undo dependency propagation by nixpkgs. + unset PYTHONPATH + ''; packages = [ python-env ] ++ (dev-tools pkgs); }; }); From 3ee900ee966812c4c450a66e898d160c06a4038f Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Tue, 2 Sep 2025 13:48:31 -0300 Subject: [PATCH 23/25] fix: set UV_PROJECT_ENVIRONMENT in nix so that uv doesnt generate a venv for it --- flake.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/flake.nix b/flake.nix index 8f6a34d7..ba8e0ad7 100644 --- a/flake.nix +++ b/flake.nix @@ -73,6 +73,7 @@ UV_NO_SYNC = "1"; # Force uv to use nixpkgs Python interpreter + UV_PROJECT_ENVIRONMENT = python-env; UV_PYTHON = pkgs.python311.interpreter; # Prevent uv from downloading managed Python's From c42995708afc842541576144f4107b3ded39cd9a Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Tue, 2 Sep 2025 13:51:25 -0300 Subject: [PATCH 24/25] fix: remove obsolete `version` keywords from docker-compose --- src/auth/infra/docker-compose.yml | 1 - src/postgrest/infra/docker-compose.yaml | 1 - 2 files changed, 2 deletions(-) diff --git a/src/auth/infra/docker-compose.yml b/src/auth/infra/docker-compose.yml index 6cfa0a0a..6506cea4 100644 --- a/src/auth/infra/docker-compose.yml +++ b/src/auth/infra/docker-compose.yml @@ -1,6 +1,5 @@ # docker-compose.yml name: auth-tests -version: '3' services: gotrue: # Signup enabled, autoconfirm off image: supabase/auth:v2.178.0 diff --git a/src/postgrest/infra/docker-compose.yaml b/src/postgrest/infra/docker-compose.yaml index cf587f8d..30d847e1 100644 --- a/src/postgrest/infra/docker-compose.yaml +++ b/src/postgrest/infra/docker-compose.yaml @@ -1,6 +1,5 @@ # docker-compose.yml name: postgrest-tests -version: '3' services: rest: image: postgrest/postgrest:v11.2.2 From cc1848fe9ec602f1610166369f4e50146addb26c Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Tue, 2 Sep 2025 14:38:09 -0300 Subject: [PATCH 25/25] fix: add a sleep after `postgrest.start-infra` to ensure reproducibility --- src/postgrest/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/src/postgrest/Makefile b/src/postgrest/Makefile index 4fb7641a..228ed2c6 100644 --- a/src/postgrest/Makefile +++ b/src/postgrest/Makefile @@ -7,6 +7,7 @@ start-infra: cd infra &&\ docker compose down &&\ docker compose up -d + sleep 2 clean-infra: cd infra &&\