diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 2561bd69..64e88f66 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -55,8 +55,8 @@ jobs: strategy: matrix: version: - - gateway: "0.35.0" - cli: "0.91.0" + - gateway: "0.36.0" + cli: "0.92.0" env: RUSTFLAGS: -D warnings steps: diff --git a/.gitignore b/.gitignore index 70078fc9..4e29e830 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ devenv.local.nix # pre-commit .pre-commit-config.yaml +.aider* diff --git a/Cargo.lock b/Cargo.lock index 19867de0..bc5611b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,7 +33,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "version_check", ] @@ -68,6 +68,56 @@ dependencies = [ "libc", ] +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +dependencies = [ + "anstyle", + "once_cell", + "windows-sys 0.59.0", +] + [[package]] name = "anyhow" version = "1.0.98" @@ -227,7 +277,7 @@ dependencies = [ "rand 0.8.5", "regex", "ring", - "rustls-native-certs", + "rustls-native-certs 0.7.3", "rustls-pemfile", "rustls-webpki 0.102.8", "serde", @@ -295,6 +345,15 @@ dependencies = [ "tungstenite 0.24.0", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-polyfill" version = "1.0.3" @@ -310,6 +369,17 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + [[package]] name = "auditable-serde" version = "0.8.0" @@ -342,6 +412,29 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +[[package]] +name = "aws-lc-rs" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b756939cb2f8dc900aa6dcd505e6e2428e9cae7ff7b028c49e3946efa70878" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa9b6986f250236c27e5a204062434a773a13243d2ffc2955f37bdba4c5c6a1" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", +] + [[package]] name = "axum" version = "0.7.9" @@ -491,6 +584,29 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags 2.9.0", + "cexpr", + "clang-sys", + "itertools 0.12.1", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.100", + "which 4.4.2", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -502,6 +618,9 @@ name = "bitflags" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +dependencies = [ + "serde", +] [[package]] name = "bitvec" @@ -596,9 +715,20 @@ version = "1.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362" dependencies = [ + "jobserver", + "libc", "shlex", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -653,12 +783,117 @@ dependencies = [ "half", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_derive 3.2.25", + "clap_lex 0.2.4", + "indexmap 1.9.3", + "once_cell", + "strsim 0.10.0", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap" +version = "4.5.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" +dependencies = [ + "clap_builder", + "clap_derive 4.5.32", +] + +[[package]] +name = "clap_builder" +version = "4.5.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.7.4", + "strsim 0.11.1", +] + +[[package]] +name = "clap_derive" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_derive" +version = "4.5.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + [[package]] name = "cobs" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -696,6 +931,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -711,6 +956,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.4.2" @@ -726,6 +986,15 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -852,7 +1121,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", + "strsim 0.11.1", "syn 2.0.100", ] @@ -893,9 +1162,9 @@ checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" [[package]] name = "der" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", "pem-rfc7468", @@ -950,6 +1219,12 @@ dependencies = [ "litrs", ] +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "duct" version = "0.13.7" @@ -962,6 +1237,12 @@ dependencies = [ "shared_child", ] +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "duration-str" version = "0.16.1" @@ -1012,7 +1293,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9b3460f44bea8cd47f45a0c70892f1eff856d97cd55358b2f73f663789f6190" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -1035,6 +1316,9 @@ name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] [[package]] name = "elliptic-curve" @@ -1106,6 +1390,17 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + [[package]] name = "event-listener" version = "5.4.0" @@ -1174,6 +1469,17 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1210,6 +1516,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fslock" version = "0.2.1" @@ -1268,6 +1580,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -1356,9 +1679,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", @@ -1387,11 +1710,62 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "grafbase-database-definition" +version = "0.1.0" +dependencies = [ + "Inflector", + "grafbase-sdk", + "indexmap 2.9.0", + "itertools 0.14.0", + "serde", +] + +[[package]] +name = "grafbase-postgres" +version = "0.1.0" +dependencies = [ + "Inflector", + "anyhow", + "chrono", + "clap 4.5.37", + "grafbase-database-definition", + "grafbase-postgres-introspection", + "indexmap 2.9.0", + "indoc", + "itertools 0.14.0", + "semver", + "serde", + "serde_json", + "sqlx", + "tokio", + "url", +] + +[[package]] +name = "grafbase-postgres-introspection" +version = "0.1.0" +dependencies = [ + "Inflector", + "anyhow", + "grafbase-database-definition", + "indenter", + "indoc", + "itertools 0.14.0", + "sqlx", +] + [[package]] name = "grafbase-sdk" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bca6bc10dabebb8b0e7dc60b7ca84bceb13145848587feeb909f56c906fc443f" +checksum = "dfd107a9ac6c58beb5936054b7c756c1f16b46ed0dbf18b01cd64491b9d86f16" dependencies = [ "anyhow", "async-tungstenite", @@ -1428,7 +1802,7 @@ dependencies = [ "tungstenite 0.26.2", "url", "uuid", - "which", + "which 7.0.3", "wit-bindgen", "zerocopy", ] @@ -1470,12 +1844,13 @@ checksum = "ebfd7fd1a0e3427f3e6c9ef7bc4e0d2f05c9e94ecf859176cdf297212fe8f06e" [[package]] name = "graphql-composition" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93340435553102d769e19893add506aef5e313eb3bf67e9a03dcde1b8c8c0dd4" +checksum = "65f7438445ff2c1f00a226ec51ce764b4a6c634e550344935c4d61346c206673" dependencies = [ "cynic-parser 0.9.1", "cynic-parser-deser", + "fixedbitset", "graphql-federated-graph", "indexmap 2.9.0", "itertools 0.14.0", @@ -1484,9 +1859,9 @@ dependencies = [ [[package]] name = "graphql-federated-graph" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ec235bbd27dc271777a6bcca6dda46132272788018bfadbe0b5b68b066eab73" +checksum = "e29530bfdd7322086524c5e6ade8c9e019c655d1808edc2290f6401a4598e667" dependencies = [ "bitflags 2.9.0", "cynic-parser 0.9.1", @@ -1502,9 +1877,9 @@ dependencies = [ [[package]] name = "graphql-wrapping-types" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "165aca8d4b7bba95daaac55f7e1e139475043a674768a659a4da80792ff74f96" +checksum = "be5b0a74822603c5fd4d656eff8736a13806964cd74e49d5c64b544be2e430e0" dependencies = [ "grafbase-workspace-hack", "serde", @@ -1552,7 +1927,7 @@ dependencies = [ "jiff", "pretty_assertions", "prost", - "rand 0.8.5", + "rand 0.9.1", "serde", "serde_json", "tokio", @@ -1653,10 +2028,19 @@ dependencies = [ ] [[package]] -name = "heapless" -version = "0.7.17" +name = "hashlink" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.2", +] + +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" dependencies = [ "atomic-polyfill", "hash32", @@ -1666,12 +2050,27 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + [[package]] name = "hermit-abi" version = "0.3.9" @@ -1708,6 +2107,15 @@ dependencies = [ "digest", ] +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "http" version = "0.2.12" @@ -2063,6 +2471,12 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + [[package]] name = "indexmap" version = "1.9.3" @@ -2111,6 +2525,12 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "itertools" version = "0.10.5" @@ -2120,6 +2540,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.14.0" @@ -2178,9 +2607,9 @@ dependencies = [ [[package]] name = "jiff" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ad87c89110f55e4cd4dc2893a9790820206729eaf221555f742d540b0724a0" +checksum = "5a064218214dc6a10fbae5ec5fa888d80c45d611aba169222fc272072bf7aef6" dependencies = [ "jiff-static", "jiff-tzdb-platform", @@ -2193,9 +2622,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d076d5b64a7e2fe6f0743f02c43ca4a6725c0f904203bfe276a5b3e793103605" +checksum = "199b7932d97e325aff3a7030e141eafe7f2c6268e1d1b24859b753a627f45254" dependencies = [ "proc-macro2", "quote", @@ -2217,6 +2646,16 @@ dependencies = [ "jiff-tzdb", ] +[[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.2", + "libc", +] + [[package]] name = "js-sys" version = "0.3.77" @@ -2290,6 +2729,12 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "leb128fmt" version = "0.1.0" @@ -2298,15 +2743,35 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "libc" -version = "0.2.171" +version = "0.2.172" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" + +[[package]] +name = "libloading" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +dependencies = [ + "cfg-if", + "windows-targets 0.52.6", +] [[package]] name = "libm" -version = "0.2.11" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +checksum = "c9627da5196e5d8ed0b0495e61e518847578da83483c37288316d9b2e03a7f72" + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", +] [[package]] name = "linked-hash-map" @@ -2314,6 +2779,12 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "linux-raw-sys" version = "0.9.4" @@ -2368,7 +2839,7 @@ dependencies = [ "lazy_static", "proc-macro2", "quote", - "regex-syntax", + "regex-syntax 0.8.5", "syn 2.0.100", ] @@ -2381,6 +2852,15 @@ dependencies = [ "logos-codegen", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "matchit" version = "0.7.3" @@ -2393,6 +2873,16 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + [[package]] name = "memchr" version = "2.7.4" @@ -2445,6 +2935,12 @@ dependencies = [ "serde", ] +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.8.8" @@ -2488,6 +2984,16 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +[[package]] +name = "names" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bddcd3bf5144b6392de80e04c347cd7fab2508f6df16a85fc496ecd5cec39bc" +dependencies = [ + "clap 3.2.25", + "rand 0.8.5", +] + [[package]] name = "native-tls" version = "0.2.14" @@ -2500,7 +3006,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -2528,12 +3034,32 @@ dependencies = [ "data-encoding", "ed25519", "ed25519-dalek", - "getrandom 0.2.15", + "getrandom 0.2.16", "log", "rand 0.8.5", "signatory", ] +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "nuid" version = "0.5.0" @@ -2602,7 +3128,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] @@ -2612,9 +3138,9 @@ version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51e219e79014df21a225b1860a479e2dcd7cbd9130f4defd4bd0e191ea31d67d" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "chrono", - "getrandom 0.2.15", + "getrandom 0.2.16", "http 1.3.1", "rand 0.8.5", "reqwest 0.12.15", @@ -2749,6 +3275,18 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "p256" version = "0.13.2" @@ -2969,6 +3507,30 @@ dependencies = [ "serde", ] +[[package]] +name = "postgres-extension" +version = "0.1.0" +dependencies = [ + "Inflector", + "chrono", + "futures", + "grafbase-database-definition", + "grafbase-postgres-introspection", + "grafbase-sdk", + "indexmap 2.9.0", + "indoc", + "insta", + "itertools 0.14.0", + "names", + "serde", + "serde_json", + "sql-ast", + "sqlx", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -3022,11 +3584,35 @@ dependencies = [ "toml_edit", ] +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -3047,7 +3633,7 @@ version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ - "heck", + "heck 0.5.0", "itertools 0.14.0", "log", "multimap", @@ -3129,7 +3715,7 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash", + "rustc-hash 2.1.1", "rustls", "socket2", "thiserror 2.0.12", @@ -3140,15 +3726,15 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.10" +version = "0.11.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc" +checksum = "bcbafbbdbb0f638fe3f35f3c56739f77a8a1d070cb25603226c83339b391472b" dependencies = [ "bytes", "getrandom 0.3.2", - "rand 0.9.0", + "rand 0.9.1", "ring", - "rustc-hash", + "rustc-hash 2.1.1", "rustls", "rustls-pki-types", "slab", @@ -3206,13 +3792,12 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", - "zerocopy", ] [[package]] @@ -3241,7 +3826,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -3270,8 +3855,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -3282,7 +3876,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.5", ] [[package]] @@ -3291,6 +3885,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.8.5" @@ -3439,7 +4039,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", @@ -3516,6 +4116,12 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustc-hash" version = "2.1.1" @@ -3531,6 +4137,19 @@ dependencies = [ "semver", ] +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags 2.9.0", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", +] + [[package]] name = "rustix" version = "1.0.5" @@ -3540,7 +4159,7 @@ dependencies = [ "bitflags 2.9.0", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.9.4", "windows-sys 0.59.0", ] @@ -3550,6 +4169,7 @@ version = "0.23.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" dependencies = [ + "aws-lc-rs", "once_cell", "ring", "rustls-pki-types", @@ -3568,7 +4188,19 @@ dependencies = [ "rustls-pemfile", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.2.0", ] [[package]] @@ -3605,6 +4237,7 @@ version = "0.103.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -3664,7 +4297,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.9.0", - "core-foundation", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +dependencies = [ + "bitflags 2.9.0", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -3844,6 +4490,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shared_child" version = "1.0.1" @@ -3862,9 +4517,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" dependencies = [ "libc", ] @@ -3917,6 +4572,9 @@ name = "smallvec" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +dependencies = [ + "serde", +] [[package]] name = "snowflake" @@ -3977,6 +4635,205 @@ dependencies = [ "der", ] +[[package]] +name = "sql-ast" +version = "0.1.0" +dependencies = [ + "grafbase-sdk", +] + +[[package]] +name = "sqlx" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3" +dependencies = [ + "base64 0.22.1", + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.2", + "hashlink", + "indexmap 2.9.0", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rustls", + "rustls-native-certs 0.8.1", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.12", + "tokio", + "tokio-stream", + "tracing", + "url", + "webpki-roots", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.100", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.100", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.0", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.0", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc" +dependencies = [ + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.12", + "tracing", + "url", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -3989,6 +4846,23 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + [[package]] name = "strsim" version = "0.11.1" @@ -4019,7 +4893,7 @@ version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "rustversion", @@ -4032,7 +4906,7 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "rustversion", @@ -4100,7 +4974,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys 0.5.0", ] @@ -4111,7 +4985,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.9.0", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -4150,10 +5024,19 @@ dependencies = [ "fastrand", "getrandom 0.3.2", "once_cell", - "rustix", + "rustix 1.0.5", "windows-sys 0.59.0", ] +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "test-matrix" version = "0.1.0" @@ -4164,6 +5047,12 @@ dependencies = [ "toml", ] +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" + [[package]] name = "thiserror" version = "1.0.69" @@ -4204,6 +5093,16 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + [[package]] name = "time" version = "0.3.41" @@ -4334,9 +5233,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.14" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034" +checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" dependencies = [ "bytes", "futures-core", @@ -4529,6 +5428,49 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", ] [[package]] @@ -4577,7 +5519,7 @@ dependencies = [ "http 1.3.1", "httparse", "log", - "rand 0.9.0", + "rand 0.9.1", "sha1", "thiserror 2.0.12", "utf-8", @@ -4607,12 +5549,33 @@ version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + [[package]] name = "unicode-width" version = "0.1.14" @@ -4667,12 +5630,24 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + [[package]] name = "uuid" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vcpkg" version = "0.2.15" @@ -4709,6 +5684,12 @@ dependencies = [ "wit-bindgen-rt 0.39.0", ] +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -4850,6 +5831,18 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.44", +] + [[package]] name = "which" version = "7.0.3" @@ -4858,10 +5851,20 @@ checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" dependencies = [ "either", "env_home", - "rustix", + "rustix 1.0.5", "winsafe", ] +[[package]] +name = "whoami" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +dependencies = [ + "redox_syscall", + "wasite", +] + [[package]] name = "winapi" version = "0.3.9" @@ -4878,6 +5881,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" @@ -5177,9 +6189,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10" +checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" dependencies = [ "memchr", ] @@ -5241,7 +6253,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" dependencies = [ "anyhow", - "heck", + "heck 0.5.0", "wit-parser", ] @@ -5272,7 +6284,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" dependencies = [ "anyhow", - "heck", + "heck 0.5.0", "indexmap 2.9.0", "prettyplease", "syn 2.0.100", diff --git a/Cargo.toml b/Cargo.toml index 46a50208..36917191 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["extensions/*", "publish-extensions", "test-matrix"] +members = ["crates/*", "extensions/*", "cli/*", "publish-extensions", "test-matrix"] [workspace.package] edition = "2024" @@ -52,20 +52,28 @@ unused-self = "allow" wildcard-imports = "allow" [workspace.dependencies] +Inflector = "0.11.4" +anyhow = "1.0.98" async-nats = "0.40" async-stream = "0.3.6" base64 = "0.22.1" bytes = { version = "1.10.1", features = ["serde"] } chrono = "0.4.40" +clap = "4.5.36" duration-str = "0.16.1" futures = "0.3" futures-util = "0.3.31" -grafbase-sdk = "0.14.0" +grafbase-database-definition = { version = "0.1.0", path = "crates/database-definition" } +grafbase-postgres-introspection = { version = "0.1.0", path = "crates/postgres-introspection" } +grafbase-sdk = "0.15.0" http = "1.3" +indexmap = "2.9.0" indoc = "2.0.6" insta = { version = "1.42.2", features = ["json"] } +itertools = "0.14.0" jiff = "0.2.6" jwt-compact = "0.8.0" +names = "0.14.0" openidconnect = "4.0.0" ory-client = "=1.9.0" pem = "3.0.5" @@ -79,11 +87,15 @@ serde = "1.0.219" serde_json = "1" serde_with = "3.12.0" sha2 = "0.10.8" +sql-ast = { version = "0.1.0", path = "crates/sql-ast" } +sqlx = { version = "0.8.4", default-features = false } strum = { version = "0.27.1", features = ["derive"] } tokio = { version = "1", features = ["rt-multi-thread", "macros", "test-util"] } tokio-stream = "0.1.17" toml = "0.8" tonic = "0.12.3" tonic-build = { version = "0.12.3", features = ["prost"] } +tracing = "0.1.41" +tracing-subscriber = "0.3.19" url = { version = "2.5.4", features = ["serde"] } wiremock = "0.6.3" diff --git a/cli/postgres/Cargo.toml b/cli/postgres/Cargo.toml new file mode 100644 index 00000000..a72ce91a --- /dev/null +++ b/cli/postgres/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "grafbase-postgres" +version = "0.1.0" +edition = "2024" +license = "Apache-2.0" + +[dependencies] +Inflector.workspace = true +chrono.workspace = true +clap = { workspace = true, features = ["derive", "env"] } +indexmap = { workspace = true, features = ["serde"] } +itertools.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +anyhow.workspace = true +indoc.workspace = true +tokio = { workspace = true, features = ["rt", "macros"] } +grafbase-database-definition.workspace = true +grafbase-postgres-introspection.workspace = true +sqlx = { workspace = true, features = [ + "sqlx-postgres", + "json", + "runtime-tokio", + "tls-rustls-aws-lc-rs", + "tls-rustls-ring-native-roots", + "postgres", +] } +semver = { version = "1.0.26", features = ["serde"] } +url = { version = "2.5.4", features = ["serde"] } diff --git a/cli/postgres/src/args.rs b/cli/postgres/src/args.rs new file mode 100644 index 00000000..96790950 --- /dev/null +++ b/cli/postgres/src/args.rs @@ -0,0 +1,61 @@ +use std::path::PathBuf; + +use clap::{ArgGroup, Parser, Subcommand}; +use semver::Version; +use url::Url; + +#[derive(Parser, Debug)] +#[command(name = "grafbase-postgres")] +#[command(about = "Grafbase Postgres Extension")] +pub struct Args { + /// Connection string to the database + #[arg(short, long, env = "DATABASE_URL")] + pub database_url: String, + + #[command(subcommand)] + pub command: Commands, +} + +#[derive(Debug, Subcommand)] +pub enum Commands { + /// Introspect a PostgreSQL database + #[command(name = "introspect")] + Introspect(IntrospectCommand), +} + +#[derive(Parser, Debug)] +#[command(group( + ArgGroup::new("extension_identifier") + .required(true) + .args(["extension_url", "extension_version"]), +))] +pub struct IntrospectCommand { + /// Output file path. If not provided, the SDL will be printed to stdout. + #[arg(short, long)] + pub output_file: Option, + /// The name of the database to be used in the GraphQL SDL + #[arg(short, long, default_value = "default")] + pub database_name: String, + /// Default schema to be used in the GraphQL SDL (will be omitted from definitions) + #[arg(short = 's', long, default_value = "public")] + pub default_schema: String, + /// URL to the extension + #[arg(long, short = 'u')] + pub extension_url: Option, + /// Extension version following semver + #[arg(long, short = 'v')] + pub extension_version: Option, +} + +impl IntrospectCommand { + pub fn extension_url(&self) -> String { + match self.extension_version.as_ref() { + Some(version) => format!("https://grafbase.com/extensions/postgres/{version}"), + None => self.extension_url.as_ref().unwrap().to_string(), + } + } +} + +pub fn parse() -> Args { + Args::parse() +} diff --git a/cli/postgres/src/main.rs b/cli/postgres/src/main.rs new file mode 100644 index 00000000..946a473b --- /dev/null +++ b/cli/postgres/src/main.rs @@ -0,0 +1,37 @@ +use args::IntrospectCommand; +use grafbase_postgres_introspection::IntrospectionOptions; +use sqlx::{Connection, PgConnection}; + +mod args; + +#[tokio::main(flavor = "current_thread")] +async fn main() -> anyhow::Result<()> { + let args = args::parse(); + + let mut conn = PgConnection::connect(&args.database_url).await?; + + match args.command { + args::Commands::Introspect(introspect_cmd) => { + introspect(&mut conn, introspect_cmd).await?; + } + } + + Ok(()) +} + +async fn introspect(conn: &mut PgConnection, cmd: IntrospectCommand) -> anyhow::Result<()> { + let opts = IntrospectionOptions { + database_name: &cmd.database_name, + extension_url: &cmd.extension_url(), + default_schema: &cmd.default_schema, + }; + + let sdl = grafbase_postgres_introspection::introspect(conn, opts).await?; + + match cmd.output_file { + Some(path) => std::fs::write(path, sdl)?, + None => println!("{sdl}"), + } + + Ok(()) +} diff --git a/compose.yaml b/compose.yaml index b5611fb1..085c056d 100644 --- a/compose.yaml +++ b/compose.yaml @@ -7,15 +7,15 @@ services: hydra: image: oryd/hydra:v2.2.0 ports: - - '4444:4444' # Public port - - '4445:4445' # Admin port + - "4444:4444" # Public port + - "4445:4445" # Admin port command: serve -c /etc/config/hydra/hydra.yml all --dev volumes: - hydra-sqlite:/var/lib/sqlite:Z - ./docker/hydra-config:/etc/config/hydra:Z environment: - DSN: 'sqlite:///var/lib/sqlite/db.sqlite?_fk=true' - URLS_SELF_ISSUER: 'http://127.0.0.1:4444' + DSN: "sqlite:///var/lib/sqlite/db.sqlite?_fk=true" + URLS_SELF_ISSUER: "http://127.0.0.1:4444" restart: unless-stopped depends_on: - hydra-migrate @@ -25,7 +25,7 @@ services: hydra-migrate: image: oryd/hydra:v2.2.0 environment: - DSN: 'sqlite:///var/lib/sqlite/db.sqlite?_fk=true' + DSN: "sqlite:///var/lib/sqlite/db.sqlite?_fk=true" command: migrate -c /etc/config/hydra/hydra.yml sql -e --yes volumes: - hydra-sqlite:/var/lib/sqlite:Z @@ -38,17 +38,17 @@ services: hydra-2: image: oryd/hydra:v2.2.0 ports: - - '4454:4454' # Public port - - '4455:4455' # Admin port + - "4454:4454" # Public port + - "4455:4455" # Admin port command: serve -c /etc/config/hydra/hydra.yml all --dev volumes: - hydra-2-sqlite:/var/lib/sqlite:Z - ./docker/hydra-config:/etc/config/hydra:Z environment: - DSN: 'sqlite:///var/lib/sqlite/db.sqlite?_fk=true' - URLS_SELF_ISSUER: 'http://127.0.0.1:4454' - SERVE_PUBLIC_PORT: '4454' - SERVE_ADMIN_PORT: '4455' + DSN: "sqlite:///var/lib/sqlite/db.sqlite?_fk=true" + URLS_SELF_ISSUER: "http://127.0.0.1:4454" + SERVE_PUBLIC_PORT: "4454" + SERVE_ADMIN_PORT: "4455" restart: unless-stopped depends_on: - hydra-migrate @@ -58,7 +58,7 @@ services: hydra-2-migrate: image: oryd/hydra:v2.2.0 environment: - DSN: 'sqlite:///var/lib/sqlite/db.sqlite?_fk=true' + DSN: "sqlite:///var/lib/sqlite/db.sqlite?_fk=true" command: migrate -c /etc/config/hydra/hydra.yml sql -e --yes volumes: - hydra-2-sqlite:/var/lib/sqlite:Z @@ -70,20 +70,33 @@ services: nats: image: nats ports: - - '4222:4222' - - '8222:8222' + - "4222:4222" + - "8222:8222" command: > --jetstream --http_port=8222 --user=grafbase --pass=grafbase - networks: ['nats'] + networks: ["nats"] + + postgres: + image: postgres:17 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: grafbase + ports: + - "5432:5432" + command: postgres -c 'max_connections=200' + networks: ["postgres"] + restart: unless-stopped networks: hydra: hydra-2: nats: + postgres: volumes: hydra-sqlite: hydra-2-sqlite: + postgres-data: diff --git a/crates/database-definition/Cargo.toml b/crates/database-definition/Cargo.toml new file mode 100644 index 00000000..e6454755 --- /dev/null +++ b/crates/database-definition/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "grafbase-database-definition" +version = "0.1.0" +edition = "2024" +license = "Apache-2.0" + +[dependencies] +Inflector.workspace = true +grafbase-sdk.workspace = true +serde.workspace = true +itertools.workspace = true +indexmap.workspace = true diff --git a/crates/database-definition/src/enum.rs b/crates/database-definition/src/enum.rs new file mode 100644 index 00000000..7998a207 --- /dev/null +++ b/crates/database-definition/src/enum.rs @@ -0,0 +1,62 @@ +use inflector::Inflector; + +use crate::StringId; + +use super::SchemaId; + +#[derive(Debug, Clone)] +pub struct Enum { + pub(super) schema_id: SchemaId, + pub(super) database_name: T, + pub(super) client_name: T, + pub(super) description: Option, +} + +impl Enum { + pub(crate) fn schema_id(&self) -> SchemaId { + self.schema_id + } + + pub(crate) fn set_client_name(&mut self, client_name: T) { + self.client_name = client_name; + } +} + +impl Enum { + pub fn new(schema_id: SchemaId, database_name: String, client_name: Option) -> Self { + let client_name = client_name.unwrap_or_else(|| database_name.to_pascal_case()); + + Self { + schema_id, + database_name, + client_name, + description: None, + } + } + + pub fn set_description(&mut self, description: String) { + self.description = Some(description); + } + + pub fn database_name(&self) -> &str { + &self.database_name + } + + pub fn client_name(&self) -> &str { + &self.client_name + } +} + +impl Enum { + pub fn database_name(&self) -> StringId { + self.database_name + } + + pub fn client_name(&self) -> StringId { + self.client_name + } + + pub fn description(&self) -> Option { + self.description + } +} diff --git a/crates/database-definition/src/enum_variant.rs b/crates/database-definition/src/enum_variant.rs new file mode 100644 index 00000000..782c308a --- /dev/null +++ b/crates/database-definition/src/enum_variant.rs @@ -0,0 +1,56 @@ +use inflector::Inflector; + +use super::{EnumId, StringId}; + +#[derive(Debug, Clone)] +pub struct EnumVariant { + pub(super) enum_id: EnumId, + pub(super) database_name: T, + pub(super) client_name: T, + pub(super) description: Option, +} + +impl EnumVariant { + pub(crate) fn enum_id(&self) -> EnumId { + self.enum_id + } +} + +impl EnumVariant { + pub fn new(enum_id: EnumId, database_name: String, client_name: Option) -> Self { + let client_name = client_name.unwrap_or_else(|| database_name.to_screaming_snake_case()); + + Self { + enum_id, + database_name, + client_name, + description: None, + } + } + + pub(crate) fn database_name(&self) -> &str { + &self.database_name + } + + pub(crate) fn client_name(&self) -> &str { + &self.client_name + } + + pub fn set_description(&mut self, description: String) { + self.description = Some(description); + } +} + +impl EnumVariant { + pub(crate) fn database_name(&self) -> StringId { + self.database_name + } + + pub(crate) fn client_name(&self) -> StringId { + self.client_name + } + + pub fn description(&self) -> Option { + self.description + } +} diff --git a/crates/database-definition/src/foreign_key.rs b/crates/database-definition/src/foreign_key.rs new file mode 100644 index 00000000..ac19c863 --- /dev/null +++ b/crates/database-definition/src/foreign_key.rs @@ -0,0 +1,59 @@ +use super::{SchemaId, StringId, TableId}; + +#[derive(Debug, Clone)] +pub struct ForeignKey { + pub(super) constraint_name: T, + pub(super) schema_id: SchemaId, + pub(super) constrained_table_id: TableId, + pub(super) referenced_table_id: TableId, + pub(super) description: Option, +} + +impl ForeignKey { + pub(crate) fn schema_id(&self) -> SchemaId { + self.schema_id + } + + pub(crate) fn constrained_table_id(&self) -> TableId { + self.constrained_table_id + } + + pub(crate) fn referenced_table_id(&self) -> TableId { + self.referenced_table_id + } +} + +impl ForeignKey { + pub fn new( + constraint_name: String, + schema_id: SchemaId, + constrained_table_id: TableId, + referenced_table_id: TableId, + ) -> Self { + Self { + constraint_name, + schema_id, + constrained_table_id, + referenced_table_id, + description: None, + } + } + + pub(crate) fn constraint_name(&self) -> &str { + &self.constraint_name + } + + pub fn set_description(&mut self, description: String) { + self.description = Some(description); + } +} + +impl ForeignKey { + pub(crate) fn constraint_name(&self) -> StringId { + self.constraint_name + } + + pub fn description(&self) -> Option { + self.description + } +} diff --git a/crates/database-definition/src/foreign_key_column.rs b/crates/database-definition/src/foreign_key_column.rs new file mode 100644 index 00000000..35185eed --- /dev/null +++ b/crates/database-definition/src/foreign_key_column.rs @@ -0,0 +1,34 @@ +use super::{ForeignKeyId, TableColumnId}; + +#[derive(Debug, Clone)] +pub struct ForeignKeyColumn { + foreign_key_id: ForeignKeyId, + constrained_column_id: TableColumnId, + referenced_column_id: TableColumnId, +} + +impl ForeignKeyColumn { + pub fn new( + foreign_key_id: ForeignKeyId, + constrained_column_id: TableColumnId, + referenced_column_id: TableColumnId, + ) -> Self { + Self { + foreign_key_id, + constrained_column_id, + referenced_column_id, + } + } + + pub(crate) fn foreign_key_id(&self) -> ForeignKeyId { + self.foreign_key_id + } + + pub(crate) fn constrained_column_id(&self) -> TableColumnId { + self.constrained_column_id + } + + pub(crate) fn referenced_column_id(&self) -> TableColumnId { + self.referenced_column_id + } +} diff --git a/crates/database-definition/src/ids.rs b/crates/database-definition/src/ids.rs new file mode 100644 index 00000000..b6bb7b60 --- /dev/null +++ b/crates/database-definition/src/ids.rs @@ -0,0 +1,62 @@ +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct SchemaId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TableId(pub(crate) u32); + +impl From for TableId { + fn from(value: u32) -> Self { + TableId(value) + } +} + +impl From for u32 { + fn from(value: TableId) -> Self { + value.0 + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TableColumnId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct EnumId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct EnumVariantId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct KeyId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct KeyColumnId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ForeignKeyId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ForeignKeyColumnId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ForwardRelationId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct BackRelationId(pub(crate) u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum RelationId { + Forward(ForwardRelationId), + Back(BackRelationId), +} + +impl RelationId { + /// Returns `true` if the relation is a forward relation. + pub fn is_forward(&self) -> bool { + matches!(self, RelationId::Forward(_)) + } + + /// Returns `true` if the relation is a back relation. + pub fn is_backward(&self) -> bool { + matches!(self, RelationId::Back(_)) + } +} diff --git a/crates/database-definition/src/indices.rs b/crates/database-definition/src/indices.rs new file mode 100644 index 00000000..c62e0001 --- /dev/null +++ b/crates/database-definition/src/indices.rs @@ -0,0 +1,46 @@ +use std::collections::HashMap; + +use grafbase_sdk::types::DefinitionId; + +use crate::{Operation, StringId}; + +use super::{EnumId, EnumVariantId, ForeignKeyId, KeyId, RelationId, SchemaId, TableColumnId, TableId}; + +/// Various indices used to quickly look up items within the schema. +#[derive(Default, Debug, Clone)] +pub(super) struct Indices { + /// Provides a fast lookup for a table by its schema ID and name. + pub(super) tables: HashMap<(SchemaId, StringId), TableId>, + /// Provides a fast lookup for a table column by its table ID and column name. + pub(super) table_columns: HashMap<(TableId, StringId), TableColumnId>, + /// Provides a fast lookup for a table column by its table ID and *client* field name. + /// Note: This might be redundant with `client_fields`. + pub(super) table_fields: HashMap<(TableId, StringId), TableColumnId>, + /// Provides a fast lookup for an enum by its schema ID and name. + pub(super) enums: HashMap<(SchemaId, StringId), EnumId>, + /// Provides a fast lookup for an enum variant by its enum ID and variant name. + pub(super) enum_variants: HashMap<(EnumId, StringId), EnumVariantId>, + /// Provides a fast lookup for a foreign key by its schema ID and name. + pub(super) foreign_keys: HashMap<(SchemaId, StringId), ForeignKeyId>, + /// Provides a fast lookup for a key (e.g., primary, unique) by its table ID and name. + pub(super) keys: HashMap<(TableId, StringId), KeyId>, + /// Provides a fast lookup for a table ID by its client-facing type name. + pub(super) client_types: HashMap, + /// Provides a fast lookup for a table column ID by its table ID and client-facing field name. + pub(super) client_fields: HashMap<(TableId, StringId), TableColumnId>, + /// Provides a fast lookup for a key ID by its table ID and client-facing unique constraint name. + pub(super) client_unique_constraints: HashMap<(TableId, StringId), KeyId>, + /// Provides a fast lookup for a relation ID by the `DefinitionId` of the client-facing relation field. + pub(super) client_relations: HashMap, + /// Provides a fast lookup for a relation ID by its table ID and client-facing relation field name. + pub(super) client_name_relations: HashMap<(TableId, StringId), RelationId>, + /// Maps the `DefinitionId` of a client-facing scalar field to its corresponding `TableColumnId`. + pub(super) field_definition_to_column: HashMap, + /// Maps a `DefinitionId` (e.g., of a field) to its client-facing name (`StringId`). + pub(super) definition_to_field_name: HashMap, + /// Maps the `DefinitionId` of a client-facing field to the `DefinitionId` of its return type. + pub(super) field_definition_to_return_type_definition_id: HashMap, + /// A mapping from a field definition ID to a full SQL operation. Mapped from + /// query and mutation definitions. + pub(super) operations: HashMap, +} diff --git a/crates/database-definition/src/interner.rs b/crates/database-definition/src/interner.rs new file mode 100644 index 00000000..d0a03439 --- /dev/null +++ b/crates/database-definition/src/interner.rs @@ -0,0 +1,30 @@ +use indexmap::IndexSet; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] +pub(crate) struct StringId(usize); + +#[derive(Default, Debug, Clone)] +pub(crate) struct StringInterner { + map: IndexSet, +} + +impl StringInterner { + /// Get an already-interned string. + pub(crate) fn lookup(&self, s: &str) -> Option { + self.map.get_index_of(s).map(StringId) + } + + pub(crate) fn get(&self, id: StringId) -> &str { + &self.map[id.0] + } + + pub(crate) fn intern(&mut self, s: &str) -> StringId { + if let Some(id) = self.lookup(s) { + id + } else { + let (idx, is_new) = self.map.insert_full(s.to_owned()); + debug_assert!(is_new); + StringId(idx) + } + } +} diff --git a/crates/database-definition/src/key.rs b/crates/database-definition/src/key.rs new file mode 100644 index 00000000..c8133c80 --- /dev/null +++ b/crates/database-definition/src/key.rs @@ -0,0 +1,45 @@ +use super::{StringId, TableId}; + +#[derive(serde::Deserialize, Debug, Clone, Copy, PartialEq)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum KeyType { + Primary, + Unique, +} + +#[derive(Debug, Clone)] +pub struct Key { + pub(super) table_id: TableId, + pub(super) r#type: KeyType, + pub(super) constraint_name: T, +} + +impl Key { + pub(crate) fn table_id(&self) -> TableId { + self.table_id + } +} + +impl Key { + pub fn new(table_id: TableId, constraint_name: String, r#type: KeyType) -> Self { + Self { + table_id, + r#type, + constraint_name, + } + } + + pub(crate) fn name(&self) -> &str { + &self.constraint_name + } +} + +impl Key { + pub(crate) fn name(&self) -> StringId { + self.constraint_name + } + + pub(crate) fn r#type(&self) -> KeyType { + self.r#type + } +} diff --git a/crates/database-definition/src/key_column.rs b/crates/database-definition/src/key_column.rs new file mode 100644 index 00000000..3de1ccfe --- /dev/null +++ b/crates/database-definition/src/key_column.rs @@ -0,0 +1,21 @@ +use super::{KeyId, TableColumnId}; + +#[derive(Debug, Clone)] +pub struct KeyColumn { + key_id: KeyId, + column_id: TableColumnId, +} + +impl KeyColumn { + pub fn new(key_id: KeyId, column_id: TableColumnId) -> Self { + Self { key_id, column_id } + } + + pub(crate) fn key_id(&self) -> KeyId { + self.key_id + } + + pub(crate) fn column_id(&self) -> TableColumnId { + self.column_id + } +} diff --git a/crates/database-definition/src/lib.rs b/crates/database-definition/src/lib.rs new file mode 100644 index 00000000..9cdc8a64 --- /dev/null +++ b/crates/database-definition/src/lib.rs @@ -0,0 +1,589 @@ +mod r#enum; +mod enum_variant; +mod foreign_key; +mod foreign_key_column; +mod ids; +mod indices; +mod interner; +mod key; +mod key_column; +mod relations; +mod table; +mod table_column; +mod r#type; +mod walkers; + +use std::collections::HashMap; + +pub use r#enum::Enum; +pub use enum_variant::EnumVariant; +pub use foreign_key::ForeignKey; +pub use foreign_key_column::ForeignKeyColumn; +use grafbase_sdk::types::DefinitionId; +pub use ids::{ + BackRelationId, EnumId, EnumVariantId, ForeignKeyColumnId, ForeignKeyId, ForwardRelationId, KeyColumnId, KeyId, + RelationId, SchemaId, TableColumnId, TableId, +}; +use indices::Indices; +use inflector::Inflector; +pub(crate) use interner::StringId; +use interner::StringInterner; +pub use key::{Key, KeyType}; +pub use key_column::KeyColumn; +use relations::Relations; +pub use table::Table; +pub use table_column::{IdentityGeneration, TableColumn}; +pub use r#type::{ColumnType, DatabaseType, EnumType, ScalarKind, ScalarType}; +pub use walkers::{EnumWalker, KeyWalker, RelationWalker, TableColumnWalker, TableWalker, Walker}; + +/// Definition of a PostgreSQL database. Contains all the +/// tables, enums, columns, constraints etc. for us to render +/// a GraphQL schema, and for us to allow querying the database +/// efficiently. +/// +/// Due to Grafbase dependency tree, mutating this structure +/// outside of introspection is not recommended. Some of the +/// mutations are public, but from the perspective of the user, +/// the important call points are the table and enum iterators, +/// and the find methods with string slices. +/// +/// Be aware that this structure is serialized in a cache for +/// fast worker startup. Any changes here must be backwards-compatible. +/// +/// There will be a test failure if something changes to alert you. +#[derive(Debug)] +pub struct DatabaseDefinition { + /// Used for deduplicating strings in the definition. + interner: StringInterner, + /// The name of the database. + database_name: String, + /// Ordered by name. + schemas: Vec, + /// Ordered by schema id, then table name. + tables: Vec>, + /// Ordered by schema id, table id and then column position. + table_columns: Vec>, + /// Ordered by schema id, then enum name. + enums: Vec>, + /// Ordered by schema id, enum id and finally the variant position. + enum_variants: Vec>, + /// Ordered by schema id, table id and foreign key constraint name. + foreign_keys: Vec>, + /// Ordered by schema id, table id, foreign key id and the column position. + foreign_key_columns: Vec, + /// Ordered by schema id, table id and constraint name. + keys: Vec>, + /// Ordered by schema id, table id, constraint id and the column position. + key_columns: Vec, + /// Various indices for faster lookups. + indices: Indices, + /// Stores the relations between tables based on foreign keys. + relations: Relations, +} + +#[derive(Debug, Clone, Copy)] +pub enum Operation { + FindOne(TableId), + FindMany(TableId), + DeleteOne(TableId), + DeleteMany(TableId), + CreateOne(TableId), + CreateMany(TableId), + UpdateOne(TableId), + UpdateMany(TableId), +} + +impl DatabaseDefinition { + /// Creates a new database definition with the given name. + /// + /// # Arguments + /// + /// * `name` - The name of the database + /// + /// # Returns + /// + /// A new `DatabaseDefinition` instance with default empty collections + pub fn new(name: String) -> Self { + Self { + database_name: name, + schemas: Vec::new(), + tables: Vec::new(), + table_columns: Vec::new(), + enums: Vec::new(), + enum_variants: Vec::new(), + foreign_keys: Vec::new(), + foreign_key_columns: Vec::new(), + keys: Vec::new(), + key_columns: Vec::new(), + indices: Indices::default(), + relations: Relations::default(), + interner: Default::default(), + } + } + + /// The name of the database. + pub fn name(&self) -> &str { + &self.database_name + } + + /// Iterates over all tables of the introspected database. + pub fn tables(&self) -> impl ExactSizeIterator> + '_ { + (0..self.tables.len()).map(move |id| self.walk(TableId(id as u32))) + } + + /// Iterates over all enums of the introspected database. + pub fn enums(&self) -> impl ExactSizeIterator> + '_ { + (0..self.enums.len()).map(move |id| self.walk(EnumId(id as u32))) + } + + /// Find a table that represents the given client type. + pub fn find_table_for_client_type(&self, client_type: &str) -> Option> { + self.interner + .lookup(client_type) + .and_then(|string_id| self.indices.client_types.get(&string_id)) + .map(|id| self.walk(*id)) + } + + /// Find a unique constraint that represents the given client field. + pub fn find_unique_constraint_for_client_field( + &self, + client_field: &str, + table_id: TableId, + ) -> Option> { + self.interner + .lookup(client_field) + .and_then(|string_id| self.indices.client_unique_constraints.get(&(table_id, string_id))) + .copied() + .map(|id| self.walk(id)) + } + + /// Retrieves a TableColumnWalker for a given definition ID. + pub fn column_for_field_definition(&self, field_definition_id: DefinitionId) -> Option> { + self.indices + .field_definition_to_column + .get(&field_definition_id) + .copied() + .map(|id| self.walk(id)) + } + + /// Adds a schema to the definition. + pub fn push_schema(&mut self, schema: String) -> SchemaId { + let id = self.next_schema_id(); + self.schemas.push(schema); + + id + } + + /// Adds an SQL operation for a given field definition ID. + pub fn push_operation(&mut self, definition_id: DefinitionId, operation: Operation) { + self.indices.operations.insert(definition_id, operation); + } + + pub fn get_operation(&self, definition_id: DefinitionId) -> Option { + self.indices.operations.get(&definition_id).copied() + } + + /// Adds a table to the definition. + pub fn push_table(&mut self, table: Table) -> TableId { + let id = self.next_table_id(); + let string_id = self.interner.intern(table.database_name()); + + self.indices.tables.insert((table.schema_id(), string_id), id); + self.push_client_type_mapping(table.client_name(), id); + + self.tables.push(Table { + schema_id: table.schema_id(), + database_name: self.interner.intern(table.database_name()), + client_name: self.interner.intern(table.client_name()), + client_field_name: self.interner.intern(table.client_field_name()), + client_field_name_plural: self.interner.intern(table.client_field_name_plural()), + description: table.description.map(|desc| self.interner.intern(&desc)), + }); + + id + } + + /// Adds a table column to the definition. + pub fn push_table_column( + &mut self, + column: TableColumn, + definition_id: Option, + ) -> TableColumnId { + let id = self.next_table_column_id(); + + let string_id = self.interner.intern(column.database_name()); + self.indices.table_columns.insert((column.table_id(), string_id), id); + + if column.database_name() != column.client_name() { + let string_id = self.interner.intern(column.client_name()); + self.indices.table_fields.insert((column.table_id(), string_id), id); + + let string_id = self.interner.intern(column.client_name()); + self.indices.table_columns.insert((column.table_id(), string_id), id); + } + + if let Some(definition_id) = definition_id { + self.push_column_to_definition(definition_id, id); + } + + self.push_client_field_mapping(column.client_name(), column.table_id(), id); + + self.table_columns.push(TableColumn { + table_id: column.table_id(), + database_name: self.interner.intern(column.database_name()), + database_type: column.database_type(), + client_name: self.interner.intern(column.client_name()), + nullable: column.nullable, + has_default: column.has_default, + identity_generation: column.identity_generation, + description: column.description.map(|d| self.interner.intern(&d)), + }); + + id + } + + /// Associates a column with a field definition in the GraphQL schema. + pub fn push_column_to_definition(&mut self, definition_id: DefinitionId, column_id: TableColumnId) { + self.indices.field_definition_to_column.insert(definition_id, column_id); + } + + /// Adds an enum to the definition. + pub fn push_enum(&mut self, r#enum: Enum) -> EnumId { + let id = self.next_enum_id(); + + let string_id = self.interner.intern(r#enum.database_name()); + self.indices.enums.insert((r#enum.schema_id(), string_id), id); + + if r#enum.database_name() != r#enum.client_name() { + let string_id = self.interner.intern(r#enum.client_name()); + self.indices.enums.insert((r#enum.schema_id(), string_id), id); + } + + self.enums.push(Enum { + schema_id: r#enum.schema_id(), + database_name: self.interner.intern(r#enum.database_name()), + client_name: self.interner.intern(r#enum.client_name()), + description: r#enum.description.map(|d| self.interner.intern(&d)), + }); + + id + } + + /// Adds an enum variant to the definition. + pub fn push_enum_variant(&mut self, enum_variant: EnumVariant) -> EnumVariantId { + let id = self.next_enum_variant_id(); + let string_id = self.interner.intern(enum_variant.database_name()); + + self.indices + .enum_variants + .insert((enum_variant.enum_id(), string_id), id); + + self.enum_variants.push(EnumVariant { + enum_id: enum_variant.enum_id(), + database_name: self.interner.intern(enum_variant.database_name()), + client_name: self.interner.intern(enum_variant.client_name()), + description: enum_variant.description.map(|d| self.interner.intern(&d)), + }); + + id + } + + /// Adds a foreign key to the definition. + pub fn push_foreign_key( + &mut self, + foreign_key: ForeignKey, + ) -> (ForeignKeyId, ForwardRelationId, BackRelationId) { + let id = self.next_foreign_key_id(); + + let (forward, back) = self.relations.push_relation(&foreign_key, id); + let string_id = self.interner.intern(foreign_key.constraint_name()); + + self.indices + .foreign_keys + .insert((foreign_key.schema_id(), string_id), id); + + self.foreign_keys.push(ForeignKey { + constraint_name: self.interner.intern(foreign_key.constraint_name()), + schema_id: foreign_key.schema_id(), + constrained_table_id: foreign_key.constrained_table_id(), + referenced_table_id: foreign_key.referenced_table_id(), + description: foreign_key.description.map(|d| self.interner.intern(&d)), + }); + + (id, forward, back) + } + + /// Adds a foreign key column to the definition. + pub fn push_foreign_key_column(&mut self, foreign_key_column: ForeignKeyColumn) -> ForeignKeyColumnId { + let id = self.next_foreign_key_column_id(); + self.foreign_key_columns.push(foreign_key_column); + + id + } + + /// Adds a unique constraint to the definition. + pub fn push_key(&mut self, key: Key) -> KeyId { + let id = self.next_key_id(); + let string_id = self.interner.intern(key.name()); + + self.indices.keys.insert((key.table_id(), string_id), id); + + self.keys.push(Key { + table_id: key.table_id(), + constraint_name: self.interner.intern(key.name()), + r#type: key.r#type, + }); + + id + } + + /// Adds a unique constraint column to the definition. + pub fn push_key_column(&mut self, key_column: KeyColumn) -> KeyColumnId { + let id = self.next_key_column_id(); + + self.key_columns.push(key_column); + + id + } + + /// Adds an index from client type name to table id. + pub fn push_client_type_mapping(&mut self, type_name: &str, table_id: TableId) { + let string_id = self.interner.intern(type_name); + self.indices.client_types.insert(string_id, table_id); + } + + /// Adds an index from client field name and table id to table column id. + pub fn push_client_field_mapping(&mut self, field_name: &str, table_id: TableId, column_id: TableColumnId) { + let string_id = self.interner.intern(field_name); + self.indices.client_fields.insert((table_id, string_id), column_id); + } + + /// Adds an index from client field name and table id to unique constraint id. + pub fn push_client_field_key_mapping(&mut self, field_name: &str, table_id: TableId, constraint_id: KeyId) { + let string_id = self.interner.intern(field_name); + + self.indices + .client_unique_constraints + .insert((table_id, string_id), constraint_id); + } + + /// Adds an index from a field definition ID to its name. + pub fn push_client_definition_to_name(&mut self, field_name: &str, definition_id: DefinitionId) { + let string_id = self.interner.intern(field_name); + self.indices.definition_to_field_name.insert(definition_id, string_id); + } + + /// Adds an index from a field definition ID to its return type definition ID. + pub fn push_field_definition_to_type_definition( + &mut self, + field_definition_id: DefinitionId, + return_type_definition_id: DefinitionId, + ) { + self.indices + .field_definition_to_return_type_definition_id + .insert(field_definition_id, return_type_definition_id); + } + + /// Adds an index from a field definition ID to a relation id. + pub fn push_client_id_relation_mapping(&mut self, field_definition_id: DefinitionId, relation_id: RelationId) { + self.indices.client_relations.insert(field_definition_id, relation_id); + } + + /// Adds an index from a table id and a client field name to a relation id. + pub fn push_client_name_relation_mapping(&mut self, table_id: TableId, name: &str, relation_id: RelationId) { + let string_id = self.interner.intern(name); + self.indices + .client_name_relations + .insert((table_id, string_id), relation_id); + } + + /// Gets the client field name for a field definition ID. + pub fn get_name_for_field_definition(&self, field_definition_id: DefinitionId) -> Option<&str> { + self.indices + .definition_to_field_name + .get(&field_definition_id) + .map(|id| self.interner.get(*id)) + } + + /// Gets the relation ID for a client field. + pub fn get_relation_id_for_client_field_id(&self, definition_id: DefinitionId) -> Option { + self.indices.client_relations.get(&definition_id).copied() + } + + /// Gets the relation walker for a given client field name and table ID. + pub fn get_relation_for_client_name(&self, table_id: TableId, field_name: &str) -> Option> { + self.interner + .lookup(field_name) + .and_then(|string_id| self.indices.client_name_relations.get(&(table_id, string_id))) + .copied() + .map(|id| self.walk(id)) + } + + /// Finds the id of a schema with the given name, if existing. + pub fn get_schema_id(&self, schema: &str) -> Option { + self.schemas + .binary_search_by(|schema_name| schema_name.as_str().cmp(schema)) + .ok() + .map(|position| SchemaId(position as u32)) + } + + /// Finds the id of a table with the given name, if existing. + pub fn get_table_id(&self, schema_id: SchemaId, table_name: &str) -> Option { + self.interner + .lookup(table_name) + .and_then(|string_id| self.indices.tables.get(&(schema_id, string_id))) + .copied() + } + + /// Finds the id of a column in a table with the given name, if existing. + pub fn get_table_column_id(&self, table_id: TableId, column_name: &str) -> Option { + self.interner + .lookup(column_name) + .and_then(|string_id| self.indices.table_columns.get(&(table_id, string_id))) + .copied() + } + + /// Finds the id of a column in a table by its client field name, if existing. + pub fn get_table_column_id_for_field(&self, table_id: TableId, field_name: &str) -> Option { + self.interner + .lookup(field_name) + .and_then(|string_id| self.indices.client_fields.get(&(table_id, string_id))) + .copied() + } + + /// Finds the column walker for a given client field name and table ID. + pub fn find_column_for_client_field(&self, field_name: &str, id: TableId) -> Option> { + self.get_table_column_id_for_field(id, field_name) + .map(|id| self.walk(id)) + } + + /// Finds the id of an enum with the given name, if existing. + pub fn get_enum_id(&self, schema_id: SchemaId, enum_name: &str) -> Option { + self.interner + .lookup(enum_name) + .and_then(|string_id| self.indices.enums.get(&(schema_id, string_id))) + .copied() + } + + /// Finds the id of an enum with the given name, if existing. + pub fn get_foreign_key_id(&self, schema_id: SchemaId, constraint_name: &str) -> Option { + self.interner + .lookup(constraint_name) + .and_then(|string_id| self.indices.foreign_keys.get(&(schema_id, string_id))) + .copied() + } + + /// Finds the id of a unique constraint with the given name, if existing. + pub fn get_key_id(&self, table_id: TableId, constraint_name: &str) -> Option { + self.interner + .lookup(constraint_name) + .and_then(|string_id| self.indices.keys.get(&(table_id, string_id))) + .copied() + } + + /// Finalizes the definition. Handles name deduplication, and sorts the internal data structures + /// accordingly. + pub fn finalize(&mut self) { + self.deduplicate_names(); + + self.relations.from.sort_by_key(|(table_id, _)| *table_id); + self.relations.to.sort_by_key(|(table_id, _)| *table_id); + } + + /// Walk an item in the definition by its ID. + pub fn walk(&self, id: Id) -> Walker<'_, Id> { + Walker { + id, + database_definition: self, + } + } + + /// Tables and enums are namespaced per schema in PostgreSQL, but in GraphQL all schemas are in the same namespace. + /// + /// If a table or enum has a duplicate name in different schemas, we'll prefix the name with the name of the schema. + fn deduplicate_names(&mut self) { + let mut names = HashMap::new(); + + for table in &self.tables { + let counter = names.entry(table.client_name()).or_default(); + *counter += 1; + } + + for table in &mut self.tables { + if names.get(&table.client_name()).copied().unwrap_or(0) < 2 { + continue; + } + + let schema_name = &self.schemas[table.schema_id().0 as usize]; + let client_name = self.interner.get(table.client_name()); + + let new_client_name = format!("{schema_name}_{client_name}").to_pascal_case(); + let client_name = self.interner.intern(&new_client_name); + + let new_client_field_name = self.interner.intern(&new_client_name.to_camel_case()); + let new_client_field_name_plural = self.interner.intern(&new_client_name.to_camel_case().to_plural()); + + table.set_client_name(client_name); + table.set_client_field_name(new_client_field_name); + table.set_client_field_name_plural(new_client_field_name_plural); + } + + names.clear(); + + for r#enum in &self.enums { + let counter = names.entry(r#enum.client_name()).or_default(); + *counter += 1; + } + + for r#enum in &mut self.enums { + if names.get(&r#enum.client_name()).copied().unwrap_or(0) < 2 { + continue; + } + + let schema_name = &self.schemas[r#enum.schema_id().0 as usize]; + let client_name = self.interner.get(r#enum.client_name()); + + let client_name = self + .interner + .intern(&format!("{schema_name}_{client_name}").to_pascal_case()); + + r#enum.set_client_name(client_name); + } + } + + fn next_schema_id(&self) -> SchemaId { + SchemaId(self.schemas.len() as u32) + } + + fn next_table_id(&self) -> TableId { + TableId(self.tables.len() as u32) + } + + fn next_table_column_id(&self) -> TableColumnId { + TableColumnId(self.table_columns.len() as u32) + } + + fn next_enum_id(&self) -> EnumId { + EnumId(self.enums.len() as u32) + } + + fn next_enum_variant_id(&self) -> EnumVariantId { + EnumVariantId(self.enum_variants.len() as u32) + } + + fn next_foreign_key_id(&self) -> ForeignKeyId { + ForeignKeyId(self.foreign_keys.len() as u32) + } + + fn next_foreign_key_column_id(&self) -> ForeignKeyColumnId { + ForeignKeyColumnId(self.foreign_key_columns.len() as u32) + } + + fn next_key_id(&self) -> KeyId { + KeyId(self.keys.len() as u32) + } + + fn next_key_column_id(&self) -> KeyColumnId { + KeyColumnId(self.key_columns.len() as u32) + } +} diff --git a/crates/database-definition/src/relations.rs b/crates/database-definition/src/relations.rs new file mode 100644 index 00000000..7203a055 --- /dev/null +++ b/crates/database-definition/src/relations.rs @@ -0,0 +1,25 @@ +use super::{BackRelationId, ForeignKey, ForeignKeyId, ForwardRelationId, TableId}; + +#[derive(Debug, Default, Clone)] +pub(super) struct Relations { + /// Ordered by table id + pub(super) from: Vec<(TableId, ForeignKeyId)>, + /// Ordered by table id + pub(super) to: Vec<(TableId, ForeignKeyId)>, +} + +impl Relations { + pub(super) fn push_relation( + &mut self, + foreign_key: &ForeignKey, + id: ForeignKeyId, + ) -> (ForwardRelationId, BackRelationId) { + let forward = ForwardRelationId(self.from.len() as u32); + let back = BackRelationId(self.to.len() as u32); + + self.from.push((foreign_key.constrained_table_id(), id)); + self.to.push((foreign_key.referenced_table_id(), id)); + + (forward, back) + } +} diff --git a/crates/database-definition/src/table.rs b/crates/database-definition/src/table.rs new file mode 100644 index 00000000..d6ad99ca --- /dev/null +++ b/crates/database-definition/src/table.rs @@ -0,0 +1,92 @@ +use inflector::Inflector; + +use super::{SchemaId, StringId}; + +#[derive(Debug, Clone)] +pub struct Table { + pub(super) schema_id: SchemaId, + pub(super) database_name: T, + pub(super) client_name: T, + pub(super) client_field_name: T, + pub(super) client_field_name_plural: T, + pub(super) description: Option, +} + +impl Copy for Table where T: Copy {} + +impl Table { + pub(crate) fn schema_id(&self) -> SchemaId { + self.schema_id + } + + pub(super) fn set_client_name(&mut self, client_name: T) { + self.client_name = client_name; + } + + pub(super) fn set_client_field_name(&mut self, client_field_name: T) { + self.client_field_name = client_field_name; + } + + pub(super) fn set_client_field_name_plural(&mut self, client_field_name_plural: T) { + self.client_field_name_plural = client_field_name_plural; + } + + pub fn set_description(&mut self, description: T) { + self.description = Some(description); + } +} + +impl Table { + pub fn new(schema_id: SchemaId, database_name: String, client_name: Option) -> Self { + let client_name = client_name.unwrap_or_else(|| database_name.to_pascal_case().to_singular()); + let client_field_name = client_name.to_camel_case(); + let client_field_name_plural = client_field_name.to_plural(); + + Self { + schema_id, + database_name, + client_name, + client_field_name, + client_field_name_plural, + description: None, + } + } + + pub(crate) fn database_name(&self) -> &str { + &self.database_name + } + + pub(crate) fn client_name(&self) -> &str { + &self.client_name + } + + pub(crate) fn client_field_name(&self) -> &str { + &self.client_field_name + } + + pub(crate) fn client_field_name_plural(&self) -> &str { + &self.client_field_name_plural + } +} + +impl Table { + pub(crate) fn database_name(&self) -> StringId { + self.database_name + } + + pub(crate) fn client_name(&self) -> StringId { + self.client_name + } + + pub(crate) fn client_field_name(&self) -> StringId { + self.client_field_name + } + + pub(crate) fn client_field_name_plural(&self) -> StringId { + self.client_field_name_plural + } + + pub(crate) fn description(&self) -> Option { + self.description + } +} diff --git a/crates/database-definition/src/table_column.rs b/crates/database-definition/src/table_column.rs new file mode 100644 index 00000000..c5c641e4 --- /dev/null +++ b/crates/database-definition/src/table_column.rs @@ -0,0 +1,116 @@ +use std::str::FromStr; + +use grafbase_sdk::SdkError; +use inflector::Inflector; + +use super::{ColumnType, StringId, TableId}; + +#[derive(Debug, Clone, Copy)] +pub enum IdentityGeneration { + /// Cannot insert a custom value to the column, always generated. + Always, + /// Can optionally insert a custom value to the column, by default generated. + ByDefault, +} + +impl FromStr for IdentityGeneration { + type Err = SdkError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "always" => Ok(IdentityGeneration::Always), + "by default" => Ok(IdentityGeneration::ByDefault), + _ => Err(SdkError::from(format!("Invalid identity generation: {}", s))), + } + } +} + +#[derive(Debug, Clone)] +pub struct TableColumn { + pub(super) table_id: TableId, + pub(super) database_name: T, + pub(super) database_type: ColumnType, + pub(super) client_name: T, + pub(super) nullable: bool, + pub(super) has_default: bool, + pub(super) identity_generation: Option, + pub(super) description: Option, +} + +impl TableColumn { + pub(crate) fn database_type(&self) -> ColumnType { + self.database_type + } + + pub(crate) fn table_id(&self) -> TableId { + self.table_id + } + + pub(crate) fn is_array(&self) -> bool { + self.database_type.is_array() + } + + pub fn set_nullable(&mut self, value: bool) { + self.nullable = value; + } + + pub fn set_has_default(&mut self, value: bool) { + self.has_default = value; + } + + pub fn identity_generation(&self) -> Option { + self.identity_generation + } + + pub fn set_identity_generation(&mut self, value: IdentityGeneration) { + self.identity_generation = Some(value); + } + + pub fn set_description(&mut self, value: T) { + self.description = Some(value); + } +} + +impl TableColumn { + pub fn new( + table_id: TableId, + database_type: ColumnType, + database_name: String, + client_name: Option, + ) -> Self { + let client_name = client_name.unwrap_or_else(|| database_name.to_camel_case()); + + Self { + table_id, + database_name, + database_type, + client_name, + nullable: false, + has_default: false, + identity_generation: None, + description: None, + } + } + + pub(crate) fn database_name(&self) -> &str { + &self.database_name + } + + pub(crate) fn client_name(&self) -> &str { + &self.client_name + } +} + +impl TableColumn { + pub(crate) fn database_name(&self) -> StringId { + self.database_name + } + + pub(crate) fn client_name(&self) -> StringId { + self.client_name + } + + pub fn description(&self) -> Option { + self.description + } +} diff --git a/crates/database-definition/src/type.rs b/crates/database-definition/src/type.rs new file mode 100644 index 00000000..5e194265 --- /dev/null +++ b/crates/database-definition/src/type.rs @@ -0,0 +1,621 @@ +use std::fmt::Debug; + +use grafbase_sdk::host_io::postgres::types::PgType; + +use super::{EnumId, EnumWalker}; + +#[derive(Clone, Copy, PartialEq)] +pub enum DatabaseType<'a> { + Scalar(ScalarType), + Enum(EnumWalker<'a>), +} + +impl Debug for DatabaseType<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DatabaseType::Scalar(scalar) => scalar.fmt(f), + DatabaseType::Enum(_) => f.debug_struct("EnumWalker").finish(), + } + } +} + +impl<'a> DatabaseType<'a> { + pub fn is_enum(self) -> bool { + matches!(self, DatabaseType::Enum(_)) + } + + pub fn is_binary(&self) -> bool { + matches!(self, DatabaseType::Scalar(scalar) if scalar.is_binary()) + } + + pub fn is_json(&self) -> bool { + matches!(self, DatabaseType::Scalar(scalar) if scalar.is_json()) + } + + pub fn is_jsonb(&self) -> bool { + matches!(self, DatabaseType::Scalar(scalar) if scalar.is_jsonb()) + } + + pub fn from_db_to_client_cast(&self) -> Option<&'static str> { + match self { + DatabaseType::Scalar(scalar) => scalar.from_db_to_client_cast(), + DatabaseType::Enum(_) => None, + } + } + + pub fn enum_database_name(&'a self) -> Option<&'a str> { + match self { + DatabaseType::Scalar(_) => None, + DatabaseType::Enum(enum_type) => Some(enum_type.database_name()), + } + } + + pub fn as_str(&self) -> &'static str { + match self { + DatabaseType::Scalar(scalar) => match scalar.kind { + ScalarKind::Smallint => "SMALLINT", + ScalarKind::Integer => "INTEGER", + ScalarKind::Int => "INT", + ScalarKind::Bigint => "BIGINT", + ScalarKind::Decimal => "DECIMAL", + ScalarKind::Numeric => "NUMERIC", + ScalarKind::Real => "REAL", + ScalarKind::DoublePrecision => "DOUBLE_PRECISION", + ScalarKind::Smallserial => "SMALLSERIAL", + ScalarKind::Serial => "SERIAL", + ScalarKind::Bigserial => "BIGSERIAL", + ScalarKind::Varchar => "VARCHAR", + ScalarKind::Char => "CHAR", + ScalarKind::Text => "TEXT", + ScalarKind::Bytea => "BYTEA", + ScalarKind::Timestamp => "TIMESTAMP", + ScalarKind::Timestamptz => "TIMESTAMPTZ", + ScalarKind::Date => "DATE", + ScalarKind::Time => "TIME", + ScalarKind::Timetz => "TIMETZ", + ScalarKind::Interval => "INTERVAL", + ScalarKind::Boolean => "BOOLEAN", + ScalarKind::Enum => "ENUM", + ScalarKind::Point => "POINT", + ScalarKind::Line => "LINE", + ScalarKind::Lseg => "LSEG", + ScalarKind::Box => "BOX", + ScalarKind::Path => "PATH", + ScalarKind::Polygon => "POLYGON", + ScalarKind::Circle => "CIRCLE", + ScalarKind::Cidr => "CIDR", + ScalarKind::Inet => "INET", + ScalarKind::Macaddr => "MACADDR", + ScalarKind::Macaddr8 => "MACADDR8", + ScalarKind::Bit => "BIT", + ScalarKind::BitVarying => "BIT VARYING", + ScalarKind::Varbit => "VARBIT", + ScalarKind::Tsvector => "TSVECTOR", + ScalarKind::Tsquery => "TSQUERY", + ScalarKind::Uuid => "UUID", + ScalarKind::Xml => "XML", + ScalarKind::Json => "JSON", + ScalarKind::Jsonb => "JSONB", + ScalarKind::Array => "ARRAY", + ScalarKind::Composite => "COMPOSITE", + ScalarKind::Int4range => "INT4RANGE", + ScalarKind::Int8range => "INT8RANGE", + ScalarKind::Numrange => "NUMRANGE", + ScalarKind::Tsrange => "TSRANGE", + ScalarKind::Tstzrange => "TSTZRANGE", + ScalarKind::Daterange => "DATERANGE", + ScalarKind::Domain => "DOMAIN", + ScalarKind::Oid => "OID", + ScalarKind::Regproc => "REGPROC", + ScalarKind::Regprocedure => "REGPROCEDURE", + ScalarKind::Regoper => "REGOPER", + ScalarKind::Regoperator => "REGOPERATOR", + ScalarKind::Regclass => "REGCLASS", + ScalarKind::Regtype => "REGTYPE", + ScalarKind::Regrole => "REGROLE", + ScalarKind::Regnamespace => "REGNAMESPACE", + ScalarKind::Regconfig => "REGCONFIG", + ScalarKind::Regdictionary => "REGDICTIONARY", + ScalarKind::PgLsn => "PG_LSN", + ScalarKind::Money => "MONEY", + }, + DatabaseType::Enum(_) => "ENUM", + } + } +} + +impl From> for PgType { + fn from(value: DatabaseType<'_>) -> Self { + match value { + DatabaseType::Scalar(scalar) => match scalar.kind { + ScalarKind::Smallint => PgType::Int16, + ScalarKind::Integer => PgType::Int32, + ScalarKind::Int => PgType::Int32, + ScalarKind::Bigint => PgType::Int64, + ScalarKind::Decimal => PgType::Decimal, + ScalarKind::Numeric => PgType::Numeric, + ScalarKind::Real => PgType::Float32, + ScalarKind::DoublePrecision => PgType::Float64, + ScalarKind::Smallserial => PgType::Int16, + ScalarKind::Serial => PgType::Int32, + ScalarKind::Bigserial => PgType::Int64, + ScalarKind::Varchar => PgType::String, + ScalarKind::Char => PgType::String, + ScalarKind::Text => PgType::String, + ScalarKind::Bytea => PgType::Bytes, + ScalarKind::Timestamp => PgType::Timestamp, + ScalarKind::Timestamptz => PgType::Timestamptz, + ScalarKind::Date => PgType::Date, + ScalarKind::Time => PgType::Time, + ScalarKind::Timetz => PgType::Timetz, + ScalarKind::Interval => PgType::Interval, + ScalarKind::Boolean => PgType::Boolean, + ScalarKind::Enum => PgType::String, + ScalarKind::Point => PgType::Point, + ScalarKind::Line => PgType::String, + ScalarKind::Lseg => PgType::String, + ScalarKind::Box => PgType::String, + ScalarKind::Path => PgType::String, + ScalarKind::Polygon => PgType::String, + ScalarKind::Circle => PgType::String, + ScalarKind::Cidr => PgType::Cidr, + ScalarKind::Inet => PgType::Inet, + ScalarKind::Macaddr => PgType::Macaddr, + ScalarKind::Macaddr8 => PgType::Macaddr8, + ScalarKind::Bit => PgType::Bit, + ScalarKind::BitVarying => PgType::Varbit, + ScalarKind::Varbit => PgType::Varbit, + ScalarKind::Tsvector => PgType::String, + ScalarKind::Tsquery => PgType::String, + ScalarKind::Uuid => PgType::Uuid, + ScalarKind::Xml => PgType::Xml, + ScalarKind::Json => PgType::Json, + ScalarKind::Jsonb => PgType::Jsonb, + ScalarKind::Array => PgType::String, + ScalarKind::Composite => PgType::String, + ScalarKind::Int4range => PgType::String, + ScalarKind::Int8range => PgType::String, + ScalarKind::Numrange => PgType::String, + ScalarKind::Tsrange => PgType::String, + ScalarKind::Tstzrange => PgType::String, + ScalarKind::Daterange => PgType::String, + ScalarKind::Domain => PgType::String, + ScalarKind::Oid => PgType::Oid, + ScalarKind::Regproc => PgType::String, + ScalarKind::Regprocedure => PgType::String, + ScalarKind::Regoper => PgType::String, + ScalarKind::Regoperator => PgType::String, + ScalarKind::Regclass => PgType::String, + ScalarKind::Regtype => PgType::String, + ScalarKind::Regrole => PgType::String, + ScalarKind::Regnamespace => PgType::String, + ScalarKind::Regconfig => PgType::String, + ScalarKind::Regdictionary => PgType::String, + ScalarKind::PgLsn => PgType::String, + ScalarKind::Money => PgType::Money, + }, + DatabaseType::Enum(_) => PgType::String, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum ColumnType { + Scalar(ScalarType), + Enum(EnumType), +} + +impl ColumnType { + pub fn is_array(self) -> bool { + match self { + ColumnType::Scalar(scalar_type) => scalar_type.is_array, + ColumnType::Enum(r#enum) => r#enum.is_array, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct EnumType { + pub id: EnumId, + pub is_array: bool, +} + +impl EnumType { + pub fn new(id: EnumId, is_array: bool) -> Self { + EnumType { id, is_array } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct ScalarType { + pub kind: ScalarKind, + pub is_array: bool, +} + +impl ScalarType { + pub fn new(kind: ScalarKind, is_array: bool) -> Self { + ScalarType { kind, is_array } + } +} + +impl ScalarType { + pub fn is_binary(self) -> bool { + matches!(self.kind, ScalarKind::Bytea) + } + + pub fn is_json(&self) -> bool { + matches!(self.kind, ScalarKind::Json) + } + + pub fn is_jsonb(&self) -> bool { + matches!(self.kind, ScalarKind::Jsonb) + } + + pub fn client_type(self) -> Option<&'static str> { + use ScalarKind::*; + + let type_name = match self.kind { + Char | Text | Xml | Cidr | Macaddr8 | Macaddr | Varchar | Bit | Varbit => "String", + Date | Inet | Time | Timetz => "String", + Uuid | Oid => "String", + Bigint | Bigserial => "BigInt", + Timestamp => "String", + Timestamptz => "String", + Interval => "String", + Decimal | Numeric | Money => "Decimal", + + Smallserial | Serial | Smallint | Int | Integer => "Int", + Json | Jsonb => "JSON", + Real | DoublePrecision => "Float", + Boolean => "Boolean", + Bytea => "Bytes", + + _ => return None, + }; + + Some(type_name) + } + + /// Returns the PostgreSQL type name for explicit casting when reading from the database, if necessary. + /// + /// Some PostgreSQL types (like `Int64`) might need to be cast to a string representation (`TEXT`) + /// when being fetched from the database to ensure compatibility with client-side languages + /// (e.g., JavaScript, which has limitations with full 64-bit integers). + /// This method provides the target type name for such casts. + /// + /// # Returns + /// + /// - `Some(&'static str)` containing the target type name string (e.g., "TEXT") if casting is needed. + /// - `None` if the type does not require explicit casting when reading. + pub fn from_db_to_client_cast(&self) -> Option<&'static str> { + match self.kind { + ScalarKind::Bigint | ScalarKind::Oid | ScalarKind::Numeric | ScalarKind::Decimal if self.is_array => { + Some("TEXT[]") + } + ScalarKind::Bigint | ScalarKind::Oid | ScalarKind::Numeric | ScalarKind::Decimal => Some("TEXT"), + _ => None, + } + } +} + +/// Postgres data types supported for column definitions +#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ScalarKind { + /// 2-byte signed integer, range: -32768 to +32767 + Smallint, + /// 4-byte signed integer, range: -2147483648 to +2147483647 + Integer, + /// Alias for INTEGER + Int, + /// 8-byte signed integer, range: -9223372036854775808 to +9223372036854775807 + Bigint, + /// Exact numeric with selectable precision + Decimal, + /// Alias for DECIMAL + Numeric, + /// 4-byte floating-point number + Real, + /// 8-byte floating-point number + DoublePrecision, + /// 2-byte autoincrementing integer + Smallserial, + /// 4-byte autoincrementing integer + Serial, + /// 8-byte autoincrementing integer + Bigserial, + /// Variable-length character string with limit + Varchar, + /// Fixed-length character string, blank padded + Char, + /// Variable unlimited length character string + Text, + /// Binary data ("byte array") + Bytea, + /// Date and time (without time zone) + Timestamp, + /// Date and time with time zone + Timestamptz, + /// Calendar date (year, month, day) + Date, + /// Time of day (without time zone) + Time, + /// Time of day with time zone + Timetz, + /// Time interval + Interval, + /// Logical Boolean (true/false) + Boolean, + /// User-defined enumerated type + Enum, + /// Geometric point on a plane + Point, + /// Infinite geometric line + Line, + /// Geometric line segment + Lseg, + /// Rectangular geometric box + Box, + /// Geometric path + Path, + /// Geometric polygon + Polygon, + /// Geometric circle + Circle, + /// IPv4 or IPv6 network address + Cidr, + /// IPv4 or IPv6 host address + Inet, + /// MAC address (6 bytes) + Macaddr, + /// MAC address (8 bytes, EUI-64 format) + Macaddr8, + /// Fixed-length bit string + Bit, + /// Variable-length bit string + BitVarying, + /// Alias for BIT_VARYING + Varbit, + /// Text search document + Tsvector, + /// Text search query + Tsquery, + /// Universally unique identifier + Uuid, + /// XML data + Xml, + /// Textual JSON data + Json, + /// Binary JSON data, decomposed + Jsonb, + /// Array of data type + Array, + /// User-defined composite type + Composite, + /// Range of integers (4-byte) + Int4range, + /// Range of integers (8-byte) + Int8range, + /// Range of numeric values + Numrange, + /// Range of timestamp without time zone + Tsrange, + /// Range of timestamp with time zone + Tstzrange, + /// Range of dates + Daterange, + /// User-defined domain type + Domain, + /// Object identifier + Oid, + /// Function name + Regproc, + /// Function with argument types + Regprocedure, + /// Operator name + Regoper, + /// Operator with argument types + Regoperator, + /// Relation name + Regclass, + /// Data type name + Regtype, + /// Role name + Regrole, + /// Schema name + Regnamespace, + /// Text search configuration + Regconfig, + /// Text search dictionary + Regdictionary, + /// Postgres Log Sequence Number + PgLsn, + /// Currency amount + Money, +} + +impl From for ScalarType { + fn from(value: u32) -> Self { + let (kind, is_array) = match value { + 16 => (ScalarKind::Boolean, false), + 17 => (ScalarKind::Bytea, false), + 18 => (ScalarKind::Char, false), + 19 => (ScalarKind::Varchar, false), + 20 => (ScalarKind::Bigint, false), + 21 => (ScalarKind::Smallint, false), + 22 => (ScalarKind::Smallint, true), + 23 => (ScalarKind::Int, false), + 25 => (ScalarKind::Text, false), + 26 => (ScalarKind::Oid, false), + 30 => (ScalarKind::Oid, true), + 114 => (ScalarKind::Json, false), + 142 => (ScalarKind::Xml, false), + 143 => (ScalarKind::Xml, true), + 199 => (ScalarKind::Json, true), + 600 => (ScalarKind::Point, false), + 601 => (ScalarKind::Lseg, false), + 602 => (ScalarKind::Path, false), + 603 => (ScalarKind::Box, false), + 604 => (ScalarKind::Polygon, false), + 628 => (ScalarKind::Line, false), + 629 => (ScalarKind::Line, true), + 650 => (ScalarKind::Cidr, false), + 651 => (ScalarKind::Cidr, true), + 700 => (ScalarKind::Real, false), + 701 => (ScalarKind::DoublePrecision, false), + 705 => (ScalarKind::Text, false), // Unknown type, default to Text + 718 => (ScalarKind::Circle, false), + 719 => (ScalarKind::Circle, true), + 774 => (ScalarKind::Macaddr8, false), + 775 => (ScalarKind::Macaddr8, true), + 790 => (ScalarKind::Money, false), // Money + 791 => (ScalarKind::Money, true), // Money array + 829 => (ScalarKind::Macaddr, false), + 869 => (ScalarKind::Inet, false), + 1000 => (ScalarKind::Boolean, true), + 1001 => (ScalarKind::Bytea, true), + 1002 => (ScalarKind::Char, true), + 1003 => (ScalarKind::Text, true), // Name array + 1005 => (ScalarKind::Smallint, true), // Int2 array + 1006 => (ScalarKind::Smallint, true), // Int2Vector array + 1007 => (ScalarKind::Int, true), // Int4 array + 1008 => (ScalarKind::Regproc, true), + 1009 => (ScalarKind::Text, true), + 1010 => (ScalarKind::Oid, true), // Tid array + 1011 => (ScalarKind::Oid, true), // Xid array + 1012 => (ScalarKind::Oid, true), // Cid array + 1013 => (ScalarKind::Oid, true), // OidVector array + 1014 => (ScalarKind::Char, true), // Bpchar array + 1015 => (ScalarKind::Varchar, true), + 1016 => (ScalarKind::Bigint, true), // Int8 array + 1017 => (ScalarKind::Point, true), + 1018 => (ScalarKind::Lseg, true), + 1019 => (ScalarKind::Path, true), + 1020 => (ScalarKind::Box, true), + 1021 => (ScalarKind::Real, true), // Float4 array + 1022 => (ScalarKind::DoublePrecision, true), // Float8 array + 1027 => (ScalarKind::Polygon, true), + 1028 => (ScalarKind::Oid, true), + 1033 => (ScalarKind::Oid, false), // Aclitem + 1034 => (ScalarKind::Oid, true), // Aclitem array + 1040 => (ScalarKind::Macaddr, true), + 1041 => (ScalarKind::Inet, true), + 1042 => (ScalarKind::Char, false), // Bpchar + 1043 => (ScalarKind::Varchar, false), + 1082 => (ScalarKind::Date, false), + 1083 => (ScalarKind::Time, false), + 1114 => (ScalarKind::Timestamp, false), + 1115 => (ScalarKind::Timestamp, true), + 1182 => (ScalarKind::Date, true), + 1183 => (ScalarKind::Time, true), + 1184 => (ScalarKind::Timestamptz, false), + 1185 => (ScalarKind::Timestamptz, true), + 1186 => (ScalarKind::Interval, false), + 1187 => (ScalarKind::Interval, true), + 1231 => (ScalarKind::Numeric, true), + 1263 => (ScalarKind::Text, true), // Cstring array + 1266 => (ScalarKind::Timetz, false), + 1270 => (ScalarKind::Timetz, true), + 1560 => (ScalarKind::Bit, false), + 1561 => (ScalarKind::Bit, true), + 1562 => (ScalarKind::Varbit, false), + 1563 => (ScalarKind::Varbit, true), + 1700 => (ScalarKind::Numeric, false), + 1790 => (ScalarKind::Text, false), // Refcursor + 2201 => (ScalarKind::Text, true), // Refcursor array + 2202 => (ScalarKind::Regprocedure, false), + 2203 => (ScalarKind::Regoper, false), + 2204 => (ScalarKind::Regoperator, false), + 2205 => (ScalarKind::Regclass, false), + 2206 => (ScalarKind::Regtype, false), + 2207 => (ScalarKind::Regprocedure, true), + 2208 => (ScalarKind::Regoper, true), + 2209 => (ScalarKind::Regoperator, true), + 2210 => (ScalarKind::Regclass, true), + 2211 => (ScalarKind::Regtype, true), + 2249 => (ScalarKind::Text, false), // Record + 2275 => (ScalarKind::Text, false), // Cstring + 2276 => (ScalarKind::Text, false), // Any + 2277 => (ScalarKind::Array, false), // Anyarray + 2278 => (ScalarKind::Text, false), // Void + 2279 => (ScalarKind::Text, false), // Trigger + 2280 => (ScalarKind::Text, false), // Language handler + 2281 => (ScalarKind::Text, false), // Internal + 2283 => (ScalarKind::Text, false), // Anyelement + 2287 => (ScalarKind::Text, true), // Record array + 2776 => (ScalarKind::Text, false), // Anynonarray + 2949 => (ScalarKind::Text, true), // TxidSnapshot array + 2950 => (ScalarKind::Uuid, false), + 2951 => (ScalarKind::Uuid, true), + 2970 => (ScalarKind::Text, false), // TxidSnapshot + 3115 => (ScalarKind::Text, false), // FdwHandler + 3220 => (ScalarKind::PgLsn, false), + 3221 => (ScalarKind::PgLsn, true), + 3310 => (ScalarKind::Text, false), // TsmHandler + 3361 => (ScalarKind::Text, false), // PgNdistinct + 3402 => (ScalarKind::Text, false), // PgDependencies + 3500 => (ScalarKind::Enum, false), // Anyenum + 3614 => (ScalarKind::Tsvector, false), + 3615 => (ScalarKind::Tsquery, false), + 3642 => (ScalarKind::Tsvector, false), // GtsVector + 3643 => (ScalarKind::Tsvector, true), + 3644 => (ScalarKind::Tsvector, true), // GtsVector array + 3645 => (ScalarKind::Tsquery, true), + 3734 => (ScalarKind::Regconfig, false), + 3735 => (ScalarKind::Regconfig, true), + 3769 => (ScalarKind::Regdictionary, false), + 3770 => (ScalarKind::Regdictionary, true), + 3802 => (ScalarKind::Jsonb, false), + 3807 => (ScalarKind::Jsonb, true), + 3831 => (ScalarKind::Text, false), // AnyRange + 3838 => (ScalarKind::Text, false), // EventTrigger + 3904 => (ScalarKind::Int4range, false), + 3905 => (ScalarKind::Int4range, true), + 3906 => (ScalarKind::Numrange, false), + 3907 => (ScalarKind::Numrange, true), + 3908 => (ScalarKind::Tsrange, false), + 3909 => (ScalarKind::Tsrange, true), + 3910 => (ScalarKind::Tstzrange, false), + 3911 => (ScalarKind::Tstzrange, true), + 3912 => (ScalarKind::Daterange, false), + 3913 => (ScalarKind::Daterange, true), + 3926 => (ScalarKind::Int8range, false), + 3927 => (ScalarKind::Int8range, true), + 4072 => (ScalarKind::Text, false), // Jsonpath + 4073 => (ScalarKind::Text, true), // Jsonpath array + 4989 => (ScalarKind::Regnamespace, false), + 4090 => (ScalarKind::Regnamespace, true), + 4096 => (ScalarKind::Regrole, false), + 4097 => (ScalarKind::Regrole, true), + 4191 => (ScalarKind::Text, false), // Regcollation + 4192 => (ScalarKind::Text, true), // Regcollation array + 4451 => (ScalarKind::Text, false), // Int4multiRange + 4532 => (ScalarKind::Text, false), // NummultiRange + 4533 => (ScalarKind::Text, false), // TsmultiRange + 4534 => (ScalarKind::Text, false), // TstzmultiRange + 4535 => (ScalarKind::Text, false), // DatemultiRange + 4536 => (ScalarKind::Text, false), // Int8multiRange + 4537 => (ScalarKind::Text, false), // AnymultiRange + 4538 => (ScalarKind::Text, false), // AnycompatiblemultiRange + 4600 => (ScalarKind::Text, false), // PgBrinBloomSummary + 4601 => (ScalarKind::Text, false), // PgBrinMinmaxMultiSummary + 5017 => (ScalarKind::Text, false), // PgMcvList + 5038 => (ScalarKind::Text, false), // PgSnapshot + 5039 => (ScalarKind::Text, true), // PgSnapshot array + 5069 => (ScalarKind::Text, false), // Xid8 + 5077 => (ScalarKind::Text, false), // Anycompatible + 5078 => (ScalarKind::Text, false), // Anycompatiblearray + 5079 => (ScalarKind::Text, false), // Anycompatiblenonarray + 5080 => (ScalarKind::Text, false), // AnycompatibleRange + 6150 => (ScalarKind::Text, true), // Int4multiRange array + 6151 => (ScalarKind::Text, true), // NummultiRange array + 6152 => (ScalarKind::Text, true), // TsmultiRange array + 6153 => (ScalarKind::Text, true), // TstzmultiRange array + 6155 => (ScalarKind::Text, true), // DatemultiRange array + 6157 => (ScalarKind::Text, true), // Int8multiRange array + _ => (ScalarKind::Text, false), // Default to Text for unknown types + }; + + Self { kind, is_array } + } +} diff --git a/crates/database-definition/src/walkers.rs b/crates/database-definition/src/walkers.rs new file mode 100644 index 00000000..454cf77e --- /dev/null +++ b/crates/database-definition/src/walkers.rs @@ -0,0 +1,82 @@ +mod back_relation; +mod r#enum; +mod enum_variant; +mod foreign_key; +mod foreign_key_column; +mod forward_relation; +mod key; +mod key_column; +mod relation; +mod table; +mod table_column; + +use super::StringId; +use crate::DatabaseDefinition; + +pub(crate) use foreign_key::ForeignKeyWalker; +pub(crate) use foreign_key_column::ForeignKeyColumnWalker; + +pub use r#enum::EnumWalker; +pub use key::KeyWalker; +pub use relation::RelationWalker; +use std::ops::Range; +pub use table::TableWalker; +pub use table_column::TableColumnWalker; + +/// An abstraction to iterate over an introspected PostgreSQL database. +/// +/// The `Id` must be something that points to an object in the database. +#[derive(Clone, Copy)] +pub struct Walker<'a, Id> { + pub(super) id: Id, + pub(super) database_definition: &'a DatabaseDefinition, +} + +impl PartialEq for Walker<'_, Id> +where + Id: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl<'a, Id> Walker<'a, Id> +where + Id: Copy, +{ + pub fn new(id: Id, database_definition: &'a DatabaseDefinition) -> Self { + Self { + id, + database_definition, + } + } + + pub fn id(self) -> Id { + self.id + } + + fn walk(self, id: OtherId) -> Walker<'a, OtherId> { + self.database_definition.walk(id) + } + + fn get_name(self, id: StringId) -> &'a str { + self.database_definition.interner.get(id) + } +} + +/// For a slice sorted by a key K, return the contiguous range of items matching the key. +fn range_for_key(slice: &[I], key: K, extract: fn(&I) -> K) -> Range +where + K: Copy + Ord + PartialOrd + PartialEq, +{ + let seed = slice.binary_search_by_key(&key, extract).unwrap_or(0); + let mut iter = slice[..seed].iter(); + let start = match iter.rposition(|i| extract(i) != key) { + None => 0, + Some(other) => other + 1, + }; + let mut iter = slice[seed..].iter(); + let end = seed + iter.position(|i| extract(i) != key).unwrap_or(slice.len() - seed); + start..end +} diff --git a/crates/database-definition/src/walkers/back_relation.rs b/crates/database-definition/src/walkers/back_relation.rs new file mode 100644 index 00000000..b1d0a1bc --- /dev/null +++ b/crates/database-definition/src/walkers/back_relation.rs @@ -0,0 +1,36 @@ +use super::{ForeignKeyWalker, TableColumnWalker, TableWalker, Walker}; +use crate::{ForeignKeyId, TableId, ids::BackRelationId}; + +/// A relation from the referenced side of a foreign key. The constraint +/// is defined on the other side. +pub type BackRelationWalker<'a> = Walker<'a, BackRelationId>; + +impl<'a> BackRelationWalker<'a> { + /// The table this relation starts from, no foreign key on this table. + pub fn referencing_table(self) -> TableWalker<'a> { + self.foreign_key().referenced_table() + } + + /// The opposite table. For back-relations, the table with the foreign key. + pub fn referenced_table(self) -> TableWalker<'a> { + self.foreign_key().constrained_table() + } + + /// The columns on this table that are forming the constraint. + pub fn referencing_columns(self) -> impl ExactSizeIterator> { + self.foreign_key().columns().map(|column| column.referenced_column()) + } + + /// The columns on the other table that are forming the constraint. + pub fn referenced_columns(self) -> impl ExactSizeIterator> { + self.foreign_key().columns().map(|column| column.constrained_column()) + } + + pub(super) fn foreign_key(self) -> ForeignKeyWalker<'a> { + self.walk(self.get().1) + } + + fn get(self) -> (TableId, ForeignKeyId) { + self.database_definition.relations.to[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/enum.rs b/crates/database-definition/src/walkers/enum.rs new file mode 100644 index 00000000..ec220588 --- /dev/null +++ b/crates/database-definition/src/walkers/enum.rs @@ -0,0 +1,47 @@ +use super::{Walker, enum_variant::EnumVariantWalker}; +use crate::{Enum, EnumId, EnumVariantId, StringId}; + +/// An enum definition in the database. +pub type EnumWalker<'a> = Walker<'a, EnumId>; + +impl<'a> EnumWalker<'a> { + /// The schema this enum belongs to. + pub fn schema(self) -> &'a str { + &self.database_definition.schemas[self.get().schema_id().0 as usize] + } + + /// The name of the enum in the database. + pub fn database_name(self) -> &'a str { + self.get_name(self.get().database_name()) + } + + /// The name of the enum in the GraphQL APIs. + pub fn client_name(self) -> &'a str { + self.get_name(self.get().client_name()) + } + + /// The variants that are part of the enum. + pub fn variants(self) -> impl ExactSizeIterator> + 'a { + let range = super::range_for_key(&self.database_definition.enum_variants, self.id, |variant| { + variant.enum_id() + }); + + range.map(move |id| self.walk(EnumVariantId(id as u32))) + } + + /// Find a variant with a given client name, returning the database name. + pub fn rename_variant(self, client_name: &str) -> Option<&'a str> { + self.variants() + .find(|variant| variant.client_name() == client_name) + .map(|variant| variant.database_name()) + } + + /// The description of the enum in the GraphQL APIs. + pub fn description(self) -> Option<&'a str> { + self.get().description().map(|id| self.get_name(id)) + } + + fn get(self) -> &'a Enum { + &self.database_definition.enums[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/enum_variant.rs b/crates/database-definition/src/walkers/enum_variant.rs new file mode 100644 index 00000000..12e12fbd --- /dev/null +++ b/crates/database-definition/src/walkers/enum_variant.rs @@ -0,0 +1,31 @@ +use super::{Walker, r#enum::EnumWalker}; +use crate::{EnumVariant, EnumVariantId, StringId}; + +/// An enum variant definition. +pub type EnumVariantWalker<'a> = Walker<'a, EnumVariantId>; + +impl<'a> EnumVariantWalker<'a> { + /// The enum this variant belongs to. + pub fn r#enum(self) -> EnumWalker<'a> { + self.walk(self.get().enum_id()) + } + + /// The name of the variant in the database. + pub fn database_name(self) -> &'a str { + self.get_name(self.get().database_name()) + } + + /// The name of the variant in the GraphQL APIs. + pub fn client_name(self) -> &'a str { + self.get_name(self.get().client_name()) + } + + /// The description for the variant. + pub fn description(self) -> Option<&'a str> { + self.get().description().map(|id| self.get_name(id)) + } + + fn get(self) -> &'a EnumVariant { + &self.database_definition.enum_variants[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/foreign_key.rs b/crates/database-definition/src/walkers/foreign_key.rs new file mode 100644 index 00000000..677cf3bd --- /dev/null +++ b/crates/database-definition/src/walkers/foreign_key.rs @@ -0,0 +1,38 @@ +use super::{ForeignKeyColumnWalker, TableWalker, Walker}; +use crate::{ForeignKey, ForeignKeyColumnId, ForeignKeyId, StringId}; + +pub(crate) type ForeignKeyWalker<'a> = Walker<'a, ForeignKeyId>; + +impl<'a> ForeignKeyWalker<'a> { + pub fn name(self) -> &'a str { + self.get_name(self.get().constraint_name()) + } + + pub fn schema(self) -> &'a str { + &self.database_definition.schemas[self.get().schema_id().0 as usize] + } + + pub fn columns(self) -> impl ExactSizeIterator> { + let range = super::range_for_key(&self.database_definition.foreign_key_columns, self.id, |column| { + column.foreign_key_id() + }); + + range.map(move |id| self.walk(ForeignKeyColumnId(id as u32))) + } + + pub fn constrained_table(self) -> TableWalker<'a> { + self.walk(self.get().constrained_table_id()) + } + + pub fn referenced_table(self) -> TableWalker<'a> { + self.walk(self.get().referenced_table_id()) + } + + pub fn description(self) -> Option<&'a str> { + self.get().description().map(|id| self.get_name(id)) + } + + fn get(self) -> &'a ForeignKey { + &self.database_definition.foreign_keys[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/foreign_key_column.rs b/crates/database-definition/src/walkers/foreign_key_column.rs new file mode 100644 index 00000000..5129d9c4 --- /dev/null +++ b/crates/database-definition/src/walkers/foreign_key_column.rs @@ -0,0 +1,22 @@ +use super::{Walker, foreign_key::ForeignKeyWalker, table_column::TableColumnWalker}; +use crate::{ForeignKeyColumn, ForeignKeyColumnId}; + +pub(crate) type ForeignKeyColumnWalker<'a> = Walker<'a, ForeignKeyColumnId>; + +impl<'a> ForeignKeyColumnWalker<'a> { + pub fn constraint(self) -> ForeignKeyWalker<'a> { + self.walk(self.get().foreign_key_id()) + } + + pub fn constrained_column(self) -> TableColumnWalker<'a> { + self.walk(self.get().constrained_column_id()) + } + + pub fn referenced_column(self) -> TableColumnWalker<'a> { + self.walk(self.get().referenced_column_id()) + } + + fn get(self) -> &'a ForeignKeyColumn { + &self.database_definition.foreign_key_columns[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/forward_relation.rs b/crates/database-definition/src/walkers/forward_relation.rs new file mode 100644 index 00000000..52ef6100 --- /dev/null +++ b/crates/database-definition/src/walkers/forward_relation.rs @@ -0,0 +1,42 @@ +use super::{ForeignKeyWalker, TableColumnWalker, TableWalker, Walker}; +use crate::{ForeignKeyId, TableId, ids::ForwardRelationId}; + +/// A relation from the side of a foreign key. Foreign key +/// is defined from this table. +pub type ForwardRelationWalker<'a> = Walker<'a, ForwardRelationId>; + +impl<'a> ForwardRelationWalker<'a> { + /// The table this relation starts from. For forward relations, the table with the foreign key. + pub fn referencing_table(self) -> TableWalker<'a> { + self.foreign_key().constrained_table() + } + + /// The opposite table, no foreign key on this table. + pub fn referenced_table(self) -> TableWalker<'a> { + self.foreign_key().referenced_table() + } + + /// The columns on this table that are forming the constraint. + pub fn referencing_columns(self) -> impl ExactSizeIterator> { + self.foreign_key().columns().map(|column| column.constrained_column()) + } + + /// The columns on the other table that are forming the constraint. + pub fn referenced_columns(self) -> impl ExactSizeIterator> { + self.foreign_key().columns().map(|column| column.referenced_column()) + } + + pub(super) fn foreign_key(self) -> ForeignKeyWalker<'a> { + self.walk(self.get().1) + } + + /// True, if we use the referenced table in the client. E.g. it has at least one + /// column of supported type and one unique constraint. + pub fn referenced_table_is_allowed_in_client(self) -> bool { + self.referenced_table().allowed_in_client() + } + + fn get(self) -> (TableId, ForeignKeyId) { + self.database_definition.relations.from[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/key.rs b/crates/database-definition/src/walkers/key.rs new file mode 100644 index 00000000..1a52273c --- /dev/null +++ b/crates/database-definition/src/walkers/key.rs @@ -0,0 +1,38 @@ +use super::{TableColumnWalker, TableWalker, Walker, key_column::KeyColumnWalker}; +use crate::{Key, KeyColumnId, KeyId, KeyType, StringId}; + +/// Defines a unique constraint in a table. +pub type KeyWalker<'a> = Walker<'a, KeyId>; + +impl<'a> KeyWalker<'a> { + /// The table of this constraint. + pub fn table(self) -> TableWalker<'a> { + self.walk(self.get().table_id()) + } + + /// The constraint name. + pub fn name(self) -> &'a str { + self.get_name(self.get().name()) + } + + /// The columns defining the unique value. + pub fn columns(self) -> impl ExactSizeIterator> + 'a { + let range = super::range_for_key(&self.database_definition.key_columns, self.id, |column| column.key_id()); + + range.map(move |id| self.walk(KeyColumnId(id as u32))) + } + + /// True, if all the given columns are part of the constraint. + pub fn has_all_the_columns(self, mut columns: impl ExactSizeIterator>) -> bool { + columns.all(|left| self.columns().any(|right| left == right.table_column())) + } + + /// True, if the constraint is the primary key of the table. + pub fn is_primary(self) -> bool { + matches!(self.get().r#type(), KeyType::Primary) + } + + fn get(self) -> &'a Key { + &self.database_definition.keys[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/key_column.rs b/crates/database-definition/src/walkers/key_column.rs new file mode 100644 index 00000000..5b078b7a --- /dev/null +++ b/crates/database-definition/src/walkers/key_column.rs @@ -0,0 +1,21 @@ +use super::{Walker, key::KeyWalker, table_column::TableColumnWalker}; +use crate::{KeyColumn, KeyColumnId}; + +/// A column that is part of a unique constraint. +pub type KeyColumnWalker<'a> = Walker<'a, KeyColumnId>; + +impl<'a> KeyColumnWalker<'a> { + /// The constraint this column is part of. + pub fn key(self) -> KeyWalker<'a> { + self.walk(self.get().key_id()) + } + + /// The column in the table this column refers to. + pub fn table_column(self) -> TableColumnWalker<'a> { + self.walk(self.get().column_id()) + } + + fn get(self) -> &'a KeyColumn { + &self.database_definition.key_columns[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/relation.rs b/crates/database-definition/src/walkers/relation.rs new file mode 100644 index 00000000..25710446 --- /dev/null +++ b/crates/database-definition/src/walkers/relation.rs @@ -0,0 +1,138 @@ +use std::borrow::Cow; + +use inflector::Inflector; +use itertools::Itertools; + +use super::{ForeignKeyWalker, TableColumnWalker, TableWalker, Walker}; +use crate::RelationId; + +pub type RelationWalker<'a> = Walker<'a, RelationId>; + +impl<'a> RelationWalker<'a> { + /// The table this relation starts from. + pub fn referencing_table(self) -> TableWalker<'a> { + match self.id() { + RelationId::Forward(id) => self.walk(id).referencing_table(), + RelationId::Back(id) => self.walk(id).referencing_table(), + } + } + + /// The opposite table. + pub fn referenced_table(self) -> TableWalker<'a> { + match self.id() { + RelationId::Forward(id) => self.walk(id).referenced_table(), + RelationId::Back(id) => self.walk(id).referenced_table(), + } + } + + /// The columns on this table that are forming the constraint. + pub fn referencing_columns(self) -> Box> + 'a> { + match self.id() { + RelationId::Forward(id) => Box::new(self.walk(id).referencing_columns()), + RelationId::Back(id) => Box::new(self.walk(id).referencing_columns()), + } + } + + /// The columns on the other table that are forming the constraint. + pub fn referenced_columns(self) -> Box> + 'a> { + match self.id() { + RelationId::Forward(id) => Box::new(self.walk(id).referenced_columns()), + RelationId::Back(id) => Box::new(self.walk(id).referenced_columns()), + } + } + + /// True, if the referenced column(s) is (are) unique, this means there can only be at most one row on the other side of the relation. + pub fn is_other_side_one(self) -> bool { + self.referenced_table() + .keys() + .any(|constraint| constraint.has_all_the_columns(self.referenced_columns())) + } + + /// True, if we use the referenced table in the client. E.g. it has at least one + /// column of supported type and one unique constraint. + pub fn referenced_table_is_allowed_in_client(self) -> bool { + self.referenced_table().allowed_in_client() + } + + /// Returns `true` if this relation is from the side that has the foreign key. + pub fn is_referencing_side(&self) -> bool { + self.id().is_forward() + } + + /// Returns `true` if this relation is from the side that is referenced. + pub fn is_referenced_side(&self) -> bool { + self.id().is_backward() + } + + /// The client type of the relation field. + pub fn client_type(self) -> Cow<'a, str> { + let base_name = self.referenced_table().client_name(); + + if self.is_other_side_one() { + let columns_nullable = self.referencing_columns().any(|c| c.is_nullable()); + + // The side that defines the foreign key is nullable if any of the referencing + // columns are nullable. The referenced side is always nullable. + if columns_nullable || self.is_referenced_side() { + Cow::Borrowed(base_name) + } else { + Cow::Owned(format!("{base_name}!")) + } + } else { + Cow::Owned(format!("{base_name}Connection!")) + } + } + + /// The client-side field name for this relation. + pub fn client_field_name(&self) -> String { + let base_name = if self.is_other_side_one() { + self.referenced_table().client_field_name() + } else { + self.referenced_table().client_field_name_plural() + }; + + let mut is_name_collision = self + .referencing_table() + .columns() + .any(|column| column.client_name() == base_name); + + let fk = self.foreign_key(); + + is_name_collision |= { + let constrained_table = fk.constrained_table(); + let referenced_table = fk.referenced_table(); + constrained_table + .forward_relations() + .any(|relation| relation.foreign_key() != fk && relation.referenced_table() == referenced_table) + }; + + if is_name_collision { + let referencing_columns = fk + .columns() + .map(|column| column.constrained_column().client_name()) + .join("_"); + + format!("{base_name}_by_{referencing_columns}").to_camel_case() + } else { + base_name.to_string() + } + } + + /// Returns the name of the relation. + pub fn name(self) -> &'a str { + self.foreign_key().name() + } + + /// Returns the description of the relation. + pub fn description(self) -> Option<&'a str> { + self.foreign_key().description() + } + + /// The foreign key backing the relation. + fn foreign_key(self) -> ForeignKeyWalker<'a> { + match self.id() { + RelationId::Forward(id) => self.walk(id).foreign_key(), + RelationId::Back(id) => self.walk(id).foreign_key(), + } + } +} diff --git a/crates/database-definition/src/walkers/table.rs b/crates/database-definition/src/walkers/table.rs new file mode 100644 index 00000000..0f891fc1 --- /dev/null +++ b/crates/database-definition/src/walkers/table.rs @@ -0,0 +1,137 @@ +use super::{ + RelationWalker, Walker, forward_relation::ForwardRelationWalker, key::KeyWalker, table_column::TableColumnWalker, +}; +use crate::{ + KeyId, RelationId, StringId, Table, TableColumnId, TableId, + ids::{BackRelationId, ForwardRelationId}, +}; + +/// Definition of a table. +pub type TableWalker<'a> = Walker<'a, TableId>; + +impl<'a> TableWalker<'a> { + /// The name of the schema this table is located. + pub fn schema(self) -> &'a str { + &self.database_definition.schemas[self.get().schema_id().0 as usize] + } + + /// The name of the table in the database. + pub fn database_name(self) -> &'a str { + self.get_name(self.get().database_name()) + } + + /// The name of the table in the GraphQL APIs. + pub fn client_name(self) -> &'a str { + self.get_name(self.get().client_name()) + } + + /// The name of fields relating to the table in the GraphQL APIs. + pub fn client_field_name(self) -> &'a str { + self.get_name(self.get().client_field_name()) + } + + /// The name of plural fields relating to the table in the GraphQL APIs. + pub fn client_field_name_plural(self) -> &'a str { + self.get_name(self.get().client_field_name_plural()) + } + + /// An iterator over all the columns in the table. + pub fn columns(self) -> impl Iterator> + 'a { + let range = super::range_for_key(&self.database_definition.table_columns, self.id, |column| { + column.table_id() + }); + + range.map(move |id| self.walk(TableColumnId(id as u32))) + } + + /// A table can be used in the client, if it has at least one supported column + /// and at least one unique constraint that contains columns of supported type. + pub fn allowed_in_client(self) -> bool { + self.columns().next().is_some() && self.keys().next().is_some() + } + + /// A special unique index that acts as the primary key of the column. + pub fn primary_key(self) -> Option> { + self.keys().find(|constraint| constraint.is_primary()) + } + + /// The key used to implicitly order a result set if no order defined by the user. + pub fn implicit_ordering_key(self) -> Option> { + self.primary_key().or_else(|| self.keys().next()) + } + + /// An iterator over all the unqiue constraints, including the primary key. + pub fn keys(self) -> impl ExactSizeIterator> + 'a { + let range = super::range_for_key(&self.database_definition.keys, self.id, |constraint| { + constraint.table_id() + }); + + range.map(move |id| self.walk(KeyId(id as u32))) + } + + /// Find a database column matching the given GraphQL field name. + pub fn find_database_column_for_field(self, field_name: &str) -> Option> { + self.database_definition + .get_table_column_id_for_field(self.id, field_name) + .map(|id| self.walk(id)) + } + + /// Find a column by database name. + pub fn find_database_column(self, name: &str) -> Option> { + self.database_definition + .get_table_column_id(self.id, name) + .map(|id| self.walk(id)) + } + + /// Find a unique constraint by name. + pub fn find_unique_constraint(self, constraint_name: &str) -> Option> { + self.database_definition + .get_key_id(self.id, constraint_name) + .map(|id| self.walk(id)) + } + + /// Iterate over all relations stemming from a foreign key on this table. + pub(crate) fn forward_relations(self) -> impl Iterator> { + let range = super::range_for_key(&self.database_definition.relations.from, self.id, |(table_id, _)| { + *table_id + }); + + range + .map(move |id| self.walk(ForwardRelationId(id as u32))) + .filter(|relation| relation.referenced_table_is_allowed_in_client()) + } + + /// An iterator over relations having the foreign key constraint defined from or into this table. + pub fn relations(self) -> impl Iterator> + 'a { + let range = super::range_for_key(&self.database_definition.relations.to, self.id, |(table_id, _)| { + *table_id + }); + + let back = range + .map(move |id| self.walk(RelationId::Back(BackRelationId(id as u32)))) + .filter(|relation| relation.referenced_table_is_allowed_in_client()); + + self.forward_relations() + .map(move |fwd| self.walk(RelationId::Forward(fwd.id))) + .chain(back) + } + + /// The name of the input type used for filtering operations in queries. + pub fn filter_input_name(self) -> String { + format!("{}FilterInput", self.client_name()) + } + + /// The name of the input type used for ordering operations in queries. + pub fn order_by_input_name(self) -> String { + format!("{}OrderByInput", self.client_name()) + } + + /// The description of the table, if any. + pub fn description(self) -> Option<&'a str> { + self.get().description().map(|id| self.get_name(id)) + } + + fn get(self) -> &'a Table { + &self.database_definition.tables[self.id.0 as usize] + } +} diff --git a/crates/database-definition/src/walkers/table_column.rs b/crates/database-definition/src/walkers/table_column.rs new file mode 100644 index 00000000..e5ad4b92 --- /dev/null +++ b/crates/database-definition/src/walkers/table_column.rs @@ -0,0 +1,114 @@ +use std::borrow::Cow; + +use inflector::Inflector; + +use super::{TableWalker, Walker}; +use crate::{ColumnType, DatabaseType, IdentityGeneration, StringId, TableColumn, TableColumnId}; + +/// Definition of a column located in a table. +pub type TableColumnWalker<'a> = Walker<'a, TableColumnId>; + +impl<'a> TableColumnWalker<'a> { + /// The table this column is located. + pub fn table(self) -> TableWalker<'a> { + self.walk(self.get().table_id()) + } + + /// The name of the column in the database. + pub fn database_name(self) -> &'a str { + self.get_name(self.get().database_name()) + } + + /// The name of the column in the GraphQL APIs. + pub fn client_name(self) -> &'a str { + self.get_name(self.get().client_name()) + } + + /// The type of the column in the database. + pub fn database_type(self) -> DatabaseType<'a> { + match self.get().database_type() { + ColumnType::Scalar(scalar) => DatabaseType::Scalar(scalar), + ColumnType::Enum(r#enum) => DatabaseType::Enum(self.walk(r#enum.id)), + } + } + + /// If the column is an enum, returns the database type of the column for casting. + pub fn enum_database_name(self) -> Option { + match self.database_type().enum_database_name() { + Some(name) if self.is_array() => Some(format!("{name}[]")), + Some(name) => Some(name.to_string()), + None => None, + } + } + + pub fn client_base_type(self) -> Option<&'a str> { + match self.database_type() { + DatabaseType::Scalar(scalar_type) => scalar_type.client_type(), + DatabaseType::Enum(walker) => Some(walker.client_name()), + } + } + + pub fn client_type(&self, prefix: Option<&str>) -> Option> { + let r#type = match self.database_type() { + DatabaseType::Scalar(scalar) if self.is_array() => { + scalar.client_type().map(|t| format!("[{t}]")).map(Cow::from) + } + DatabaseType::Scalar(scalar) => scalar.client_type().map(Cow::from), + DatabaseType::Enum(r#enum) if self.is_array() => { + let name = match prefix { + Some(prefix) => format!("[{}_{}]", prefix, r#enum.client_name()).to_pascal_case(), + None => format!("[{}]", r#enum.client_name()), + }; + + Some(Cow::from(name)) + } + DatabaseType::Enum(r#enum) => { + let name = match prefix { + Some(prefix) => Cow::from(format!("{}_{}", prefix, r#enum.client_name()).to_pascal_case()), + None => Cow::from(r#enum.client_name()), + }; + + Some(name) + } + }; + + if self.is_nullable() { + r#type + } else { + r#type.map(|t| format!("{}!", t).into()) + } + } + + /// The description of the column. + pub fn description(self) -> Option<&'a str> { + self.get().description().map(|id| self.get_name(id)) + } + + /// True, if the column is an array. + pub fn is_array(self) -> bool { + self.get().is_array() + } + + /// True, if the column is nullable. + pub fn is_nullable(self) -> bool { + self.get().nullable + } + + /// True, if the column allows null input. + pub fn allows_null_input(self) -> bool { + self.is_nullable() || self.get().has_default || self.identity_generation().is_some() + } + + /// True, if user can define the column value manually. + pub fn allows_user_input(self) -> bool { + !matches!(self.identity_generation(), Some(IdentityGeneration::Always)) + } + + fn identity_generation(self) -> Option { + self.get().identity_generation() + } + + fn get(self) -> &'a TableColumn { + &self.database_definition.table_columns[self.id.0 as usize] + } +} diff --git a/crates/postgres-introspection/Cargo.toml b/crates/postgres-introspection/Cargo.toml new file mode 100644 index 00000000..a61ab2e9 --- /dev/null +++ b/crates/postgres-introspection/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "grafbase-postgres-introspection" +version = "0.1.0" +edition.workspace = true +license.workspace = true +homepage.workspace = true +keywords.workspace = true +repository.workspace = true + +[dependencies] +grafbase-database-definition.workspace = true +indoc.workspace = true +anyhow.workspace = true +sqlx = { workspace = true, default-features = false, features = [ + "sqlx-postgres", + "json", + "runtime-tokio", + "tls-rustls-aws-lc-rs", + "tls-rustls-ring-native-roots", + "postgres", +] } +itertools.workspace = true +Inflector.workspace = true +indenter = { version = "0.3.3", features = ["std"] } + +[lints] +workspace = true diff --git a/crates/postgres-introspection/src/columns.rs b/crates/postgres-introspection/src/columns.rs new file mode 100644 index 00000000..f28abdb0 --- /dev/null +++ b/crates/postgres-introspection/src/columns.rs @@ -0,0 +1,100 @@ +use grafbase_database_definition::DatabaseDefinition; +use sqlx::{PgConnection, Row}; +use std::str::FromStr; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + use grafbase_database_definition::{ColumnType, EnumType, IdentityGeneration, ScalarType, TableColumn}; + + let query = indoc::indoc! {r#" + SELECT columns.table_schema AS schema, + columns.table_name AS table_name, + columns.column_name AS column_name, + CAST(columns.udt_name::regtype::oid AS int4) AS type_id, + columns.udt_name AS type_name, + columns.udt_schema AS type_schema, + columns.data_type = 'ARRAY' AS is_array, + pg_attrdef.adbin IS NOT NULL AS has_default, + columns.is_nullable = 'YES' AS is_nullable, + columns.identity_generation AS identity_generation, + pg_description.description AS description + + FROM information_schema.columns columns + + -- for default values + JOIN pg_attribute ON pg_attribute.attname = columns.column_name + + -- also for defaults + JOIN (SELECT pg_class.oid, + relname, + pg_namespace.nspname AS namespace + FROM pg_class + JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace) AS pg_class + ON pg_class.oid = pg_attribute.attrelid + AND pg_class.relname = columns.table_name + AND pg_class.namespace = columns.table_schema + + -- also for defaults + LEFT OUTER JOIN pg_attrdef + ON pg_attrdef.adrelid = pg_attribute.attrelid + AND pg_attrdef.adnum = pg_attribute.attnum + AND pg_class.namespace = columns.table_schema + + -- for column comments + LEFT OUTER JOIN pg_description + ON pg_description.objoid = pg_class.oid + AND pg_description.objsubid = pg_attribute.attnum + + WHERE table_schema <> ALL ( $1 ) + ORDER BY schema, table_name, columns.ordinal_position; + "#}; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + for row in rows { + let Some(schema_id) = database_definition.get_schema_id(row.get(0)) else { + continue; + }; + let Some(table_id) = database_definition.get_table_id(schema_id, row.get(1)) else { + continue; + }; + + // If the type is an array, it's named `_type` in the database. We don't need that info in the type + // name, we store enums without an underscore in our interner. + let type_name = row.get::<&str, _>(4).trim_start_matches('_'); + + let enum_id = database_definition + .get_schema_id(row.get(5)) + .and_then(|enum_schema_id| database_definition.get_enum_id(enum_schema_id, type_name)); + + let database_type = match enum_id { + Some(id) => ColumnType::Enum(EnumType { + id, + is_array: row.get(6), + }), + None => ColumnType::Scalar(ScalarType::from(row.get::(3) as u32)), + }; + + let mut column = TableColumn::new(table_id, database_type, row.get(2), None); + + column.set_nullable(row.get(8)); + column.set_has_default(row.get(7)); + + if let Some(s) = row.get(9) { + column.set_identity_generation(IdentityGeneration::from_str(s)?); + } + + if let Some(description) = row.get(10) { + column.set_description(description); + } + + database_definition.push_table_column(column, None); + } + + Ok(()) +} diff --git a/crates/postgres-introspection/src/enums.rs b/crates/postgres-introspection/src/enums.rs new file mode 100644 index 00000000..c16dee61 --- /dev/null +++ b/crates/postgres-introspection/src/enums.rs @@ -0,0 +1,53 @@ +use grafbase_database_definition::{DatabaseDefinition, Enum, EnumVariant}; +use indoc::indoc; +use sqlx::{PgConnection, Row}; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + let query = indoc! {r#" + SELECT + nsp.nspname AS schema_name, -- 0 + t.typname AS enum_name, -- 1 + e.enumlabel AS enum_value, -- 2 + pg_catalog.obj_description(t.oid, 'pg_type') AS enum_comment -- 3 + FROM pg_catalog.pg_type t + JOIN pg_catalog.pg_namespace nsp ON nsp.oid = t.typnamespace + JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid + WHERE nsp.nspname <> ALL ( $1 ) -- Exclude system schemas + AND t.typtype = 'e' -- Ensure it is an enum type + ORDER BY nsp.nspname, t.typname, e.enumsortorder; + "#}; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + for row in rows { + let Some(schema_id) = database_definition.get_schema_id(row.get(0)) else { + continue; + }; + + let enum_name: String = row.get(1); + + let enum_id = match database_definition.get_enum_id(schema_id, &enum_name) { + Some(enum_id) => enum_id, + None => { + let mut r#enum = Enum::new(schema_id, enum_name, None); + + if let Some(description) = row.get(3) { + r#enum.set_description(description); + } + + database_definition.push_enum(r#enum) + } + }; + + let variant = EnumVariant::new(enum_id, row.get(2), None); + database_definition.push_enum_variant(variant); + } + + Ok(()) +} diff --git a/crates/postgres-introspection/src/foreign_keys.rs b/crates/postgres-introspection/src/foreign_keys.rs new file mode 100644 index 00000000..bf5a5b69 --- /dev/null +++ b/crates/postgres-introspection/src/foreign_keys.rs @@ -0,0 +1,113 @@ +use grafbase_database_definition::{DatabaseDefinition, ForeignKey, ForeignKeyColumn}; +use sqlx::{PgConnection, Row}; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + let query = indoc::indoc! {r#" + SELECT "constraint".conname AS constraint_name, -- 0 + "constraint".schema AS constrained_schema, -- 1 + "constraint".table_name AS constrained_table_name, -- 2 + child_attribute.attname AS constrained_column_name, -- 3 + relation_namespace.nspname AS referenced_schema, -- 4 + parent_class.relname AS referenced_table_name, -- 5 + parent_attribute.attname AS referenced_column_name, -- 6 + pg_catalog.obj_description("constraint".oid, 'pg_constraint') + AS description -- 7 + + FROM (SELECT pg_namespace.nspname AS schema, + unnest(pg_constraint.conkey) AS child, -- list of constrained columns + unnest(pg_constraint.confkey) AS parent, -- list of referenced columns + pg_class.relname AS table_name, + pg_namespace.nspname AS schema_name, + generate_subscripts(pg_constraint.conkey, 1) AS conkey_idx, + pg_constraint.oid, -- Needed for obj_description + pg_constraint.confrelid, + pg_constraint.conrelid, + pg_constraint.conname + FROM pg_class + JOIN pg_constraint ON pg_constraint.conrelid = pg_class.oid + JOIN pg_namespace ON pg_class.relnamespace = pg_namespace.oid + WHERE pg_constraint.contype = 'f' -- f = foreign key constraint + ORDER BY conkey_idx) "constraint" + + JOIN pg_attribute parent_attribute + ON parent_attribute.attrelid = "constraint".confrelid + AND parent_attribute.attnum = "constraint".parent + JOIN pg_class parent_class + ON parent_class.oid = "constraint".confrelid + JOIN pg_attribute child_attribute + ON child_attribute.attrelid = "constraint".conrelid + AND child_attribute.attnum = "constraint".child + -- Join namespace based on the referenced table's namespace OID. + JOIN pg_namespace relation_namespace + ON parent_class.relnamespace = relation_namespace.oid -- Use parent_class join for clarity + + WHERE "constraint".schema <> ALL ( $1 ) + + -- order matters, be careful if changing + ORDER BY constrained_schema, constrained_table_name, constraint_name, "constraint".conkey_idx; + "#}; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + #[allow(clippy::manual_let_else)] // sorry, but match looks better here + for row in rows { + let constrained_schema_id = match database_definition.get_schema_id(row.get(1)) { + Some(id) => id, + None => continue, + }; + + let constrained_table_id = match database_definition.get_table_id(constrained_schema_id, row.get(2)) { + Some(id) => id, + None => continue, + }; + + let constrained_column_id = match database_definition.get_table_column_id(constrained_table_id, row.get(3)) { + Some(id) => id, + None => continue, + }; + + let referenced_schema_id = match database_definition.get_schema_id(row.get(4)) { + Some(id) => id, + None => continue, + }; + + let referenced_table_id = match database_definition.get_table_id(referenced_schema_id, row.get(5)) { + Some(id) => id, + None => continue, + }; + + let referenced_column_id = match database_definition.get_table_column_id(referenced_table_id, row.get(6)) { + Some(id) => id, + None => continue, + }; + + let foreign_key_id = match database_definition.get_foreign_key_id(constrained_schema_id, row.get(0)) { + Some(id) => id, + None => { + let mut foreign_key = ForeignKey::new( + row.get(0), + constrained_schema_id, + constrained_table_id, + referenced_table_id, + ); + + if let Some(description) = row.get(7) { + foreign_key.set_description(description); + } + + database_definition.push_foreign_key(foreign_key).0 + } + }; + + let column = ForeignKeyColumn::new(foreign_key_id, constrained_column_id, referenced_column_id); + database_definition.push_foreign_key_column(column); + } + + Ok(()) +} diff --git a/crates/postgres-introspection/src/keys.rs b/crates/postgres-introspection/src/keys.rs new file mode 100644 index 00000000..076c7421 --- /dev/null +++ b/crates/postgres-introspection/src/keys.rs @@ -0,0 +1,74 @@ +use grafbase_database_definition::{DatabaseDefinition, Key, KeyColumn, KeyType}; +use sqlx::{PgConnection, Row}; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + let query = indoc::indoc! {r#" + WITH rawindex AS (SELECT indrelid, + indexrelid, + indisprimary, + unnest(indkey) AS indkeyid, + generate_subscripts(indkey, 1) AS indkeyidx + FROM pg_index + WHERE indpred IS NULL -- filter out partial indexes + AND NOT indisexclusion -- filter out exclusion constraints + AND (indisunique OR indisprimary) + ) + SELECT schemainfo.nspname AS schema, + indexinfo.relname AS constraint_name, + tableinfo.relname AS table_name, + columninfo.attname AS column_name, + rawindex.indisprimary AS is_primary_key + FROM rawindex + + INNER JOIN pg_class AS tableinfo ON tableinfo.oid = rawindex.indrelid + INNER JOIN pg_class AS indexinfo ON indexinfo.oid = rawindex.indexrelid + INNER JOIN pg_namespace AS schemainfo ON schemainfo.oid = tableinfo.relnamespace + + LEFT JOIN pg_attribute AS columninfo + ON columninfo.attrelid = tableinfo.oid AND columninfo.attnum = rawindex.indkeyid + + WHERE schemainfo.nspname <> ALL ( $1 ) + ORDER BY schema, table_name, constraint_name, rawindex.indkeyidx; + "#}; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + for row in rows { + let Some(schema_id) = database_definition.get_schema_id(row.get(0)) else { + continue; + }; + + let Some(table_id) = database_definition.get_table_id(schema_id, row.get(2)) else { + continue; + }; + + let Some(column_name): Option<&str> = row.get(3) else { + continue; + }; + + let Some(column_id) = database_definition.get_table_column_id(table_id, column_name) else { + continue; + }; + + let constraint_id = match database_definition.get_key_id(table_id, row.get(1)) { + Some(id) => id, + None => { + let key_type = if row.get(4) { KeyType::Primary } else { KeyType::Unique }; + let key = Key::new(table_id, row.get(1), key_type); + + database_definition.push_key(key) + } + }; + + let column = KeyColumn::new(constraint_id, column_id); + database_definition.push_key_column(column); + } + + Ok(()) +} diff --git a/crates/postgres-introspection/src/lib.rs b/crates/postgres-introspection/src/lib.rs new file mode 100644 index 00000000..0676e47e --- /dev/null +++ b/crates/postgres-introspection/src/lib.rs @@ -0,0 +1,61 @@ +use grafbase_database_definition::DatabaseDefinition; + +mod columns; +mod enums; +mod foreign_keys; +mod keys; +mod render; +mod schemas; +mod tables; + +/// Options for Postgres introspection. +pub struct IntrospectionOptions<'a> { + /// Name of the database to introspect. + pub database_name: &'a str, + /// URL of the extension to use. + pub extension_url: &'a str, + /// Default schema to use and omit from the SDL output. + pub default_schema: &'a str, +} + +/// Introspects a PostgreSQL database schema. +/// +/// This function connects to a PostgreSQL database and retrieves information about +/// its schema including schemas, enums, tables, columns, foreign keys, and primary keys. +/// It then renders this information as an SDL (Schema Definition Language) string. +/// +/// # Arguments +/// +/// * `conn` - A mutable reference to an active PostgreSQL connection. +/// * `opts` - Options for customizing the introspection process. +pub async fn introspect(conn: &mut sqlx::PgConnection, opts: IntrospectionOptions<'_>) -> anyhow::Result { + let mut database_definition = DatabaseDefinition::new(opts.database_name.to_string()); + + schemas::introspect_database(conn, &mut database_definition).await?; + enums::introspect_database(conn, &mut database_definition).await?; + tables::introspect_database(conn, &mut database_definition).await?; + columns::introspect_database(conn, &mut database_definition).await?; + foreign_keys::introspect_database(conn, &mut database_definition).await?; + keys::introspect_database(conn, &mut database_definition).await?; + + database_definition.finalize(); + + Ok(render::to_sdl( + database_definition, + opts.extension_url, + opts.default_schema, + )) +} + +/// A list of schemas to filter out automatically on every introspection. +static BLOCKED_SCHEMAS: &[&str] = &["pg_catalog", "pg_toast", "information_schema"]; + +fn blocked_schemas() -> Vec { + static SCHEMAS: std::sync::OnceLock> = std::sync::OnceLock::new(); + + let result = SCHEMAS + .get_or_init(|| BLOCKED_SCHEMAS.iter().map(|schema| (*schema).to_string()).collect()) + .clone(); + + result +} diff --git a/crates/postgres-introspection/src/render.rs b/crates/postgres-introspection/src/render.rs new file mode 100644 index 00000000..64ca6f9a --- /dev/null +++ b/crates/postgres-introspection/src/render.rs @@ -0,0 +1,27 @@ +mod ast; +mod enums; +mod input_types; +mod mutation; +mod output_types; +mod query; +mod scalars; +mod schema_directives; +mod tables; + +use ast::schema::Schema; +use grafbase_database_definition::DatabaseDefinition; + +pub fn to_sdl(database_definition: DatabaseDefinition, extension_url: &str, default_schema: &str) -> String { + let mut rendered = Schema::new(); + + scalars::render(&mut rendered); + schema_directives::render(&database_definition, extension_url, &mut rendered); + enums::render(&database_definition, default_schema, &mut rendered); + input_types::render(&database_definition, &mut rendered); + output_types::render(&database_definition, &mut rendered); + tables::render(&database_definition, default_schema, &mut rendered); + query::render(&database_definition, &mut rendered); + mutation::render(&database_definition, &mut rendered); + + rendered.to_string() +} diff --git a/crates/postgres-introspection/src/render/ast.rs b/crates/postgres-introspection/src/render/ast.rs new file mode 100644 index 00000000..91d81229 --- /dev/null +++ b/crates/postgres-introspection/src/render/ast.rs @@ -0,0 +1,7 @@ +pub mod directive; +pub mod r#enum; +pub mod field; +pub mod input; +pub mod scalar; +pub mod schema; +pub mod r#type; diff --git a/crates/postgres-introspection/src/render/ast/directive.rs b/crates/postgres-introspection/src/render/ast/directive.rs new file mode 100644 index 00000000..b2d53f54 --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/directive.rs @@ -0,0 +1,165 @@ +use std::{ + borrow::Cow, + fmt::{self, Write}, +}; + +pub enum ArgumentValue<'a> { + String(Cow<'a, str>), + Constant(Cow<'a, str>), + Array(Vec>), + MultiLineArray { + indent: &'static str, + values: Vec>, + }, +} + +impl fmt::Display for ArgumentValue<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ArgumentValue::String(s) => write!(f, "\"{}\"", s), + ArgumentValue::Constant(c) => c.fmt(f), + ArgumentValue::Array(arr) => { + f.write_char('[')?; + + let items = arr.len(); + + for (i, argument) in arr.iter().enumerate() { + argument.fmt(f)?; + + if i < items - 1 { + f.write_str(", ")?; + } + } + + f.write_char(']') + } + ArgumentValue::MultiLineArray { indent, values } => { + f.write_str("[\n")?; + + let items = values.len(); + + for (i, argument) in values.iter().enumerate() { + write!(f, "{indent} {argument}")?; + + if i < items - 1 { + f.write_str(",\n")?; + } else { + f.write_char('\n')?; + } + } + + write!(f, "{indent}]") + } + } + } +} + +pub struct Argument<'a> { + name: Cow<'a, str>, + value: ArgumentValue<'a>, + description: Option>, +} + +impl<'a> Argument<'a> { + pub fn new(name: impl Into>, value: ArgumentValue<'a>) -> Self { + Argument { + name: name.into(), + value, + description: None, + } + } + + pub fn string(name: impl Into>, value: impl Into>) -> Self { + Argument::new(name.into(), ArgumentValue::String(value.into())) + } + + pub fn constant(name: &'a str, value: impl Into>) -> Self { + Argument::new(name, ArgumentValue::Constant(value.into())) + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } + + pub fn has_description(&self) -> bool { + self.description.is_some() + } +} + +impl fmt::Display for Argument<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = &self.description { + writeln!(f, r#"""""#)?; + writeln!(f, "{description}")?; + writeln!(f, r#"""""#)?; + } + write!(f, "{}: {}", self.name, self.value) + } +} + +pub struct Directive<'a> { + name: &'a str, + arguments: Vec>, + render_multiline: bool, +} + +impl<'a> Directive<'a> { + pub fn new(name: &'a str) -> Self { + Self { + name, + arguments: Vec::new(), + render_multiline: false, + } + } + + pub fn push_argument(&mut self, argument: Argument<'a>) { + if argument.description.is_some() { + self.render_multiline = true; + } + + self.arguments.push(argument); + } + + pub fn render_multiline(&mut self) { + self.render_multiline = true; + } +} + +impl fmt::Display for Directive<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_char('@')?; + f.write_str(self.name)?; + + if !self.arguments.is_empty() { + f.write_char('(')?; + + if self.render_multiline { + writeln!(f)?; + } + + for (i, arg) in self.arguments.iter().enumerate() { + if self.render_multiline { + write!(f, " {}", arg)?; + + if i < self.arguments.len() - 1 { + writeln!(f, ",")?; + } + } else { + arg.fmt(f)?; + + if i < self.arguments.len() - 1 { + f.write_str(", ")?; + } + } + } + + if self.render_multiline { + write!(f, "\n )")?; + } else { + f.write_char(')')?; + } + } + + Ok(()) + } +} diff --git a/crates/postgres-introspection/src/render/ast/enum.rs b/crates/postgres-introspection/src/render/ast/enum.rs new file mode 100644 index 00000000..02775ee5 --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/enum.rs @@ -0,0 +1,99 @@ +use std::{borrow::Cow, fmt}; + +use super::directive::Directive; + +pub struct Enum<'a> { + name: &'a str, + directives: Vec>, + variants: Vec>, + description: Option>, +} + +impl<'a> Enum<'a> { + pub fn new(name: &'a str) -> Self { + Self { + name, + directives: Vec::new(), + variants: Vec::new(), + description: None, + } + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn push_variant(&mut self, value: EnumVariant<'a>) { + self.variants.push(value); + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for Enum<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = self.description.as_deref() { + writeln!(f, r#"""""#)?; + writeln!(f, "{description}")?; + writeln!(f, r#"""""#)?; + } + + write!(f, "enum {}", self.name)?; + + for directive in &self.directives { + write!(f, " {}", directive)?; + } + + f.write_str(" {")?; + + for variant in &self.variants { + write!(f, "\n{}", variant)?; + } + + write!(f, "\n}}") + } +} + +pub struct EnumVariant<'a> { + name: &'a str, + directives: Vec>, + description: Option>, +} + +impl<'a> EnumVariant<'a> { + pub fn new(name: &'a str) -> Self { + Self { + name, + directives: Vec::new(), + description: None, + } + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for EnumVariant<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = self.description.as_deref() { + writeln!(f, r#" """"#)?; + writeln!(f, " {description}")?; + writeln!(f, r#" """"#)?; + } + + write!(f, " {}", self.name)?; + + for directive in &self.directives { + write!(f, " {}", directive)?; + } + + Ok(()) + } +} diff --git a/crates/postgres-introspection/src/render/ast/field.rs b/crates/postgres-introspection/src/render/ast/field.rs new file mode 100644 index 00000000..dd11c75c --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/field.rs @@ -0,0 +1,91 @@ +use std::fmt::Write; +use std::{borrow::Cow, fmt}; + +use indenter::indented; + +use super::directive::{Argument, Directive}; + +pub struct Field<'a> { + name: Cow<'a, str>, + r#type: Cow<'a, str>, + directives: Vec>, + arguments: Vec>, + description: Option>, + render_multiline: bool, +} + +impl<'a> Field<'a> { + pub fn new(name: impl Into>, r#type: impl Into>) -> Self { + Self { + name: name.into(), + r#type: r#type.into(), + directives: Vec::new(), + arguments: Vec::new(), + description: None, + render_multiline: false, + } + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn push_argument(&mut self, argument: Argument<'a>) { + if argument.has_description() { + self.render_multiline = true; + } + + self.arguments.push(argument); + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for Field<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = &self.description { + let indent = " "; + writeln!(indented(f).with_str(indent), r#"""""#)?; + writeln!(indented(f).with_str(indent), "{description}")?; + writeln!(indented(f).with_str(indent), r#"""""#)?; + } + + write!(f, " {}", self.name)?; + + if !self.arguments.is_empty() { + if self.render_multiline { + writeln!(f, "(")?; + } else { + write!(f, "(")?; + } + + for (i, argument) in self.arguments.iter().enumerate() { + if i > 0 && !self.render_multiline { + write!(f, ", ")?; + } + + if self.render_multiline { + writeln!(indented(f).with_str(" "), "{},", argument)?; + } else { + write!(f, "{}", argument)?; + } + } + + if self.render_multiline { + write!(f, " ): {}", self.r#type)?; + } else { + write!(f, "): {}", self.r#type)?; + } + } else { + write!(f, ": {}", self.r#type)?; + } + + for directive in &self.directives { + write!(f, " {}", directive)?; + } + + Ok(()) + } +} diff --git a/crates/postgres-introspection/src/render/ast/input.rs b/crates/postgres-introspection/src/render/ast/input.rs new file mode 100644 index 00000000..cb37b811 --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/input.rs @@ -0,0 +1,72 @@ +use std::{ + borrow::Cow, + fmt::{self}, +}; + +use super::{directive::Directive, field::Field}; + +pub struct InputType<'a> { + name: Cow<'a, str>, + directives: Vec>, + fields: Vec>, + description: Option>, +} + +impl<'a> InputType<'a> { + pub fn new(name: impl Into>) -> InputType<'a> { + InputType { + name: name.into(), + directives: Vec::new(), + fields: Vec::new(), + description: None, + } + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn push_field(&mut self, field: Field<'a>) { + self.fields.push(field); + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for InputType<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = self.description.as_deref() { + writeln!(f, r#"""""#)?; + writeln!(f, "{description}")?; + writeln!(f, r#"""""#)?; + } + + write!(f, "input {}", self.name)?; + + if !self.directives.is_empty() { + for directive in self.directives.iter() { + write!(f, " {directive} ")?; + } + } + + if self.directives.is_empty() { + write!(f, " ")?; + } + + f.write_str("{\n")?; + + if self.fields.is_empty() { + writeln!(f, " _: Boolean")?; + } else { + for field in self.fields.iter() { + writeln!(f, "{field}")?; + } + } + + f.write_str("}")?; + + Ok(()) + } +} diff --git a/crates/postgres-introspection/src/render/ast/scalar.rs b/crates/postgres-introspection/src/render/ast/scalar.rs new file mode 100644 index 00000000..fa59e48c --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/scalar.rs @@ -0,0 +1,32 @@ +use std::borrow::Cow; +use std::fmt; + +pub struct Scalar<'a> { + name: &'a str, + description: Option>, +} + +impl<'a> Scalar<'a> { + pub fn new(name: &'a str) -> Self { + Self { + name, + description: None, + } + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for Scalar<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = self.description.as_deref() { + writeln!(f, r#"""""#)?; + writeln!(f, "{description}")?; + writeln!(f, r#"""""#)?; + } + + writeln!(f, "scalar {}", self.name) + } +} diff --git a/crates/postgres-introspection/src/render/ast/schema.rs b/crates/postgres-introspection/src/render/ast/schema.rs new file mode 100644 index 00000000..2a0c591e --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/schema.rs @@ -0,0 +1,77 @@ +use std::fmt; + +use super::{directive::Directive, r#enum::Enum, input::InputType, scalar::Scalar, r#type::Type}; + +#[derive(Default)] +pub struct Schema<'a> { + directives: Vec>, + input_types: Vec>, + types: Vec>, + enums: Vec>, + scalars: Vec>, +} + +impl<'a> Schema<'a> { + pub fn new() -> Self { + Self::default() + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn push_input(&mut self, input_type: InputType<'a>) { + self.input_types.push(input_type); + } + + pub fn push_type(&mut self, r#type: Type<'a>) { + self.types.push(r#type); + } + + pub fn push_enum(&mut self, r#enum: Enum<'a>) { + self.enums.push(r#enum); + } + + pub fn push_scalar(&mut self, scalar: Scalar<'a>) { + self.scalars.push(scalar); + } +} + +impl fmt::Display for Schema<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if !self.directives.is_empty() { + writeln!(f, "extend schema")?; + + for directive in &self.directives { + writeln!(f, " {}", directive)?; + } + } + + writeln!(f)?; + + for scalar in &self.scalars { + scalar.fmt(f)?; + writeln!(f)?; + } + + for r#enum in &self.enums { + r#enum.fmt(f)?; + writeln!(f)?; + writeln!(f)?; + } + + for input in &self.input_types { + input.fmt(f)?; + writeln!(f)?; + writeln!(f)?; + } + + for r#type in &self.types { + r#type.fmt(f)?; + writeln!(f)?; + writeln!(f)?; + } + + Ok(()) + } +} diff --git a/crates/postgres-introspection/src/render/ast/type.rs b/crates/postgres-introspection/src/render/ast/type.rs new file mode 100644 index 00000000..ed18f057 --- /dev/null +++ b/crates/postgres-introspection/src/render/ast/type.rs @@ -0,0 +1,70 @@ +use std::{ + borrow::Cow, + fmt::{self, Write}, +}; + +use super::{directive::Directive, field::Field}; + +pub struct Type<'a> { + name: Cow<'a, str>, + directives: Vec>, + fields: Vec>, + description: Option>, +} + +impl<'a> Type<'a> { + pub fn new(name: impl Into>) -> Type<'a> { + Type { + name: name.into(), + directives: Vec::new(), + fields: Vec::new(), + description: None, + } + } + + pub fn push_directive(&mut self, directive: Directive<'a>) { + self.directives.push(directive); + } + + pub fn push_field(&mut self, field: Field<'a>) { + self.fields.push(field); + } + + pub fn set_description(&mut self, description: impl Into>) { + self.description = Some(description.into()); + } +} + +impl fmt::Display for Type<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(description) = &self.description { + writeln!(f, r#"""""#)?; + writeln!(f, "{description}")?; + writeln!(f, r#"""""#)?; + } + + write!(f, "type {}", self.name)?; + + if !self.directives.is_empty() { + f.write_char('\n')?; + + for directive in self.directives.iter() { + write!(f, " {directive}")?; + + f.write_char('\n')?; + } + } else { + f.write_char(' ')?; + } + + f.write_str("{\n")?; + + if !self.fields.is_empty() { + for field in self.fields.iter() { + writeln!(f, "{field}")?; + } + } + + f.write_str("}") + } +} diff --git a/crates/postgres-introspection/src/render/enums.rs b/crates/postgres-introspection/src/render/enums.rs new file mode 100644 index 00000000..6738caec --- /dev/null +++ b/crates/postgres-introspection/src/render/enums.rs @@ -0,0 +1,65 @@ +use grafbase_database_definition::DatabaseDefinition; + +use super::ast::{ + directive::{Argument, Directive}, + r#enum::{Enum, EnumVariant}, + schema::Schema, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, default_schema: &'a str, rendered: &mut Schema<'a>) { + rendered.push_enum({ + { + let mut r#enum = Enum::new("OrderDirection"); + r#enum.set_description("Specifies the direction for ordering results."); + + for (variant, description) in [ + ("ASC", "Specifies an ascending order for a given orderBy argument."), + ("DESC", "Specifies a descending order for a given orderBy argument."), + ] { + let mut variant = EnumVariant::new(variant); + variant.set_description(description); + + r#enum.push_variant(variant); + } + + r#enum + } + }); + + for r#enum in database_definition.enums() { + let mut render = Enum::new(r#enum.client_name()); + + if let Some(description) = r#enum.description() { + render.set_description(description); + } + + render.push_directive({ + let mut directive = Directive::new("pgEnum"); + directive.push_argument(Argument::string("name", r#enum.database_name())); + + if r#enum.schema() != default_schema { + directive.push_argument(Argument::string("schema", r#enum.schema())); + } + + directive + }); + + for variant in r#enum.variants() { + let mut variant_render = EnumVariant::new(variant.client_name()); + + variant_render.push_directive({ + let mut directive = Directive::new("pgEnumVariant"); + directive.push_argument(Argument::string("name", variant.database_name())); + directive + }); + + if let Some(description) = variant.description() { + variant_render.set_description(description); + } + + render.push_variant(variant_render); + } + + rendered.push_enum(render); + } +} diff --git a/crates/postgres-introspection/src/render/input_types.rs b/crates/postgres-introspection/src/render/input_types.rs new file mode 100644 index 00000000..976459ce --- /dev/null +++ b/crates/postgres-introspection/src/render/input_types.rs @@ -0,0 +1,523 @@ +use std::borrow::Cow; + +use grafbase_database_definition::{DatabaseDefinition, TableWalker}; +use inflector::Inflector; +use itertools::Itertools; + +use crate::render::ast::{directive::Directive, field::Field, input::InputType}; + +use super::ast::schema::Schema; + +const SCALARS: &[&str] = &[ + "String", "BigInt", "Int", "Float", "Boolean", "Decimal", "Bytes", "JSON", +]; + +const FILTERS: &[(&str, &str)] = &[ + ("eq", "The value is exactly the one given"), + ("ne", "The value is not the one given"), + ("gt", "The value is greater than the one given"), + ("lt", "The value is less than the one given"), + ("gte", "The value is greater than, or equal to the one given"), + ("lte", "The value is less than, or equal to the one given"), +]; + +const ARRAY_FILTERS: &[(&str, &str)] = &[ + ("in", "The value is in the given array of values"), + ("nin", "The value is not in the given array of values"), +]; + +const ARRAYS: &[(&str, &str)] = &[ + ("[String]", "String"), + ("[Int]", "Int"), + ("[BigInt]", "BigInt"), + ("[Decimal]", "Decimal"), + ("[Float]", "Float"), + ("[Boolean]", "Boolean"), + ("[Bytes]", "Bytes"), + ("[JSON]", "JSON"), +]; + +static NUMERIC_SCALARS: &[&str] = &["BigInt", "Float", "Decimal", "Int"]; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, rendered: &mut Schema<'a>) { + render_scalar_inputs(database_definition, rendered); + + for table in database_definition.tables().filter(|t| t.allowed_in_client()) { + render_order_input(rendered, table); + render_fetch_input(rendered, table); + render_filter_input(rendered, table); + render_create_input(rendered, table); + render_update_input(rendered, table); + } +} + +fn render_update_input<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let mut mutation_input = InputType::new(format!("{}UpdateInput", table.client_name())); + mutation_input.set_description(format!("Input for updating an existing {}", table.client_name())); + + for column in table.columns() { + if !column.allows_user_input() { + continue; + } + + let type_name = if column.is_array() { + format!("{}ArrayUpdateInput", column.client_base_type().unwrap()) + } else { + format!("{}UpdateInput", column.client_base_type().unwrap()) + }; + + mutation_input.push_field({ + let mut field = Field::new(column.client_name(), type_name); + field.set_description(format!("Update field value for {}", column.client_name())); + field + }); + } + + rendered.push_input(mutation_input); +} + +fn render_create_input<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let type_name = format!("{}CreateInput", table.client_name()); + let mut mutation_input = InputType::new(type_name); + + mutation_input.set_description(format!("Input for creating a new {}", table.client_name())); + + for column in table.columns() { + if !column.allows_user_input() { + continue; + } + + let type_name = if column.is_array() { + Cow::Owned(format!("[{}]", column.client_base_type().unwrap())) + } else { + Cow::Borrowed(column.client_base_type().unwrap()) + }; + + let type_name = if column.allows_null_input() { + type_name + } else { + Cow::Owned(format!("{}!", type_name)) + }; + + mutation_input.push_field({ + let mut field = Field::new(column.client_name(), type_name); + field.set_description(format!("Set field value for {}", column.client_name())); + field + }); + } + + rendered.push_input(mutation_input); +} + +fn render_filter_input<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let type_name = format!("{}FilterInput", table.client_name()); + let mut filter_input = InputType::new(type_name.clone()); + + filter_input.set_description(format!("Filter input type for {} objects.", table.client_name())); + + filter_input.push_directive(Directive::new("oneOf")); + + for column in table.columns() { + let scalar = column.client_base_type().unwrap(); + + let filter_type = if column.is_array() { + format!("{scalar}ArrayFilterInput") + } else { + format!("{scalar}FilterInput") + }; + + filter_input.push_field({ + let mut field = Field::new(column.client_name(), filter_type); + field.set_description(format!("Filter by the given {}", column.client_name())); + field + }); + } + + let mut collection_input = InputType::new(format!("{}CollectionFilterInput", table.client_name())); + + collection_input.set_description(format!("Filter input type for {} collections", table.client_name())); + + collection_input.push_field({ + let mut field = Field::new("contains", type_name.clone()); + field.set_description("The object is related to an object with the given fields"); + field + }); + + rendered.push_input(collection_input); + + for relation in table.relations() { + if relation.is_other_side_one() { + filter_input.push_field({ + let mut field = Field::new( + relation.client_field_name(), + format!("{}FilterInput", relation.referenced_table().client_name()), + ); + + field.set_description(format!( + "Filter by the related {} object", + relation.referenced_table().client_name() + )); + + field + }); + } else { + filter_input.push_field({ + let mut field = Field::new( + relation.client_field_name(), + format!("{}CollectionFilterInput", relation.referenced_table().client_name()), + ); + + field.set_description(format!( + "Filter by the related {} objects", + relation.referenced_table().client_name() + )); + + field + }); + } + } + + for (op, desc) in &[ + ("ALL", "All of the filters must match"), + ("NONE", "None of the filters must match"), + ("ANY", "At least one of the filters must match"), + ] { + filter_input.push_field({ + let mut field = Field::new(*op, format!("[{}]", type_name.clone())); + field.set_description(*desc); + field + }); + } + + rendered.push_input(filter_input); +} + +fn render_fetch_input<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let mut filter_input = InputType::new(format!("{}LookupInput", table.client_name())); + filter_input.set_description(format!("Input type to select a unique {}", table.client_name())); + + filter_input.push_directive(Directive::new("oneOf")); + + for key in table.keys() { + if key.columns().count() == 1 { + let column = key.columns().next().unwrap().table_column(); + + filter_input.push_field({ + let mut field = Field::new(column.client_name(), column.client_base_type().unwrap()); + field.set_description(format!("Select by the '{}' field", column.client_name())); + field + }); + } else { + let type_name = format!( + "{}_{}_Input", + table.client_name(), + key.columns().map(|c| c.table_column().client_name()).join("_") + ) + .to_pascal_case(); + + let mut composite_input = InputType::new(type_name.clone()); + + composite_input.set_description(format!( + "Input type to select a unique {} with multiple fields", + table.client_name() + )); + + for column in key.columns() { + let column = column.table_column(); + + composite_input.push_field({ + let mut field = Field::new(column.client_name(), column.client_type(None).unwrap()); + + field.set_description(format!("Select by the '{}' field", column.client_name())); + field + }); + } + + rendered.push_input(composite_input); + + filter_input.push_field({ + let field_name = key + .columns() + .map(|c| c.table_column().client_name()) + .join("_") + .to_camel_case(); + + let mut field = Field::new(field_name, type_name); + + field.set_description(format!( + "Select {} by composite columns '{}'", + table.client_name(), + key.columns().map(|c| c.table_column().client_name()).join(", ") + )); + + field + }); + } + } + + rendered.push_input(filter_input); +} + +fn render_order_input<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let mut order_input = InputType::new(table.order_by_input_name()); + order_input.push_directive(Directive::new("oneOf")); + order_input.set_description(format!("Specifies the ordering for {} results.", table.client_name())); + + for column in table.columns() { + order_input.push_field({ + let mut field = Field::new(column.client_name(), "OrderDirection"); + + field.set_description(format!( + "Order {} by {}", + table.client_name().to_camel_case().to_plural(), + column.client_name() + )); + + field + }); + } + + for relation in table.relations().filter(|r| r.is_other_side_one()) { + order_input.push_field({ + let mut field = Field::new( + relation.client_field_name(), + relation.referenced_table().order_by_input_name(), + ); + + field.set_description(format!( + "Order {} results by {} fields", + relation.referencing_table().client_name(), + relation.referenced_table().client_name(), + )); + + field + }); + } + + rendered.push_input(order_input); +} + +fn render_scalar_inputs<'a>(database_definition: &'a DatabaseDefinition, rendered: &mut Schema<'a>) { + for scalar in SCALARS { + rendered.push_input(create_scalar_filters(scalar)); + rendered.push_input(create_scalar_update_input(scalar)); + rendered.push_input(create_array_update_type(scalar)); + } + + for (return_type, scalar) in ARRAYS { + rendered.push_input(create_scalar_array_filters(scalar, *return_type)); + } + + for r#enum in database_definition.enums() { + rendered.push_input(create_scalar_filters(r#enum.client_name())); + + let array_type = format!("[{}]", r#enum.client_name()); + rendered.push_input(create_scalar_array_filters(r#enum.client_name(), array_type)); + rendered.push_input(create_scalar_update_input(r#enum.client_name())); + rendered.push_input(create_array_update_type(r#enum.client_name())); + } +} + +fn create_array_update_type(scalar: &str) -> InputType<'_> { + let input_type_name = format!("{scalar}ArrayUpdateInput"); + let mut input = InputType::new(input_type_name.clone()); + + input.set_description(format!("Update input for {scalar} array type.")); + input.push_directive(Directive::new("oneOf")); + + input.push_field({ + let mut field = Field::new("set", format!("[{scalar}]")); + field.set_description("Replaces the value of a field with the specified value."); + field + }); + + input.push_field({ + let mut field = Field::new("append", format!("[{scalar}]")); + field.set_description("Append an array value to the column."); + field + }); + + input.push_field({ + let mut field = Field::new("prepend", format!("[{scalar}]")); + field.set_description("Prepend an array value to the column."); + field + }); + + input +} + +fn create_scalar_update_input(scalar: &str) -> InputType<'_> { + let input_type_name = format!("{scalar}UpdateInput"); + let mut input = InputType::new(input_type_name.clone()); + + input.set_description(format!("Update input for {scalar} type.")); + input.push_directive(Directive::new("oneOf")); + + input.push_field({ + let mut field = Field::new("set", scalar); + field.set_description("Replaces the value of a field with the specified value."); + field + }); + + if NUMERIC_SCALARS.contains(&scalar) { + input.push_field({ + let mut field = Field::new("increment", scalar); + field.set_description("Increments the value of a field by the specified value."); + field + }); + + input.push_field({ + let mut field = Field::new("decrement", scalar); + field.set_description("Decrements the value of a field by the specified value."); + field + }); + + input.push_field({ + let mut field = Field::new("multiply", scalar); + field.set_description("Multiplies the value of a field by the specified value."); + field + }); + + input.push_field({ + let mut field = Field::new("divide", scalar); + field.set_description("Divides the value of a field by the specified value."); + field + }); + } + + if scalar == "JSON" { + input.push_field({ + let mut field = Field::new("append", scalar); + field.set_description("Append JSON value to the column."); + field + }); + + input.push_field({ + let mut field = Field::new("prepend", scalar); + field.set_description("Prepend JSON value to the column."); + field + }); + + input.push_field({ + let mut field = Field::new("deleteKey", "String"); + + field.set_description( + "Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array.", + ); + + field + }); + + input.push_field({ + let mut field = Field::new("deleteElem", "Int"); + + field.set_description( + "Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array.", + ); + + field + }); + + input.push_field({ + let mut field = Field::new("deleteAtPath", "[String!]"); + + field.set_description( + "Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes.", + ); + + field + }); + } + + input +} + +fn create_scalar_array_filters<'a>(scalar: &'a str, return_type: impl Into>) -> InputType<'a> { + let return_type = return_type.into(); + let input_type_name = format!("{scalar}ArrayFilterInput"); + let mut input = InputType::new(input_type_name.clone()); + + input.set_description(format!("Search filter input for {scalar} array type.")); + input.push_directive(Directive::new("oneOf")); + + for (filter, description) in FILTERS { + let mut field = Field::new(*filter, return_type.clone()); + field.set_description(*description); + + input.push_field(field); + } + + for (filter, description) in ARRAY_FILTERS { + let mut field = Field::new(*filter, format!("[{return_type}!]")); + field.set_description(*description); + + input.push_field(field); + } + + input.push_field({ + let mut field = Field::new("contains", return_type.clone()); + field.set_description("Checks if the array contains all elements of the provided array"); + field + }); + + input.push_field({ + let mut field = Field::new("contained", return_type.clone()); + field.set_description("Checks if the array is contained within the provided array"); + field + }); + + input.push_field({ + let mut field = Field::new("overlaps", return_type.clone()); + field.set_description("Checks if the array has any elements in common with the provided array"); + field + }); + + input.push_field({ + let mut field = Field::new("not", input_type_name); + field.set_description("A negation of the given filter"); + field + }); + + input +} + +fn create_scalar_filters(scalar: &str) -> InputType<'_> { + let input_type_name = format!("{scalar}FilterInput"); + let mut input = InputType::new(input_type_name.clone()); + + input.set_description(format!("Search filter input for {scalar} type.")); + input.push_directive(Directive::new("oneOf")); + + for (filter, description) in FILTERS { + let mut field = Field::new(*filter, scalar); + field.set_description(*description); + + input.push_field(field); + } + + if scalar == "String" { + input.push_field({ + let mut field = Field::new("like", scalar); + field.set_description("The given input is part of the column value"); + + field + }); + } + + for (filter, description) in ARRAY_FILTERS { + // never ever allow nulls in array filters. we use any/all instead of IN, + // and a null value in the array will cause very interesting behavior... + let mut field = Field::new(*filter, format!("[{scalar}!]")); + field.set_description(*description); + + input.push_field(field); + } + + input.push_field({ + let mut field = Field::new("not", input_type_name); + field.set_description("A negation of the given filter"); + field + }); + + input +} diff --git a/crates/postgres-introspection/src/render/mutation.rs b/crates/postgres-introspection/src/render/mutation.rs new file mode 100644 index 00000000..e37dbd8c --- /dev/null +++ b/crates/postgres-introspection/src/render/mutation.rs @@ -0,0 +1,157 @@ +use grafbase_database_definition::{DatabaseDefinition, TableWalker}; +use inflector::Inflector; + +use super::ast::{ + directive::{Argument, Directive}, + field::Field, + schema::Schema, + r#type::Type, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, rendered: &mut Schema<'a>) { + let mut mutation = Type::new("Mutation"); + + for table in database_definition.tables().filter(|t| t.allowed_in_client()) { + render_create_mutations(&mut mutation, table); + render_update_mutations(&mut mutation, table); + render_delete_mutations(&mut mutation, table); + } + + rendered.push_type(mutation); +} + +fn render_delete_mutations<'a>(mutation: &mut Type<'a>, table: TableWalker<'a>) { + // delete one + let mut field = Field::new( + format!("{}Delete", table.client_name().to_camel_case()), + format!("{}DeletePayload!", table.client_name()), + ); + + field.set_description(format!("Delete a unique {}", table.client_name())); + field.push_directive(Directive::new("pgDeleteOne")); + + field.push_argument({ + let mut argument = Argument::constant("lookup", format!("{}LookupInput!", table.client_name())); + argument.set_description(format!("Lookup input for unique {} deletion", table.client_name())); + argument + }); + + mutation.push_field(field); + + // delete many + let mut field = Field::new( + format!("{}DeleteMany", table.client_name().to_camel_case()), + format!("{}DeleteManyPayload!", table.client_name()), + ); + + field.set_description(format!( + "Delete multiple {}", + table.client_name().to_camel_case().to_plural() + )); + + field.push_directive(Directive::new("pgDeleteMany")); + + field.push_argument({ + let mut argument = Argument::constant("filter", format!("{}FilterInput", table.client_name())); + argument.set_description(format!("Filter for {} deletion", table.client_name())); + argument + }); + + mutation.push_field(field); +} + +fn render_update_mutations<'a>(mutation: &mut Type<'a>, table: TableWalker<'a>) { + // update one + let mut field = Field::new( + format!("{}Update", table.client_name().to_camel_case()), + format!("{}UpdatePayload!", table.client_name()), + ); + + field.set_description(format!("Update a unique {}", table.client_name())); + field.push_directive(Directive::new("pgUpdateOne")); + + field.push_argument({ + let mut argument = Argument::constant("lookup", format!("{}LookupInput!", table.client_name())); + argument.set_description(format!("Lookup input for unique {} update", table.client_name())); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("input", format!("{}UpdateInput!", table.client_name())); + argument.set_description(format!("Input for updating a {}", table.client_name())); + argument + }); + + mutation.push_field(field); + + // update many + let mut field = Field::new( + format!("{}UpdateMany", table.client_name().to_camel_case()), + format!("{}UpdateManyPayload!", table.client_name()), + ); + + field.set_description(format!( + "Update multiple {}", + table.client_name().to_camel_case().to_plural() + )); + + field.push_directive(Directive::new("pgUpdateMany")); + + field.push_argument({ + let mut argument = Argument::constant("filter", format!("{}FilterInput", table.client_name())); + argument.set_description(format!( + "Filter for updating multiple {} instances", + table.client_name() + )); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("input", format!("{}UpdateInput!", table.client_name())); + argument.set_description(format!("Input for updating multiple {} instances", table.client_name())); + argument + }); + + mutation.push_field(field); +} + +fn render_create_mutations<'a>(mutation: &mut Type<'a>, table: TableWalker<'a>) { + // create one + let mut field = Field::new( + format!("{}Create", table.client_name().to_camel_case()), + format!("{}CreatePayload!", table.client_name()), + ); + + field.set_description(format!("Create a single {}", table.client_name())); + + field.push_directive(Directive::new("pgInsertOne")); + + field.push_argument({ + let mut argument = Argument::constant("input", format!("{}CreateInput!", table.client_name())); + argument.set_description(format!("Input for creating a single {}", table.client_name())); + argument + }); + + mutation.push_field(field); + + // create many + let mut field = Field::new( + format!("{}CreateMany", table.client_name().to_camel_case()), + format!("{}CreateManyPayload!", table.client_name()), + ); + + field.set_description(format!( + "Create multiple {}", + table.client_name().to_camel_case().to_plural() + )); + + field.push_directive(Directive::new("pgInsertMany")); + + field.push_argument({ + let mut argument = Argument::constant("input", format!("[{}CreateInput!]!", table.client_name())); + argument.set_description(format!("Input for creating multiple {} instances", table.client_name())); + argument + }); + + mutation.push_field(field); +} diff --git a/crates/postgres-introspection/src/render/output_types.rs b/crates/postgres-introspection/src/render/output_types.rs new file mode 100644 index 00000000..2cc6d29c --- /dev/null +++ b/crates/postgres-introspection/src/render/output_types.rs @@ -0,0 +1,198 @@ +use grafbase_database_definition::{DatabaseDefinition, TableWalker}; +use inflector::Inflector; + +use super::ast::{ + directive::{Argument, Directive}, + field::Field, + schema::Schema, + r#type::Type, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, rendered: &mut Schema<'a>) { + render_page_info(rendered); + + for table in database_definition.tables().filter(|t| t.allowed_in_client()) { + let returning_type = render_returning_type(rendered, table); + + render_mutation_types(rendered, table, returning_type); + render_edge(rendered, table); + render_connection(rendered, table); + } +} + +fn render_connection<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let mut r#type = Type::new(format!("{}Connection", table.client_name())); + r#type.set_description(format!("The connection type for {}", table.client_name())); + + r#type.push_directive({ + let mut directive = Directive::new("pgConnection"); + directive.push_argument(Argument::string("type", table.client_name())); + directive + }); + + r#type.push_field({ + let mut field = Field::new("edges", format!("[{}Edge!]!", table.client_name())); + field.set_description("A list of edges"); + field + }); + + r#type.push_field({ + let mut field = Field::new("pageInfo", "PageInfo!"); + field.set_description("Information to aid in pagination"); + field + }); + + rendered.push_type(r#type); +} + +fn render_edge<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) { + let mut r#type = Type::new(format!("{}Edge", table.client_name())); + r#type.set_description("An edge in a connection. Contains the node and its cursor"); + + r#type.push_field({ + let mut field = Field::new("node", format!("{}!", table.client_name())); + field.set_description("The item at the end of the edge"); + field + }); + + r#type.push_field({ + let mut field = Field::new("cursor", "String!"); + field.set_description("A cursor for use in pagination"); + field + }); + + rendered.push_type(r#type); +} + +fn render_mutation_types<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>, returning_type: String) { + let mutations = [ + ( + format!("{}CreatePayload", table.client_name()), + format!("Return type when creating one {}", table.client_name()), + returning_type.clone(), + table.client_name(), + ), + ( + format!("{}CreateManyPayload", table.client_name()), + format!( + "Return type when creating many {}", + table.client_name().to_plural().to_camel_case() + ), + format!("[{returning_type}]!"), + table.client_name(), + ), + ( + format!("{}UpdatePayload", table.client_name()), + format!("Return type when updating one {}", table.client_name()), + returning_type.clone(), + table.client_name(), + ), + ( + format!("{}UpdateManyPayload", table.client_name()), + format!( + "Return type when updating many {}", + table.client_name().to_plural().to_camel_case() + ), + format!("[{returning_type}]!"), + table.client_name(), + ), + ( + format!("{}DeletePayload", table.client_name()), + format!("Return type when deleting one {}", table.client_name()), + returning_type.clone(), + table.client_name(), + ), + ( + format!("{}DeleteManyPayload", table.client_name()), + format!( + "Return type when deleting many {}", + table.client_name().to_plural().to_camel_case() + ), + format!("[{returning_type}]!"), + table.client_name(), + ), + ]; + + for (type_name, type_description, returning_type, target) in mutations { + let mut r#type = Type::new(type_name); + + r#type.set_description(type_description); + + r#type.push_directive({ + let mut directive = Directive::new("pgMutation"); + directive.push_argument(Argument::string("type", target)); + directive + }); + + r#type.push_field({ + let mut field = Field::new("returning", returning_type); + field.set_description("Returned item(s) from the mutation"); + field + }); + + r#type.push_field({ + let mut field = Field::new("rowCount", "Int!"); + field.set_description("The number of rows mutated"); + field + }); + + rendered.push_type(r#type); + } +} + +fn render_returning_type<'a>(rendered: &mut Schema<'a>, table: TableWalker<'a>) -> String { + let returning_type = format!("{}Returning", table.client_name()); + let mut r#type = Type::new(returning_type.clone()); + + r#type.set_description(format!( + "Return type containing fields of the mutated or created {} object", + table.client_name() + )); + + r#type.push_directive({ + let mut directive = Directive::new("pgReturning"); + directive.push_argument(Argument::string("type", table.client_name())); + directive + }); + + for column in table.columns() { + let mut field = Field::new(column.client_name(), column.client_type(None).unwrap()); + field.set_description(format!("The value of the {} field", column.client_name())); + r#type.push_field(field); + } + + rendered.push_type(r#type); + returning_type +} + +fn render_page_info(rendered: &mut Schema<'_>) { + let mut r#type = Type::new("PageInfo"); + + r#type.set_description("Information about pagination in a collection of objects"); + + r#type.push_field({ + let mut field = Field::new("hasPreviousPage", "Boolean!"); + field.set_description("When paginating backwards, are there more items?"); + field + }); + + r#type.push_field({ + let mut field = Field::new("hasNextPage", "Boolean!"); + field.set_description("When paginating forwards, are there more items?"); + field + }); + + r#type.push_field({ + let mut field = Field::new("startCursor", "String!"); + field.set_description("The cursor of the first item in the page"); + field + }); + + r#type.push_field({ + let mut field = Field::new("endCursor", "String!"); + field.set_description("The cursor of the last item in the page"); + field + }); + + rendered.push_type(r#type); +} diff --git a/crates/postgres-introspection/src/render/query.rs b/crates/postgres-introspection/src/render/query.rs new file mode 100644 index 00000000..916bc7ea --- /dev/null +++ b/crates/postgres-introspection/src/render/query.rs @@ -0,0 +1,82 @@ +use grafbase_database_definition::DatabaseDefinition; +use inflector::Inflector; + +use super::ast::{ + directive::{Argument, Directive}, + field::Field, + schema::Schema, + r#type::Type, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, rendered: &mut Schema<'a>) { + let mut query = Type::new("Query"); + + for table in database_definition.tables().filter(|t| t.allowed_in_client()) { + let mut field = Field::new(table.client_name().to_camel_case(), table.client_name()); + + field.push_directive(Directive::new("pgSelectOne")); + field.set_description(format!("Query a unique {}", table.client_name())); + + field.push_argument({ + let mut argument = Argument::constant("lookup", format!("{}LookupInput!", table.client_name())); + argument.set_description(format!("Input for unique {} lookup", table.client_name())); + argument + }); + + query.push_field(field); + + let mut field = Field::new( + table.client_name().to_plural().to_camel_case(), + format!("{}Connection!", table.client_name()), + ); + + field.set_description(format!( + "Query and paginate multiple {}", + table.client_name().to_camel_case().to_plural() + )); + + field.push_directive(Directive::new("pgSelectMany")); + + field.push_argument({ + let mut argument = Argument::constant("filter", format!("{}FilterInput", table.client_name())); + argument.set_description(format!("Filter for {}", table.client_name())); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("first", "Int"); + argument.set_description("Limit the number of results, from the beginning"); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("last", "Int"); + argument.set_description("Limit the number of results, from the end"); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("before", "String"); + argument + .set_description("Cursor for pagination, select items before the cursor. Use together with `last`."); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("after", "String"); + argument + .set_description("Cursor for pagination, select items after the cursor. Use together with `first`."); + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("orderBy", format!("[{}!]", table.order_by_input_name())); + argument.set_description("Order the results by selected fields"); + argument + }); + + query.push_field(field); + } + + rendered.push_type(query); +} diff --git a/crates/postgres-introspection/src/render/scalars.rs b/crates/postgres-introspection/src/render/scalars.rs new file mode 100644 index 00000000..3ad51bc3 --- /dev/null +++ b/crates/postgres-introspection/src/render/scalars.rs @@ -0,0 +1,27 @@ +use super::ast::{scalar::Scalar, schema::Schema}; + +pub(super) fn render(rendered: &mut Schema) { + rendered.push_scalar({ + let mut scalar = Scalar::new("JSON"); + scalar.set_description("JSON data type"); + scalar + }); + + rendered.push_scalar({ + let mut scalar = Scalar::new("Bytes"); + scalar.set_description("Binary data type"); + scalar + }); + + rendered.push_scalar({ + let mut scalar = Scalar::new("BigInt"); + scalar.set_description("Big integer data type"); + scalar + }); + + rendered.push_scalar({ + let mut scalar = Scalar::new("Decimal"); + scalar.set_description("Decimal data type"); + scalar + }); +} diff --git a/crates/postgres-introspection/src/render/schema_directives.rs b/crates/postgres-introspection/src/render/schema_directives.rs new file mode 100644 index 00000000..51d61e52 --- /dev/null +++ b/crates/postgres-introspection/src/render/schema_directives.rs @@ -0,0 +1,55 @@ +use grafbase_database_definition::DatabaseDefinition; + +use super::ast::{ + directive::{Argument, ArgumentValue, Directive}, + schema::Schema, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, extension_url: &'a str, rendered: &mut Schema<'a>) { + rendered.push_directive({ + let mut directive = Directive::new("link"); + + directive.render_multiline(); + directive.push_argument(Argument::string("url", extension_url)); + + let import = vec![ + ArgumentValue::String("@pgDatabase".into()), + ArgumentValue::String("@pgTable".into()), + ArgumentValue::String("@pgColumn".into()), + ArgumentValue::String("@pgEnum".into()), + ArgumentValue::String("@pgEnumVariant".into()), + ArgumentValue::String("@pgRelation".into()), + ArgumentValue::String("@pgKey".into()), + ArgumentValue::String("@pgSelectOne".into()), + ArgumentValue::String("@pgSelectMany".into()), + ArgumentValue::String("@pgInsertOne".into()), + ArgumentValue::String("@pgInsertMany".into()), + ArgumentValue::String("@pgUpdateOne".into()), + ArgumentValue::String("@pgUpdateMany".into()), + ArgumentValue::String("@pgDeleteOne".into()), + ArgumentValue::String("@pgDeleteMany".into()), + ArgumentValue::String("@pgConnection".into()), + ArgumentValue::String("@pgMutation".into()), + ArgumentValue::String("@pgReturning".into()), + ArgumentValue::String("PgKeyType".into()), + ArgumentValue::String("PgColumnType".into()), + ]; + + directive.push_argument(Argument::new( + "import", + ArgumentValue::MultiLineArray { + indent: " ", + values: import, + }, + )); + + directive + }); + + rendered.push_directive({ + let mut directive = Directive::new("pgDatabase"); + directive.push_argument(Argument::string("name", database_definition.name())); + + directive + }); +} diff --git a/crates/postgres-introspection/src/render/tables.rs b/crates/postgres-introspection/src/render/tables.rs new file mode 100644 index 00000000..97ed5853 --- /dev/null +++ b/crates/postgres-introspection/src/render/tables.rs @@ -0,0 +1,197 @@ +use grafbase_database_definition::{DatabaseDefinition, DatabaseType, RelationWalker, TableColumnWalker, TableWalker}; + +use super::ast::{ + directive::{Argument, ArgumentValue, Directive}, + field::Field, + schema::Schema, + r#type::Type, +}; + +pub fn render<'a>(database_definition: &'a DatabaseDefinition, default_schema: &str, rendered: &mut Schema<'a>) { + for table in database_definition.tables().filter(|t| t.allowed_in_client()) { + let mut render = Type::new(table.client_name()); + render_directives(&mut render, default_schema, table); + + for column in table.columns() { + render_column(&mut render, table, column); + } + + for relation in table.relations() { + render_relation(&mut render, relation); + } + + if let Some(description) = table.description() { + render.set_description(description); + } + + rendered.push_type(render); + } +} + +fn render_relation<'a>(render: &mut Type<'a>, relation: RelationWalker<'a>) { + let mut field = Field::new(relation.client_field_name(), relation.client_type()); + + field.push_directive({ + let mut directive = Directive::new("pgRelation"); + + directive.push_argument(Argument::string("name", relation.name())); + + if relation.is_referencing_side() { + let mut fields = Vec::new(); + let mut references = Vec::new(); + + for column in relation.referencing_columns() { + fields.push(ArgumentValue::String(column.client_name().into())); + } + + for column in relation.referenced_columns() { + references.push(ArgumentValue::String(column.client_name().into())); + } + + directive.push_argument(Argument::new("fields", ArgumentValue::Array(fields))); + directive.push_argument(Argument::new("references", ArgumentValue::Array(references))); + } + + directive + }); + + if !relation.is_other_side_one() { + field.push_argument({ + let mut argument = Argument::constant( + "filter", + format!("{}FilterInput", relation.referenced_table().client_name()), + ); + + argument.set_description(format!( + "Filter the related {} instances", + relation.referenced_table().client_name() + )); + + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("first", "Int"); + + argument.set_description(format!( + "Select the first {} instances", + relation.referenced_table().client_name() + )); + + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("last", "Int"); + + argument.set_description(format!( + "Select the last {} instances", + relation.referenced_table().client_name() + )); + + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("before", "String"); + + argument.set_description(format!( + "Select the {} instances before the given cursor", + relation.referenced_table().client_name() + )); + + argument + }); + + field.push_argument({ + let mut argument = Argument::constant("after", "String"); + + argument.set_description(format!( + "Select the {} instances after the given cursor", + relation.referenced_table().client_name() + )); + + argument + }); + + field.push_argument({ + let mut argument = Argument::constant( + "orderBy", + format!("[{}OrderByInput!]", relation.referenced_table().client_name()), + ); + + argument.set_description(format!( + "Order the {} instances by the given fields", + relation.referenced_table().client_name() + )); + + argument + }); + } + + if let Some(description) = relation.description() { + field.set_description(description); + } + + render.push_field(field); +} + +fn render_column<'a>(render: &mut Type<'a>, table: TableWalker<'a>, column: TableColumnWalker<'a>) { + let Some(client_type) = column.client_type(None) else { + return; + }; + + let mut field = Field::new(column.client_name(), client_type); + + field.push_directive({ + let mut directive = Directive::new("pgColumn"); + + directive.push_argument(Argument::string("name", column.database_name())); + directive.push_argument(Argument::constant("type", column.database_type().as_str())); + + if let DatabaseType::Enum(r#enum) = column.database_type() { + if r#enum.schema() != table.schema() { + directive.push_argument(Argument::string("enumSchema", r#enum.schema())); + } + } + + directive + }); + + if let Some(description) = column.description() { + field.set_description(description); + } + + render.push_field(field); +} + +fn render_directives<'a>(render: &mut Type<'a>, default_schema: &str, table: TableWalker<'a>) { + render.push_directive({ + let mut directive = Directive::new("pgTable"); + directive.push_argument(Argument::string("name", table.database_name())); + + if table.schema() != default_schema { + directive.push_argument(Argument::string("schema", table.schema())); + } + + directive + }); + + for key in table.keys() { + let mut directive = Directive::new("pgKey"); + + let fields = key + .columns() + .map(|c| ArgumentValue::String(c.table_column().client_name().into())) + .collect(); + + directive.push_argument(Argument::new("fields", ArgumentValue::Array(fields))); + + directive.push_argument(Argument::constant( + "type", + if key.is_primary() { "PRIMARY" } else { "UNIQUE" }, + )); + + render.push_directive(directive); + } +} diff --git a/crates/postgres-introspection/src/schemas.rs b/crates/postgres-introspection/src/schemas.rs new file mode 100644 index 00000000..d6902ce4 --- /dev/null +++ b/crates/postgres-introspection/src/schemas.rs @@ -0,0 +1,20 @@ +use grafbase_database_definition::DatabaseDefinition; +use sqlx::{PgConnection, Row}; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + let query = "SELECT nspname AS name FROM pg_namespace WHERE nspname <> ALL ($1) ORDER BY name"; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + for row in rows { + database_definition.push_schema(row.get(0)); + } + + Ok(()) +} diff --git a/crates/postgres-introspection/src/tables.rs b/crates/postgres-introspection/src/tables.rs new file mode 100644 index 00000000..b0d8089e --- /dev/null +++ b/crates/postgres-introspection/src/tables.rs @@ -0,0 +1,41 @@ +use grafbase_database_definition::{DatabaseDefinition, Table}; +use sqlx::{PgConnection, Row}; + +pub(crate) async fn introspect_database( + conn: &mut PgConnection, + database_definition: &mut DatabaseDefinition, +) -> anyhow::Result<()> { + let query = indoc::indoc! {r#" + SELECT + pg_class.relname AS name, + pg_namespace.nspname AS schema, + pg_description.description AS description + FROM pg_class + INNER JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace + LEFT JOIN pg_description ON pg_description.objoid = pg_class.oid AND pg_description.objsubid = 0 + WHERE pg_class.relkind = 'r' -- r = relation, e.g. a table + AND pg_namespace.nspname <> ALL ( $1 ) + ORDER BY schema, name; + "#}; + + let rows = sqlx::query(query) + .bind(super::blocked_schemas()) + .fetch_all(conn) + .await?; + + for row in rows { + let Some(schema_id) = database_definition.get_schema_id(row.get(1)) else { + continue; + }; + + let mut table = Table::::new(schema_id, row.get(0), None); + + if let Some(description) = row.get(2) { + table.set_description(description); + } + + database_definition.push_table(table); + } + + Ok(()) +} diff --git a/crates/sql-ast/Cargo.toml b/crates/sql-ast/Cargo.toml new file mode 100644 index 00000000..9b2f9799 --- /dev/null +++ b/crates/sql-ast/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "sql-ast" +version = "0.1.0" +edition.workspace = true +license.workspace = true +homepage.workspace = true +keywords.workspace = true +repository.workspace = true + +[dependencies] +grafbase-sdk.workspace = true + +[lints] +workspace = true diff --git a/crates/sql-ast/src/ast.rs b/crates/sql-ast/src/ast.rs new file mode 100644 index 00000000..d9ff6b52 --- /dev/null +++ b/crates/sql-ast/src/ast.rs @@ -0,0 +1,46 @@ +//! An abstract syntax tree for SQL queries. + +mod case; +mod column; +mod common_table_expression; +mod compare; +mod conditions; +mod conjunctive; +mod delete; +mod expression; +mod function; +mod grouping; +mod insert; +mod join; +mod ops; +mod ordering; +mod over; +mod query; +mod row; +mod select; +mod table; +mod update; +mod values; + +pub use case::Case; +pub use column::Column; +pub use common_table_expression::CommonTableExpression; +pub use compare::{Comparable, Compare, JsonCompare, JsonType}; +pub use conditions::ConditionTree; +pub use conjunctive::*; +pub use delete::Delete; +pub use expression::*; +pub use function::*; +pub use grouping::*; +pub use insert::*; +pub use join::{Join, JoinData, Joinable}; +pub use ops::*; +pub use ordering::*; +pub use ordering::{IntoOrderDefinition, Order, OrderDefinition, Ordering}; +pub use over::*; +pub use query::Query; +pub use row::Row; +pub use select::Select; +pub use table::*; +pub use update::*; +pub use values::Values; diff --git a/crates/sql-ast/src/ast/case.rs b/crates/sql-ast/src/ast/case.rs new file mode 100644 index 00000000..c3a104bd --- /dev/null +++ b/crates/sql-ast/src/ast/case.rs @@ -0,0 +1,42 @@ +use super::Expression; + +#[derive(Clone, Debug, Default)] +pub struct CaseBuilder<'a> { + when: Vec>, +} + +#[derive(Clone, Debug, PartialEq)] +pub struct Case<'a> { + pub(crate) when: Vec>, + pub(crate) r#else: Box>, +} + +impl<'a> Case<'a> { + pub fn builder() -> CaseBuilder<'a> { + CaseBuilder::default() + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct When<'a> { + pub(crate) condition: Expression<'a>, + pub(crate) result: Expression<'a>, +} + +impl<'a> CaseBuilder<'a> { + pub fn when(mut self, condition: impl Into>, result: impl Into>) -> Self { + self.when.push(When { + condition: condition.into(), + result: result.into(), + }); + + self + } + + pub fn r#else(self, expression: impl Into>) -> Case<'a> { + Case { + when: self.when, + r#else: Box::new(expression.into()), + } + } +} diff --git a/crates/sql-ast/src/ast/column.rs b/crates/sql-ast/src/ast/column.rs new file mode 100644 index 00000000..2677c3de --- /dev/null +++ b/crates/sql-ast/src/ast/column.rs @@ -0,0 +1,98 @@ +use super::Aliasable; +use crate::ast::{Expression, ExpressionKind, Table}; +use std::borrow::Cow; + +/// A column definition. +#[derive(Clone, Debug, Default)] +pub struct Column<'a> { + pub name: Cow<'a, str>, + pub(crate) table: Option>, + pub(crate) alias: Option>, +} + +/// Defines a default value for a `Column`. +impl PartialEq for Column<'_> { + fn eq(&self, other: &Column) -> bool { + self.name == other.name && self.table == other.table + } +} + +impl<'a> From> for Expression<'a> { + fn from(col: Column<'a>) -> Self { + Expression { + kind: ExpressionKind::Column(Box::new(col)), + alias: None, + } + } +} + +impl<'a> Column<'a> { + /// Create a column definition. + pub fn new(name: S) -> Self + where + S: Into>, + { + Column { + name: name.into(), + ..Default::default() + } + } + + /// Include the table name in the column expression. + pub fn table(mut self, table: T) -> Self + where + T: Into>, + { + self.table = Some(table.into()); + self + } +} + +impl<'a> Aliasable<'a> for Column<'a> { + type Target = Column<'a>; + + fn alias(mut self, alias: T) -> Self::Target + where + T: Into>, + { + self.alias = Some(alias.into()); + self + } +} + +impl<'a> From<&'a str> for Column<'a> { + fn from(s: &'a str) -> Self { + Column { + name: s.into(), + ..Default::default() + } + } +} + +impl<'a, 'b> From<&'a &'b str> for Column<'b> { + fn from(s: &'a &'b str) -> Self { + Column::from(*s) + } +} + +impl From for Column<'_> { + fn from(s: String) -> Self { + Column { + name: s.into(), + ..Default::default() + } + } +} + +impl<'a, T, C> From<(T, C)> for Column<'a> +where + T: Into>, + C: Into>, +{ + fn from(t: (T, C)) -> Column<'a> { + let mut column: Column<'a> = t.1.into(); + column = column.table(t.0); + + column + } +} diff --git a/crates/sql-ast/src/ast/common_table_expression.rs b/crates/sql-ast/src/ast/common_table_expression.rs new file mode 100644 index 00000000..b5f31447 --- /dev/null +++ b/crates/sql-ast/src/ast/common_table_expression.rs @@ -0,0 +1,18 @@ +use std::borrow::Cow; + +use super::Query; + +#[derive(Debug, PartialEq, Clone)] +pub struct CommonTableExpression<'a> { + pub(crate) name: Cow<'a, str>, + pub(crate) query: Query<'a>, +} + +impl<'a> CommonTableExpression<'a> { + pub fn new(name: impl Into>, query: impl Into>) -> Self { + Self { + name: name.into(), + query: query.into(), + } + } +} diff --git a/crates/sql-ast/src/ast/compare.rs b/crates/sql-ast/src/ast/compare.rs new file mode 100644 index 00000000..4736e855 --- /dev/null +++ b/crates/sql-ast/src/ast/compare.rs @@ -0,0 +1,512 @@ +use super::ExpressionKind; +use crate::ast::{Column, ConditionTree, Expression}; +use std::borrow::Cow; + +/// For modeling comparison expressions. +#[derive(Debug, Clone, PartialEq)] +pub enum Compare<'a> { + /// `left = right` + Equals(Box>, Box>), + /// `left <> right` + NotEquals(Box>, Box>), + /// `left < right` + LessThan(Box>, Box>), + /// `left <= right` + LessThanOrEquals(Box>, Box>), + /// `left > right` + GreaterThan(Box>, Box>), + /// `left >= right` + GreaterThanOrEquals(Box>, Box>), + /// `left IN (..)` + In(Box>, Box>), + /// `left NOT IN (..)` + NotIn(Box>, Box>), + /// `left = ANY (..)` + AnySelection(Box>, Box>), + /// `left <> ALL (..)` + NotAllSelection(Box>, Box>), + /// `left LIKE %..%` + Like(Box>, Box>), + /// `left NOT LIKE %..%` + NotLike(Box>, Box>), + /// `value IS NULL` + Null(Box>), + /// `value IS NOT NULL` + NotNull(Box>), + /// `value` BETWEEN `left` AND `right` + Between(Box>, Box>, Box>), + /// `value` NOT BETWEEN `left` AND `right` + NotBetween(Box>, Box>, Box>), + /// Raw comparator, allows to use an operator `left right` as is, + /// without visitor transformation in between. + Raw(Box>, Cow<'a, str>, Box>), + /// All json related comparators + Json(JsonCompare<'a>), + /// ANY (`left`) + Any(Box>), + /// ALL (`left`) + All(Box>), +} + +#[derive(Debug, Clone, PartialEq)] +pub enum JsonCompare<'a> { + ArrayOverlaps(Box>, Box>), + ArrayContains(Box>, Box>), + ArrayContained(Box>, Box>), + ArrayNotContains(Box>, Box>), + TypeEquals(Box>, JsonType<'a>), + TypeNotEquals(Box>, JsonType<'a>), +} + +#[derive(Debug, Clone, PartialEq)] +pub enum JsonType<'a> { + Array, + Object, + String, + Number, + Boolean, + Null, + ColumnRef(Box>), +} + +impl<'a> From> for JsonType<'a> { + fn from(col: Column<'a>) -> Self { + JsonType::ColumnRef(Box::new(col)) + } +} + +impl<'a> From> for ConditionTree<'a> { + fn from(cmp: Compare<'a>) -> Self { + ConditionTree::single(Expression::from(cmp)) + } +} + +impl<'a> From> for Expression<'a> { + fn from(cmp: Compare<'a>) -> Self { + Expression { + kind: ExpressionKind::Compare(cmp), + alias: None, + } + } +} + +/// An item that can be compared against other values in the database. +pub trait Comparable<'a> { + /// Tests if both sides are the same value. + fn equals(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if both sides are not the same value. + fn not_equals(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is smaller than the right side. + fn less_than(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is smaller than the right side or the same. + fn less_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is bigger than the right side. + fn greater_than(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is bigger than the right side or the same. + fn greater_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is included in the right side collection. + fn in_selection(self, selection: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is equal to any element on the right side collection. + /// Corresponds to `left = ANY (right)`. + fn any_selection(self, selection: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is not equal to any element on the right side collection. + /// Corresponds to `left <> ALL (right)`. + fn not_all_selection(self, selection: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is not included in the right side collection. + fn not_in_selection(self, selection: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side includes the right side string. + fn like(self, pattern: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side does not include the right side string. + fn not_like(self, pattern: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the left side is `NULL`. + #[allow(clippy::wrong_self_convention)] + fn is_null(self) -> Compare<'a>; + + /// Tests if the left side is not `NULL`. + #[allow(clippy::wrong_self_convention)] + fn is_not_null(self) -> Compare<'a>; + + /// Tests if the value is between two given values. + fn between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>; + + /// Tests if the value is not between two given values. + fn not_between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>; + + /// Tests if the array overlaps with another array. + fn array_overlaps(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the array contains another array. + fn array_contains(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array contains a value. + fn array_contained(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array does not contain a value. + fn json_array_not_contains(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array starts with a value. + fn json_array_begins_with(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array does not start with a value. + fn json_array_not_begins_with(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array ends with a value. + fn json_array_ends_into(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON array does not end with a value. + fn json_array_not_ends_into(self, item: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON value is of a certain type. + fn json_type_equals(self, json_type: T) -> Compare<'a> + where + T: Into>; + + /// Tests if the JSON value is not of a certain type. + fn json_type_not_equals(self, json_type: T) -> Compare<'a> + where + T: Into>; + + /// Matches at least one elem of a list of values. + fn any(self) -> Compare<'a>; + + /// Matches all elem of a list of values. + fn all(self) -> Compare<'a>; + + /// Compares two expressions with a custom operator. + fn compare_raw(self, raw_comparator: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>; +} + +impl<'a, U> Comparable<'a> for U +where + U: Into>, +{ + fn equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.equals(comparison) + } + + fn not_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.not_equals(comparison) + } + + fn less_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.less_than(comparison) + } + + fn less_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.less_than_or_equals(comparison) + } + + fn greater_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.greater_than(comparison) + } + + fn greater_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.greater_than_or_equals(comparison) + } + + fn in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.in_selection(selection) + } + + fn not_in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.not_in_selection(selection) + } + + fn like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.like(pattern) + } + + fn not_like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.not_like(pattern) + } + + #[allow(clippy::wrong_self_convention)] + fn is_null(self) -> Compare<'a> { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.is_null() + } + + #[allow(clippy::wrong_self_convention)] + fn is_not_null(self) -> Compare<'a> { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.is_not_null() + } + + fn between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.between(left, right) + } + + fn not_between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.not_between(left, right) + } + + fn compare_raw(self, raw_comparator: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let left: Column<'a> = self.into(); + let left: Expression<'a> = left.into(); + let right: Expression<'a> = right.into(); + + left.compare_raw(raw_comparator.into(), right) + } + + fn array_overlaps(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.array_overlaps(item) + } + + fn array_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.array_contains(item) + } + + fn array_contained(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.array_contained(item) + } + + fn json_array_not_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_array_not_contains(item) + } + + fn json_array_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_array_begins_with(item) + } + + fn json_array_not_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_array_not_begins_with(item) + } + + fn json_array_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_array_ends_into(item) + } + + fn json_array_not_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_array_not_ends_into(item) + } + + fn json_type_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_type_equals(json_type) + } + + fn json_type_not_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.json_type_not_equals(json_type) + } + + fn any(self) -> Compare<'a> { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.any() + } + + fn all(self) -> Compare<'a> { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + + val.all() + } + + fn any_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.any_selection(selection) + } + + fn not_all_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let col: Column<'a> = self.into(); + let val: Expression<'a> = col.into(); + val.not_all_selection(selection) + } +} diff --git a/crates/sql-ast/src/ast/conditions.rs b/crates/sql-ast/src/ast/conditions.rs new file mode 100644 index 00000000..b1f01510 --- /dev/null +++ b/crates/sql-ast/src/ast/conditions.rs @@ -0,0 +1,105 @@ +use crate::ast::{Expression, ExpressionKind, Select}; + +use super::Table; + +/// Tree structures and leaves for condition building. +#[derive(Debug, PartialEq, Clone, Default)] +pub enum ConditionTree<'a> { + /// `(left_expression AND right_expression)` + And(Vec>), + /// `(left_expression OR right_expression)` + Or(Vec>), + /// `(NOT expression)` + Not(Box>), + /// A single expression leaf + Single(Box>), + /// A leaf that does nothing to the condition, `1=1` + #[default] + NoCondition, + /// A leaf that cancels the condition, `1=0` + #[allow(dead_code)] // some day... + NegativeCondition, + /// Exists condition + Exists(Box>), +} + +impl<'a> ConditionTree<'a> { + /// An `AND` statement, is true when both sides are true. + pub fn and(mut self, other: E) -> ConditionTree<'a> + where + E: Into>, + { + match self { + Self::And(ref mut conditions) => { + conditions.push(other.into()); + self + } + Self::Single(expr) => Self::And(vec![*expr, other.into()]), + _ => Self::And(vec![Expression::from(self), other.into()]), + } + } + + /// An `OR` statement, is true when one side is true. + pub fn or(mut self, other: E) -> ConditionTree<'a> + where + E: Into>, + { + match self { + Self::Or(ref mut conditions) => { + conditions.push(other.into()); + self + } + Self::Single(expr) => Self::Or(vec![*expr, other.into()]), + _ => Self::Or(vec![Expression::from(self), other.into()]), + } + } + + /// A `NOT` statement, is true when the expression is false. + pub fn not(left: E) -> ConditionTree<'a> + where + E: Into>, + { + ConditionTree::Not(Box::new(left.into())) + } + + /// A single leaf, is true when the expression is true. + pub fn single(left: E) -> ConditionTree<'a> + where + E: Into>, + { + ConditionTree::Single(Box::new(left.into())) + } + + /// True if a nested table has any values. + pub fn exists(select: E) -> ConditionTree<'a> + where + E: Into>, + { + ConditionTree::Exists(Box::new(select.into())) + } + + /// Inverts the entire condition tree if condition is met. + pub fn invert_if(self, invert: bool) -> ConditionTree<'a> { + if invert { Self::not(self) } else { self } + } +} + +impl<'a> From> for Expression<'a> { + fn from(ct: ConditionTree<'a>) -> Self { + Expression { + kind: ExpressionKind::ConditionTree(ct), + alias: None, + } + } +} + +impl<'a> From> for ConditionTree<'a> { + fn from(sel: Select<'a>) -> Self { + let exp = Expression { + kind: ExpressionKind::Value(Box::new(sel.into())), + alias: None, + }; + + ConditionTree::single(exp) + } +} diff --git a/crates/sql-ast/src/ast/conjunctive.rs b/crates/sql-ast/src/ast/conjunctive.rs new file mode 100644 index 00000000..0cef0281 --- /dev/null +++ b/crates/sql-ast/src/ast/conjunctive.rs @@ -0,0 +1,40 @@ +use crate::ast::{ConditionTree, Expression}; + +/// `AND`, `OR` and `NOT` conjunctive implementations. +pub trait Conjunctive<'a> { + /// Builds an `AND` condition having `self` as the left leaf and `other` as the right. + fn and(self, other: E) -> ConditionTree<'a> + where + E: Into>; + + /// Builds an `OR` condition having `self` as the left leaf and `other` as the right. + fn or(self, other: E) -> ConditionTree<'a> + where + E: Into>; + + /// Builds a `NOT` condition having `self` as the condition. + fn not(self) -> ConditionTree<'a>; +} + +impl<'a, T> Conjunctive<'a> for T +where + T: Into>, +{ + fn and(self, other: E) -> ConditionTree<'a> + where + E: Into>, + { + ConditionTree::And(vec![self.into(), other.into()]) + } + + fn or(self, other: E) -> ConditionTree<'a> + where + E: Into>, + { + ConditionTree::Or(vec![self.into(), other.into()]) + } + + fn not(self) -> ConditionTree<'a> { + ConditionTree::not(self.into()) + } +} diff --git a/crates/sql-ast/src/ast/delete.rs b/crates/sql-ast/src/ast/delete.rs new file mode 100644 index 00000000..484f7390 --- /dev/null +++ b/crates/sql-ast/src/ast/delete.rs @@ -0,0 +1,85 @@ +use crate::ast::{ConditionTree, Query, Table}; + +use super::Expression; + +#[derive(Debug, PartialEq, Clone)] +/// A builder for a `DELETE` statement. +pub struct Delete<'a> { + pub(crate) table: Table<'a>, + pub(crate) conditions: Option>, + pub(crate) returning: Option>>, +} + +impl<'a> From> for Query<'a> { + fn from(delete: Delete<'a>) -> Self { + Query::Delete(Box::new(delete)) + } +} + +impl<'a> Delete<'a> { + /// Creates a new `DELETE` statement for the given table. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let query = Delete::from_table("users"); + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"DELETE FROM "users""#, sql); + /// # } + /// ``` + pub fn from_table(table: T) -> Self + where + T: Into>, + { + Self { + table: table.into(), + conditions: None, + returning: None, + } + } + + /// Adds `WHERE` conditions to the query. See + /// [Comparable](trait.Comparable.html#required-methods) for more examples. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Delete::from_table("users"); + /// query.so_that("bar".equals(false)); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"DELETE FROM "users" WHERE "bar" = $1"#, sql); + /// assert_eq!(vec![Value::from(false)], params); + /// # } + /// ``` + pub fn so_that(&mut self, conditions: T) + where + T: Into>, + { + self.conditions = Some(conditions.into()); + } + + /// Adds a `RETURNING` definition to the `DELETE` statement. Defines the return + /// value of the query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Delete::from_table("users"); + /// query.returning([Column::from("id"), Column::from("name")]); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"DELETE FROM "users" RETURNING "id", "name""#, sql); + /// # } + /// ``` + pub fn returning(&mut self, returning: T) + where + T: IntoIterator, + E: Into>, + { + self.returning = Some(returning.into_iter().map(Into::into).collect()); + } +} diff --git a/crates/sql-ast/src/ast/expression.rs b/crates/sql-ast/src/ast/expression.rs new file mode 100644 index 00000000..8e3d2091 --- /dev/null +++ b/crates/sql-ast/src/ast/expression.rs @@ -0,0 +1,395 @@ +use grafbase_sdk::host_io::postgres::types::DatabaseValue; + +use crate::ast::{Aliasable, Column, Comparable, Compare, ConditionTree, Function, Row, Select, SqlOp, Table, Values}; + +use super::{ + Case, + compare::{JsonCompare, JsonType}, +}; +use std::borrow::Cow; + +/// An expression that can be positioned in a query. Can be a single value or a +/// statement that is evaluated into a value. +#[derive(Debug, Clone, PartialEq)] +pub struct Expression<'a> { + pub kind: ExpressionKind<'a>, + pub alias: Option>, +} + +impl<'a> Expression<'a> { + pub fn value(value: DatabaseValue) -> Self { + Self { + kind: ExpressionKind::Parameterized(ParameterizedValue { value, enum_type: None }), + alias: None, + } + } + + pub fn enum_value(value: DatabaseValue, enum_type: impl Into>) -> Self { + Self { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value, + enum_type: Some(enum_type.into()), + }), + alias: None, + } + } +} + +impl<'a> Expression<'a> { + /// The type of the expression, dictates how it's implemented in the query. + pub fn kind(&self) -> &ExpressionKind<'a> { + &self.kind + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ParameterizedValue<'a> { + pub value: DatabaseValue, + pub enum_type: Option>, +} + +/// An expression we can compare and use in database queries. +#[derive(Debug, Clone, PartialEq)] +pub enum ExpressionKind<'a> { + /// Anything that we must parameterize before querying + Parameterized(ParameterizedValue<'a>), + /// Will be rendered as-is to the SQL statement. Carefully escape, if needed. + Raw(&'a str), + /// Will be rendered as-is, quoted, to the SQL statement. Carefully escape, if needed. + RawString(&'a str), + /// A database column + Column(Box>), + /// A database column + Table(Box>), + /// Data in a row form, e.g. (1, 2, 3) + Row(Row<'a>), + /// A nested `SELECT` or `SELECT .. UNION` statement + Selection(Box>), + /// A database function call + Function(Box>), + /// A qualified asterisk to a table + Asterisk(Option>>), + /// An operation: sum, sub, mul or div. + Op(Box>), + /// A tree of expressions to evaluate from the deepest value to up + ConditionTree(ConditionTree<'a>), + /// A comparison expression + Compare(Compare<'a>), + /// A single value, column, row or a nested select + Value(Box>), + /// Multiple values + Values(Values<'a>), + /// A case expression + Case(Case<'a>), + /// DEFAULT keyword, e.g. for `INSERT INTO ... VALUES (..., DEFAULT, ...)` + Default, +} + +/// A quick alias to create a raw value expression. +pub fn raw(value: &str) -> Expression<'_> { + Expression { + kind: ExpressionKind::Raw(value), + alias: None, + } +} + +pub fn raw_str(value: &str) -> Expression<'_> { + Expression { + kind: ExpressionKind::RawString(value), + alias: None, + } +} + +/// A quick alias to create an asterisk to a table. +pub fn asterisk() -> Expression<'static> { + Expression { + kind: ExpressionKind::Asterisk(None), + alias: None, + } +} + +/// A quick alias to create a default value expression. +pub fn default_value() -> Expression<'static> { + Expression { + kind: ExpressionKind::Default, + alias: None, + } +} + +impl<'a> From> for Expression<'a> { + fn from(f: Function<'a>) -> Self { + Expression { + kind: ExpressionKind::Function(Box::new(f)), + alias: None, + } + } +} + +impl<'a> From> for Expression<'a> { + fn from(p: SqlOp<'a>) -> Self { + Expression { + kind: ExpressionKind::Op(Box::new(p)), + alias: None, + } + } +} + +impl<'a> From> for Expression<'a> { + fn from(value: Values<'a>) -> Self { + Expression { + kind: ExpressionKind::Values(value), + alias: None, + } + } +} + +impl<'a> From> for Expression<'a> { + fn from(value: Row<'a>) -> Self { + Expression { + kind: ExpressionKind::Row(value), + alias: None, + } + } +} + +impl<'a> From> for Expression<'a> { + fn from(value: Table<'a>) -> Self { + Self { + kind: ExpressionKind::Table(Box::new(value)), + alias: None, + } + } +} + +impl<'a> From> for Expression<'a> { + fn from(kind: ExpressionKind<'a>) -> Self { + Self { kind, alias: None } + } +} + +impl<'a> From> for Expression<'a> { + fn from(value: Case<'a>) -> Self { + Self { + kind: ExpressionKind::Case(value), + alias: None, + } + } +} + +impl<'a> Aliasable<'a> for Expression<'a> { + type Target = Expression<'a>; + + fn alias(mut self, alias: T) -> Self::Target + where + T: Into>, + { + self.alias = Some(alias.into()); + self + } +} + +impl<'a> Comparable<'a> for Expression<'a> { + fn equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::Equals(Box::new(self), Box::new(comparison.into())) + } + + fn not_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::NotEquals(Box::new(self), Box::new(comparison.into())) + } + + fn less_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::LessThan(Box::new(self), Box::new(comparison.into())) + } + + fn less_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::LessThanOrEquals(Box::new(self), Box::new(comparison.into())) + } + + fn greater_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::GreaterThan(Box::new(self), Box::new(comparison.into())) + } + + fn greater_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + Compare::GreaterThanOrEquals(Box::new(self), Box::new(comparison.into())) + } + + fn in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + Compare::In(Box::new(self), Box::new(selection.into())) + } + + fn not_in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + Compare::NotIn(Box::new(self), Box::new(selection.into())) + } + + fn any_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + Compare::AnySelection(Box::new(self), Box::new(selection.into())) + } + + fn not_all_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + Compare::NotAllSelection(Box::new(self), Box::new(selection.into())) + } + + fn like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + Compare::Like(Box::new(self), Box::new(pattern.into())) + } + + fn not_like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + Compare::NotLike(Box::new(self), Box::new(pattern.into())) + } + + #[allow(clippy::wrong_self_convention)] + fn is_null(self) -> Compare<'a> { + Compare::Null(Box::new(self)) + } + + #[allow(clippy::wrong_self_convention)] + fn is_not_null(self) -> Compare<'a> { + Compare::NotNull(Box::new(self)) + } + + fn between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + Compare::Between(Box::new(self), Box::new(left.into()), Box::new(right.into())) + } + + fn not_between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + Compare::NotBetween(Box::new(self), Box::new(left.into()), Box::new(right.into())) + } + + fn compare_raw(self, raw_comparator: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + Compare::Raw(Box::new(self), raw_comparator.into(), Box::new(right.into())) + } + + fn array_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::ArrayContains(Box::new(self), Box::new(item.into()))) + } + + fn array_contained(self, item: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::ArrayContained(Box::new(self), Box::new(item.into()))) + } + + fn array_overlaps(self, item: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::ArrayOverlaps(Box::new(self), Box::new(item.into()))) + } + + fn json_array_not_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::ArrayNotContains(Box::new(self), Box::new(item.into()))) + } + + fn json_array_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let array_starts_with: Expression = super::function::json_extract_first_array_elem(self).into(); + + Compare::Equals(Box::new(array_starts_with), Box::new(item.into())) + } + + fn json_array_not_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let array_starts_with: Expression = super::function::json_extract_first_array_elem(self).into(); + + Compare::NotEquals(Box::new(array_starts_with), Box::new(item.into())) + } + + fn json_array_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let array_ends_into: Expression = super::function::json_extract_last_array_elem(self).into(); + + Compare::Equals(Box::new(array_ends_into), Box::new(item.into())) + } + + fn json_array_not_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let array_ends_into: Expression = super::function::json_extract_last_array_elem(self).into(); + + Compare::NotEquals(Box::new(array_ends_into), Box::new(item.into())) + } + + fn json_type_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::TypeEquals(Box::new(self), json_type.into())) + } + + fn json_type_not_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + Compare::Json(JsonCompare::TypeNotEquals(Box::new(self), json_type.into())) + } + + fn any(self) -> Compare<'a> { + Compare::Any(Box::new(self)) + } + + fn all(self) -> Compare<'a> { + Compare::All(Box::new(self)) + } +} diff --git a/crates/sql-ast/src/ast/function.rs b/crates/sql-ast/src/ast/function.rs new file mode 100644 index 00000000..3f54fcf3 --- /dev/null +++ b/crates/sql-ast/src/ast/function.rs @@ -0,0 +1,104 @@ +mod aggregate_to_string; +mod average; +mod cast; +mod coalesce; +mod concat; +mod count; +mod encode; +mod json_agg; +mod json_build_object; +mod json_extract; +mod json_extract_array; +mod json_unquote; +mod lower; +mod maximum; +mod minimum; +mod row_number; +mod row_to_json; +mod sum; +mod to_jsonb; +mod unnest; +mod upper; + +pub use aggregate_to_string::*; +pub use average::*; +pub use cast::*; +pub use coalesce::*; +pub use concat::*; +pub use count::*; +pub use encode::*; +pub use json_agg::*; +pub use json_build_object::*; +pub use json_extract::*; +pub(crate) use json_extract_array::*; +pub use json_unquote::*; +pub use lower::*; +pub use maximum::*; +pub use minimum::*; +pub use row_number::*; +pub use row_to_json::*; +pub use sum::*; +pub use to_jsonb::*; +pub use unnest::*; +pub use upper::*; + +use super::Aliasable; +use std::borrow::Cow; + +/// A database function definition +#[derive(Debug, Clone, PartialEq)] +pub struct Function<'a> { + pub(crate) typ_: FunctionType<'a>, + pub(crate) alias: Option>, +} + +impl Function<'_> { + pub fn returns_json(&self) -> bool { + matches!( + self.typ_, + FunctionType::RowToJson(_) + | FunctionType::JsonExtract(_) + | FunctionType::JsonExtractLastArrayElem(_) + | FunctionType::JsonExtractFirstArrayElem(_) + | FunctionType::ToJsonb(_) + ) + } +} + +/// A database function type +#[derive(Debug, Clone, PartialEq)] +pub(crate) enum FunctionType<'a> { + Count(Count<'a>), + Cast(Cast<'a>), + AggregateToString(AggregateToString<'a>), + Average(Average<'a>), + Sum(Sum<'a>), + Lower(Lower<'a>), + Upper(Upper<'a>), + Minimum(Minimum<'a>), + Maximum(Maximum<'a>), + Coalesce(Coalesce<'a>), + Concat(Concat<'a>), + JsonExtract(JsonExtract<'a>), + JsonExtractLastArrayElem(JsonExtractLastArrayElem<'a>), + JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), + JsonUnquote(JsonUnquote<'a>), + RowToJson(RowToJson<'a>), + ToJsonb(ToJsonb<'a>), + JsonbAgg(JsonbAgg<'a>), + Encode(Encode<'a>), + JsonBuildObject(JsonBuildObject<'a>), + Unnest(Unnest<'a>), +} + +impl<'a> Aliasable<'a> for Function<'a> { + type Target = Function<'a>; + + fn alias(mut self, alias: T) -> Self::Target + where + T: Into>, + { + self.alias = Some(alias.into()); + self + } +} diff --git a/crates/sql-ast/src/ast/function/aggregate_to_string.rs b/crates/sql-ast/src/ast/function/aggregate_to_string.rs new file mode 100644 index 00000000..03cfc1d0 --- /dev/null +++ b/crates/sql-ast/src/ast/function/aggregate_to_string.rs @@ -0,0 +1,30 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +/// An aggregate function that concatenates strings from a group into a single +/// string with various options. +pub struct AggregateToString<'a> { + pub(crate) value: Box>, +} + +/// Aggregates the given field into a string. +pub fn aggregate_to_string<'a, T>(expr: T) -> Function<'a> +where + T: Into>, +{ + let fun = AggregateToString { + value: Box::new(expr.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: AggregateToString<'a>) -> Self { + Self { + typ_: FunctionType::AggregateToString(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/average.rs b/crates/sql-ast/src/ast/function/average.rs new file mode 100644 index 00000000..e477b1ce --- /dev/null +++ b/crates/sql-ast/src/ast/function/average.rs @@ -0,0 +1,26 @@ +use super::Function; +use crate::ast::{Column, FunctionType}; + +/// A representation of the `AVG` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Average<'a> { + pub(crate) column: Column<'a>, +} + +/// Calculates the average value of a numeric column. +pub fn avg<'a, C>(col: C) -> Function<'a> +where + C: Into>, +{ + let fun = Average { column: col.into() }; + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Average<'a>) -> Self { + Self { + typ_: FunctionType::Average(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/cast.rs b/crates/sql-ast/src/ast/function/cast.rs new file mode 100644 index 00000000..6bc0c93b --- /dev/null +++ b/crates/sql-ast/src/ast/function/cast.rs @@ -0,0 +1,30 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +pub struct Cast<'a> { + pub(crate) expr: Expression<'a>, + pub(crate) target_type: &'static str, +} + +/// Count of the underlying table where the given expression is not null. +pub fn cast<'a, T>(expr: T, target_type: &'static str) -> Function<'a> +where + T: Into>, +{ + let fun = Cast { + expr: expr.into(), + target_type, + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Cast<'a>) -> Self { + Self { + typ_: FunctionType::Cast(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/coalesce.rs b/crates/sql-ast/src/ast/function/coalesce.rs new file mode 100644 index 00000000..e17dc1d2 --- /dev/null +++ b/crates/sql-ast/src/ast/function/coalesce.rs @@ -0,0 +1,30 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +/// Returns the first non-null expression +pub struct Coalesce<'a> { + pub(crate) exprs: Vec>, +} + +/// Returns the first non-null argument +pub fn coalesce<'a, T, V>(exprs: V) -> Function<'a> +where + T: Into>, + V: Into>, +{ + let fun = Coalesce { + exprs: exprs.into().into_iter().map(|e| e.into()).collect(), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Coalesce<'a>) -> Self { + Self { + typ_: FunctionType::Coalesce(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/concat.rs b/crates/sql-ast/src/ast/function/concat.rs new file mode 100644 index 00000000..65ce6ba9 --- /dev/null +++ b/crates/sql-ast/src/ast/function/concat.rs @@ -0,0 +1,29 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +/// A representation of the `Concat` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Concat<'a> { + pub(crate) exprs: Vec>, +} + +/// Concat several expressions. +pub fn concat<'a, T>(exprs: Vec) -> Function<'a> +where + T: Into>, +{ + let fun = Concat { + exprs: exprs.into_iter().map(Into::into).collect(), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Concat<'a>) -> Self { + Self { + typ_: FunctionType::Concat(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/count.rs b/crates/sql-ast/src/ast/function/count.rs new file mode 100644 index 00000000..c2af7997 --- /dev/null +++ b/crates/sql-ast/src/ast/function/count.rs @@ -0,0 +1,29 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +/// Returns the number of rows that matches a specified criteria. +pub struct Count<'a> { + pub(crate) exprs: Vec>, +} + +/// Count of the underlying table where the given expression is not null. +pub fn count<'a, T>(expr: T) -> Function<'a> +where + T: Into>, +{ + let fun = Count { + exprs: vec![expr.into()], + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Count<'a>) -> Self { + Self { + typ_: FunctionType::Count(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/encode.rs b/crates/sql-ast/src/ast/function/encode.rs new file mode 100644 index 00000000..b5913255 --- /dev/null +++ b/crates/sql-ast/src/ast/function/encode.rs @@ -0,0 +1,36 @@ +use super::Function; +use crate::ast::Expression; + +/// The encode format. +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum EncodeFormat { + Base64, + Escape, + Hex, +} + +/// A representation of the `encode` function in PostgreSQL. +#[derive(Debug, Clone, PartialEq)] +pub struct Encode<'a> { + pub(crate) expression: Expression<'a>, + pub(crate) format: EncodeFormat, +} + +/// Return the given table as JSONB collection. +pub fn encode<'a>(expression: impl Into>, format: EncodeFormat) -> Function<'a> { + let fun = Encode { + expression: expression.into(), + format, + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Encode<'a>) -> Self { + Self { + typ_: super::FunctionType::Encode(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/json_agg.rs b/crates/sql-ast/src/ast/function/json_agg.rs new file mode 100644 index 00000000..ec3857cf --- /dev/null +++ b/crates/sql-ast/src/ast/function/json_agg.rs @@ -0,0 +1,34 @@ +use super::Function; +use crate::ast::{Expression, Ordering}; + +#[derive(Debug, Clone, PartialEq)] +/// A representation of the `json_agg` function in PostgreSQL. +pub struct JsonbAgg<'a> { + pub(crate) expression: Expression<'a>, + pub(crate) distinct: bool, + pub(crate) order_by: Option>, +} + +/// Return the given table as JSONB collection. +pub fn jsonb_agg<'a>( + expression: impl Into>, + order_by: Option>, + distinct: bool, +) -> Function<'a> { + let fun = JsonbAgg { + expression: expression.into(), + distinct, + order_by, + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonbAgg<'a>) -> Self { + Self { + typ_: super::FunctionType::JsonbAgg(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/json_build_object.rs b/crates/sql-ast/src/ast/function/json_build_object.rs new file mode 100644 index 00000000..de300fd2 --- /dev/null +++ b/crates/sql-ast/src/ast/function/json_build_object.rs @@ -0,0 +1,32 @@ +use super::{Function, FunctionType}; +use crate::ast::Expression; +use std::borrow::Cow; + +#[derive(Debug, Clone, PartialEq)] +pub struct JsonBuildObject<'a> { + pub(crate) values: Vec<(Cow<'a, str>, Expression<'a>)>, +} + +pub fn json_build_object<'a, S, E>(values: impl IntoIterator) -> Function<'a> +where + S: Into>, + E: Into>, +{ + let values = values + .into_iter() + .map(|(name, value)| (name.into(), value.into())) + .collect(); + + let function = JsonBuildObject { values }; + + function.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonBuildObject<'a>) -> Self { + Self { + typ_: FunctionType::JsonBuildObject(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/json_extract.rs b/crates/sql-ast/src/ast/function/json_extract.rs new file mode 100644 index 00000000..d99919d9 --- /dev/null +++ b/crates/sql-ast/src/ast/function/json_extract.rs @@ -0,0 +1,36 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; +use std::borrow::Cow; + +#[derive(Debug, Clone, PartialEq)] +pub struct JsonExtract<'a> { + pub(crate) column: Box>, + pub(crate) path: Vec>, + pub(crate) extract_as_string: bool, +} + +/// Extracts a subset of a JSON blob given a path. +/// Two types of paths can be used: +/// - `String` paths, referring to JSON paths. This is supported by MySQL only. +/// - `Array` paths, supported by Postgres only. +pub fn json_extract<'a, C>(column: C, path: Vec>, extract_as_string: bool) -> Function<'a> +where + C: Into>, +{ + let fun = JsonExtract { + column: Box::new(column.into()), + path, + extract_as_string, + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonExtract<'a>) -> Self { + Self { + typ_: FunctionType::JsonExtract(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/json_extract_array.rs b/crates/sql-ast/src/ast/function/json_extract_array.rs new file mode 100644 index 00000000..fa234715 --- /dev/null +++ b/crates/sql-ast/src/ast/function/json_extract_array.rs @@ -0,0 +1,53 @@ +use crate::ast::{Expression, Function, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +pub struct JsonExtractLastArrayElem<'a> { + pub(crate) expr: Box>, +} + +/// This is an internal function used to help construct the JsonArrayEndsInto Comparable +pub(crate) fn json_extract_last_array_elem<'a, E>(expr: E) -> Function<'a> +where + E: Into>, +{ + let fun = JsonExtractLastArrayElem { + expr: Box::new(expr.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonExtractLastArrayElem<'a>) -> Self { + Self { + typ_: FunctionType::JsonExtractLastArrayElem(value), + alias: None, + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct JsonExtractFirstArrayElem<'a> { + pub(crate) expr: Box>, +} + +/// This is an internal function used to help construct the JsonArrayBeginsWith Comparable +pub(crate) fn json_extract_first_array_elem<'a, E>(expr: E) -> Function<'a> +where + E: Into>, +{ + let fun = JsonExtractFirstArrayElem { + expr: Box::new(expr.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonExtractFirstArrayElem<'a>) -> Self { + Self { + typ_: FunctionType::JsonExtractFirstArrayElem(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/json_unquote.rs b/crates/sql-ast/src/ast/function/json_unquote.rs new file mode 100644 index 00000000..453b9a6f --- /dev/null +++ b/crates/sql-ast/src/ast/function/json_unquote.rs @@ -0,0 +1,28 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +#[derive(Debug, Clone, PartialEq)] +pub struct JsonUnquote<'a> { + pub(crate) expr: Box>, +} + +/// Converts a JSON expression into string and unquotes it. +pub fn json_unquote<'a, E>(expr: E) -> Function<'a> +where + E: Into>, +{ + let fun = JsonUnquote { + expr: Box::new(expr.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: JsonUnquote<'a>) -> Self { + Self { + typ_: FunctionType::JsonUnquote(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/lower.rs b/crates/sql-ast/src/ast/function/lower.rs new file mode 100644 index 00000000..5bf90816 --- /dev/null +++ b/crates/sql-ast/src/ast/function/lower.rs @@ -0,0 +1,29 @@ +use super::Function; +use crate::ast::{Expression, FunctionType}; + +/// A represention of the `LOWER` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Lower<'a> { + pub(crate) expression: Box>, +} + +/// Converts the result of the expression into lowercase string. +pub fn lower<'a, E>(expression: E) -> Function<'a> +where + E: Into>, +{ + let fun = Lower { + expression: Box::new(expression.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Lower<'a>) -> Self { + Self { + typ_: FunctionType::Lower(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/maximum.rs b/crates/sql-ast/src/ast/function/maximum.rs new file mode 100644 index 00000000..3ee95047 --- /dev/null +++ b/crates/sql-ast/src/ast/function/maximum.rs @@ -0,0 +1,26 @@ +use super::Function; +use crate::ast::{Column, FunctionType}; + +/// A represention of the `MAX` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Maximum<'a> { + pub(crate) column: Column<'a>, +} + +/// Calculates the maximum value of a numeric column. +pub fn max<'a, C>(col: C) -> Function<'a> +where + C: Into>, +{ + let fun = Maximum { column: col.into() }; + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Maximum<'a>) -> Self { + Self { + typ_: FunctionType::Maximum(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/minimum.rs b/crates/sql-ast/src/ast/function/minimum.rs new file mode 100644 index 00000000..dfecd03f --- /dev/null +++ b/crates/sql-ast/src/ast/function/minimum.rs @@ -0,0 +1,26 @@ +use super::Function; +use crate::ast::{Column, FunctionType}; + +/// A represention of the `MIN` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Minimum<'a> { + pub(crate) column: Column<'a>, +} + +/// Calculates the minimum value of a numeric column. +pub fn min<'a, C>(col: C) -> Function<'a> +where + C: Into>, +{ + let fun = Minimum { column: col.into() }; + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Minimum<'a>) -> Self { + Self { + typ_: FunctionType::Minimum(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/row_number.rs b/crates/sql-ast/src/ast/function/row_number.rs new file mode 100644 index 00000000..5e2a59db --- /dev/null +++ b/crates/sql-ast/src/ast/function/row_number.rs @@ -0,0 +1,33 @@ +use crate::ast::{Column, IntoOrderDefinition, Over}; + +#[derive(Debug, Default, Clone, PartialEq)] +/// A window function that assigns a sequential integer +/// number to each row in the query’s result set. +pub struct RowNumber<'a> { + pub(crate) over: Over<'a>, +} + +impl<'a> RowNumber<'a> { + /// Define the order of the row number. Is the row order if not set. + pub fn order_by(mut self, value: T) -> Self + where + T: IntoOrderDefinition<'a>, + { + self.over.ordering.append(value.into_order_definition()); + self + } + + /// Define the partitioning of the row number + pub fn partition_by(mut self, partition: T) -> Self + where + T: Into>, + { + self.over.partitioning.push(partition.into()); + self + } +} + +/// A number from 1 to n in specified order +pub fn row_number<'a>() -> RowNumber<'a> { + RowNumber::default() +} diff --git a/crates/sql-ast/src/ast/function/row_to_json.rs b/crates/sql-ast/src/ast/function/row_to_json.rs new file mode 100644 index 00000000..fb6ae5fd --- /dev/null +++ b/crates/sql-ast/src/ast/function/row_to_json.rs @@ -0,0 +1,32 @@ +use super::Function; +use crate::ast::{FunctionType, Table}; + +#[derive(Debug, Clone, PartialEq)] +/// A representation of the `ROW_TO_JSON` function in the database. +/// Only for `Postgresql` +pub struct RowToJson<'a> { + pub(crate) expr: Table<'a>, + pub(crate) pretty_print: bool, +} + +/// Return the given table in `JSON` format. +pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> +where + T: Into>, +{ + let fun = RowToJson { + expr: expr.into(), + pretty_print, + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: RowToJson<'a>) -> Self { + Self { + typ_: FunctionType::RowToJson(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/sum.rs b/crates/sql-ast/src/ast/function/sum.rs new file mode 100644 index 00000000..c7de39f9 --- /dev/null +++ b/crates/sql-ast/src/ast/function/sum.rs @@ -0,0 +1,28 @@ +use crate::ast::{Expression, Function, FunctionType}; + +/// A represention of the `SUM` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Sum<'a> { + pub(crate) expr: Box>, +} + +/// Calculates the sum value of a numeric column. +pub fn sum<'a, E>(expr: E) -> Function<'a> +where + E: Into>, +{ + let fun = Sum { + expr: Box::new(expr.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Sum<'a>) -> Self { + Self { + typ_: FunctionType::Sum(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/to_jsonb.rs b/crates/sql-ast/src/ast/function/to_jsonb.rs new file mode 100644 index 00000000..2492b530 --- /dev/null +++ b/crates/sql-ast/src/ast/function/to_jsonb.rs @@ -0,0 +1,24 @@ +use super::Function; +use crate::ast::Table; + +#[derive(Debug, Clone, PartialEq)] +/// A representation of the `to_jsonb` function in PostgreSQL. +pub struct ToJsonb<'a> { + pub(crate) table: Table<'a>, +} + +/// Return the given table in JSONB. +pub fn to_jsonb<'a>(table: impl Into>) -> Function<'a> { + let fun = ToJsonb { table: table.into() }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: ToJsonb<'a>) -> Self { + Self { + typ_: super::FunctionType::ToJsonb(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/unnest.rs b/crates/sql-ast/src/ast/function/unnest.rs new file mode 100644 index 00000000..9035a645 --- /dev/null +++ b/crates/sql-ast/src/ast/function/unnest.rs @@ -0,0 +1,33 @@ +use crate::ast::{Expression, Function, FunctionType}; + +/// Represents an `UNNEST` function call. +#[derive(Debug, Clone, PartialEq)] +pub struct Unnest<'a> { + pub(crate) expression: Box>, +} + +/// Creates an `UNNEST` function call. +/// +/// `UNNEST` expands an array or map into a relation. Arrays are unnested into a +/// single column, and maps are unnested into two columns (key, value). +/// Used mainly in PostgreSQL, but some other databases might support standard +/// compliant `UNNEST` or similar functions. +pub fn unnest<'a, E>(expression: E) -> Function<'a> +where + E: Into>, +{ + let fun = Unnest { + expression: Box::new(expression.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Unnest<'a>) -> Self { + Self { + typ_: FunctionType::Unnest(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/function/upper.rs b/crates/sql-ast/src/ast/function/upper.rs new file mode 100644 index 00000000..961c3de6 --- /dev/null +++ b/crates/sql-ast/src/ast/function/upper.rs @@ -0,0 +1,28 @@ +use crate::ast::{Expression, Function, FunctionType}; + +/// A represention of the `UPPER` function in the database. +#[derive(Debug, Clone, PartialEq)] +pub struct Upper<'a> { + pub(crate) expression: Box>, +} + +/// Converts the result of the expression into uppercase string. +pub fn upper<'a, E>(expression: E) -> Function<'a> +where + E: Into>, +{ + let fun = Upper { + expression: Box::new(expression.into()), + }; + + fun.into() +} + +impl<'a> From> for Function<'a> { + fn from(value: Upper<'a>) -> Self { + Self { + typ_: FunctionType::Upper(value), + alias: None, + } + } +} diff --git a/crates/sql-ast/src/ast/grouping.rs b/crates/sql-ast/src/ast/grouping.rs new file mode 100644 index 00000000..47e197df --- /dev/null +++ b/crates/sql-ast/src/ast/grouping.rs @@ -0,0 +1,91 @@ +use super::Function; +use crate::ast::{Column, Expression, Table}; + +/// Defines a grouping for the `GROUP BY` statement. +pub type GroupByDefinition<'a> = Expression<'a>; + +/// A list of definitions for the `GROUP BY` statement +#[derive(Debug, Default, PartialEq, Clone)] +pub struct Grouping<'a>(pub Vec>); + +impl<'a> Grouping<'a> { + #[doc(hidden)] + pub fn append(&mut self, value: GroupByDefinition<'a>) { + self.0.push(value); + } + + pub fn new(values: Vec>) -> Self { + Self(values) + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +/// An item that can be used in the `GROUP BY` statement +pub trait Groupable<'a> +where + Self: Sized, +{ + /// Group by `self` + fn group(self) -> GroupByDefinition<'a>; +} + +/// Convert the value into a group by definition. +pub trait IntoGroupByDefinition<'a> { + fn into_group_by_definition(self) -> GroupByDefinition<'a>; +} + +impl<'a> IntoGroupByDefinition<'a> for &'a str { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + let column: Column = self.into(); + column.into() + } +} + +impl<'a> IntoGroupByDefinition<'a> for (&'a str, &'a str) { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + let column: Column = self.into(); + column.into() + } +} + +impl<'a> IntoGroupByDefinition<'a> for (&'a str, &'a str, &'a str) { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + let table: Table = self.1.into(); + let column: Column = self.2.into(); + + column.table(table.database(self.0)).into() + } +} + +impl<'a> IntoGroupByDefinition<'a> for Column<'a> { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + self.into() + } +} + +impl<'a> IntoGroupByDefinition<'a> for Function<'a> { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + self.into() + } +} + +impl<'a> IntoGroupByDefinition<'a> for GroupByDefinition<'a> { + fn into_group_by_definition(self) -> GroupByDefinition<'a> { + self + } +} + +impl<'a> Groupable<'a> for Column<'a> { + fn group(self) -> GroupByDefinition<'a> { + self.into() + } +} + +impl<'a> Groupable<'a> for &'a str { + fn group(self) -> GroupByDefinition<'a> { + Column::from(self).group() + } +} diff --git a/crates/sql-ast/src/ast/insert.rs b/crates/sql-ast/src/ast/insert.rs new file mode 100644 index 00000000..d96abf7b --- /dev/null +++ b/crates/sql-ast/src/ast/insert.rs @@ -0,0 +1,289 @@ +use std::borrow::Cow; + +use grafbase_sdk::SdkError; + +use crate::ast::{Column, Expression, Query, Row, Table, Update, Values}; + +/// A builder for an `INSERT` statement. +#[derive(Clone, Debug, PartialEq)] +pub struct Insert<'a> { + pub(crate) table: Option>, + pub(crate) columns: Vec>, + pub(crate) values: Expression<'a>, + pub(crate) on_conflict: Option>, + pub(crate) returning: Option>>, + pub(crate) comment: Option>, +} + +/// A builder for an `INSERT` statement for a single row. +#[derive(Clone, Debug, PartialEq)] +pub struct SingleRowInsert<'a> { + pub(crate) table: Option>, + pub(crate) columns: Vec>, + pub(crate) values: Row<'a>, +} + +/// A builder for an `INSERT` statement for multiple rows. +#[derive(Clone, Debug, PartialEq)] +pub struct MultiRowInsert<'a> { + pub(crate) table: Option>, + pub(crate) columns: Vec>, + pub(crate) values: Vec>, +} + +/// `INSERT` conflict resolution strategies. +#[allow(clippy::large_enum_variant)] +#[derive(Clone, Debug, PartialEq)] +pub enum OnConflict<'a> { + /// When a row already exists, do nothing. + DoNothing, + /// ON CONFLICT UPDATE is supported for Postgres + Update(Update<'a>, Vec>), +} + +impl<'a> From> for Query<'a> { + fn from(insert: Insert<'a>) -> Self { + Query::Insert(Box::new(insert)) + } +} + +impl<'a> From> for Insert<'a> { + fn from(insert: SingleRowInsert<'a>) -> Self { + let values = if insert.values.is_empty() { + Expression::from(Row::new()) + } else { + Expression::from(insert.values) + }; + + Insert { + table: insert.table, + columns: insert.columns, + values, + on_conflict: None, + returning: None, + comment: None, + } + } +} + +impl<'a> From> for Insert<'a> { + fn from(insert: MultiRowInsert<'a>) -> Self { + let values = Expression::from(Values::new(insert.values)); + + Insert { + table: insert.table, + columns: insert.columns, + values, + on_conflict: None, + returning: None, + comment: None, + } + } +} + +impl<'a> From> for Query<'a> { + fn from(insert: SingleRowInsert<'a>) -> Query<'a> { + Query::from(Insert::from(insert)) + } +} + +impl<'a> From> for Query<'a> { + fn from(insert: MultiRowInsert<'a>) -> Query<'a> { + Query::from(Insert::from(insert)) + } +} + +impl<'a> Insert<'a> { + /// Creates a new single row `INSERT` statement for the given table. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let query = Insert::single_into("users"); + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"INSERT INTO "users" DEFAULT VALUES"#, sql); + /// # } + /// ``` + pub fn single_into(table: T) -> SingleRowInsert<'a> + where + T: Into>, + { + SingleRowInsert { + table: Some(table.into()), + columns: Vec::new(), + values: Row::new(), + } + } + + pub fn single() -> SingleRowInsert<'a> { + SingleRowInsert { + table: None, + columns: Vec::new(), + values: Row::new(), + } + } + + /// Creates a new multi row `INSERT` statement for the given table. + pub fn multi_into(table: T, columns: I) -> MultiRowInsert<'a> + where + T: Into>, + K: Into>, + I: IntoIterator, + { + MultiRowInsert { + table: Some(table.into()), + columns: columns.into_iter().map(|c| c.into()).collect(), + values: Vec::new(), + } + } + + pub fn multi(columns: I) -> MultiRowInsert<'a> + where + K: Into>, + I: IntoIterator, + { + MultiRowInsert { + table: None, + columns: columns.into_iter().map(|c| c.into()).collect(), + values: Vec::new(), + } + } + + pub fn expression_into(table: T, columns: I, expression: E) -> Self + where + T: Into>, + I: IntoIterator, + K: Into>, + E: Into>, + { + Insert { + table: Some(table.into()), + columns: columns.into_iter().map(|c| c.into()).collect(), + values: expression.into(), + on_conflict: None, + returning: None, + comment: None, + } + } + + /// Sets the conflict resolution strategy. + pub fn on_conflict(&mut self, on_conflict: OnConflict<'a>) { + self.on_conflict = Some(on_conflict); + } + + /// Adds a comment to the insert. + pub fn comment>>(&mut self, comment: C) { + self.comment = Some(comment.into()); + } + + /// Sets the returned columns. + pub fn returning(&mut self, columns: I) + where + K: Into>, + I: IntoIterator, + { + self.returning = Some(columns.into_iter().map(|k| k.into()).collect()); + } +} + +impl<'a> SingleRowInsert<'a> { + /// Adds a new value to the `INSERT` statement + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Insert::single_into("users"); + /// query.value("foo", 10); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"INSERT INTO "users" ("foo") VALUES ($1)"#, sql); + /// assert_eq!(vec![Value::from(10)], params); + /// # } + /// ``` + pub fn value(&mut self, key: K, val: V) + where + K: Into>, + V: Into>, + { + self.columns.push(key.into()); + self.values.push(val.into()); + } + + /// Merge two single row inserts into a multi row insert. + /// + /// Both inserts must be to the same table and must include the same columns. + pub fn merge(self, other: SingleRowInsert<'a>) -> Result, SdkError> { + if self.table != other.table { + return Err(SdkError::from("Merging inserts must be on the same table.")); + } + + if self.columns != other.columns { + return Err(SdkError::from("All insert items must have the same columns.")); + } + + Ok(MultiRowInsert { + table: self.table, + columns: self.columns, + values: vec![self.values, other.values], + }) + } + + /// Convert into a common `Insert` statement. + pub fn build(self) -> Insert<'a> { + Insert::from(self) + } +} + +impl<'a> MultiRowInsert<'a> { + /// Adds multiple new rows to be inserted. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Insert::multi_into("users", vec!["foo"]); + /// + /// query.values(vec![1]); + /// query.values(vec![2]); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"INSERT INTO "users" ("foo") VALUES ($1), ($2)"#, sql); + /// + /// assert_eq!( + /// vec![ + /// Value::from(1), + /// Value::from(2), + /// ], params); + /// # } + /// ``` + pub fn values(&mut self, values: V) + where + V: Into>, + { + self.values.push(values.into()); + } + + /// Extend the insert statement with a single row insert. + /// + /// Both inserts must be to the same table and must include the same columns. + pub fn extend(&mut self, other: SingleRowInsert<'a>) -> Result<(), SdkError> { + if self.table != other.table { + return Err(SdkError::from("Merging inserts must be on the same table.")); + } + + if self.columns != other.columns { + return Err(SdkError::from("All insert items must have the same columns.")); + } + + self.values.push(other.values); + + Ok(()) + } + + /// Convert into a common `Insert` statement. + pub fn build(self) -> Insert<'a> { + Insert::from(self) + } +} diff --git a/crates/sql-ast/src/ast/join.rs b/crates/sql-ast/src/ast/join.rs new file mode 100644 index 00000000..62d58f46 --- /dev/null +++ b/crates/sql-ast/src/ast/join.rs @@ -0,0 +1,91 @@ +use crate::ast::{ConditionTree, Table}; + +/// The `JOIN` table and conditions. +#[derive(Debug, PartialEq, Clone)] +pub struct JoinData<'a> { + pub(crate) table: Table<'a>, + pub(crate) conditions: ConditionTree<'a>, + pub(crate) lateral: bool, +} + +impl<'a> JoinData<'a> { + /// Implement a join with no conditions. + pub fn all_from(table: impl Into>) -> Self { + Self { + table: table.into(), + conditions: ConditionTree::NoCondition, + lateral: false, + } + } + + /// Join as lateral join. + pub fn lateral(&mut self) { + self.lateral = true; + } +} + +impl<'a, T> From for JoinData<'a> +where + T: Into>, +{ + fn from(table: T) -> Self { + Self::all_from(table) + } +} + +/// A representation of a `JOIN` statement. +#[derive(Debug, PartialEq, Clone)] +pub enum Join<'a> { + /// Implements an `INNER JOIN` with given `JoinData`. + Inner(JoinData<'a>), + /// Implements an `LEFT JOIN` with given `JoinData`. + Left(JoinData<'a>), + /// Implements an `RIGHT JOIN` with given `JoinData`. + Right(JoinData<'a>), + /// Implements an `FULL JOIN` with given `JoinData`. + Full(JoinData<'a>), + /// Implements a `CROSS JOIN` with given `JoinData`. + Cross(JoinData<'a>), +} + +/// An item that can be joined. +pub trait Joinable<'a> { + /// Add the `JOIN` conditions. + fn on(self, conditions: T) -> JoinData<'a> + where + T: Into>; +} + +impl<'a, U> Joinable<'a> for U +where + U: Into>, +{ + fn on(self, conditions: T) -> JoinData<'a> + where + T: Into>, + { + JoinData { + table: self.into(), + conditions: conditions.into(), + lateral: false, + } + } +} + +impl<'a> Joinable<'a> for JoinData<'a> { + fn on(self, conditions: T) -> JoinData<'a> + where + T: Into>, + { + let conditions = match self.conditions { + ConditionTree::NoCondition => conditions.into(), + cond => cond.and(conditions.into()), + }; + + JoinData { + table: self.table, + conditions, + lateral: false, + } + } +} diff --git a/crates/sql-ast/src/ast/ops.rs b/crates/sql-ast/src/ast/ops.rs new file mode 100644 index 00000000..89d25b5b --- /dev/null +++ b/crates/sql-ast/src/ast/ops.rs @@ -0,0 +1,54 @@ +use crate::ast::Expression; +use std::ops::{Add, Div, Mul, Rem, Sub}; + +/// Calculation operations in SQL queries. +#[derive(Debug, PartialEq, Clone)] +pub enum SqlOp<'a> { + Add(Expression<'a>, Expression<'a>), + Sub(Expression<'a>, Expression<'a>), + Mul(Expression<'a>, Expression<'a>), + Div(Expression<'a>, Expression<'a>), + Rem(Expression<'a>, Expression<'a>), + Append(Expression<'a>, Expression<'a>), + JsonDeleteAtPath(Expression<'a>, Expression<'a>), +} + +impl<'a> Add for Expression<'a> { + type Output = Expression<'a>; + + fn add(self, other: Self) -> Self { + SqlOp::Add(self, other).into() + } +} + +impl<'a> Sub for Expression<'a> { + type Output = Expression<'a>; + + fn sub(self, other: Self) -> Self { + SqlOp::Sub(self, other).into() + } +} + +impl<'a> Mul for Expression<'a> { + type Output = Expression<'a>; + + fn mul(self, other: Self) -> Self { + SqlOp::Mul(self, other).into() + } +} + +impl<'a> Div for Expression<'a> { + type Output = Expression<'a>; + + fn div(self, other: Self) -> Self { + SqlOp::Div(self, other).into() + } +} + +impl<'a> Rem for Expression<'a> { + type Output = Expression<'a>; + + fn rem(self, other: Self) -> Self { + SqlOp::Rem(self, other).into() + } +} diff --git a/crates/sql-ast/src/ast/ordering.rs b/crates/sql-ast/src/ast/ordering.rs new file mode 100644 index 00000000..a07632df --- /dev/null +++ b/crates/sql-ast/src/ast/ordering.rs @@ -0,0 +1,118 @@ +use crate::ast::{Column, Expression}; + +/// Defines ordering for an `ORDER BY` statement. +pub type OrderDefinition<'a> = (Expression<'a>, Option); + +/// A list of definitions for the `ORDER BY` statement. +#[derive(Debug, Default, PartialEq, Clone)] +pub struct Ordering<'a>(pub Vec>); + +impl<'a> Ordering<'a> { + pub fn append(&mut self, value: OrderDefinition<'a>) { + self.0.push(value); + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +/// The ordering direction +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum Order { + /// Ascending + Asc, + /// Descending + Desc, + /// Ascending Nulls First + AscNullsFirst, + /// Ascending Nulls Last + AscNullsLast, + /// Descending Nulls First + DescNullsFirst, + /// Descending Nulls Last + DescNullsLast, +} + +/// An item that can be used in the `ORDER BY` statement +pub trait Orderable<'a> +where + Self: Sized, +{ + /// Order by `self` in the given order + fn order(self, order: Option) -> OrderDefinition<'a>; + + /// Change the order to `ASC` + fn ascend(self) -> OrderDefinition<'a> { + self.order(Some(Order::Asc)) + } + + /// Change the order to `DESC` + fn descend(self) -> OrderDefinition<'a> { + self.order(Some(Order::Desc)) + } + + /// Change the order to `ASC NULLS FIRST` + fn ascend_nulls_first(self) -> OrderDefinition<'a> { + self.order(Some(Order::AscNullsFirst)) + } + + /// Change the order to `ASC NULLS LAST` + fn ascend_nulls_last(self) -> OrderDefinition<'a> { + self.order(Some(Order::AscNullsLast)) + } + + /// Change the order to `DESC NULLS FIRST` + fn descend_nulls_first(self) -> OrderDefinition<'a> { + self.order(Some(Order::DescNullsFirst)) + } + + /// Change the order to `ASC NULLS LAST` + fn descend_nulls_last(self) -> OrderDefinition<'a> { + self.order(Some(Order::DescNullsLast)) + } +} + +/// Convert the value into an order definition with order item and direction +pub trait IntoOrderDefinition<'a> { + fn into_order_definition(self) -> OrderDefinition<'a>; +} + +impl<'a> IntoOrderDefinition<'a> for &'a str { + fn into_order_definition(self) -> OrderDefinition<'a> { + let column: Column<'a> = self.into(); + (column.into(), None) + } +} + +impl<'a> IntoOrderDefinition<'a> for Column<'a> { + fn into_order_definition(self) -> OrderDefinition<'a> { + (self.into(), None) + } +} + +impl<'a> IntoOrderDefinition<'a> for OrderDefinition<'a> { + fn into_order_definition(self) -> OrderDefinition<'a> { + self + } +} + +impl<'a> Orderable<'a> for Column<'a> { + fn order(self, order: Option) -> OrderDefinition<'a> { + (self.into(), order) + } +} + +impl<'a> Orderable<'a> for &'a str { + fn order(self, order: Option) -> OrderDefinition<'a> { + let column: Column<'a> = self.into(); + column.order(order) + } +} + +impl<'a> Orderable<'a> for (&'a str, &'a str) { + fn order(self, order: Option) -> OrderDefinition<'a> { + let column: Column<'a> = self.into(); + column.order(order) + } +} diff --git a/crates/sql-ast/src/ast/over.rs b/crates/sql-ast/src/ast/over.rs new file mode 100644 index 00000000..60ab7929 --- /dev/null +++ b/crates/sql-ast/src/ast/over.rs @@ -0,0 +1,15 @@ +use crate::ast::{Column, Ordering}; + +#[derive(Debug, Default, Clone, PartialEq)] +/// Determines the partitioning and ordering of a rowset before the associated +/// window function is applied. +pub struct Over<'a> { + pub(crate) ordering: Ordering<'a>, + pub(crate) partitioning: Vec>, +} + +impl Over<'_> { + pub fn is_empty(&self) -> bool { + self.ordering.is_empty() && self.partitioning.is_empty() + } +} diff --git a/crates/sql-ast/src/ast/query.rs b/crates/sql-ast/src/ast/query.rs new file mode 100644 index 00000000..8add7f69 --- /dev/null +++ b/crates/sql-ast/src/ast/query.rs @@ -0,0 +1,10 @@ +use crate::ast::{Delete, Insert, Select, Update}; + +/// A database query +#[derive(Debug, Clone, PartialEq)] +pub enum Query<'a> { + Select(Box>), + Insert(Box>), + Update(Box>), + Delete(Box>), +} diff --git a/crates/sql-ast/src/ast/row.rs b/crates/sql-ast/src/ast/row.rs new file mode 100644 index 00000000..fee162a4 --- /dev/null +++ b/crates/sql-ast/src/ast/row.rs @@ -0,0 +1,387 @@ +use super::compare::JsonType; +use crate::ast::{Comparable, Compare, Expression}; +use std::borrow::Cow; + +/// A collection of values surrounded by parentheses. +#[derive(Debug, Default, PartialEq, Clone)] +pub struct Row<'a> { + pub values: Vec>, +} + +impl<'a> Row<'a> { + pub fn new() -> Self { + Row { values: Vec::new() } + } + + pub fn with_capacity(capacity: usize) -> Self { + Row { + values: Vec::with_capacity(capacity), + } + } + + pub fn pop(&mut self) -> Option> { + self.values.pop() + } + + pub fn push(&mut self, value: T) + where + T: Into>, + { + self.values.push(value.into()); + } + + pub fn is_empty(&self) -> bool { + self.values.is_empty() + } + + pub fn len(&self) -> usize { + self.values.len() + } +} + +impl<'a> IntoIterator for Row<'a> { + type Item = Expression<'a>; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.values.into_iter() + } +} + +impl<'a, T> From> for Row<'a> +where + T: Into>, +{ + fn from(vector: Vec) -> Row<'a> { + let mut row = Row::with_capacity(vector.len()); + + for v in vector.into_iter() { + row.push(v.into()); + } + + row + } +} + +impl<'a, A> From<(A,)> for Row<'a> +where + A: Into>, +{ + fn from((val,): (A,)) -> Self { + let mut row = Row::with_capacity(1); + row.push(val); + row + } +} + +impl<'a, A, B> From<(A, B)> for Row<'a> +where + A: Into>, + B: Into>, +{ + fn from(vals: (A, B)) -> Self { + let mut row = Row::with_capacity(2); + + row.push(vals.0); + row.push(vals.1); + + row + } +} + +impl<'a, A, B, C> From<(A, B, C)> for Row<'a> +where + A: Into>, + B: Into>, + C: Into>, +{ + fn from(vals: (A, B, C)) -> Self { + let mut row = Row::with_capacity(3); + + row.push(vals.0); + row.push(vals.1); + row.push(vals.2); + + row + } +} + +impl<'a, A, B, C, D> From<(A, B, C, D)> for Row<'a> +where + A: Into>, + B: Into>, + C: Into>, + D: Into>, +{ + fn from(vals: (A, B, C, D)) -> Self { + let mut row = Row::with_capacity(4); + + row.push(vals.0); + row.push(vals.1); + row.push(vals.2); + row.push(vals.3); + + row + } +} + +impl<'a, A, B, C, D, E> From<(A, B, C, D, E)> for Row<'a> +where + A: Into>, + B: Into>, + C: Into>, + D: Into>, + E: Into>, +{ + fn from(vals: (A, B, C, D, E)) -> Self { + let mut row = Row::with_capacity(5); + + row.push(vals.0); + row.push(vals.1); + row.push(vals.2); + row.push(vals.3); + row.push(vals.4); + + row + } +} + +impl<'a> Comparable<'a> for Row<'a> { + fn equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.equals(comparison) + } + + fn not_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.not_equals(comparison) + } + + fn less_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.less_than(comparison) + } + + fn less_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.less_than_or_equals(comparison) + } + + fn greater_than(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.greater_than(comparison) + } + + fn greater_than_or_equals(self, comparison: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.greater_than_or_equals(comparison) + } + + fn in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.in_selection(selection) + } + + fn not_in_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.not_in_selection(selection) + } + + fn any_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.any_selection(selection) + } + + fn not_all_selection(self, selection: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.not_all_selection(selection) + } + + fn like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.like(pattern) + } + + fn not_like(self, pattern: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + value.not_like(pattern) + } + + #[allow(clippy::wrong_self_convention)] + fn is_null(self) -> Compare<'a> { + let value: Expression<'a> = self.into(); + value.is_null() + } + + #[allow(clippy::wrong_self_convention)] + fn is_not_null(self) -> Compare<'a> { + let value: Expression<'a> = self.into(); + value.is_not_null() + } + + fn between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let value: Expression<'a> = self.into(); + value.between(left, right) + } + + fn not_between(self, left: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let value: Expression<'a> = self.into(); + value.not_between(left, right) + } + + fn compare_raw(self, raw_comparator: T, right: V) -> Compare<'a> + where + T: Into>, + V: Into>, + { + let value: Expression<'a> = self.into(); + value.compare_raw(raw_comparator, right) + } + + /// Tests if the array overlaps with another array. + fn array_overlaps(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.array_overlaps(item) + } + + fn array_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.array_contains(item) + } + + fn array_contained(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.array_contained(item) + } + + fn json_array_not_contains(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_array_not_contains(item) + } + + fn json_array_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_array_begins_with(item) + } + + fn json_array_not_begins_with(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_array_not_begins_with(item) + } + + fn json_array_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_array_ends_into(item) + } + + fn json_array_not_ends_into(self, item: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_array_not_ends_into(item) + } + + fn json_type_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_type_equals(json_type) + } + + fn json_type_not_equals(self, json_type: T) -> Compare<'a> + where + T: Into>, + { + let value: Expression<'a> = self.into(); + + value.json_type_not_equals(json_type) + } + + fn any(self) -> Compare<'a> { + let value: Expression<'a> = self.into(); + + value.any() + } + + fn all(self) -> Compare<'a> { + let value: Expression<'a> = self.into(); + + value.all() + } +} diff --git a/crates/sql-ast/src/ast/select.rs b/crates/sql-ast/src/ast/select.rs new file mode 100644 index 00000000..e9acd887 --- /dev/null +++ b/crates/sql-ast/src/ast/select.rs @@ -0,0 +1,564 @@ +use std::borrow::Cow; + +use super::{ + Column, CommonTableExpression, ConditionTree, Expression, ExpressionKind, Grouping, IntoGroupByDefinition, + IntoOrderDefinition, Join, JoinData, Ordering, Query, Table, +}; + +type Type<'a> = ConditionTree<'a>; + +/// A builder for a `SELECT` statement. +#[derive(Debug, PartialEq, Clone, Default)] +pub struct Select<'a> { + pub(crate) ctes: Vec>, + pub(crate) distinct: bool, + pub(crate) tables: Vec>, + pub(crate) columns: Vec>, + pub(crate) conditions: Option>, + pub(crate) ordering: Ordering<'a>, + pub(crate) grouping: Grouping<'a>, + pub(crate) having: Option>, + pub(crate) limit: Option, + pub(crate) offset: Option, + pub(crate) joins: Vec>, + pub(crate) comment: Option>, +} + +impl<'a> From> for Expression<'a> { + fn from(sel: Select<'a>) -> Expression<'a> { + Expression { + kind: ExpressionKind::Selection(Box::new(sel)), + alias: None, + } + } +} + +impl<'a> From> for Query<'a> { + fn from(sel: Select<'a>) -> Query<'a> { + Query::Select(Box::new(sel)) + } +} + +impl<'a> Select<'a> { + /// Creates a new `SELECT` statement for the given table. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{self, Renderer}}; + /// # fn main() { + /// let query = Select::from_table("users"); + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users""#, sql); + /// # } + /// ``` + /// + /// The table can be in multiple parts, defining the schema. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{self, Renderer}}; + /// # fn main() { + /// let query = Select::from_table(("crm", "users")); + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "crm"."users".* FROM "crm"."users""#, sql); + /// # } + /// ``` + /// + /// Selecting from a nested `SELECT`. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{self, Renderer}}; + /// # fn main() { + /// let mut inner_select = Select::default(); + /// inner_select.value(1); + /// + /// let select = Table::from(inner_select).alias("num"); + /// let query = Select::from_table(select.alias("num")); + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "num".* FROM (SELECT $1) AS "num""#, sql); + /// assert_eq!(vec![Value::from(1)], params); + /// # } + /// ``` + pub fn from_table(table: T) -> Self + where + T: Into>, + { + Select { + tables: vec![table.into()], + ..Select::default() + } + } + + /// Adds a table to be selected. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// let mut inner_select = Select::default(); + /// inner_select.value(1); + /// + /// query.and_from(Table::from(inner_select).alias("num")); + /// query.column(("users", "name")); + /// query.value(Table::from("num").asterisk()); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users"."name", "num".* FROM "users", (SELECT $1) AS "num""#, sql); + /// # } + /// ``` + pub fn and_from(&mut self, table: T) + where + T: Into>, + { + self.tables.push(table.into()); + } + + /// Selects a static value as the column. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::default(); + /// query.value(1); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!("SELECT $1", sql); + /// assert_eq!(vec![Value::from(1)], params); + /// # } + /// ``` + pub fn value(&mut self, value: T) + where + T: Into>, + { + self.columns.push(value.into()); + } + + /// Adds a column to be selected. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.column("name"); + /// query.column(("users", "id")); + /// query.column((("crm", "users"), "foo")); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "name", "users"."id", "crm"."users"."foo" FROM "users""#, sql); + /// # } + /// ``` + pub fn column(&mut self, column: T) + where + T: Into>, + { + self.columns.push(column.into().into()); + } + + /// A bulk method to select multiple values. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// query.columns(["foo", "bar"]); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "foo", "bar" FROM "users""#, sql); + /// # } + /// ``` + pub fn columns(&mut self, columns: T) + where + T: IntoIterator, + C: Into>, + { + self.columns = columns.into_iter().map(|c| c.into().into()).collect(); + } + + /// Adds `DISTINCT` to the select query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.column("foo"); + /// query.column("bar"); + /// query.distinct(); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT DISTINCT "foo", "bar" FROM "users""#, sql); + /// # } + /// ``` + pub fn distinct(&mut self) { + self.distinct = true; + } + + /// Adds `WHERE` conditions to the query, replacing the previous conditions. + /// See [Comparable](trait.Comparable.html#required-methods) for more + /// examples. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// query.so_that("foo".equals("bar")); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" WHERE "foo" = $1"#, sql); + /// + /// assert_eq!(vec![ + /// Value::from("bar"), + /// ], params); + /// # } + /// ``` + pub fn so_that(&mut self, conditions: T) + where + T: Into>, + { + self.conditions = Some(conditions.into()); + } + + /// Adds an additional `WHERE` condition to the query combining the possible + /// previous condition with `AND`. See + /// [Comparable](trait.Comparable.html#required-methods) for more examples. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.so_that("foo".equals("bar")); + /// query.and_where("lol".equals("wtf")); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" WHERE ("foo" = $1 AND "lol" = $2)"#, sql); + /// + /// assert_eq!(vec![ + /// Value::from("bar"), + /// Value::from("wtf"), + /// ], params); + /// # } + /// ``` + pub fn and_where(&mut self, conditions: T) + where + T: Into>, + { + match self.conditions.take() { + Some(previous) => { + self.conditions = Some(previous.and(conditions.into())); + } + None => self.so_that(conditions), + } + } + + /// Adds an additional `WHERE` condition to the query combining the possible + /// previous condition with `OR`. See + /// [Comparable](trait.Comparable.html#required-methods) for more examples. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.so_that("foo".equals("bar")); + /// query.or_where("lol".equals("wtf")); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" WHERE ("foo" = $1 OR "lol" = $2)"#, sql); + /// + /// assert_eq!(vec![ + /// Value::from("bar"), + /// Value::from("wtf"), + /// ], params); + /// # } + /// ``` + pub fn or_where(&mut self, conditions: T) + where + T: Into>, + { + match self.conditions.take() { + Some(previous) => { + self.conditions = Some(previous.or(conditions.into())); + } + None => self.so_that(conditions), + } + } + + /// Adds `INNER JOIN` clause to the query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let join = Table::from("posts") + /// .alias("p") + /// .on(("p", "user_id").equals(Column::from(("users", "id")))); + /// + /// let mut query = Select::from_table("users"); + /// query.inner_join(join); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"SELECT "users".* FROM "users" INNER JOIN "posts" AS "p" ON "p"."user_id" = "users"."id""#, + /// sql + /// ); + /// # } + /// ``` + pub fn inner_join(&mut self, join: J) + where + J: Into>, + { + self.joins.push(Join::Inner(join.into())); + } + + /// Adds `LEFT JOIN` clause to the query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let join = Table::from("posts") + /// .alias("p") + /// .on(("p", "visible").equals(true)); + /// + /// let mut query = Select::from_table("users"); + /// query.left_join(join); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"SELECT "users".* FROM "users" LEFT JOIN "posts" AS "p" ON "p"."visible" = $1"#, + /// sql + /// ); + /// + /// assert_eq!( + /// vec![ + /// Value::from(true), + /// ], + /// params + /// ); + /// # } + /// ``` + pub fn left_join(&mut self, join: J) + where + J: Into>, + { + self.joins.push(Join::Left(join.into())); + } + + /// Adds `RIGHT JOIN` clause to the query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let join = Table::from("posts") + /// .alias("p") + /// .on(("p", "visible").equals(true)); + /// + /// + /// let mut query = Select::from_table("users"); + /// query.right_join(join); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"SELECT "users".* FROM "users" RIGHT JOIN "posts" AS "p" ON "p"."visible" = $1"#, + /// sql + /// ); + /// + /// assert_eq!( + /// vec![ + /// Value::from(true), + /// ], + /// params + /// ); + /// # } + /// ``` + pub fn right_join(&mut self, join: J) + where + J: Into>, + { + self.joins.push(Join::Right(join.into())); + } + + /// Adds `FULL JOIN` clause to the query. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let join = Table::from("posts") + /// .alias("p") + /// .on(("p", "visible").equals(true)); + /// + /// let mut query = Select::from_table("users"); + /// query.full_join(join); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"SELECT "users".* FROM "users" FULL JOIN "posts" AS "p" ON "p"."visible" = $1"#, + /// sql + /// ); + /// + /// assert_eq!( + /// vec![ + /// Value::from(true), + /// ], + /// params + /// ); + /// # } + /// ``` + pub fn full_join(&mut self, join: J) + where + J: Into>, + { + self.joins.push(Join::Full(join.into())); + } + + /// Adds `CROSS JOIN` clause to the query. + /// + /// The cross join condition must be `None`. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let join = Table::from("posts").alias("p"); + /// + /// let mut query = Select::from_table("users"); + /// query.cross_join(join); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"SELECT "users".* FROM "users" CROSS JOIN "posts" AS "p""#, + /// sql + /// ); + /// # } + /// ``` + pub fn cross_join(&mut self, join: J) + where + J: Into>, + { + self.joins.push(Join::Cross(join.into())); + } + + /// Adds an ordering to the `ORDER BY` section. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// query.order_by("foo"); + /// query.order_by("baz".ascend()); + /// query.order_by("bar".descend()); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" ORDER BY "foo", "baz" ASC, "bar" DESC"#, sql); + /// # } + pub fn order_by(&mut self, value: T) + where + T: IntoOrderDefinition<'a>, + { + self.ordering.append(value.into_order_definition()); + } + + /// Adds a grouping to the `GROUP BY` section. + /// + /// This does not check if the grouping is actually valid in respect to aggregated columns. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.column("foo"); + /// query.column("bar"); + /// query.group_by("foo"); + /// query.group_by("bar"); + /// + /// let (sql, _) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "foo", "bar" FROM "users" GROUP BY "foo", "bar""#, sql); + /// # } + pub fn group_by(&mut self, value: T) + where + T: IntoGroupByDefinition<'a>, + { + self.grouping.append(value.into_group_by_definition()); + } + + /// Adds group conditions to a query. Should be combined together with a + /// [group_by](struct.Select.html#method.group_by) statement. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// + /// query.column("foo"); + /// query.column("bar"); + /// query.group_by("foo"); + /// query.having("foo".greater_than(100)); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "foo", "bar" FROM "users" GROUP BY "foo" HAVING "foo" > $1"#, sql); + /// assert_eq!(vec![Value::from(100)], params); + /// # } + pub fn having(&mut self, conditions: T) + where + T: Into>, + { + self.having = Some(conditions.into()); + } + + /// Sets the `LIMIT` value. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// query.limit(10); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" LIMIT $1"#, sql); + /// assert_eq!(vec![Value::from(10_i64)], params); + /// # } + pub fn limit(&mut self, limit: u32) { + self.limit = Some(limit); + } + + /// Sets the `OFFSET` value. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Select::from_table("users"); + /// query.offset(10); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"SELECT "users".* FROM "users" OFFSET $1"#, sql); + /// assert_eq!(vec![Value::from(10_i64)], params); + /// # } + pub fn offset(&mut self, offset: u32) { + self.offset = Some(offset); + } + + /// Adds a common table expression to the select. + pub fn with(&mut self, cte: CommonTableExpression<'a>) { + self.ctes.push(cte); + } +} diff --git a/crates/sql-ast/src/ast/table.rs b/crates/sql-ast/src/ast/table.rs new file mode 100644 index 00000000..e29cf65a --- /dev/null +++ b/crates/sql-ast/src/ast/table.rs @@ -0,0 +1,263 @@ +use super::{ExpressionKind, Join, JoinData}; +use crate::ast::{Expression, Select, Values}; +use std::borrow::Cow; + +/// An object that can be aliased. +pub trait Aliasable<'a> { + type Target; + + /// Alias table for usage elsewhere in the query. + fn alias(self, alias: T) -> Self::Target + where + T: Into>; +} + +#[derive(Clone, Debug, PartialEq)] +/// Either an identifier or a nested query. +pub enum TableType<'a> { + Table(Cow<'a, str>), + JoinedTable(Box<(Cow<'a, str>, Vec>)>), + Query(Box>), + Values(Values<'a>), + Expression(Expression<'a>), +} + +/// A table definition +#[derive(Clone, Debug)] +pub struct Table<'a> { + pub typ: TableType<'a>, + pub alias: Option>, + pub database: Option>, +} + +impl PartialEq for Table<'_> { + fn eq(&self, other: &Table) -> bool { + self.typ == other.typ && self.database == other.database + } +} + +impl<'a> Table<'a> { + /// Define in which database the table is located + pub fn database(mut self, database: T) -> Self + where + T: Into>, + { + self.database = Some(database.into()); + self + } + + /// A qualified asterisk to this table + pub fn asterisk(self) -> Expression<'a> { + Expression { + kind: ExpressionKind::Asterisk(Some(Box::new(self))), + alias: None, + } + } + + /// Adds a `LEFT JOIN` clause to the query, specifically for that table. + /// Useful to positionally add a JOIN clause in case you are selecting from multiple tables. + pub fn left_join(mut self, join: J) -> Self + where + J: Into>, + { + match self.typ { + TableType::Table(table_name) => { + self.typ = TableType::JoinedTable(Box::new((table_name, vec![Join::Left(join.into())]))) + } + TableType::JoinedTable(ref mut jt) => jt.1.push(Join::Left(join.into())), + TableType::Expression(_) => { + unreachable!("Please contact Grafbase support +555 420 69 69") + } + TableType::Query(_) => { + unreachable!("You cannot left_join on a table of type Query") + } + TableType::Values(_) => { + unreachable!("You cannot left_join on a table of type Values") + } + } + + self + } + + /// Adds an `INNER JOIN` clause to the query, specifically for that table. + /// Useful to positionally add a JOIN clause in case you are selecting from multiple tables. + pub fn inner_join(mut self, join: J) -> Self + where + J: Into>, + { + match self.typ { + TableType::Table(table_name) => { + self.typ = TableType::JoinedTable(Box::new((table_name, vec![Join::Inner(join.into())]))) + } + TableType::JoinedTable(ref mut jt) => jt.1.push(Join::Inner(join.into())), + TableType::Expression(_) => { + unreachable!("Please contact Grafbase support +555 420 69 69") + } + TableType::Query(_) => { + unreachable!("You cannot inner_join on a table of type Query") + } + TableType::Values(_) => { + unreachable!("You cannot inner_join on a table of type Values") + } + } + + self + } + + /// Adds a `RIGHT JOIN` clause to the query, specifically for that table. + /// Useful to positionally add a JOIN clause in case you are selecting from multiple tables. + pub fn right_join(mut self, join: J) -> Self + where + J: Into>, + { + match self.typ { + TableType::Table(table_name) => { + self.typ = TableType::JoinedTable(Box::new((table_name, vec![Join::Right(join.into())]))) + } + TableType::JoinedTable(ref mut jt) => jt.1.push(Join::Right(join.into())), + TableType::Expression(_) => { + unreachable!("Please contact Grafbase support +555 420 69 69") + } + TableType::Query(_) => { + unreachable!("You cannot right_join on a table of type Query") + } + TableType::Values(_) => { + unreachable!("You cannot right_join on a table of type Values") + } + } + + self + } + + /// Adds a `FULL JOIN` clause to the query, specifically for that table. + /// Useful to positionally add a JOIN clause in case you are selecting from multiple tables. + pub fn full_join(mut self, join: J) -> Self + where + J: Into>, + { + match self.typ { + TableType::Table(table_name) => { + self.typ = TableType::JoinedTable(Box::new((table_name, vec![Join::Full(join.into())]))) + } + TableType::JoinedTable(ref mut jt) => jt.1.push(Join::Full(join.into())), + TableType::Expression(_) => { + unreachable!("Please contact Grafbase support +555 420 69 69") + } + TableType::Query(_) => { + unreachable!("You cannot full_join on a table of type Query") + } + TableType::Values(_) => { + unreachable!("You cannot full_join on a table of type Values") + } + } + + self + } +} + +impl<'a> From> for Table<'a> { + fn from(value: Expression<'a>) -> Self { + Table { + typ: TableType::Expression(value), + alias: None, + database: None, + } + } +} + +impl<'a> From<&'a str> for Table<'a> { + fn from(s: &'a str) -> Table<'a> { + Table { + typ: TableType::Table(s.into()), + alias: None, + database: None, + } + } +} + +impl<'a> From<&'a String> for Table<'a> { + fn from(s: &'a String) -> Table<'a> { + Table { + typ: TableType::Table(s.into()), + alias: None, + database: None, + } + } +} + +impl<'a> From> for Table<'a> { + fn from(s: Cow<'a, str>) -> Table<'a> { + Table { + typ: TableType::Table(s), + alias: None, + database: None, + } + } +} + +impl<'a> From<(&'a str, &'a str)> for Table<'a> { + fn from(s: (&'a str, &'a str)) -> Table<'a> { + let table: Table<'a> = s.1.into(); + table.database(s.0) + } +} + +impl<'a> From<(&'a str, &'a String)> for Table<'a> { + fn from(s: (&'a str, &'a String)) -> Table<'a> { + let table: Table<'a> = s.1.into(); + table.database(s.0) + } +} + +impl<'a> From<(&'a String, &'a str)> for Table<'a> { + fn from(s: (&'a String, &'a str)) -> Table<'a> { + let table: Table<'a> = s.1.into(); + table.database(s.0) + } +} + +impl<'a> From<(&'a String, &'a String)> for Table<'a> { + fn from(s: (&'a String, &'a String)) -> Table<'a> { + let table: Table<'a> = s.1.into(); + table.database(s.0) + } +} + +impl From for Table<'_> { + fn from(s: String) -> Self { + Table { + typ: TableType::Table(s.into()), + alias: None, + database: None, + } + } +} + +impl<'a> From<(String, String)> for Table<'a> { + fn from(s: (String, String)) -> Table<'a> { + let table: Table<'a> = s.1.into(); + table.database(s.0) + } +} + +impl<'a> From> for Table<'a> { + fn from(select: Select<'a>) -> Self { + Table { + typ: TableType::Query(Box::new(select)), + alias: None, + database: None, + } + } +} + +impl<'a> Aliasable<'a> for Table<'a> { + type Target = Table<'a>; + + fn alias(mut self, alias: T) -> Self::Target + where + T: Into>, + { + self.alias = Some(alias.into()); + self + } +} diff --git a/crates/sql-ast/src/ast/update.rs b/crates/sql-ast/src/ast/update.rs new file mode 100644 index 00000000..7d675304 --- /dev/null +++ b/crates/sql-ast/src/ast/update.rs @@ -0,0 +1,147 @@ +use crate::ast::{Column, ConditionTree, Expression, Query, Table}; + +type Type<'a> = Column<'a>; + +/// A builder for an `UPDATE` statement. +#[derive(Debug, PartialEq, Clone)] +pub struct Update<'a> { + pub(crate) table: Table<'a>, + pub(crate) columns: Vec>, + pub(crate) values: Vec>, + pub(crate) conditions: Option>, + pub(crate) returning: Option>>, +} + +impl<'a> From> for Query<'a> { + fn from(update: Update<'a>) -> Self { + Query::Update(Box::new(update)) + } +} + +impl<'a> Update<'a> { + /// Creates the basis for an `UPDATE` statement to the given table. + pub fn table(table: T) -> Self + where + T: Into>, + { + Self { + table: table.into(), + columns: Vec::new(), + values: Vec::new(), + conditions: None, + returning: None, + } + } + + /// Add another column value assignment to the query + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Update::table("users"); + /// query.set("foo", 10); + /// query.set("bar", false); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// assert_eq!(r#"UPDATE "users" SET "foo" = $1, "bar" = $2"#, sql); + /// + /// assert_eq!( + /// vec![ + /// Value::from(10), + /// Value::from(false), + /// ], + /// params, + /// ); + /// # } + /// ``` + pub fn set(&mut self, column: K, value: V) + where + K: Into>, + V: Into>, + { + self.columns.push(column.into()); + self.values.push(value.into()); + } + + /// Adds `WHERE` conditions to the query. See + /// [Comparable](trait.Comparable.html#required-methods) for more examples. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut query = Update::table("users"); + /// query.set("foo", 1); + /// query.so_that("bar".equals(false)); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!(r#"UPDATE "users" SET "foo" = $1 WHERE "bar" = $2"#, sql); + /// + /// assert_eq!( + /// vec![ + /// Value::from(1), + /// Value::from(false), + /// ], + /// params, + /// ); + /// # } + /// ``` + /// + /// We can also use a nested `SELECT` in the conditions. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut select = Select::from_table("bars"); + /// select.column("id"); + /// select.so_that("uniq_val".equals(3)); + /// + /// let mut query = Update::table("users"); + /// query.set("foo", 1); + /// query.so_that("bar".equals(select)); + /// + /// let (sql, params) = renderer::Postgres::build(query); + /// + /// assert_eq!( + /// r#"UPDATE "users" SET "foo" = $1 WHERE "bar" = (SELECT "id" FROM "bars" WHERE "uniq_val" = $2)"#, + /// sql + /// ); + /// + /// assert_eq!( + /// vec![ + /// Value::from(1), + /// Value::from(3), + /// ], + /// params, + /// ); + /// # } + /// ``` + pub fn so_that(&mut self, conditions: T) + where + T: Into>, + { + self.conditions = Some(conditions.into()); + } + + /// Sets the returned columns. + /// + /// ```rust + /// # use grafbase_sql_ast::{ast::*, renderer::{Renderer, self}}; + /// # fn main() { + /// let mut update = Update::table("users"); + /// update.set("foo", 10); + /// update.returning(vec!["id"]); + /// + /// let (sql, _) = renderer::Postgres::build(update); + /// + /// assert_eq!(r#"UPDATE "users" SET "foo" = $1 RETURNING "id""#, sql); + /// # } + /// ``` + pub fn returning(&mut self, columns: I) + where + K: Into>, + I: IntoIterator, + { + self.returning = Some(columns.into_iter().map(|k| k.into()).collect()); + } +} diff --git a/crates/sql-ast/src/ast/values.rs b/crates/sql-ast/src/ast/values.rs new file mode 100644 index 00000000..1831631a --- /dev/null +++ b/crates/sql-ast/src/ast/values.rs @@ -0,0 +1,86 @@ +use crate::ast::Row; + +/// An in-memory temporary table. Can be used in some of the databases in a +/// place of an actual table. Doesn't work in MySQL 5.7. +#[derive(Debug, Clone, Default, PartialEq)] +pub struct Values<'a> { + pub(crate) rows: Vec>, +} + +impl<'a> Values<'a> { + /// Create a new empty in-memory set of values. + pub fn empty() -> Self { + Self { rows: Vec::new() } + } + + /// Create a new in-memory set of values. + pub fn new(rows: Vec>) -> Self { + Self { rows } + } + + /// Create a new in-memory set of values with an allocated capacity. + pub fn with_capacity(capacity: usize) -> Self { + Self { + rows: Vec::with_capacity(capacity), + } + } + + /// Add value to the temporary table. + pub fn push(&mut self, row: T) + where + T: Into>, + { + self.rows.push(row.into()); + } + + /// The number of rows in the in-memory table. + pub fn len(&self) -> usize { + self.rows.len() + } + + /// True if has no rows. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn row_len(&self) -> usize { + match self.rows.split_first() { + Some((row, _)) => row.len(), + None => 0, + } + } + + pub fn flatten_row(self) -> Option> { + let mut result = Row::with_capacity(self.len()); + + for mut row in self.rows.into_iter() { + match row.pop() { + Some(value) => result.push(value), + None => return None, + } + } + + Some(result) + } +} + +impl<'a, I, R> From for Values<'a> +where + I: Iterator, + R: Into>, +{ + fn from(rows: I) -> Self { + Self { + rows: rows.map(|r| r.into()).collect(), + } + } +} + +impl<'a> IntoIterator for Values<'a> { + type Item = Row<'a>; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.rows.into_iter() + } +} diff --git a/crates/sql-ast/src/lib.rs b/crates/sql-ast/src/lib.rs new file mode 100644 index 00000000..25e61217 --- /dev/null +++ b/crates/sql-ast/src/lib.rs @@ -0,0 +1,2 @@ +pub mod ast; +pub mod renderer; diff --git a/crates/sql-ast/src/renderer.rs b/crates/sql-ast/src/renderer.rs new file mode 100644 index 00000000..1ef0026f --- /dev/null +++ b/crates/sql-ast/src/renderer.rs @@ -0,0 +1,4 @@ +//! Visitor for reading an abstract SQL syntax tree, generating the query and +//! gathering parameters in the right order. + +pub mod postgres; diff --git a/crates/sql-ast/src/renderer/postgres.rs b/crates/sql-ast/src/renderer/postgres.rs new file mode 100644 index 00000000..93162a13 --- /dev/null +++ b/crates/sql-ast/src/renderer/postgres.rs @@ -0,0 +1,1345 @@ +use core::fmt; +use std::{borrow::Cow, fmt::Write}; + +use grafbase_sdk::host_io::postgres::{self as sdk, types::DatabaseType}; + +use crate::ast::{ + self, Average, Column, CommonTableExpression, Compare, Concat, ConditionTree, Delete, Encode, EncodeFormat, + Expression, ExpressionKind, Function, FunctionType, Grouping, Insert, Join, JoinData, JsonBuildObject, JsonCompare, + JsonExtract, JsonExtractFirstArrayElem, JsonExtractLastArrayElem, JsonType, JsonUnquote, JsonbAgg, OnConflict, + Order, Ordering, ParameterizedValue, Query, Row, Select, SqlOp, Table, TableType, ToJsonb, Update, Values, +}; + +const C_BACKTICK_OPEN: &str = "\""; +const C_BACKTICK_CLOSE: &str = "\""; + +const C_QUOTE: &str = "'"; + +/// Renders an AST node into a Postgres query. +/// +/// Takes an AST node that can be converted into a `Query` and renders it into a Postgres query +/// using the visitor pattern. The resulting query includes both the SQL string and any bound +/// parameters. +/// +/// # Returns +/// +/// A finalized Postgres query that can be executed against a database. +pub fn render<'a>(ast: impl Into>) -> sdk::Query { + let mut renderer = Renderer::default(); + renderer.visit_query(ast.into()); + + renderer.builder.finalize() +} + +#[derive(Debug, Default)] +struct Renderer { + builder: sdk::QueryBuilder, +} + +impl Renderer { + fn write(&mut self, s: D) { + self.builder + .write_fmt(format_args!("{s}")) + .expect("we ran out of memory or something else why write failed"); + } + + fn substitute_parameter(&mut self, parameter: impl DatabaseType) { + self.substitute_value(ParameterizedValue { + value: parameter.into_bound_value(0), + enum_type: None, + }); + } + + fn substitute_value(&mut self, value: ParameterizedValue<'_>) { + let cast = { + let cast = value.enum_type.or(value.value.type_cast().map(Cow::Borrowed)); + cast.map(|cast| format!("::{}", cast)) + }; + + self.builder.bind_value(value.value); + self.write("$"); + self.write(self.builder.bound_values()); + + if let Some(ref cast) = cast { + self.write(cast); + } + } + + fn visit_limit_and_offset(&mut self, limit: Option, offset: Option) { + match (limit, offset) { + (Some(limit), Some(offset)) => { + self.write(" LIMIT "); + self.substitute_parameter(limit); + + self.write(" OFFSET "); + self.substitute_parameter(offset); + } + (None, Some(offset)) => { + self.write(" OFFSET "); + self.substitute_parameter(offset); + } + (Some(limit), None) => { + self.write(" LIMIT "); + self.substitute_parameter(limit); + } + (None, None) => (), + } + } + + fn visit_insert(&mut self, insert: Insert<'_>) { + self.write("INSERT "); + + if let Some(table) = insert.table.clone() { + self.write("INTO "); + self.visit_table(table, true); + } + + match insert.values { + Expression { + kind: ExpressionKind::Row(row), + .. + } => { + if row.values.is_empty() { + self.write(" DEFAULT VALUES"); + } else { + let columns = insert.columns.len(); + + self.write(" ("); + for (i, c) in insert.columns.into_iter().enumerate() { + self.visit_column(c.name.into_owned().into()); + + if i < (columns - 1) { + self.write(","); + } + } + + self.write(")"); + self.write(" VALUES "); + self.visit_row(row); + } + } + Expression { + kind: ExpressionKind::Values(values), + .. + } => { + let columns = insert.columns.len(); + + self.write(" ("); + for (i, c) in insert.columns.into_iter().enumerate() { + self.visit_column(c.name.into_owned().into()); + + if i < (columns - 1) { + self.write(","); + } + } + + self.write(")"); + self.write(" VALUES "); + let values_len = values.len(); + + for (i, row) in values.into_iter().enumerate() { + self.visit_row(row); + + if i < (values_len - 1) { + self.write(", "); + } + } + } + expr => self.surround_with("(", ")", |ref mut s| s.visit_expression(expr)), + } + + match insert.on_conflict { + Some(OnConflict::DoNothing) => self.write(" ON CONFLICT DO NOTHING"), + Some(OnConflict::Update(update, constraints)) => { + self.write(" ON CONFLICT"); + self.columns_to_bracket_list(constraints); + self.write(" DO "); + + self.visit_upsert(update); + } + None => (), + } + + if let Some(returning) = insert.returning { + if !returning.is_empty() { + let values = returning.into_iter().map(|r| r.into()).collect(); + self.write(" RETURNING "); + self.visit_columns(values); + } + }; + } + + fn visit_delete(&mut self, delete: Delete<'_>) { + self.write("DELETE FROM "); + self.visit_table(delete.table, true); + + if let Some(conditions) = delete.conditions { + self.write(" WHERE "); + self.visit_conditions(conditions); + } + + if let Some(returning) = delete.returning { + self.write(" RETURNING "); + + let length = returning.len(); + + for (i, expression) in returning.into_iter().enumerate() { + self.visit_expression(expression); + + if i < (length - 1) { + self.write(", "); + } + } + } + } + + fn visit_aggregate_to_string(&mut self, value: Expression<'_>) { + self.write("ARRAY_TO_STRING"); + self.write("("); + self.write("ARRAY_AGG"); + self.write("("); + self.visit_expression(value); + self.write(")"); + self.write("','"); + self.write(")") + } + + fn visit_json_extract(&mut self, json_extract: JsonExtract<'_>) { + let json_path = json_extract.path; + + self.write("("); + self.visit_expression(*json_extract.column); + + if json_extract.extract_as_string { + self.write("#>>"); + } else { + self.write("#>"); + } + + // We use the `ARRAY[]::text[]` notation to better handle escaped character + // The text protocol used when sending prepared statement doesn't seem to work well with escaped characters + // when using the '{a, b, c}' string array notation. + self.surround_with("ARRAY[", "]::text[]", |s| { + let len = json_path.len(); + + for (index, path) in json_path.into_iter().enumerate() { + s.substitute_parameter(path.to_string()); + + if index < len - 1 { + s.write(", "); + } + } + }); + + self.write(")"); + + if !json_extract.extract_as_string { + self.write("::jsonb"); + } + } + + fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'_>) { + self.write("("); + self.visit_expression(*json_unquote.expr); + self.write("#>>ARRAY[]::text[]"); + self.write(")"); + } + + fn visit_array_contains(&mut self, left: Expression<'_>, right: Expression<'_>, not: bool) { + if not { + self.write("( NOT "); + } + + self.visit_expression(left); + self.write(" @> "); + self.visit_expression(right); + + if not { + self.write(" )"); + } + } + + fn visit_array_contained(&mut self, left: Expression<'_>, right: Expression<'_>, not: bool) { + if not { + self.write("( NOT "); + } + + self.visit_expression(left); + self.write(" <@ "); + self.visit_expression(right); + + if not { + self.write(" )"); + } + } + + fn visit_array_overlaps(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" && "); + self.visit_expression(right); + } + + fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'_>) { + self.write("("); + self.visit_expression(*extract.expr); + self.write("->-1"); + self.write(")"); + } + + fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'_>) { + self.write("("); + self.visit_expression(*extract.expr); + self.write("->0"); + self.write(")"); + } + + fn visit_json_type_equals(&mut self, left: Expression<'_>, json_type: JsonType<'_>, not: bool) { + self.write("JSONB_TYPEOF"); + self.write("("); + self.visit_expression(left); + self.write(")"); + + if not { + self.write(" != "); + } else { + self.write(" = "); + } + + match json_type { + JsonType::Array => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "array".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::Boolean => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "boolean".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::Number => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "number".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::Object => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "object".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::String => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "string".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::Null => self.visit_expression(Expression { + kind: ExpressionKind::Parameterized(ParameterizedValue { + value: "null".to_string().into_bound_value(0), + enum_type: None, + }), + alias: None, + }), + JsonType::ColumnRef(column) => { + self.write("JSONB_TYPEOF"); + self.write("("); + self.visit_column(*column); + self.write("::jsonb)") + } + } + } + + fn visit_like(&mut self, left: Expression<'_>, right: Expression<'_>) { + let need_cast = matches!(&left.kind, ExpressionKind::Column(_)); + self.visit_expression(left); + + // NOTE: Pg is strongly typed, LIKE comparisons are only between strings. + // to avoid problems with types without implicit casting we explicitly cast to text + if need_cast { + self.write("::text"); + } + + self.write(" LIKE "); + self.visit_expression(right); + } + + fn visit_not_like(&mut self, left: Expression<'_>, right: Expression<'_>) { + let need_cast = matches!(&left.kind, ExpressionKind::Column(_)); + self.visit_expression(left); + + // NOTE: Pg is strongly typed, LIKE comparisons are only between strings. + // to avoid problems with types without implicit casting we explicitly cast to text + if need_cast { + self.write("::text"); + } + + self.write(" NOT LIKE "); + self.visit_expression(right); + } + + fn visit_ordering(&mut self, ordering: Ordering<'_>) { + let len = ordering.0.len(); + + for (i, (value, ordering)) in ordering.0.into_iter().enumerate() { + let direction = ordering.map(|dir| match dir { + Order::Asc => " ASC", + Order::Desc => " DESC", + Order::AscNullsFirst => " ASC NULLS FIRST", + Order::AscNullsLast => " ASC NULLS LAST", + Order::DescNullsFirst => " DESC NULLS FIRST", + Order::DescNullsLast => " DESC NULLS LAST", + }); + + self.visit_expression(value); + self.write(direction.unwrap_or("")); + + if i < (len - 1) { + self.write(", "); + } + } + } + + fn visit_concat(&mut self, concat: Concat<'_>) { + let len = concat.exprs.len(); + + self.surround_with("(", ")", |s| { + for (i, expr) in concat.exprs.into_iter().enumerate() { + s.visit_expression(expr); + + if i < (len - 1) { + s.write(" || "); + } + } + }); + } + + fn visit_to_jsonb(&mut self, to_jsonb: ToJsonb<'_>) { + self.write("to_jsonb("); + self.visit_table(to_jsonb.table, false); + self.write(".*)"); + } + + fn visit_json_build_object(&mut self, json_build_object: JsonBuildObject<'_>) { + let values_length = json_build_object.values.len(); + self.write("json_build_object("); + + for (i, (name, expression)) in json_build_object.values.into_iter().enumerate() { + self.surround_with("'", "'", |renderer| { + renderer.write(&name); + }); + + self.write(", "); + self.visit_expression(expression); + + if i < (values_length - 1) { + self.write(","); + } + } + + self.write(")"); + } + + fn visit_jsonb_agg(&mut self, json_agg: JsonbAgg<'_>) { + self.write("jsonb_agg("); + + if json_agg.distinct { + self.write("DISTINCT "); + } + + self.visit_expression(json_agg.expression); + + if let Some(ordering) = json_agg.order_by { + self.write(" ORDER BY "); + self.visit_ordering(ordering); + } + + self.write(")"); + } + + fn visit_encode(&mut self, encode: Encode<'_>) { + self.write("encode("); + self.visit_expression(encode.expression); + self.write(", "); + + match encode.format { + EncodeFormat::Base64 => self.write("'base64'"), + EncodeFormat::Escape => self.write("'escape'"), + EncodeFormat::Hex => self.write("'hex'"), + } + + self.write(")"); + } + + fn visit_join_data(&mut self, data: JoinData<'_>, empty_on: bool) { + if data.lateral { + self.write(" LATERAL "); + } + + self.visit_table(data.table, true); + if data.conditions != ConditionTree::NoCondition || empty_on { + self.write(" ON "); + self.visit_conditions(data.conditions) + } + } + + fn surround_with(&mut self, begin: &str, end: &str, f: F) + where + F: FnOnce(&mut Self), + { + self.write(begin); + f(self); + self.write(end) + } + + fn columns_to_bracket_list(&mut self, columns: Vec>) { + let len = columns.len(); + + self.write(" ("); + for (i, c) in columns.into_iter().enumerate() { + self.visit_column(c.name.into_owned().into()); + + if i < (len - 1) { + self.write(","); + } + } + self.write(")"); + } + + /// The join statements in the query + fn visit_joins(&mut self, joins: Vec>) { + for join in joins { + match join { + Join::Inner(data) => { + self.write(" INNER JOIN "); + self.visit_join_data(data, true); + } + Join::Left(data) => { + self.write(" LEFT JOIN "); + self.visit_join_data(data, true); + } + Join::Right(data) => { + self.write(" RIGHT JOIN "); + self.visit_join_data(data, true); + } + Join::Full(data) => { + self.write(" FULL JOIN "); + self.visit_join_data(data, true); + } + Join::Cross(data) => { + self.write(" CROSS JOIN "); + self.visit_join_data(data, false); + } + } + } + } + + fn visit_common_table_expression(&mut self, cte: CommonTableExpression<'_>) { + self.visit_table(Table::from(cte.name.into_owned()), false); + self.write(" AS "); + + let query = cte.query; + self.surround_with("(", ")", |ref mut s| s.visit_query(query)); + } + + /// A walk through a `SELECT` statement + fn visit_select(&mut self, select: Select<'_>) { + let number_of_ctes = select.ctes.len(); + + if number_of_ctes > 0 { + self.write("WITH "); + + for (i, cte) in select.ctes.into_iter().enumerate() { + self.visit_common_table_expression(cte); + + if i < (number_of_ctes - 1) { + self.write(", "); + } + } + + self.write(" "); + } + + self.write("SELECT "); + + if select.distinct { + self.write("DISTINCT "); + } + + if !select.tables.is_empty() { + if select.columns.is_empty() { + for (i, table) in select.tables.iter().enumerate() { + if i > 0 { + self.write(", "); + } + + match &table.typ { + TableType::Query(_) | TableType::Values(_) => match table.alias { + Some(ref alias) => { + self.surround_with(C_BACKTICK_OPEN, C_BACKTICK_CLOSE, |ref mut s| s.write(alias)); + self.write(".*"); + } + None => self.write("*"), + }, + TableType::Table(_) => match table.alias.clone() { + Some(ref alias) => { + self.surround_with(C_BACKTICK_OPEN, C_BACKTICK_CLOSE, |ref mut s| s.write(alias)); + self.write(".*"); + } + None => { + self.visit_table(table.clone(), false); + self.write(".*"); + } + }, + TableType::JoinedTable(jt) => match table.alias.clone() { + Some(ref alias) => { + self.surround_with(C_BACKTICK_OPEN, C_BACKTICK_CLOSE, |ref mut s| s.write(alias)); + self.write(".*"); + } + None => { + let mut unjoined_table = table.clone(); + // Convert the table typ to a `TableType::Table` for the SELECT statement print + // We only want the join to appear in the FROM clause + unjoined_table.typ = TableType::Table(jt.0.clone()); + + self.visit_table(unjoined_table, false); + self.write(".*"); + } + }, + TableType::Expression(_) => { + todo!("hmm"); + } + } + } + } else { + self.visit_columns(select.columns); + } + + self.write(" FROM "); + + for (i, table) in select.tables.into_iter().enumerate() { + if i > 0 { + self.write(", "); + } + + self.visit_table(table, true); + } + + if !select.joins.is_empty() { + self.visit_joins(select.joins); + } + + if let Some(conditions) = select.conditions { + self.write(" WHERE "); + self.visit_conditions(conditions); + } + if !select.grouping.is_empty() { + self.write(" GROUP BY "); + self.visit_grouping(select.grouping); + } + if let Some(conditions) = select.having { + self.write(" HAVING "); + self.visit_conditions(conditions); + } + if !select.ordering.is_empty() { + self.write(" ORDER BY "); + self.visit_ordering(select.ordering); + } + + self.visit_limit_and_offset(select.limit, select.offset); + } else if select.columns.is_empty() { + self.write(" *"); + } else { + self.visit_columns(select.columns); + } + } + + /// A walk through an `UPDATE` statement + fn visit_update(&mut self, update: Update<'_>) { + self.write("UPDATE "); + self.visit_table(update.table, true); + + { + self.write(" SET "); + let pairs = update.columns.into_iter().zip(update.values); + let len = pairs.len(); + + for (i, (key, value)) in pairs.enumerate() { + self.visit_column(key); + self.write(" = "); + self.visit_expression(value); + + if i < (len - 1) { + self.write(", "); + } + } + } + + if let Some(conditions) = update.conditions { + self.write(" WHERE "); + self.visit_conditions(conditions); + } + + if let Some(returning) = update.returning { + if !returning.is_empty() { + let values = returning.into_iter().map(|r| r.into()).collect(); + self.write(" RETURNING "); + self.visit_columns(values); + } + } + } + + fn visit_upsert(&mut self, update: Update<'_>) { + self.write("UPDATE "); + + self.write("SET "); + self.visit_update_set(update.clone()); + + if let Some(conditions) = update.conditions { + self.write(" WHERE "); + self.visit_conditions(conditions); + } + } + + fn visit_update_set(&mut self, update: Update<'_>) { + let pairs = update.columns.into_iter().zip(update.values); + let len = pairs.len(); + + for (i, (key, value)) in pairs.enumerate() { + self.visit_column(key); + self.write(" = "); + self.visit_expression(value); + + if i < (len - 1) { + self.write(", "); + } + } + } + + /// A helper for delimiting an identifier, surrounding every part with `C_BACKTICK` + /// and delimiting the values with a `.` + fn delimited_identifiers(&mut self, parts: &[&str]) { + let len = parts.len(); + + for (i, part) in parts.iter().enumerate() { + self.surround_with_backticks(part); + + if i < (len - 1) { + self.write("."); + } + } + } + + /// A helper for delimiting a part of an identifier, surrounding it with `C_BACKTICK` + fn surround_with_backticks(&mut self, part: &str) { + self.surround_with(C_BACKTICK_OPEN, C_BACKTICK_CLOSE, |ref mut s| s.write(part)); + } + + /// A walk through a complete `Query` statement + fn visit_query(&mut self, query: Query<'_>) { + match query { + Query::Select(select) => self.visit_select(*select), + Query::Insert(insert) => self.visit_insert(*insert), + Query::Update(update) => self.visit_update(*update), + Query::Delete(delete) => self.visit_delete(*delete), + } + } + + /// The selected columns + fn visit_columns(&mut self, columns: Vec>) { + let len = columns.len(); + + for (i, column) in columns.into_iter().enumerate() { + self.visit_expression(column); + + if i < (len - 1) { + self.write(", "); + } + } + } + + fn visit_operation(&mut self, op: SqlOp<'_>) { + match op { + SqlOp::Add(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" + "); + se.visit_expression(right) + }), + SqlOp::Sub(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" - "); + se.visit_expression(right) + }), + SqlOp::Mul(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" * "); + se.visit_expression(right) + }), + SqlOp::Div(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" / "); + se.visit_expression(right) + }), + SqlOp::Rem(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" % "); + se.visit_expression(right) + }), + SqlOp::Append(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" || "); + se.visit_expression(right) + }), + SqlOp::JsonDeleteAtPath(left, right) => self.surround_with("(", ")", |ref mut se| { + se.visit_expression(left); + se.write(" #- "); + se.visit_expression(right); + }), + } + } + + /// A visit to a value used in an expression + fn visit_expression(&mut self, value: Expression<'_>) { + match value.kind { + ExpressionKind::Value(value) => self.visit_expression(*value), + ExpressionKind::Raw(value) => self.write(value), + ExpressionKind::RawString(value) => { + self.write(C_QUOTE); + self.write(value); + self.write(C_QUOTE); + } + ExpressionKind::ConditionTree(tree) => self.visit_conditions(tree), + ExpressionKind::Compare(compare) => self.visit_compare(compare), + ExpressionKind::Parameterized(val) => self.substitute_value(val), + ExpressionKind::Column(column) => self.visit_column(*column), + ExpressionKind::Row(row) => self.visit_row(row), + ExpressionKind::Selection(selection) => { + self.surround_with("(", ")", |ref mut s| s.visit_select(*selection)) + } + ExpressionKind::Function(function) => self.visit_function(*function), + ExpressionKind::Op(op) => self.visit_operation(*op), + ExpressionKind::Values(values) => self.visit_values(values), + ExpressionKind::Asterisk(table) => match table { + Some(table) => { + self.visit_table(*table, false); + self.write(".*") + } + None => self.write("*"), + }, + ExpressionKind::Default => self.write("DEFAULT"), + ExpressionKind::Table(table) => self.visit_table(*table, false), + ExpressionKind::Case(case) => self.visit_case(case), + } + + if let Some(alias) = value.alias { + self.write(" AS "); + + self.delimited_identifiers(&[&*alias]); + }; + } + + fn visit_multiple_tuple_comparison(&mut self, left: Row<'_>, right: Values<'_>, negate: bool) { + self.visit_row(left); + self.write(if negate { " NOT IN " } else { " IN " }); + self.visit_values(right) + } + + fn visit_values(&mut self, values: Values<'_>) { + self.surround_with("(", ")", |ref mut s| { + let len = values.len(); + for (i, row) in values.into_iter().enumerate() { + s.visit_row(row); + + if i < (len - 1) { + s.write(","); + } + } + }) + } + + /// A database table identifier + fn visit_table(&mut self, table: Table<'_>, include_alias: bool) { + match table.typ { + TableType::Table(table_name) => match table.database { + Some(database) => self.delimited_identifiers(&[&*database, &*table_name]), + None => self.delimited_identifiers(&[&*table_name]), + }, + TableType::Values(values) => self.visit_values(values), + TableType::Query(select) => self.surround_with("(", ")", |ref mut s| s.visit_select(*select)), + TableType::JoinedTable(jt) => { + match table.database { + Some(database) => self.delimited_identifiers(&[&*database, &*jt.0]), + None => self.delimited_identifiers(&[&*jt.0]), + } + self.visit_joins(jt.1) + } + TableType::Expression(expr) => { + self.visit_expression(expr); + } + }; + + if include_alias { + if let Some(alias) = table.alias { + self.write(" AS "); + + self.delimited_identifiers(&[&*alias]); + }; + } + } + + /// A database column identifier + fn visit_column(&mut self, column: Column<'_>) { + match column.table { + Some(table) => { + self.visit_table(table, false); + self.write("."); + self.delimited_identifiers(&[&*column.name]); + } + _ => self.delimited_identifiers(&[&*column.name]), + }; + + if let Some(alias) = column.alias { + self.write(" AS "); + self.delimited_identifiers(&[&*alias]); + } + } + + /// A row of data used as an expression + fn visit_row(&mut self, row: Row<'_>) { + self.surround_with("(", ")", |ref mut s| { + let len = row.values.len(); + for (i, value) in row.values.into_iter().enumerate() { + s.visit_expression(value); + + if i < (len - 1) { + s.write(","); + } + } + }) + } + + /// A walk through the query conditions + fn visit_conditions(&mut self, tree: ConditionTree<'_>) { + match tree { + ConditionTree::And(expressions) => self.surround_with("(", ")", |ref mut s| { + let len = expressions.len(); + + for (i, expr) in expressions.into_iter().enumerate() { + s.visit_expression(expr); + + if i < (len - 1) { + s.write(" AND "); + } + } + }), + ConditionTree::Or(expressions) => self.surround_with("(", ")", |ref mut s| { + let len = expressions.len(); + + for (i, expr) in expressions.into_iter().enumerate() { + s.visit_expression(expr); + + if i < (len - 1) { + s.write(" OR "); + } + } + }), + ConditionTree::Not(expression) => self.surround_with("(", ")", |ref mut s| { + s.write("NOT "); + s.visit_expression(*expression) + }), + ConditionTree::Single(expression) => self.visit_expression(*expression), + ConditionTree::NoCondition => self.write("1=1"), + ConditionTree::NegativeCondition => self.write("1=0"), + ConditionTree::Exists(table) => self.surround_with("(", ")", |ref mut s| { + s.write("EXISTS "); + + s.surround_with("(", ")", |ref mut s| { + s.visit_table(*table, false); + }) + }), + } + } + + fn visit_greater_than(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" > "); + self.visit_expression(right) + } + + fn visit_greater_than_or_equals(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" >= "); + self.visit_expression(right) + } + + fn visit_less_than(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" < "); + self.visit_expression(right) + } + + fn visit_less_than_or_equals(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" <= "); + self.visit_expression(right) + } + + /// A comparison expression + fn visit_compare(&mut self, compare: Compare<'_>) { + match compare { + Compare::Equals(left, right) => self.visit_equals(*left, *right), + Compare::NotEquals(left, right) => self.visit_not_equals(*left, *right), + Compare::LessThan(left, right) => self.visit_less_than(*left, *right), + Compare::LessThanOrEquals(left, right) => self.visit_less_than_or_equals(*left, *right), + Compare::GreaterThan(left, right) => self.visit_greater_than(*left, *right), + Compare::GreaterThanOrEquals(left, right) => self.visit_greater_than_or_equals(*left, *right), + Compare::AnySelection(left, right) => { + self.visit_expression(*left); + self.write(" = ANY "); + + self.surround_with("(", ")", |this| { + this.visit_expression(*right); + }); + } + Compare::NotAllSelection(left, right) => { + self.visit_expression(*left); + self.write(" <> ALL "); + + self.surround_with("(", ")", |this| { + this.visit_expression(*right); + }); + } + Compare::In(left, right) => match (*left, *right) { + // To prevent `x IN ()` from happening. + ( + _, + Expression { + kind: ExpressionKind::Row(ref row), + .. + }, + ) if row.is_empty() => self.write("1=0"), + + // To prevent `x IN ()` from happening. + ( + Expression { + kind: ExpressionKind::Row(_), + .. + }, + Expression { + kind: ExpressionKind::Values(ref vals), + .. + }, + ) if vals.row_len() == 0 => self.write("1=0"), + + // Flattening out a row. + ( + Expression { + kind: ExpressionKind::Row(mut cols), + .. + }, + Expression { + kind: ExpressionKind::Values(vals), + .. + }, + ) if cols.len() == 1 && vals.row_len() == 1 => { + let col = cols.pop().unwrap(); + let vals = vals.flatten_row().unwrap(); + + self.visit_expression(col); + self.write(" IN "); + self.visit_row(vals) + } + + // No need to do `IN` if right side is only one value, + ( + left, + Expression { + kind: ExpressionKind::Parameterized(pv), + .. + }, + ) => { + self.visit_expression(left); + self.write(" = "); + self.substitute_value(pv) + } + + ( + Expression { + kind: ExpressionKind::Row(row), + .. + }, + Expression { + kind: ExpressionKind::Values(values), + .. + }, + ) => self.visit_multiple_tuple_comparison(row, values, false), + + // expr IN (..) + (left, right) => { + self.visit_expression(left); + self.write(" IN "); + self.visit_expression(right) + } + }, + Compare::NotIn(left, right) => match (*left, *right) { + // To prevent `x NOT IN ()` from happening. + ( + _, + Expression { + kind: ExpressionKind::Row(ref row), + .. + }, + ) if row.is_empty() => self.write("1=1"), + + // To prevent `x NOT IN ()` from happening. + ( + Expression { + kind: ExpressionKind::Row(_), + .. + }, + Expression { + kind: ExpressionKind::Values(ref vals), + .. + }, + ) if vals.row_len() == 0 => self.write("1=1"), + + // Flattening out a row. + ( + Expression { + kind: ExpressionKind::Row(mut cols), + .. + }, + Expression { + kind: ExpressionKind::Values(vals), + .. + }, + ) if cols.len() == 1 && vals.row_len() == 1 => { + let col = cols.pop().unwrap(); + let vals = vals.flatten_row().unwrap(); + + self.visit_expression(col); + self.write(" NOT IN "); + self.visit_row(vals) + } + + // No need to do `IN` if right side is only one value, + ( + left, + Expression { + kind: ExpressionKind::Parameterized(pv), + .. + }, + ) => { + self.visit_expression(left); + self.write(" <> "); + self.substitute_value(pv) + } + + ( + Expression { + kind: ExpressionKind::Row(row), + .. + }, + Expression { + kind: ExpressionKind::Values(values), + .. + }, + ) => self.visit_multiple_tuple_comparison(row, values, true), + + // expr IN (..) + (left, right) => { + self.visit_expression(left); + self.write(" NOT IN "); + self.visit_expression(right) + } + }, + Compare::Like(left, right) => self.visit_like(*left, *right), + Compare::NotLike(left, right) => self.visit_not_like(*left, *right), + Compare::Null(column) => { + self.visit_expression(*column); + self.write(" IS NULL") + } + Compare::NotNull(column) => { + self.visit_expression(*column); + self.write(" IS NOT NULL") + } + Compare::Between(val, left, right) => { + self.visit_expression(*val); + self.write(" BETWEEN "); + self.visit_expression(*left); + self.write(" AND "); + self.visit_expression(*right) + } + Compare::NotBetween(val, left, right) => { + self.visit_expression(*val); + self.write(" NOT BETWEEN "); + self.visit_expression(*left); + self.write(" AND "); + self.visit_expression(*right) + } + Compare::Raw(left, comp, right) => { + self.visit_expression(*left); + self.write(" "); + self.write(comp); + self.write(" "); + self.visit_expression(*right) + } + Compare::Json(json_compare) => match json_compare { + JsonCompare::ArrayContains(left, right) => self.visit_array_contains(*left, *right, false), + JsonCompare::ArrayContained(left, right) => self.visit_array_contained(*left, *right, false), + JsonCompare::ArrayOverlaps(left, right) => self.visit_array_overlaps(*left, *right), + JsonCompare::ArrayNotContains(left, right) => self.visit_array_contains(*left, *right, true), + JsonCompare::TypeEquals(left, json_type) => self.visit_json_type_equals(*left, json_type, false), + JsonCompare::TypeNotEquals(left, json_type) => self.visit_json_type_equals(*left, json_type, true), + }, + Compare::Any(left) => { + self.write("ANY"); + self.surround_with("(", ")", |s| s.visit_expression(*left)) + } + Compare::All(left) => { + self.write("ALL"); + self.surround_with("(", ")", |s| s.visit_expression(*left)) + } + } + } + + fn visit_equals(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" = "); + self.visit_expression(right); + } + + fn visit_not_equals(&mut self, left: Expression<'_>, right: Expression<'_>) { + self.visit_expression(left); + self.write(" <> "); + self.visit_expression(right); + } + + /// A visit in the `GROUP BY` section of the query + fn visit_grouping(&mut self, grouping: Grouping<'_>) { + let len = grouping.0.len(); + + for (i, value) in grouping.0.into_iter().enumerate() { + self.visit_expression(value); + + if i < (len - 1) { + self.write(", "); + } + } + } + + fn visit_average(&mut self, avg: Average<'_>) { + self.write("AVG"); + self.surround_with("(", ")", |ref mut s| s.visit_column(avg.column)); + } + + fn visit_function(&mut self, fun: Function<'_>) { + match fun.typ_ { + FunctionType::Count(fun_count) => { + if fun_count.exprs.is_empty() { + self.write("COUNT(*)"); + } else { + self.write("COUNT"); + self.surround_with("(", ")", |ref mut s| s.visit_columns(fun_count.exprs)); + } + } + FunctionType::Cast(cast) => { + self.write("CAST"); + self.surround_with("(", ")", |ref mut s| { + s.visit_expression(cast.expr); + s.write(" AS "); + s.write(cast.target_type); + }); + } + FunctionType::AggregateToString(agg) => { + self.visit_aggregate_to_string(agg.value.as_ref().clone()); + } + FunctionType::RowToJson(row_to_json) => { + self.write("ROW_TO_JSON"); + self.surround_with("(", ")", |ref mut s| s.visit_table(row_to_json.expr, false)) + } + FunctionType::Average(avg) => { + self.visit_average(avg); + } + FunctionType::Sum(sum) => { + self.write("SUM"); + self.surround_with("(", ")", |ref mut s| s.visit_expression(*sum.expr)); + } + FunctionType::Lower(lower) => { + self.write("LOWER"); + self.surround_with("(", ")", |ref mut s| s.visit_expression(*lower.expression)); + } + FunctionType::Upper(upper) => { + self.write("UPPER"); + self.surround_with("(", ")", |ref mut s| s.visit_expression(*upper.expression)); + } + FunctionType::Minimum(min) => { + self.write("MIN"); + self.surround_with("(", ")", |ref mut s| s.visit_column(min.column)); + } + FunctionType::Maximum(max) => { + self.write("MAX"); + self.surround_with("(", ")", |ref mut s| s.visit_column(max.column)); + } + FunctionType::Coalesce(coalesce) => { + self.write("COALESCE"); + self.surround_with("(", ")", |s| s.visit_columns(coalesce.exprs)); + } + FunctionType::JsonExtract(json_extract) => { + self.visit_json_extract(json_extract); + } + FunctionType::JsonExtractFirstArrayElem(extract) => { + self.visit_json_extract_first_array_item(extract); + } + FunctionType::JsonExtractLastArrayElem(extract) => { + self.visit_json_extract_last_array_item(extract); + } + FunctionType::JsonUnquote(unquote) => { + self.visit_json_unquote(unquote); + } + FunctionType::ToJsonb(to_jsonb) => self.visit_to_jsonb(to_jsonb), + FunctionType::JsonbAgg(json_agg) => self.visit_jsonb_agg(json_agg), + FunctionType::Encode(encode) => self.visit_encode(encode), + FunctionType::JsonBuildObject(encode) => self.visit_json_build_object(encode), + FunctionType::Unnest(unnest) => self.visit_unnest(unnest), + FunctionType::Concat(concat) => { + self.visit_concat(concat); + } + }; + + if let Some(alias) = fun.alias { + self.write(" AS "); + self.delimited_identifiers(&[&*alias]); + } + } + + fn visit_case(&mut self, case: ast::Case<'_>) { + self.write("CASE"); + + for when in case.when { + self.write(" WHEN "); + self.visit_expression(when.condition); + self.write(" THEN "); + self.visit_expression(when.result); + } + + self.write(" ELSE "); + self.visit_expression(*case.r#else); + self.write(" END"); + } + + fn visit_unnest(&mut self, unnest: ast::Unnest<'_>) { + self.write("unnest"); + + self.surround_with("(", ")", |s| { + s.visit_expression(*unnest.expression); + }); + } +} diff --git a/extensions/postgres/.gitignore b/extensions/postgres/.gitignore new file mode 100644 index 00000000..377de80d --- /dev/null +++ b/extensions/postgres/.gitignore @@ -0,0 +1,3 @@ +target +build +.build.lock diff --git a/extensions/postgres/Cargo.toml b/extensions/postgres/Cargo.toml new file mode 100644 index 00000000..b5fb4353 --- /dev/null +++ b/extensions/postgres/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "postgres-extension" +version = "0.1.0" +edition = "2024" +license = "Apache-2.0" + +[lib] +name = "postgres_extension" +path = "src/lib.rs" +crate-type = ["cdylib"] + +[features] +default = [] + +# Common dependencies for both WASI and native +[dependencies] +Inflector.workspace = true +chrono.workspace = true +grafbase-database-definition.workspace = true +grafbase-sdk.workspace = true +indexmap = { workspace = true, features = ["serde"] } +itertools.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +sql-ast.workspace = true +tracing.workspace = true +tracing-subscriber = { workspace = true, features = ["json", "env-filter"] } + +[dev-dependencies] +futures.workspace = true +grafbase-postgres-introspection.workspace = true +grafbase-sdk = { workspace = true, features = ["test-utils"] } +indoc.workspace = true +insta = { workspace = true, features = ["json"] } +names.workspace = true +serde_json.workspace = true +sqlx = { workspace = true, features = [ + "sqlx-postgres", + "json", + "runtime-tokio", + "tls-rustls-aws-lc-rs", + "tls-rustls-ring-native-roots", + "postgres", +] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros", "test-util"] } diff --git a/extensions/postgres/README.md b/extensions/postgres/README.md new file mode 100644 index 00000000..c08503cc --- /dev/null +++ b/extensions/postgres/README.md @@ -0,0 +1,299 @@ +# Grafbase Postgres Extension + +This Postgres extension for the Grafbase Gateway acts as a complete virtual subgraph that you can compose into your federated schema. + +Use the [`grafbase-postgres`](https://github.com/grafbase/extensions/tree/julius/ywmxqwoumunr/cli/postgres) introspection tool to generate a subgraph schema from your Postgres database. You can then compose this subgraph into your federated schema. Commit the generated schema to your repository and re-introspect the database periodically to keep the schema up-to-date. + +The introspection process generates a complete set of types representing the data in your Postgres database, including entities, relationships, and scalar fields. Use these types to query and mutate data in your Postgres database via the Grafbase Gateway. + +Generally, every operation executed through the extension generates exactly one SQL statement. This includes relationships, which the extension represents as lateral joins. The database handles JSON rendering; the extension manages query generation and data retrieval. + +**Note:** This extension is currently under development. Known missing features include: + +* TLS support for database connections +* Support for types from Postgres extensions (e.g., PostGIS) +* Real pagination cursors and page info (currently returns dummy values) + +## Getting Started + +### Introspection Tool (`grafbase-postgres`) + +Currently, you must compile the `grafbase-postgres` introspection tool from source (refer to its [repository](https://github.com/grafbase/extensions/tree/julius/ywmxqwoumunr/cli/postgres) for instructions). After successful compilation, copy the resulting binary to a directory in your system's `PATH`. + +*Future versions will offer easier installation, likely integrated with the Grafbase CLI.* + +### Building the Extension (Wasm) + +To use the extension with the Grafbase Gateway, you first need to build the Wasm component. + +1. Build the extension: + ```bash + grafbase extension build + ``` +2. This command creates a `build` directory containing the Wasm module and its manifest: + ``` + build/ + ├── extension.wasm + └── manifest.json + ``` + +### Gateway Configuration + +Configure your Grafbase Gateway to use the extension in your `grafbase.config.toml`. + +**Using a Published Version (Recommended when available):** + +Specify the desired version (replace `0.1` when newer versions are released): + +```toml +# grafbase.config.toml +[extensions.postgres] +version = "0.1" +``` + +**Using a Local Build:** + +If you built the extension manually, point the gateway to the build directory: + +```toml +# grafbase.config.toml +[extensions.postgres] +path = "/path/to/your/build" # Update this path +``` + +## Building From Source + +As mentioned above, you currently need to build the Wasm module locally. + +1. Clone the repository containing this extension. +2. Navigate to the extension's directory. +3. Run the build command: + ```bash + grafbase extension build + ``` +4. Configure your gateway to use the generated artifacts in the `build` directory as shown in the [Gateway Configuration](#gateway-configuration) section. + +## Testing + +To run the tests: + +1. Start the test database using Docker Compose: + ```bash + docker compose up -d + ``` +2. Execute the test suite using Cargo: + ```bash + cargo test + ``` + +### Faster Test Execution + +To speed up a full test run, you can pre-compile the extension: + +```bash +grafbase extension build +``` + +Then, run the tests with the `PREBUILT_EXTENSION` environment variable set: + +```bash +PREBUILT_EXTENSION=1 cargo test +``` + +## Configuration + +Configure the Postgres extension within your `grafbase.config.toml` under the `[extensions.postgres.config]` section. See the [Grafbase Gateway configuration documentation](https://grafbase.com/docs/reference/gateway/configuration/extensions) for general extension configuration details. + +```toml +# Example configuration within grafbase.config.toml +[extensions.postgres] + # version = "0.1" # Or path = "..." + +[extensions.postgres.config] +[[extensions.postgres.config.databases]] +# Optional: Specify a name if connecting multiple databases. +# This name links the database connection to a specific subgraph. +name = "default" +# Use environment variables for sensitive parts like passwords +url = "postgres://user:password@host:port/database" + +[extensions.postgres.config.databases.pool] +# Maximum number of connections (default: 10) +max_connections = 10 +# Minimum number of idle connections maintained (default: 0) +min_connections = 0 +# Maximum idle time before closing a connection (ms, default: 600000 / 10 min) +idle_timeout_ms = 600000 +# Maximum time to wait for a connection from the pool (ms, default: 30000 / 30 sec) +acquire_timeout_ms = 30000 +# Maximum lifetime of a connection (ms, default: 1800000 / 30 min) +max_lifetime_ms = 1800000 +``` + +## Introspection + +Use the `grafbase-postgres introspect` command to generate a GraphQL schema definition (SDL) from your database: + +```bash +# Ensure grafbase-postgres is in your PATH +# Replace connection details and version as needed +grafbase-postgres \ + -d "postgres://postgres:grafbase@localhost:5432/postgres" \ + introspect \ + -v 0.1.0 > subgraph.graphql +``` + +This command generates a GraphQL schema (`subgraph.graphql` in this example). You can then publish this schema as a virtual subgraph to your federated graph using the [Grafbase CLI `publish` command](https://grafbase.com/docs/reference/grafbase-cli/publish): + +```bash +# Replace with your graph details +grafbase publish \ + --name postgres \ + my-org/my-graph@main \ + -m "Add postgres subgraph" \ + --virtual \ + --schema subgraph.graphql +``` + +## Generated Schema (Queries and Types) + +The introspection process reads tables, views (TODO: Add view support), and enums from the specified database schema to generate a GraphQL SDL. + +Consider this example PostgreSQL schema: + +```sql +CREATE TABLE "users" ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + username VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + metadata JSONB DEFAULT '{}' +); + +CREATE TABLE profiles ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id BIGINT NOT NULL REFERENCES "users"(id) ON DELETE CASCADE, + first_name VARCHAR(100), + last_name VARCHAR(100) +); +``` + +The introspection tool generates the following GraphQL types based on the tables above: + +```graphql +type Profile + @pgTable(name: "profiles") + @pgKey(fields: ["id"], type: PRIMARY) +{ + id: BigInt! @pgColumn(name: "id", type: BIGINT) + userId: BigInt! @pgColumn(name: "user_id", type: BIGINT) + firstName: String @pgColumn(name: "first_name", type: VARCHAR) + lastName: String @pgColumn(name: "last_name", type: VARCHAR) + user: User! @pgRelation(name: "profiles_user_id_fkey", fields: ["userId"], references: ["id"]) +} + +type User + @pgTable(name: "users") + @pgKey(fields: ["id"], type: PRIMARY) +{ + id: BigInt! @pgColumn(name: "id", type: BIGINT) + username: String! @pgColumn(name: "username", type: VARCHAR) + email: String! @pgColumn(name: "email", type: VARCHAR) + metadata: JSON @pgColumn(name: "metadata", type: JSONB) + profiles( + filter: ProfileFilterInput, + first: Int, + last: Int, + before: String, + after: String, + orderBy: [ProfileOrderByInput!], + ): ProfileConnection! @pgRelation(name: "profiles_user_id_fkey") +} +``` + +**Key Generation Principles:** + +* **Naming:** Field names default to camelCase, and type names default to PascalCase. The original database names are preserved in the `@pgTable` and `@pgColumn` directives. +* **Schemas:** If your database uses multiple PostgreSQL schemas, the directives (e.g., `@pgTable(name: "users", schema: "public")`) will include the schema name. +* **Relationships:** The tool generates fields for foreign key relationships using the `@pgRelation` directive. The side defining the foreign key constraint includes `fields` and `references` arguments; the other side represents the inverse relationship. +* **JSON Types:** Columns with `JSON` or `JSONB` types map to the `JSON` scalar type in the SDL. + * If your JSON data has a consistent structure, you can replace the `JSON` scalar with a custom GraphQL object type *after* introspection. **Note:** Queries will fail if the database returns JSON that doesn't match your custom type definition. +* **Customization:** You can rename generated types and fields after introspection. However, you **must** keep the original database object names within the `@pgTable`, `@pgColumn`, and `@pgRelation` directives. Ensure you also update any corresponding input types if you rename elements. +* **Pruning:** You can safely remove unused queries, mutations, and their associated input/output types from the generated schema if they are not needed in your API. + +### Queries + +The introspection generates queries for fetching single records and collections. + +```graphql +# Example generated queries +type Query { + # Fetch a single user by primary/unique key + user( + lookup: UserLookupInput!, + ): User @pgSelectOne + + # Fetch a collection of users with filtering, ordering, and pagination + users( + filter: UserFilterInput, + first: Int, + last: Int, + before: String, + after: String, + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany +} +``` + +* **Single Record (`@pgSelectOne`):** Fetches a unique row (e.g., `user`). Its `lookup` argument accepts fields corresponding to the table's primary key or unique constraints. For composite keys, the tool generates specific input types. +* **Collections (`@pgSelectMany`):** Fetches multiple rows (e.g., `users`). It supports filtering (`filter`), ordering (`orderBy`), and cursor-based pagination (`first`, `last`, `before`, `after`). +* **Performance:** When you query fields representing relationships, the extension generates efficient SQL joins (specifically lateral joins). The extension guarantees execution of exactly one SQL query per incoming GraphQL request, preventing the N+1 query problem. +* **Pagination:** Queries returning multiple items (including nested one-to-many relations) expose standard GraphQL connection types with pagination arguments (`first`, `last`, `before`, `after`) and `pageInfo`. (**Note:** Cursors and `pageInfo` currently return dummy values, see Missing Features). + +### Mutations + +The introspection also generates standard CRUD mutations. + +```graphql +# Example generated mutations +type Mutation { + # Create a single user + userCreate( + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + + # Create multiple users + userCreateMany( + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + + # Update a single user (identified by lookup) + userUpdate( + lookup: UserLookupInput!, + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + + # Update multiple users (identified by filter) + userUpdateMany( + filter: UserFilterInput, + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + + # Delete a single user (identified by lookup) + userDelete( + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + + # Delete multiple users (identified by filter) + userDeleteMany( + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany +} +``` + +* **Operations:** The tool generates mutations for single-row (`@pgInsertOne`, `@pgUpdateOne`, `@pgDeleteOne`) and multi-row (`@pgInsertMany`, `@pgUpdateMany`, `@pgDeleteMany`) operations. +* **Returning Data:** All mutations support a `returning` selection set, allowing you to fetch data about the affected rows within the same database transaction. +* **Performance:** Each mutation executes as a single SQL statement. + +### Supported PostgreSQL Versions + +We primarily test against the latest stable PostgreSQL version. The extension relies on SQL features, particularly JSON/JSONB functions, available in PostgreSQL. Therefore, the minimum supported version is **PostgreSQL 9.4**. diff --git a/extensions/postgres/definitions.graphql b/extensions/postgres/definitions.graphql new file mode 100644 index 00000000..83f655a1 --- /dev/null +++ b/extensions/postgres/definitions.graphql @@ -0,0 +1,539 @@ +extend schema + @link(url: "https://specs.grafbase.com/grafbase", import: ["InputValueSet"]) + +""" +Defines a Postgres database connection to be used in the subgraph. + +A subgraph can only have one Postgres database connection. If needing +multiple connections, create multiple subgraphs. +""" +directive @pgDatabase( + """ + The name of the Postgres database + """ + name: String! +) on SCHEMA + +""" +Defines a Postgres table mapping for a GraphQL object type +""" +directive @pgTable( + """ + The name of the Postgres table + """ + name: String! + + """ + The schema where the table is located, defaults to 'public' + """ + schema: String! = "public" +) on OBJECT + +""" +Defines a Postgres key constraint (PRIMARY or UNIQUE) for a table +""" +directive @pgKey( + """ + List of field names that comprise this key + """ + fields: [String]! + + """ + Type of key constraint (PRIMARY or UNIQUE) + """ + type: PgKeyType! +) repeatable on OBJECT + +""" +Defines a Postgres column mapping for a GraphQL field +""" +directive @pgColumn( + """ + The name of the Postgres column + """ + name: String! + + """ + The Postgres data type for this column + """ + type: PgColumnType! + + """ + If the type is ENUM, this must be defined to an enum type + which is available in the database. + """ + enumType: String + + """ + If the type is ENUM, and if the enum is in a different schema + than the column, this must be defined to point to a correct schema. + """ + enumSchema: String +) on FIELD_DEFINITION + +""" +Types of Postgres key constraints +""" +enum PgKeyType { + """ + Primary key constraint - uniquely identifies each record in the table + """ + PRIMARY + + """ + Unique constraint - ensures all values in a column or combination of columns are distinct + """ + UNIQUE +} + +""" +Defines a Postgres foreign key relationship between two tables +""" +directive @pgRelation( + """ + The name of the relation - used to identify the relationship in the database + """ + name: String! + + """ + Array of field names from the source table that form the foreign key. Must be defined exactly + on one side of the relation. + """ + fields: [String!] + + """ + Array of field names from the target table being referenced. Must be defined exactly on + one side of the relation. + """ + references: [String!] +) on FIELD_DEFINITION + +""" +Defines a Postgres enum type mapping for a GraphQL enum +""" +directive @pgEnum( + """ + The name of the Postgres enum type + """ + name: String! + + """ + The schema where the enum type is located, defaults to 'public' + """ + schema: String! = "public" +) on ENUM + +""" +Defines a Postgres enum variant mapping for a GraphQL enum value +""" +directive @pgEnumVariant( + """ + The name of the Postgres enum variant value + """ + name: String! +) on ENUM_VALUE + +""" +Defines the type as a connection type, to be used with 1-n relations. +""" +directive @pgConnection( + """ + The name of the type this mutation targets + """ + type: String! +) on OBJECT + +""" +Defines the type as a mutation type, to be used as a mutation return value. +""" +directive @pgMutation( + """ + The name of the type this mutation targets + """ + type: String! +) on OBJECT + +""" +Defines the type to be an output for postgres RETURNING mutation. +""" +directive @pgReturning( + """ + The name of the type to be returned after the mutation + """ + type: String! +) on OBJECT + +""" +Directive to run a Postgres SELECT query for a field, returning at most one row +""" +directive @pgSelectOne on FIELD_DEFINITION + +""" +Directive to run a Postgres SELECT query for a field, returning an array of rows +""" +directive @pgSelectMany on FIELD_DEFINITION + +""" +Directive to run a Postgres INSERT query for a field, returning the inserted row +""" +directive @pgInsertOne on FIELD_DEFINITION + +""" +Directive to run a Postgres INSERT query for a field, returning the inserted rows +""" +directive @pgInsertMany on FIELD_DEFINITION + +""" +Directive to run a Postgres UPDATE query for a field, returning the updated row +""" +directive @pgUpdateOne on FIELD_DEFINITION + +""" +Directive to run a Postgres UPDATE query for a field, returning the updated rows +""" +directive @pgUpdateMany on FIELD_DEFINITION + +""" +Directive to run a Postgres DELETE query for a field, returning the deleted row +""" +directive @pgDeleteOne on FIELD_DEFINITION + +""" +Directive to run a Postgres DELETE query for a field, returning the deleted rows +""" +directive @pgDeleteMany on FIELD_DEFINITION + +""" +Postgres data types supported for column definitions +""" +enum PgColumnType { + """ + 2-byte signed integer, range: -32768 to +32767 + """ + SMALLINT + + """ + 4-byte signed integer, range: -2147483648 to +2147483647 + """ + INTEGER + + """ + Alias for INTEGER + """ + INT + + """ + 8-byte signed integer, range: -9223372036854775808 to +9223372036854775807 + """ + BIGINT + + """ + Exact numeric with selectable precision + """ + DECIMAL + + """ + Alias for DECIMAL + """ + NUMERIC + + """ + 4-byte floating-point number + """ + REAL + + """ + 8-byte floating-point number + """ + DOUBLE_PRECISION + + """ + 2-byte autoincrementing integer + """ + SMALLSERIAL + + """ + 4-byte autoincrementing integer + """ + SERIAL + + """ + 8-byte autoincrementing integer + """ + BIGSERIAL + + """ + Variable-length character string with limit + """ + VARCHAR + + """ + Fixed-length character string, blank padded + """ + CHAR + + """ + Variable unlimited length character string + """ + TEXT + + """ + Binary data ("byte array") + """ + BYTEA + + """ + Date and time (without time zone) + """ + TIMESTAMP + + """ + Date and time with time zone + """ + TIMESTAMPTZ + + """ + Calendar date (year, month, day) + """ + DATE + + """ + Time of day (without time zone) + """ + TIME + + """ + Time of day with time zone + """ + TIMETZ + + """ + Time interval + """ + INTERVAL + + """ + Logical Boolean (true/false) + """ + BOOLEAN + + """ + User-defined enumerated type + """ + ENUM + + """ + Geometric point on a plane + """ + POINT + + """ + Infinite geometric line + """ + LINE + + """ + Geometric line segment + """ + LSEG + + """ + Rectangular geometric box + """ + BOX + + """ + Geometric path + """ + PATH + + """ + Geometric polygon + """ + POLYGON + + """ + Geometric circle + """ + CIRCLE + + """ + IPv4 or IPv6 network address + """ + CIDR + + """ + IPv4 or IPv6 host address + """ + INET + + """ + MAC address (6 bytes) + """ + MACADDR + + """ + MAC address (8 bytes, EUI-64 format) + """ + MACADDR8 + + """ + Currency amount + """ + MONEY + + """ + Fixed-length bit string + """ + BIT + + """ + Variable-length bit string + """ + BIT_VARYING + + """ + Alias for BIT_VARYING + """ + VARBIT + + """ + Text search document + """ + TSVECTOR + + """ + Text search query + """ + TSQUERY + + """ + Universally unique identifier + """ + UUID + + """ + XML data + """ + XML + + """ + Textual JSON data + """ + JSON + + """ + Binary JSON data, decomposed + """ + JSONB + + """ + Array of data type + """ + ARRAY + + """ + User-defined composite type + """ + COMPOSITE + + """ + Range of integers (4-byte) + """ + INT4RANGE + + """ + Range of integers (8-byte) + """ + INT8RANGE + + """ + Range of numeric values + """ + NUMRANGE + + """ + Range of timestamp without time zone + """ + TSRANGE + + """ + Range of timestamp with time zone + """ + TSTZRANGE + + """ + Range of dates + """ + DATERANGE + + """ + User-defined domain type + """ + DOMAIN + + """ + Object identifier + """ + OID + + """ + Function name + """ + REGPROC + + """ + Function with argument types + """ + REGPROCEDURE + + """ + Operator name + """ + REGOPER + + """ + Operator with argument types + """ + REGOPERATOR + + """ + Relation name + """ + REGCLASS + + """ + Data type name + """ + REGTYPE + + """ + Role name + """ + REGROLE + + """ + Schema name + """ + REGNAMESPACE + + """ + Text search configuration + """ + REGCONFIG + + """ + Text search dictionary + """ + REGDICTIONARY + + """ + Postgres Log Sequence Number + """ + PG_LSN + + """ + An enum type. You must define the name of the enum. + """ + ENUM +} diff --git a/extensions/postgres/extension.toml b/extensions/postgres/extension.toml new file mode 100644 index 00000000..f6a66821 --- /dev/null +++ b/extensions/postgres/extension.toml @@ -0,0 +1,17 @@ +[extension] +name = "postgres" +version = "0.1.0" +description = "A new extension" +homepage_url = "https://grafbase.com/extensions/postgres" +repository_url = "https://github.com/grafbase/extensions/tree/main/extensions/postgres" +license = "Apache-2.0" +type = "selection_set_resolver" + +# These are the default permissions for the extension. +# The user can enable or disable them as needed in the gateway +# configuration file. +[permissions] +network = true +stdout = true +stderr = true +environment_variables = true diff --git a/extensions/postgres/src/config.rs b/extensions/postgres/src/config.rs new file mode 100644 index 00000000..dc557d50 --- /dev/null +++ b/extensions/postgres/src/config.rs @@ -0,0 +1,32 @@ +#[derive(Debug, Clone, serde::Deserialize)] +pub struct PostgresConfig { + pub databases: Vec, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(default)] +pub struct DatabaseConfig { + pub name: String, + pub url: String, + pub pool: Option, +} + +impl Default for DatabaseConfig { + fn default() -> Self { + Self { + name: "default".to_string(), + url: "postgres://localhost:5432/postgres".to_string(), + pool: None, + } + } +} + +#[derive(Debug, Clone, Default, serde::Deserialize)] +#[serde(default)] +pub struct PoolConfig { + pub max_connections: Option, + pub min_connections: Option, + pub idle_timeout_ms: Option, + pub acquire_timeout_ms: Option, + pub max_lifetime_ms: Option, +} diff --git a/extensions/postgres/src/context.rs b/extensions/postgres/src/context.rs new file mode 100644 index 00000000..70e65b46 --- /dev/null +++ b/extensions/postgres/src/context.rs @@ -0,0 +1,190 @@ +pub mod create_input; +pub mod filter; +pub mod selection_iterator; +pub mod update_input; + +use create_input::{CreateInputIterator, CreateInputParameters, CreateManyInputParameters}; +use filter::{FilterIterator, MultipleFilterIterator, UniqueFilterIterator}; +use grafbase_database_definition::{ + DatabaseDefinition, DatabaseType, EnumWalker, Operation, TableColumnWalker, TableWalker, +}; +use grafbase_sdk::{ + SdkError, + host_io::postgres::{ + self, + types::{DatabaseType as _, DatabaseValue}, + }, + types::{ArgumentValues, Field}, +}; +use selection_iterator::SelectionIterator; +use serde_json::Value; +use update_input::UpdateInputIterator; + +#[derive(Clone, Copy)] +pub struct Context<'a> { + pub(super) arguments: ArgumentValues<'a>, + pub(super) database_definition: &'a DatabaseDefinition, + pub(super) pool: &'a postgres::Pool, + pub(super) operation: Operation, + pub(super) field: Field<'a>, +} + +#[derive(Debug, Clone, serde::Deserialize)] +struct Filter { + filter: Option>, +} + +#[derive(Debug, Clone, serde::Deserialize)] +struct Lookup { + lookup: serde_json::Map, +} + +impl<'a> Context<'a> { + pub fn operation(self) -> Operation { + self.operation + } + + pub(crate) fn selection(self, table: TableWalker<'a>) -> Result, SdkError> { + SelectionIterator::new(self, table, self.field, self.field.selection_set()) + } + + pub fn collection_selection(self, table: TableWalker<'a>) -> Result, SdkError> { + let field = self + .field + .selection_set() + .fields() + .find(|f| { + self.database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("edges") + }) + .ok_or_else(|| SdkError::from("edges field not defined in selection"))? + .selection_set() + .fields() + .find(|f| { + self.database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("node") + }) + .ok_or_else(|| SdkError::from("node field not defined in edges selection"))?; + + let selection = field.selection_set(); + + SelectionIterator::new(self, table, field, selection) + } + + pub(crate) fn create_input(&'a self, table: TableWalker<'a>) -> Result, SdkError> { + let args = self.field.arguments::(self.arguments)?; + let iterator = CreateInputIterator::new(self, table, args.input)?; + Ok(iterator) + } + + pub(crate) fn create_many_input( + &'a self, + table: TableWalker<'a>, + ) -> Result>, SdkError> { + let args = self.field.arguments::(self.arguments)?; + let mut result = Vec::with_capacity(args.input.len()); + + for args in args.input { + result.push(CreateInputIterator::new(self, table, args)?); + } + + Ok(result) + } + + pub(crate) fn update_input(&'a self, table: TableWalker<'a>) -> Result, SdkError> { + let iterator = UpdateInputIterator::new(self, table)?; + Ok(iterator) + } + + pub(crate) fn unique_filter(self, table: TableWalker<'a>) -> Result, SdkError> { + let filter = self.field.arguments::(self.arguments)?; + let iterator = UniqueFilterIterator::new(self, table, filter.lookup); + + Ok(FilterIterator::Unique(iterator)) + } + + pub fn mutation_is_returning(self) -> bool { + self.field.selection_set().fields().any(|f| { + self.database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("returning") + }) + } + + pub fn returning_selection(self, table: TableWalker<'a>) -> Result>, SdkError> { + let Some(returning) = self.field.selection_set().fields().find(|f| { + self.database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("returning") + }) else { + return Ok(None); + }; + + let iterator = SelectionIterator::new(self, table, returning, returning.selection_set())?; + + Ok(Some(iterator)) + } + + /// A complex `user(filter: { id: { eq: 1 } })` filter. + pub fn filter(&'a self, table: TableWalker<'a>) -> Result, SdkError> { + let filter_map = self + .field + .arguments::(self.arguments)? + .filter + .unwrap_or_default(); + + let iterator = MultipleFilterIterator::new(self, table, filter_map); + + Ok(FilterIterator::Multiple(iterator)) + } +} + +fn rename_enum_variants(column: TableColumnWalker<'_>, value: DatabaseValue) -> Result { + let value = match column.database_type() { + DatabaseType::Scalar(_) => value, + DatabaseType::Enum(r#enum) => { + if let Some(variant) = value.as_str() { + match rename_enum_variant(r#enum, variant) { + Ok(new_variant) => new_variant.into_bound_value(0), + Err(err) => return Err(err), + } + } else if let Some(values) = value.to_list() { + let mut new_values = Vec::with_capacity(values.len()); + + for value in values { + let Some(variant) = value.as_str() else { + return Err(SdkError::from(format!( + "got non-string value for enum field {}", + column.client_name(), + ))); + }; + + match rename_enum_variant(r#enum, variant) { + Ok(new_variant) => new_values.push(new_variant), + Err(err) => return Err(err), + } + } + + new_values.into_bound_value(0) + } else { + return Err(SdkError::from(format!( + "got non-string value for enum field {}", + column.client_name(), + ))); + } + } + }; + + Ok(value) +} + +fn rename_enum_variant(r#enum: EnumWalker<'_>, variant: &str) -> Result { + let result = r#enum + .rename_variant(variant) + .ok_or_else(|| SdkError::from(format!("invalid enum variant {}", variant)))? + .to_string(); + + Ok(result) +} diff --git a/extensions/postgres/src/context/create_input.rs b/extensions/postgres/src/context/create_input.rs new file mode 100644 index 00000000..58e2bb4b --- /dev/null +++ b/extensions/postgres/src/context/create_input.rs @@ -0,0 +1,89 @@ +use std::collections::VecDeque; + +use grafbase_database_definition::{TableColumnWalker, TableWalker}; +use grafbase_sdk::{SdkError, host_io::postgres::types::DatabaseValue}; + +use super::Context; + +pub enum CreateInputItem<'a> { + /// Inserts a single column value. + Column(TableColumnWalker<'a>, DatabaseValue), + DefaultValue(TableColumnWalker<'a>), +} + +enum IteratorInput<'a> { + FromUser(VecDeque<(TableColumnWalker<'a>, DatabaseValue)>), + Default(VecDeque>), +} + +pub struct CreateInputIterator<'a> { + input: IteratorInput<'a>, +} + +#[derive(Debug, Clone, serde::Deserialize)] +pub struct CreateInputParameters { + pub input: serde_json::Map, +} + +#[derive(Debug, Clone, serde::Deserialize)] +pub struct CreateManyInputParameters { + pub input: Vec>, +} + +impl<'a> CreateInputIterator<'a> { + pub fn new( + ctx: &'a Context<'a>, + table: TableWalker<'a>, + args: serde_json::Map, + ) -> Result { + let mut input = VecDeque::new(); + + for (ref field_name, value) in args { + let Some(column) = ctx + .database_definition + .find_column_for_client_field(field_name, table.id()) + else { + return Err(SdkError::from(format!("field {field_name} not found"))); + }; + + let value = DatabaseValue::from_json_input(value, column.database_type(), column.is_array())?; + + input.push_back((column, value)); + } + + let input = match input { + input if input.is_empty() => { + let mut input = VecDeque::new(); + + for column in table.columns() { + input.push_back(column); + } + + IteratorInput::Default(input) + } + input => IteratorInput::FromUser(input), + }; + + Ok(Self { input }) + } +} + +impl<'a> Iterator for CreateInputIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + match self.input { + IteratorInput::FromUser(ref mut input) => { + let (column, value) = input.pop_front()?; + + let value = match super::rename_enum_variants(column, value) { + Ok(value) => value, + Err(err) => return Some(Err(err)), + }; + + Some(Ok(CreateInputItem::Column(column, value))) + } + IteratorInput::Default(ref mut input) => Some(Ok(CreateInputItem::DefaultValue(input.pop_front()?))), + } + } +} diff --git a/extensions/postgres/src/context/filter.rs b/extensions/postgres/src/context/filter.rs new file mode 100644 index 00000000..776a0f32 --- /dev/null +++ b/extensions/postgres/src/context/filter.rs @@ -0,0 +1,29 @@ +mod complex; +mod simple; + +pub(super) use complex::MultipleFilterIterator; +use grafbase_sdk::SdkError; +pub(super) use simple::UniqueFilterIterator; + +use sql_ast::ast::ConditionTree; + +#[derive(Clone)] +pub enum FilterIterator<'a> { + Unique(UniqueFilterIterator<'a>), + Multiple(MultipleFilterIterator<'a>), +} + +impl<'a> Iterator for FilterIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + match self { + FilterIterator::Unique(iterator) => match iterator.next() { + Some(Ok(condition)) => Some(Ok(ConditionTree::from(condition))), + Some(Err(err)) => Some(Err(err)), + None => None, + }, + FilterIterator::Multiple(iterator) => iterator.next(), + } + } +} diff --git a/extensions/postgres/src/context/filter/complex.rs b/extensions/postgres/src/context/filter/complex.rs new file mode 100644 index 00000000..b21f5006 --- /dev/null +++ b/extensions/postgres/src/context/filter/complex.rs @@ -0,0 +1,197 @@ +use crate::context::Context; +use grafbase_database_definition::{TableColumnWalker, TableWalker}; +use grafbase_sdk::{ + SdkError, + host_io::postgres::types::{DatabaseType, DatabaseValue}, +}; +use serde_json::{Map, Value}; +use sql_ast::ast::{Column, Comparable, ConditionTree, Expression, Select}; +use std::collections::VecDeque; + +#[derive(Clone)] +pub struct MultipleFilterIterator<'a> { + context: &'a Context<'a>, + table: TableWalker<'a>, + filter: VecDeque<(String, Value)>, +} + +impl<'a> MultipleFilterIterator<'a> { + pub fn new( + context: &'a Context<'a>, + table: TableWalker<'a>, + filter: impl IntoIterator, + ) -> Self { + Self { + context, + table, + filter: VecDeque::from_iter(filter), + } + } +} + +impl<'a> Iterator for MultipleFilterIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + let (field, value) = self.filter.pop_front()?; + + // filtering from a related table. + if let Some(relation) = self + .context + .database_definition + .get_relation_for_client_name(self.table.id(), &field) + { + let object = if !relation.is_other_side_one() { + match value { + Value::Object(mut object) => match object.remove("contains") { + Some(Value::Object(object)) => object, + _ => unreachable!("nested filters must be objects"), + }, + _ => unreachable!("nested filters must be objects"), + } + } else { + match value { + Value::Object(object) => object, + _ => unreachable!("nested filters must be objects"), + } + }; + + let mut conditions = Vec::new(); + + for (referenced, referencing) in relation.referenced_columns().zip(relation.referencing_columns()) { + let referencing = Column::from((referencing.table().database_name(), referencing.database_name())); + conditions.push(Expression::from(referenced.database_name().equals(referencing))); + } + + let nested = Self::new(self.context, relation.referenced_table(), object); + + for condition in nested { + match condition { + Ok(condition) => { + conditions.push(Expression::from(condition)); + } + Err(e) => return Some(Err(e)), + } + } + + let table = relation.referenced_table(); + + let mut select = Select::from_table((table.schema(), table.database_name())); + select.value(Expression::value(1.into_bound_value(0))); + select.so_that(ConditionTree::And(conditions)); + + return Some(Ok(ConditionTree::exists(select))); + } + + let operations = match value { + Value::Object(operations) => operations, + Value::Array(values) => { + let mut operations = Vec::with_capacity(values.len()); + + for operation in values.into_iter().filter_map(|operation| match operation { + Value::Object(obj) => Some(obj), + _ => None, + }) { + let nested = Self::new(self.context, self.table, operation); + + for operation in nested { + match operation { + Ok(operation) => { + operations.push(Expression::from(operation)); + } + Err(e) => return Some(Err(e)), + } + } + } + + let tree = match field.as_str() { + "ALL" => ConditionTree::And(operations), + "ANY" => ConditionTree::Or(operations), + "NONE" => ConditionTree::not(ConditionTree::Or(operations)), + _ => unreachable!(), + }; + + return Some(Ok(tree)); + } + _ => return None, + }; + + let column = self + .context + .database_definition + .find_column_for_client_field(&field, self.table.id()) + .expect("column for input field not found"); + + match generate_conditions(operations, column) { + Ok(conditions) => Some(Ok(conditions)), + Err(err) => Some(Err(err)), + } + } +} + +fn generate_conditions( + operations: Map, + column: TableColumnWalker<'_>, +) -> Result, SdkError> { + let mut compares = Vec::with_capacity(operations.len()); + + for (key, value) in operations { + let table_column = (column.table().database_name(), column.database_name()); + + let expression = |value| { + let db_value = DatabaseValue::from_json_input(value, column.database_type(), column.is_array())?; + + let expr = match column.enum_database_name() { + Some(enum_type) => Expression::enum_value(db_value, enum_type), + None => Expression::value(db_value), + }; + + Result::<_, SdkError>::Ok(expr) + }; + + let compare = match key.as_str() { + "eq" => { + if value.is_null() { + table_column.is_null() + } else { + table_column.equals(expression(value)?) + } + } + "ne" => { + if value.is_null() { + table_column.is_not_null() + } else { + table_column.not_equals(expression(value)?) + } + } + "gt" => table_column.greater_than(expression(value)?), + "lt" => table_column.less_than(expression(value)?), + "gte" => table_column.greater_than_or_equals(expression(value)?), + "lte" => table_column.less_than_or_equals(expression(value)?), + "in" => table_column.any_selection(expression(value)?), + "nin" => table_column.not_all_selection(expression(value)?), + "contains" => table_column.array_contains(expression(value)?), + "contained" => table_column.array_contained(expression(value)?), + "overlaps" => table_column.array_overlaps(expression(value)?), + "not" => { + let operations = match value { + Value::Object(obj) => obj, + _ => unreachable!("non-object not filter"), + }; + + let condition = ConditionTree::not(generate_conditions(operations, column)?); + let expression = Expression::from(condition); + + compares.push(expression); + + continue; + } + "like" => table_column.like(expression(value)?), + _ => todo!(), + }; + + compares.push(Expression::from(compare)); + } + + Ok(ConditionTree::And(compares)) +} diff --git a/extensions/postgres/src/context/filter/simple.rs b/extensions/postgres/src/context/filter/simple.rs new file mode 100644 index 00000000..bc450b0f --- /dev/null +++ b/extensions/postgres/src/context/filter/simple.rs @@ -0,0 +1,115 @@ +use crate::context::Context; +use grafbase_database_definition::{TableColumnId, TableWalker}; +use grafbase_sdk::{SdkError, host_io::postgres::types::DatabaseValue}; +use indexmap::IndexSet; +use serde_json::Value; +use sql_ast::ast::{Column, Comparable, Compare, Expression}; +use std::{collections::VecDeque, iter::Iterator}; + +/// An iterator for a "simple" filter, e.g. a filter that's defined +/// as `by` argument from the client, and has at most one unique equality +/// check. +#[derive(Clone)] +pub struct UniqueFilterIterator<'a> { + context: Context<'a>, + table: TableWalker<'a>, + filter: VecDeque<(String, Value)>, + nested: Option>>, + constrained_columns: IndexSet, +} + +impl<'a> UniqueFilterIterator<'a> { + pub fn new( + context: Context<'a>, + table: TableWalker<'a>, + filter: impl IntoIterator, + ) -> Self { + Self { + context, + table, + filter: VecDeque::from_iter(filter), + nested: None, + constrained_columns: IndexSet::new(), + } + } + + fn push_constrained_column(&mut self, column_id: TableColumnId) { + self.constrained_columns.insert(column_id); + } +} + +impl<'a> Iterator for UniqueFilterIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + // We are having a nested input type, which we iterate over. + if let Some(item) = self.nested.as_mut().and_then(Iterator::next) { + return Some(item); + } + + let Some((field, value)) = self.filter.pop_front() else { + // solves the issue where user emits a value for a nullable composite unique. + return self + .constrained_columns + .pop() + .map(|column_id| { + let column = self.context.database_definition.walk(column_id); + (self.table.database_name(), column.database_name()).is_null() + }) + .map(Ok); + }; + + // If selecting an object, we don't care about the name of the object, but selecting the + // fields defined in the input. + // + // E.g. in `user(by: { nameEmail: { name: "foo", email: "bar" }})`, we do not care about `nameEmail`, + // but the nested values `name` and `email` are used in the query filters. + if let Value::Object(map) = value { + let mut nested = UniqueFilterIterator::new(self.context, self.table, map); + + let constraint = self + .context + .database_definition + .find_unique_constraint_for_client_field(&field, self.table.id()) + .expect("constraint for input field not found"); + + for column in constraint.columns() { + nested.push_constrained_column(column.table_column().id()); + } + + let item = nested.next(); + self.nested = Some(Box::new(nested)); + + return item; + }; + + let column = self + .context + .database_definition + .find_column_for_client_field(&field, self.table.id()) + .expect("column for input field not found"); + + self.constrained_columns.shift_remove(&column.id()); + + match value { + Value::Null => Some(Ok(Column::new(column.database_name()) + .table(self.table.database_name()) + .is_null())), + _ => { + let value = DatabaseValue::from_json_input(value, column.database_type(), column.is_array()); + + let expression = match value { + Ok(value) => match column.enum_database_name() { + Some(name) => Expression::enum_value(value, name), + None => Expression::value(value), + }, + Err(e) => return Some(Err(e)), + }; + + let column = Column::new(column.database_name()).table(self.table.database_name()); + + Some(Ok(column.equals(expression))) + } + } + } +} diff --git a/extensions/postgres/src/context/selection_iterator.rs b/extensions/postgres/src/context/selection_iterator.rs new file mode 100644 index 00000000..ab706c68 --- /dev/null +++ b/extensions/postgres/src/context/selection_iterator.rs @@ -0,0 +1,266 @@ +pub mod collection_args; + +use collection_args::{CollectionArgs, CollectionParameters}; +use grafbase_database_definition::{DatabaseType, EnumWalker, RelationWalker, TableColumnWalker, TableWalker}; +use grafbase_sdk::{ + SdkError, + types::{Field, SelectionSet}, +}; +use sql_ast::ast::{self, Aliasable, Case, Column, Comparable, Expression, Select, jsonb_agg, raw_str}; +use std::{borrow::Cow, collections::HashMap}; + +use super::Context; + +pub struct SelectColumn<'a>(TableColumnWalker<'a>); + +impl<'a> SelectColumn<'a> { + pub fn into_expression(self, table_name: Option>) -> (TableColumnWalker<'a>, Expression<'a>) { + let table_name = match table_name { + Some(name) => name, + None => Cow::Borrowed(self.0.table().database_name()), + }; + + let sql_col = Column::new(self.0.database_name()).table(table_name); + + let r#enum = match self.0.database_type() { + DatabaseType::Scalar(scalar_type) => { + let expr = match scalar_type.from_db_to_client_cast() { + Some(cast) => Expression::from(ast::cast(sql_col, cast)), + None => Expression::from(sql_col), + }; + + return (self.0, expr); + } + DatabaseType::Enum(walker) => walker, + }; + + let col = ast::cast(sql_col.clone(), "text"); + + let builder = r#enum.variants().fold(Case::builder(), |builder, variant| { + let when = Expression::from(col.clone()).equals(raw_str(variant.database_name())); + let then = raw_str(variant.client_name()); + + builder.when(when, then) + }); + + let expr = builder.r#else(Expression::from(col)).into(); + + (self.0, expr) + } +} + +pub struct Unnest<'a>(TableColumnWalker<'a>, EnumWalker<'a>); + +impl<'a> Unnest<'a> { + pub fn into_select(self, table_name: Option>) -> (TableColumnWalker<'a>, Select<'a>) { + let unnest_col = Column::new(format!("unnest_{}", self.0.database_name())); + let unnest_col = ast::cast(unnest_col, "text"); + + let builder = self.1.variants().fold(Case::builder(), |acc, variant| { + let when = Expression::from(unnest_col.clone()).equals(raw_str(variant.database_name())); + let then = raw_str(variant.client_name()); + + acc.when(when, then) + }); + + let case = builder.r#else(Expression::from(unnest_col)); + let aggregate = jsonb_agg(Expression::from(case), None, false).alias("json_array"); + + let mut column = Column::new(self.0.database_name()); + + if let Some(table_name) = table_name { + column = column.table(table_name); + } + + let expr = Expression::from(ast::unnest(column)).alias(format!("unnest_{}", self.0.database_name())); + + let mut select = Select::from_table(expr); + select.value(aggregate); + + (self.0, select) + } +} + +pub enum TableSelection<'a> { + /// Selects a single column. + Column(SelectColumn<'a>), + /// Returns a selection which transforms an array of enum values into an array of + /// GraphQL enum values, renamed. + ColumnUnnest(Unnest<'a>), + /// Joins a unique row with a nested selection. + JoinUnique(RelationWalker<'a>, SelectionIterator<'a>), + /// Joins a collection of rows with a nested selection. + JoinMany(RelationWalker<'a>, SelectionIterator<'a>, CollectionArgs), +} + +/// An iterator over a GraphQL selection. Returns either a column or a +/// join, which should be handled accordingly when generating an SQL query. +#[derive(Clone)] +pub struct SelectionIterator<'a> { + ctx: Context<'a>, + selection: SelectionSet<'a>, + extra_columns: Vec>, + index: usize, + extra_column_index: usize, +} + +impl<'a> SelectionIterator<'a> { + pub fn new( + ctx: Context<'a>, + table: TableWalker<'a>, + field: Field<'a>, + selection: SelectionSet<'a>, + ) -> Result { + let mut extra_columns = Vec::new(); + + let selection_columns: HashMap<_, _> = selection + .fields() + .flat_map(|f| ctx.database_definition.column_for_field_definition(f.definition_id())) + .map(|c| (c.client_name(), c)) + .collect(); + + if let Ok(params) = field.arguments::(ctx.arguments) { + for order_input in ¶ms.order_by { + for field_name in order_input.field.keys() { + if selection_columns.contains_key(field_name.as_str()) { + continue; + } + + let column = ctx + .database_definition + .find_column_for_client_field(field_name, table.id()) + .ok_or_else(|| { + SdkError::from(format!( + "ordering type {} with non-existing field {}", + table.client_name(), + field_name + )) + })?; + + extra_columns.push(column); + } + } + }; + + for column in table.implicit_ordering_key().unwrap().columns() { + if selection_columns.contains_key(column.table_column().client_name()) { + continue; + } + + if extra_columns.contains(&column.table_column()) { + continue; + } + + extra_columns.push(column.table_column()); + } + + Ok(Self { + ctx, + selection, + extra_columns, + index: 0, + extra_column_index: 0, + }) + } +} + +impl<'a> Iterator for SelectionIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + let Some(selection_field) = self.selection.fields().nth(self.index) else { + let extra = self.extra_columns.get(self.extra_column_index); + self.extra_column_index += 1; + + return extra.map(|column| Ok(TableSelection::Column(SelectColumn(*column)))); + }; + + self.index += 1; + + // Selecting a column. + if let Some(column) = self + .ctx + .database_definition + .column_for_field_definition(selection_field.definition_id()) + { + match column.database_type() { + DatabaseType::Enum(r#enum) if column.is_array() => { + return Some(Ok(TableSelection::ColumnUnnest(Unnest(column, r#enum)))); + } + _ => { + return Some(Ok(TableSelection::Column(SelectColumn(column)))); + } + } + } + + // Joining a table with the current one, selecting from the joined table. + let relation = match self + .ctx + .database_definition + .get_relation_id_for_client_field_id(selection_field.definition_id()) + .map(|id| self.ctx.database_definition.walk(id)) + { + Some(relation) => relation, + None => { + return self.next(); + } + }; + + if relation.is_other_side_one() { + // The other side has a unique constraint, so our join must return at most one row. + let selection_set = selection_field.selection_set(); + + let iterator = match Self::new(self.ctx, relation.referenced_table(), selection_field, selection_set) { + Ok(iterator) => iterator, + Err(err) => return Some(Err(err)), + }; + + Some(Ok(TableSelection::JoinUnique(relation, iterator))) + } else { + let params = selection_field + .arguments::(self.ctx.arguments) + .ok() + .unwrap_or_default(); + + // The other side has not a unique constraint that matches with the foreign key, + // meaning the resulting set is a collection. + + // `userCollection { edges { node { field } } }`, the selection part. + // + let selection_field = selection_field + .selection_set() + .fields() + .find(|f| { + self.ctx + .database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("edges") + }) + .unwrap() + .selection_set() + .fields() + .find(|f| { + self.ctx + .database_definition + .get_name_for_field_definition(f.definition_id()) + == Some("node") + }) + .unwrap(); + + let selection_set = selection_field.selection_set(); + + let iterator = match Self::new(self.ctx, relation.referenced_table(), selection_field, selection_set) { + Ok(iterator) => iterator, + Err(error) => return Some(Err(error)), + }; + + // By defining this, we mark the next select to return a collecton. + let args = CollectionArgs::new(self.ctx.database_definition, relation.referenced_table(), params); + + match args { + Ok(args) => Some(Ok(TableSelection::JoinMany(relation, iterator, args))), + Err(error) => Some(Err(error)), + } + } + } +} diff --git a/extensions/postgres/src/context/selection_iterator/collection_args.rs b/extensions/postgres/src/context/selection_iterator/collection_args.rs new file mode 100644 index 00000000..3399c8d7 --- /dev/null +++ b/extensions/postgres/src/context/selection_iterator/collection_args.rs @@ -0,0 +1,239 @@ +use std::collections::BTreeMap; + +use grafbase_database_definition::{DatabaseDefinition, TableColumnWalker, TableWalker}; +use grafbase_sdk::{SdkError, host_io::postgres::types::DatabaseValue}; + +use sql_ast::ast::{Aliasable, Column, Comparable, ConditionTree, Expression, Order, OrderDefinition}; + +#[derive(Debug, Clone, Default)] +pub struct CollectionOrdering { + inner: Vec<((String, String), Option)>, + outer: Vec<(String, Option)>, +} + +impl CollectionOrdering { + pub fn inner(&self) -> impl ExactSizeIterator> + '_ { + self.inner + .iter() + .map(|((table, column), order)| (Column::from((table.clone(), column.clone())).into(), *order)) + } + + pub fn outer(&self) -> impl ExactSizeIterator> + '_ { + self.outer.iter().map(|(column, order)| { + let column = Column::from(column.clone()); + (column.into(), *order) + }) + } +} + +/// Argument defining a relay-style GraphQL collection. +#[derive(Debug, Clone)] +pub struct CollectionArgs { + first: Option, + last: Option, + order_by: CollectionOrdering, + extra_columns: Vec>, +} + +#[derive(Default, Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CollectionParameters { + pub first: Option, + pub last: Option, + #[serde(default)] + pub order_by: Vec, +} + +#[derive(Debug, Default, Clone, serde::Deserialize)] +pub struct OrderParameter { + #[serde(flatten)] + pub field: BTreeMap, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum OrderDirection { + Asc, + Desc, +} + +impl CollectionArgs { + pub(crate) fn new( + database_definition: &DatabaseDefinition, + table: TableWalker<'_>, + mut params: CollectionParameters, + ) -> Result { + if let (Some(_), Some(_)) = (params.first, params.last) { + return Err(SdkError::from("first and last parameters can't be both defined")); + } + + let constraint = table + .implicit_ordering_key() + .expect("tables at this point must have at least one unique constraint"); + + for column in constraint.columns() { + if params + .order_by + .iter() + .any(|v| v.field.contains_key(column.table_column().client_name())) + { + continue; + } + + params.order_by.push({ + let mut map = BTreeMap::new(); + map.insert(column.table_column().client_name().to_string(), OrderDirection::Asc); + + OrderParameter { field: map } + }); + } + + // ordering the innermost query + let mut order_by = CollectionOrdering::default(); + + // extra columns we have to select (based on ordering) + let mut extra_columns = Vec::new(); + + for mut value in params.order_by { + let Some((field, direction)) = value.field.pop_first() else { + continue; + }; + + // For `last` to work, we must reverse the order of the inner query. + let inner_direction = match direction { + OrderDirection::Desc if params.last.is_some() => Order::AscNullsFirst, + OrderDirection::Desc => Order::DescNullsFirst, + _ if params.last.is_some() => Order::DescNullsFirst, + _ => Order::AscNullsFirst, + }; + + // and then reverse the order again for the outer query. + let outer_direction = match inner_direction { + Order::DescNullsFirst if params.last.is_some() => Order::AscNullsFirst, + Order::AscNullsFirst if params.last.is_some() => Order::DescNullsFirst, + _ => inner_direction, + }; + + let column = database_definition + .find_column_for_client_field(&field, table.id()) + .ok_or_else(|| { + SdkError::from(format!( + "ordering type {} with field{}, which does not exist", + table.client_name(), + &field + )) + })?; + + let sql_column = Column::from((table.database_name().to_string(), column.client_name().to_string())); + + // We must name our order columns for them to be visible in the order by statement of the + // outer queries. + let alias = format!("{}_{}", table.database_name(), column.database_name()); + extra_columns.push(sql_column.clone().alias(alias.clone())); + + order_by.inner.push(( + (table.database_name().to_string(), column.database_name().to_string()), + Some(inner_direction), + )); + + order_by.outer.push((alias, Some(outer_direction))); + } + + Ok(Self { + first: params.first, + last: params.last, + order_by, + extra_columns, + }) + } + + /// Select the first N items. An example GraphQL definition: `userCollection(first: N)`. + pub(crate) fn first(&self) -> Option { + self.first + } + + /// Select the last N items. An example GraphQL definition: `userCollection(last: N)`. + pub(crate) fn last(&self) -> Option { + self.last + } + + /// Defines the ordering of the collection. The first item in a tuple is the ordering for the innermost + /// query, and the second one of all the outer queries. An example GraphQL definition: + /// `userCollection(orderBy: [{ name: DESC }])`. + pub(crate) fn order_by(&self) -> &CollectionOrdering { + &self.order_by + } + + /// A set of extra columns needing to select in the collecting query. Needed to handle the ordering of the outer + /// layers. + pub(crate) fn extra_columns(&self) -> impl ExactSizeIterator> + '_ { + self.extra_columns.clone().into_iter() + } +} + +// sigh, this is for pagination +fn _generate_filter( + table_column: TableColumnWalker<'_>, + fields: &[(&str, &serde_json::Value, OrderDirection)], +) -> Result>, SdkError> { + let mut filters: Vec> = Vec::new(); + let max_id = fields.len() - 1; + + for (i, (column, value, direction)) in fields.iter().enumerate() { + let column = Column::from((*column).to_string()); + + if i == max_id { + if value.is_null() { + if let OrderDirection::Asc = direction { + filters.push(column.is_not_null().into()); + } + } else { + let value = DatabaseValue::from_json_input( + (*value).clone(), + table_column.database_type(), + table_column.is_array(), + )?; + + let expression = match table_column.enum_database_name() { + Some(enum_name) => Expression::enum_value(value, enum_name), + None => Expression::value(value), + }; + + match direction { + OrderDirection::Asc => { + filters.push(column.greater_than(expression).into()); + } + OrderDirection::Desc => { + let tree = ConditionTree::Or(vec![ + column.clone().less_than(expression).into(), + column.is_null().into(), + ]); + + filters.push(tree.into()); + } + } + } + } else { + let value = DatabaseValue::from_json_input( + (*value).clone(), + table_column.database_type(), + table_column.is_array(), + )?; + + let expression = match table_column.enum_database_name() { + Some(enum_name) => Expression::enum_value(value, enum_name), + None => Expression::value(value), + }; + + filters.push(column.equals(expression).into()); + } + } + + if filters.is_empty() { + Ok(None) + } else if filters.len() == 1 { + Ok(Some(filters.pop().unwrap())) + } else { + Ok(Some(ConditionTree::And(filters).into())) + } +} diff --git a/extensions/postgres/src/context/update_input.rs b/extensions/postgres/src/context/update_input.rs new file mode 100644 index 00000000..566b1b85 --- /dev/null +++ b/extensions/postgres/src/context/update_input.rs @@ -0,0 +1,164 @@ +use grafbase_database_definition::{TableColumnWalker, TableWalker}; +use grafbase_sdk::{ + SdkError, + host_io::postgres::types::{DatabaseType as _, DatabaseValue}, +}; +use indexmap::IndexMap; +use serde_json::Value; +use sql_ast::ast::{Column, Expression, SqlOp}; +use std::{collections::VecDeque, fmt::Debug}; + +use super::Context; + +pub struct UpdateInputItem<'a> { + pub column: TableColumnWalker<'a>, + pub expression: Expression<'a>, +} + +impl Debug for UpdateInputItem<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("UpdateInputItem") + .field("column", &self.column.client_name()) + .field("expression", &self.expression) + .finish() + } +} + +pub struct UpdateInputIterator<'a> { + input: VecDeque<(TableColumnWalker<'a>, UpdateOperation)>, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(untagged)] +pub enum UpdateOperation { + /// Set the column to a specific value + Set { set: Value }, + + /// Increment the column by the specified value + Increment { increment: Value }, + + /// Decrement the column by the specified value + Decrement { decrement: Value }, + + /// Delete a key from a JSON object + DeleteKey { + #[serde(rename = "deleteKey")] + delete_key: String, + }, + + /// Multiply the column by the specified value + Multiply { multiply: Value }, + + /// Divide the column by the specified value + Divide { divide: Value }, + + /// Append a value to an array or concatenate with a string + Append { append: Value }, + + /// Prepend a value to an array or concatenate with a string + Prepend { prepend: Value }, + + /// Delete a key path from a JSON object + DeleteAtPath { + #[serde(rename = "deleteAtPath")] + delete_at_path: Vec, + }, +} + +#[derive(Debug, Clone, serde::Deserialize)] +pub struct UpdateInputParameters { + pub input: IndexMap, +} + +impl<'a> UpdateInputIterator<'a> { + pub fn new(ctx: &'a Context<'a>, table: TableWalker<'a>) -> Result { + let mut input = VecDeque::new(); + + for (ref field_name, op) in ctx.field.arguments::(ctx.arguments)?.input { + let Some(column) = ctx + .database_definition + .find_column_for_client_field(field_name, table.id()) + else { + return Err(SdkError::from(format!("field {field_name} not found"))); + }; + + input.push_back((column, op)); + } + + Ok(Self { + input: input.into_iter().collect(), + }) + } +} + +impl<'a> Iterator for UpdateInputIterator<'a> { + type Item = Result, SdkError>; + + fn next(&mut self) -> Option { + use UpdateOperation::*; + + let (column, value) = self.input.pop_front()?; + let sql_column = Column::from(column.database_name()); + + let as_value = |value: Value| { + let value = DatabaseValue::from_json_input(value, column.database_type(), column.is_array())?; + + super::rename_enum_variants(column, value) + }; + + let value_expression = |value: DatabaseValue| match column.enum_database_name() { + Some(enum_type) => Expression::enum_value(value, enum_type), + None => Expression::value(value), + }; + + let expression = match value { + Set { set } => match as_value(set) { + Ok(value) => value_expression(value), + Err(err) => return Some(Err(err)), + }, + Increment { increment } => match as_value(increment) { + Ok(value) => Expression::from(sql_column) + value_expression(value), + Err(err) => return Some(Err(err)), + }, + Decrement { decrement } => match as_value(decrement) { + Ok(value) => Expression::from(sql_column) - value_expression(value), + Err(err) => return Some(Err(err)), + }, + DeleteKey { delete_key } => { + Expression::from(sql_column) - Expression::value(delete_key.into_bound_value(0)) + } + UpdateOperation::Multiply { multiply } => match as_value(multiply) { + Ok(value) => Expression::from(sql_column) * value_expression(value), + Err(err) => return Some(Err(err)), + }, + UpdateOperation::Divide { divide } => match as_value(divide) { + Ok(value) => Expression::from(sql_column) / value_expression(value), + Err(err) => return Some(Err(err)), + }, + UpdateOperation::Append { append } => match as_value(append) { + Ok(value) => { + let op = SqlOp::Append(Expression::from(sql_column), value_expression(value)); + Expression::from(op) + } + Err(err) => return Some(Err(err)), + }, + UpdateOperation::Prepend { prepend } => match as_value(prepend) { + Ok(value) => { + let op = SqlOp::Append(value_expression(value), Expression::from(sql_column)); + Expression::from(op) + } + Err(err) => return Some(Err(err)), + }, + UpdateOperation::DeleteAtPath { delete_at_path } => { + let op = SqlOp::JsonDeleteAtPath( + Expression::from(sql_column), + Expression::value(delete_at_path.into_bound_value(0)), + ); + + Expression::from(op) + } + }; + + Some(Ok(UpdateInputItem { column, expression })) + } +} diff --git a/extensions/postgres/src/context/update_operation.rs b/extensions/postgres/src/context/update_operation.rs new file mode 100644 index 00000000..af8f1da3 --- /dev/null +++ b/extensions/postgres/src/context/update_operation.rs @@ -0,0 +1,84 @@ +use serde::Deserialize; +use serde_json::Value; + +/// Represents all possible update operations for a column in Postgres +/// +/// The operation comes in the format: `column_name: { operation: value }` +/// Example: `{ "name": { "set": "John" } }` +#[derive(Debug, Clone, Deserialize)] +#[serde(untagged)] +pub enum UpdateOperation { + /// Set the column to a specific value + Set { set: Value }, + + /// Increment the column by the specified value + Increment { increment: Value }, + + /// Decrement the column by the specified value + Decrement { decrement: Value }, + + /// Delete a key from a JSON object + DeleteKey { deleteKey: Value }, + + /// Multiply the column by the specified value + Multiply { multiply: Value }, + + /// Divide the column by the specified value + Divide { divide: Value }, + + /// Append a value to an array or concatenate with a string + Append { append: Value }, + + /// Prepend a value to an array or concatenate with a string + Prepend { prepend: Value }, + + /// Delete a key path from a JSON object + DeleteAtPath { deleteAtPath: Value }, +} + +impl UpdateOperation { + /// Returns the operation type as a string + pub fn operation_type(&self) -> &'static str { + match self { + Self::Set { .. } => "set", + Self::Increment { .. } => "increment", + Self::Decrement { .. } => "decrement", + Self::DeleteKey { .. } => "deleteKey", + Self::Multiply { .. } => "multiply", + Self::Divide { .. } => "divide", + Self::Append { .. } => "append", + Self::Prepend { .. } => "prepend", + Self::DeleteAtPath { .. } => "deleteAtPath", + } + } + + /// Extract the value from any variant + pub fn value(&self) -> &Value { + match self { + Self::Set { set } => set, + Self::Increment { increment } => increment, + Self::Decrement { decrement } => decrement, + Self::DeleteKey { deleteKey } => deleteKey, + Self::Multiply { multiply } => multiply, + Self::Divide { divide } => divide, + Self::Append { append } => append, + Self::Prepend { prepend } => prepend, + Self::DeleteAtPath { deleteAtPath } => deleteAtPath, + } + } + + /// Consumes the operation and returns the contained value + pub fn into_value(self) -> Value { + match self { + Self::Set { set } => set, + Self::Increment { increment } => increment, + Self::Decrement { decrement } => decrement, + Self::DeleteKey { deleteKey } => deleteKey, + Self::Multiply { multiply } => multiply, + Self::Divide { divide } => divide, + Self::Append { append } => append, + Self::Prepend { prepend } => prepend, + Self::DeleteAtPath { deleteAtPath } => deleteAtPath, + } + } +} \ No newline at end of file diff --git a/extensions/postgres/src/introspect.rs b/extensions/postgres/src/introspect.rs new file mode 100644 index 00000000..712983fd --- /dev/null +++ b/extensions/postgres/src/introspect.rs @@ -0,0 +1,89 @@ +mod enums; +mod field_mapping; +mod foreign_keys; +mod keys; +mod schemas; +mod tables; + +use std::collections::HashMap; + +use grafbase_database_definition::{DatabaseDefinition, KeyType, ScalarKind}; +use grafbase_sdk::types::SubgraphSchema; + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgDatabase { + pub name: String, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgTable { + pub name: String, + pub schema: String, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgColumn { + pub name: String, + pub r#type: ScalarKind, + pub enum_schema: Option, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgEnum { + pub name: String, + pub schema: String, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgEnumVariant { + pub name: String, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgRelation { + pub name: String, + #[serde(default)] + pub fields: Vec, + #[serde(default)] + pub references: Vec, +} + +#[derive(Debug, Clone, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PgKey { + pub fields: Vec, + pub r#type: KeyType, +} + +pub fn from_sdl(subgraph_schemas: Vec>) -> HashMap { + let mut result = HashMap::new(); + + for schema in subgraph_schemas { + let Some(pg_database) = schema + .directives() + .find(|d| d.name() == "pgDatabase") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + let mut database_definition = DatabaseDefinition::new(pg_database.name); + + schemas::introspect_sdl(&schema, &mut database_definition); + enums::introspect_sdl(&schema, &mut database_definition); + tables::introspect_sdl(&schema, &mut database_definition); + foreign_keys::introspect_sdl(&schema, &mut database_definition); + keys::introspect_sdl(&schema, &mut database_definition); + field_mapping::introspect(&schema, &mut database_definition); + + result.insert(schema.name().to_string(), database_definition); + } + + result +} diff --git a/extensions/postgres/src/introspect/enums.rs b/extensions/postgres/src/introspect/enums.rs new file mode 100644 index 00000000..7f822fb9 --- /dev/null +++ b/extensions/postgres/src/introspect/enums.rs @@ -0,0 +1,43 @@ +use super::PgEnumVariant; +use crate::introspect::PgEnum; +use grafbase_database_definition::{DatabaseDefinition, Enum, EnumVariant}; +use grafbase_sdk::types::{SubgraphSchema, TypeDefinition}; + +pub(crate) fn introspect_sdl(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + for r#type in schema.type_definitions() { + let TypeDefinition::Enum(definition) = r#type else { + continue; + }; + + let Some(pg_enum) = definition + .directives() + .find(|directive| directive.name() == "pgEnum") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + let Some(schema_id) = database_definition.get_schema_id(&pg_enum.schema) else { + continue; + }; + + let r#enum = Enum::new(schema_id, pg_enum.name, Some(r#type.name().to_string())); + let enum_id = database_definition.push_enum(r#enum); + + for variant in definition.values() { + let Some(pg_enum_variant) = variant + .directives() + .find(|directive| directive.name() == "pgEnumVariant") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + database_definition.push_enum_variant(EnumVariant::new( + enum_id, + pg_enum_variant.name, + Some(variant.name().to_string()), + )); + } + } +} diff --git a/extensions/postgres/src/introspect/field_mapping.rs b/extensions/postgres/src/introspect/field_mapping.rs new file mode 100644 index 00000000..8666bcc3 --- /dev/null +++ b/extensions/postgres/src/introspect/field_mapping.rs @@ -0,0 +1,111 @@ +use grafbase_database_definition::{DatabaseDefinition, Operation}; +use grafbase_sdk::types::{ObjectDefinition, SubgraphSchema, TypeDefinition}; + +#[derive(serde::Deserialize)] +struct PgTypePointer<'a> { + r#type: &'a str, +} + +pub(super) fn introspect(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + for r#type in schema.type_definitions() { + let TypeDefinition::Object(object_definition) = r#type else { + continue; + }; + + for field in object_definition.fields() { + database_definition.push_client_definition_to_name(field.name(), field.id()); + + database_definition.push_field_definition_to_type_definition(field.id(), field.ty().definition().id()); + } + + if Some(r#type.id()) == schema.query().map(|q| q.id()) || Some(r#type.id()) == schema.mutation().map(|m| m.id()) + { + map_operations(schema, &object_definition, database_definition); + continue; + } + + if let Some(returning) = object_definition + .directives() + .find(|d| d.name() == "pgReturning" || d.name() == "pgConnection") + .and_then(|d| d.arguments::().ok()) + { + map_pointer_type(&object_definition, returning, database_definition); + } + } +} + +fn map_pointer_type( + object_definition: &ObjectDefinition<'_>, + returning: PgTypePointer<'_>, + database_definition: &mut DatabaseDefinition, +) { + let Some(table_id) = database_definition + .find_table_for_client_type(returning.r#type) + .map(|t| t.id()) + else { + return; + }; + + for field in object_definition.fields() { + let Some(column_id) = database_definition.get_table_column_id_for_field(table_id, field.name()) else { + continue; + }; + + database_definition.push_column_to_definition(field.id(), column_id); + } +} + +fn map_operations( + schema: &SubgraphSchema<'_>, + object_definition: &ObjectDefinition<'_>, + database_definition: &mut DatabaseDefinition, +) { + for field in object_definition.fields() { + let Some(TypeDefinition::Object(r#type)) = schema.type_definition(&field.ty().definition().id()) else { + continue; + }; + + let r#type = match r#type + .directives() + .find(|d| d.name() == "pgMutation" || d.name() == "pgConnection") + .and_then(|d| d.arguments::>().ok()) + { + Some(args) => args.r#type, + None => r#type.name(), + }; + + let Some(table_id) = database_definition.find_table_for_client_type(r#type).map(|t| t.id()) else { + continue; + }; + + for directive in field.directives() { + match directive.name() { + "pgSelectMany" => { + database_definition.push_operation(field.id(), Operation::FindMany(table_id)); + } + "pgSelectOne" => { + database_definition.push_operation(field.id(), Operation::FindOne(table_id)); + } + "pgInsertMany" => { + database_definition.push_operation(field.id(), Operation::CreateMany(table_id)); + } + "pgInsertOne" => { + database_definition.push_operation(field.id(), Operation::CreateOne(table_id)); + } + "pgUpdateMany" => { + database_definition.push_operation(field.id(), Operation::UpdateMany(table_id)); + } + "pgUpdateOne" => { + database_definition.push_operation(field.id(), Operation::UpdateOne(table_id)); + } + "pgDeleteMany" => { + database_definition.push_operation(field.id(), Operation::DeleteMany(table_id)); + } + "pgDeleteOne" => { + database_definition.push_operation(field.id(), Operation::DeleteOne(table_id)); + } + _ => {} + } + } + } +} diff --git a/extensions/postgres/src/introspect/foreign_keys.rs b/extensions/postgres/src/introspect/foreign_keys.rs new file mode 100644 index 00000000..c097fe0f --- /dev/null +++ b/extensions/postgres/src/introspect/foreign_keys.rs @@ -0,0 +1,133 @@ +use grafbase_database_definition::{DatabaseDefinition, ForeignKey, ForeignKeyColumn, RelationId}; +use grafbase_sdk::types::{SubgraphSchema, TypeDefinition}; + +use super::{PgRelation, PgTable}; + +pub(super) fn introspect_sdl(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + for r#type in schema.type_definitions() { + let TypeDefinition::Object(constrained_object) = r#type else { + continue; + }; + + let Some(pg_table) = constrained_object + .directives() + .find(|directive| directive.name() == "pgTable") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + let Some(constrained_schema_id) = database_definition.get_schema_id(&pg_table.schema) else { + continue; + }; + + let Some(constrained_table_id) = database_definition.get_table_id(constrained_schema_id, &pg_table.name) else { + continue; + }; + + for constrained_field in constrained_object.fields() { + let TypeDefinition::Object(referenced_object) = constrained_field.ty().definition() else { + continue; + }; + + let referenced_object = if referenced_object.directives().any(|d| d.name() == "pgConnection") { + let Some(edges) = referenced_object.fields().find(|f| f.name() == "edges") else { + continue; + }; + + let TypeDefinition::Object(edges) = edges.ty().definition() else { + continue; + }; + + let Some(node) = edges.fields().find(|f| f.name() == "node") else { + continue; + }; + + let TypeDefinition::Object(node) = node.ty().definition() else { + continue; + }; + + node + } else { + referenced_object + }; + + let Some(referenced_table_id) = database_definition + .find_table_for_client_type(referenced_object.name()) + .map(|t| t.id()) + else { + continue; + }; + + if constrained_field + .directives() + .any(|directive| directive.name() == "pgColumn") + { + continue; + }; + + let Some(pg_relation) = constrained_field + .directives() + .find(|directive| directive.name() == "pgRelation") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + if pg_relation.fields.is_empty() || pg_relation.references.is_empty() { + continue; + } + + let referenced_field = referenced_object + .fields() + .find(|f| { + f.directives() + .any(|d| d.arguments::().map(|a| a.name).ok().as_ref() == Some(&pg_relation.name)) + }) + .expect("yolo"); + + let foreign_key = ForeignKey::new( + pg_relation.name, + constrained_schema_id, + constrained_table_id, + referenced_table_id, + ); + + let (fk_id, forward, back) = database_definition.push_foreign_key(foreign_key); + + let columns = pg_relation.fields.into_iter().zip(pg_relation.references.into_iter()); + + database_definition.push_client_id_relation_mapping(constrained_field.id(), RelationId::Forward(forward)); + database_definition.push_client_id_relation_mapping(referenced_field.id(), RelationId::Back(back)); + + database_definition.push_client_name_relation_mapping( + constrained_table_id, + constrained_field.name(), + RelationId::Forward(forward), + ); + + database_definition.push_client_name_relation_mapping( + referenced_table_id, + referenced_field.name(), + RelationId::Back(back), + ); + + for (constrained_field_name, referenced_field_name) in columns { + let Some(constrained_column_id) = + database_definition.get_table_column_id(constrained_table_id, &constrained_field_name) + else { + continue; + }; + + let Some(referenced_column_id) = + database_definition.get_table_column_id(referenced_table_id, &referenced_field_name) + else { + continue; + }; + + let fk_column = ForeignKeyColumn::new(fk_id, constrained_column_id, referenced_column_id); + database_definition.push_foreign_key_column(fk_column); + } + } + } +} diff --git a/extensions/postgres/src/introspect/keys.rs b/extensions/postgres/src/introspect/keys.rs new file mode 100644 index 00000000..9a30a202 --- /dev/null +++ b/extensions/postgres/src/introspect/keys.rs @@ -0,0 +1,62 @@ +use grafbase_database_definition::{DatabaseDefinition, Key, KeyColumn, KeyType}; +use grafbase_sdk::types::{SubgraphSchema, TypeDefinition}; +use inflector::Inflector; + +use super::{PgKey, PgTable}; + +pub(super) fn introspect_sdl(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + 'main: for r#type in schema.type_definitions() { + let TypeDefinition::Object(object) = r#type else { + continue; + }; + + let Some(pg_table) = object + .directives() + .find(|directive| directive.name() == "pgTable") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + for pg_key in object.directives().filter(|d| d.name() == "pgKey") { + let Some(pg_key) = pg_key.arguments::().ok() else { + continue; + }; + + let Some(schema_id) = database_definition.get_schema_id(&pg_table.schema) else { + continue; + }; + + let Some(table_id) = database_definition.get_table_id(schema_id, &pg_table.name) else { + continue; + }; + + // TODO: revisit when everything's done. we might not need this. + let constraint_name = match pg_key.r#type { + KeyType::Primary => format!("pk_{}", object.name()), + KeyType::Unique => format!("unique_{}", object.name()), + }; + + let key = Key::new(table_id, constraint_name, pg_key.r#type); + let key_id = database_definition.push_key(key); + + for field_name in &pg_key.fields { + let Some(column_id) = database_definition + .find_column_for_client_field(field_name, table_id) + .map(|c| c.id()) + else { + continue 'main; + }; + + let key_column = KeyColumn::new(key_id, column_id); + database_definition.push_key_column(key_column); + database_definition.push_client_field_key_mapping(field_name, table_id, key_id); + } + + if pg_key.fields.len() > 1 { + let field_name = pg_key.fields.join("_").to_camel_case(); + database_definition.push_client_field_key_mapping(&field_name, table_id, key_id); + } + } + } +} diff --git a/extensions/postgres/src/introspect/schemas.rs b/extensions/postgres/src/introspect/schemas.rs new file mode 100644 index 00000000..56beba63 --- /dev/null +++ b/extensions/postgres/src/introspect/schemas.rs @@ -0,0 +1,43 @@ +use grafbase_database_definition::DatabaseDefinition; +use grafbase_sdk::types::{SubgraphSchema, TypeDefinition}; +use std::collections::BTreeSet; + +use super::{PgEnum, PgTable}; + +pub(crate) fn introspect_sdl(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + use TypeDefinition::*; + + let mut schemas = BTreeSet::new(); + + for r#type in schema.type_definitions() { + match r#type { + Object(definition) => { + let Some(pg_table) = definition + .directives() + .find(|directive| directive.name() == "pgTable") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + schemas.insert(pg_table.schema); + } + Enum(definition) => { + let Some(pg_enum) = definition + .directives() + .find(|directive| directive.name() == "pgEnum") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + schemas.insert(pg_enum.schema); + } + Scalar(_) | Interface(_) | Union(_) | InputObject(_) => continue, + } + } + + for schema in schemas { + database_definition.push_schema(schema); + } +} diff --git a/extensions/postgres/src/introspect/tables.rs b/extensions/postgres/src/introspect/tables.rs new file mode 100644 index 00000000..988e7d1d --- /dev/null +++ b/extensions/postgres/src/introspect/tables.rs @@ -0,0 +1,75 @@ +use grafbase_database_definition::{ + ColumnType, DatabaseDefinition, EnumType, ScalarKind, ScalarType, SchemaId, Table, TableColumn, +}; +use grafbase_sdk::types::{FieldDefinition, SubgraphSchema, TypeDefinition}; + +use super::{PgColumn, PgTable}; + +pub(crate) fn introspect_sdl(schema: &SubgraphSchema<'_>, database_definition: &mut DatabaseDefinition) { + for r#type in schema.type_definitions() { + let TypeDefinition::Object(definition) = r#type else { + continue; + }; + + let Some(pg_table) = definition + .directives() + .find(|directive| directive.name() == "pgTable") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + let Some(schema_id) = database_definition.get_schema_id(&pg_table.schema) else { + continue; + }; + + let table = Table::::new(schema_id, pg_table.name, Some(definition.name().to_string())); + let table_id = database_definition.push_table(table); + + for field in definition.fields() { + let Some(pg_column) = field + .directives() + .find(|directive| directive.name() == "pgColumn") + .and_then(|d| d.arguments::().ok()) + else { + continue; + }; + + let Some(column_type) = introspect_type(database_definition, schema_id, field, &pg_column) else { + continue; + }; + + let column = + TableColumn::::new(table_id, column_type, pg_column.name, Some(field.name().to_string())); + + database_definition.push_table_column(column, Some(field.id())); + } + } +} + +fn introspect_type( + database_definition: &DatabaseDefinition, + schema_id: SchemaId, + field: FieldDefinition<'_>, + pg_column: &PgColumn, +) -> Option { + match pg_column.r#type { + ScalarKind::Enum => { + let schema_id = match pg_column.enum_schema { + Some(ref schema) => database_definition.get_schema_id(schema)?, + None => schema_id, + }; + + let enum_name = field.ty().definition().name(); + let enum_id = database_definition.get_enum_id(schema_id, enum_name)?; + let is_array = field.ty().is_list(); + + Some(ColumnType::Enum(EnumType::new(enum_id, is_array))) + } + kind => { + let is_array = field.ty().is_list(); + + Some(ColumnType::Scalar(ScalarType::new(kind, is_array))) + } + } +} diff --git a/extensions/postgres/src/lib.rs b/extensions/postgres/src/lib.rs new file mode 100644 index 00000000..4fc5dbe1 --- /dev/null +++ b/extensions/postgres/src/lib.rs @@ -0,0 +1,121 @@ +mod config; +mod context; +mod introspect; +mod logger; +mod resolve; + +use std::{collections::HashMap, time::Duration}; + +use config::PostgresConfig; +use context::Context; +use grafbase_database_definition::DatabaseDefinition; +use grafbase_sdk::{ + SdkError, SelectionSetResolverExtension, + host_io::postgres, + types::{ArgumentValues, Configuration, Data, Error, Field, SubgraphHeaders, SubgraphSchema}, +}; + +#[derive(SelectionSetResolverExtension)] +struct PostgresExtension { + // from database name to pool + pools: HashMap, + // from subgraph name to definition + database_definitions: HashMap, +} + +impl SelectionSetResolverExtension for PostgresExtension { + fn new(subgraph_schemas: Vec>, config: Configuration) -> Result { + logger::init(); + + let mut pools = HashMap::new(); + let config: PostgresConfig = config.deserialize()?; + + for database in config.databases { + let pool = create_pool(&database)?; + pools.insert(database.name, pool); + } + + let database_definitions = introspect::from_sdl(subgraph_schemas); + + Ok(Self { + pools, + database_definitions, + }) + } + + fn prepare(&mut self, _: &str, field: Field<'_>) -> Result, Error> { + Ok(field.into_bytes()) + } + + fn resolve( + &mut self, + _: SubgraphHeaders, + subgraph_name: &str, + prepared: &[u8], + arguments: ArgumentValues<'_>, + ) -> Result { + let Some(database_definition) = self.database_definitions.get(subgraph_name) else { + return Err(Error::new(format!( + "Subgraph {subgraph_name} is not a Postgres subgraph" + ))); + }; + + let Some(pool) = self.pools.get(database_definition.name()) else { + return Err(Error::new(format!( + "Database {} is not configured", + database_definition.name() + ))); + }; + + let data = Field::with_bytes(prepared, |field| { + let Some(operation) = database_definition.get_operation(field.definition_id()) else { + return Err(SdkError::from("operation not found")); + }; + + let ctx = Context { + operation, + arguments, + database_definition, + pool, + field, + }; + + resolve::execute(ctx) + })??; + + Ok(data) + } +} + +fn create_pool(database: &config::DatabaseConfig) -> Result { + let pool = match database.pool { + Some(ref pool_config) => { + let mut pool_opts = postgres::PoolOptions::new(); + + if let Some(max_connections) = pool_config.max_connections { + pool_opts = pool_opts.max_connections(max_connections); + } + + if let Some(min_connections) = pool_config.min_connections { + pool_opts = pool_opts.min_connections(min_connections); + } + + if let Some(idle_timeout_ms) = pool_config.idle_timeout_ms { + pool_opts = pool_opts.idle_timeout(Duration::from_millis(idle_timeout_ms)); + } + + if let Some(acquire_timeout_ms) = pool_config.acquire_timeout_ms { + pool_opts = pool_opts.acquire_timeout(Duration::from_millis(acquire_timeout_ms)); + } + + if let Some(max_lifetime_ms) = pool_config.max_lifetime_ms { + pool_opts = pool_opts.max_lifetime(Duration::from_millis(max_lifetime_ms)); + } + + postgres::Pool::connect_with_options(&database.name, &database.url, pool_opts)? + } + None => postgres::Pool::connect(&database.name, &database.url)?, + }; + + Ok(pool) +} diff --git a/extensions/postgres/src/logger.rs b/extensions/postgres/src/logger.rs new file mode 100644 index 00000000..f8679d4e --- /dev/null +++ b/extensions/postgres/src/logger.rs @@ -0,0 +1,65 @@ +use std::{env, io::IsTerminal}; +use tracing_subscriber::{EnvFilter, Registry, prelude::*}; + +// Define the LogStyle enum +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum LogStyle { + Pretty, + Text, + Json, +} + +impl LogStyle { + fn from_env() -> Self { + match env::var("GRAFBASE_LOG_STYLE").ok().as_deref() { + Some("pretty") => LogStyle::Pretty, + Some("text") => LogStyle::Text, + Some("json") => LogStyle::Json, + _ => { + // Default logic + let log_level = env::var("GRAFBASE_LOG").unwrap_or_else(|_| "info".to_string()); + let is_terminal = std::io::stdout().is_terminal(); + if is_terminal && (log_level.contains("debug") || log_level.contains("trace")) { + LogStyle::Pretty + } else { + LogStyle::Text + } + } + } + } +} + +pub(super) fn init() { + let env_filter = EnvFilter::builder() + .with_env_var("GRAFBASE_LOG") + .try_from_env() + .unwrap_or_else(|_| EnvFilter::from("info")); // Default to "info" level + + let log_style = LogStyle::from_env(); + let is_terminal = std::io::stdout().is_terminal(); + let registry = Registry::default(); + + match log_style { + LogStyle::Pretty => registry + .with( + tracing_subscriber::fmt::layer() + .pretty() + .with_ansi(is_terminal) + .with_target(false), + ) + .with(env_filter) + .init(), + LogStyle::Text => registry + .with( + tracing_subscriber::fmt::layer() + .with_ansi(is_terminal) + .with_target(false), + ) + .with(env_filter) + .init(), + LogStyle::Json => registry + .with(tracing_subscriber::fmt::layer().json()) // Use JSON formatting + .with(env_filter) // Apply the filter + .init(), // Set this subscriber as the global default + }; +} diff --git a/extensions/postgres/src/resolve.rs b/extensions/postgres/src/resolve.rs new file mode 100644 index 00000000..cadc3299 --- /dev/null +++ b/extensions/postgres/src/resolve.rs @@ -0,0 +1,26 @@ +mod builder; +mod create_many; +mod create_one; +mod delete_many; +mod delete_one; +mod find_many; +mod find_one; +mod query; +mod update_many; +mod update_one; + +use grafbase_database_definition::Operation; +use grafbase_sdk::{SdkError, types::Data}; + +pub(super) fn execute(ctx: super::Context<'_>) -> Result { + match ctx.operation() { + Operation::FindOne(table_id) => find_one::execute(ctx, table_id), + Operation::FindMany(table_id) => find_many::execute(ctx, table_id), + Operation::DeleteOne(table_id) => delete_one::execute(ctx, table_id), + Operation::DeleteMany(table_id) => delete_many::execute(ctx, table_id), + Operation::CreateOne(table_id) => create_one::execute(ctx, table_id), + Operation::CreateMany(table_id) => create_many::execute(ctx, table_id), + Operation::UpdateOne(table_id) => update_one::execute(ctx, table_id), + Operation::UpdateMany(table_id) => update_many::execute(ctx, table_id), + } +} diff --git a/extensions/postgres/src/resolve/builder.rs b/extensions/postgres/src/resolve/builder.rs new file mode 100644 index 00000000..75d1d1b5 --- /dev/null +++ b/extensions/postgres/src/resolve/builder.rs @@ -0,0 +1,86 @@ +use std::borrow::Cow; + +use crate::context::{ + filter::FilterIterator, + selection_iterator::{SelectionIterator, collection_args::CollectionArgs}, +}; +use grafbase_database_definition::{RelationWalker, TableWalker}; + +/// A builder for building a PostgreSQL `SELECT` statement. +#[derive(Clone)] +pub struct SelectBuilder<'a> { + table: TableWalker<'a>, + selection: SelectionIterator<'a>, + filter: Option>, + collection_args: Option, + field_name: Cow<'static, str>, + relation: Option>, +} + +impl<'a> SelectBuilder<'a> { + /// Starting from the given table, select the fields in the iterator + /// and name the selection with `field_name`. + pub fn new( + table: TableWalker<'a>, + selection: SelectionIterator<'a>, + field_name: impl Into>, + ) -> Self { + Self { + table, + selection, + filter: None, + collection_args: None, + field_name: field_name.into(), + relation: None, + } + } + + /// Adds a `WHERE` clause to the statement. + pub fn set_filter(&mut self, filter: FilterIterator<'a>) { + self.filter = Some(filter); + } + + /// If defining collection arguments to the query, it sets the + /// result to be an array of rows, and allows defining the relay + /// arguments with first/last/before/after and orderBy. + pub fn set_collection_args(&mut self, args: CollectionArgs) { + self.collection_args = Some(args); + } + + /// Marks the query as a selection for a relation. + pub fn set_relation(&mut self, relation: RelationWalker<'a>) { + self.relation = Some(relation); + } + + /// The name of the table we're selecting from. + pub fn table(&self) -> TableWalker<'a> { + self.table + } + + /// The selected fields from the user. + pub fn selection(&self) -> SelectionIterator<'a> { + self.selection.clone() + } + + /// How we name the result of this query. Set to `root` if generating the main query, + /// and to the name of the relation field if creating a select for a join. + pub fn field_name(&self) -> &str { + &self.field_name + } + + /// The arguments to define how multiple rows should be fetched. + pub fn collection_args(&self) -> Option<&CollectionArgs> { + self.collection_args.as_ref() + } + + /// The `WHERE` statement for this select. + pub fn filter(&self) -> Option> { + self.filter.clone() + } + + /// If selecting for a join, this should have the definition of the relation we're + /// currently on. + pub fn relation(&self) -> Option> { + self.relation + } +} diff --git a/extensions/postgres/src/resolve/create_many.rs b/extensions/postgres/src/resolve/create_many.rs new file mode 100644 index 00000000..83e7d053 --- /dev/null +++ b/extensions/postgres/src/resolve/create_many.rs @@ -0,0 +1,47 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::Context; + +use super::query; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::insert::build(&ctx, table, ctx.create_many_input(table)?)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let rows = query.fetch(&connection)?; + let mut result = Vec::with_capacity(rows.size_hint().0); + + for mut row in rows { + if let Some(col) = row.next() { + result.push(col?.as_json::()?); + } + } + + let row_count = result.len(); + + let data = serde_json::to_vec(&serde_json::json!({ + "returning": result, + "rowCount": row_count, + })) + .unwrap(); + + Ok(Data::Json(data)) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::to_vec(&serde_json::json!({ + "rowCount": row_count, + })) + .unwrap(); + + Ok(Data::Json(data)) + } +} diff --git a/extensions/postgres/src/resolve/create_one.rs b/extensions/postgres/src/resolve/create_one.rs new file mode 100644 index 00000000..b0eb78e1 --- /dev/null +++ b/extensions/postgres/src/resolve/create_one.rs @@ -0,0 +1,49 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::Context; + +use super::query; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::insert::build(&ctx, table, [ctx.create_input(table)?])?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let mut rows = query.fetch(&connection)?; + + let mut row = match rows.next() { + Some(row) => row, + None => return Ok(Data::Json(serde_json::to_vec(&serde_json::Value::Null).unwrap())), + }; + + let result = match row.next() { + Some(Ok(col)) => col.as_json()?.unwrap_or(serde_json::Value::Null), + Some(Err(err)) => return Err(SdkError::from(format!("query error: {err}"))), + None => serde_json::Value::Null, + }; + + let row_count = if result.is_null() { 0 } else { 1 }; + + let data = serde_json::json!({ + "returning": result, + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::json!({ + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } +} diff --git a/extensions/postgres/src/resolve/delete_many.rs b/extensions/postgres/src/resolve/delete_many.rs new file mode 100644 index 00000000..90b90ace --- /dev/null +++ b/extensions/postgres/src/resolve/delete_many.rs @@ -0,0 +1,46 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::Context; + +use super::query; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::delete::build(&ctx, ctx.filter(table)?, table)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let rows = query.fetch(&connection)?; + let mut result = Vec::with_capacity(rows.size_hint().0); + + for mut row in rows { + if let Some(col) = row.next() { + result.push(col?.as_json::()?); + } + } + + let row_count = result.len(); + + let data = serde_json::to_vec(&serde_json::json!({ + "returning": result, + "rowCount": row_count, + })) + .unwrap(); + + Ok(Data::Json(data)) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::json!({ + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } +} diff --git a/extensions/postgres/src/resolve/delete_one.rs b/extensions/postgres/src/resolve/delete_one.rs new file mode 100644 index 00000000..0fe7ec14 --- /dev/null +++ b/extensions/postgres/src/resolve/delete_one.rs @@ -0,0 +1,45 @@ +use super::query; +use crate::context::Context; +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::delete::build(&ctx, ctx.unique_filter(table)?, table)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let mut rows = query.fetch(&connection)?; + + let result = match rows.next() { + Some(mut row) => match row.next() { + Some(Ok(col)) => col.as_json()?.unwrap_or(serde_json::Value::Null), + Some(Err(err)) => return Err(SdkError::from(format!("query error: {err}"))), + None => serde_json::Value::Null, + }, + None => serde_json::Value::Null, + }; + + let row_count = if result.is_null() { 0 } else { 1 }; + + let data = serde_json::json!({ + "returning": result, + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::json!({ + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } +} diff --git a/extensions/postgres/src/resolve/find_many.rs b/extensions/postgres/src/resolve/find_many.rs new file mode 100644 index 00000000..0402b8c0 --- /dev/null +++ b/extensions/postgres/src/resolve/find_many.rs @@ -0,0 +1,50 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::{ + Context, + selection_iterator::collection_args::{CollectionArgs, CollectionParameters}, +}; + +use super::{builder::SelectBuilder, query}; + +fn empty() -> Data { + Data::Json(serde_json::to_vec(&serde_json::Value::Array(Vec::new())).unwrap()) +} + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let mut builder = SelectBuilder::new(table, ctx.collection_selection(table)?, "root"); + let collection_params = ctx.field.arguments::(ctx.arguments)?; + + let args = CollectionArgs::new(ctx.database_definition, table, collection_params)?; + builder.set_collection_args(args); + + if let Ok(filter) = ctx.filter(table) { + builder.set_filter(filter); + } + + let ast = query::select::build(builder, false)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + let mut rows = query.fetch(&connection)?; + + let mut row = match rows.next() { + Some(row) => row, + None => return Ok(empty()), + }; + + let col = match row.next() { + Some(Ok(col)) => col, + Some(Err(e)) => return Err(SdkError::from(format!("query error: {e}"))), + None => return Ok(empty()), + }; + + let data = col.into_bytes().map(Data::Json).unwrap_or_else(empty); + + Ok(data) +} diff --git a/extensions/postgres/src/resolve/find_one.rs b/extensions/postgres/src/resolve/find_one.rs new file mode 100644 index 00000000..b054bc61 --- /dev/null +++ b/extensions/postgres/src/resolve/find_one.rs @@ -0,0 +1,45 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::{ + context::Context, + resolve::{builder::SelectBuilder, query}, +}; + +fn null() -> Data { + Data::Json(serde_json::to_vec(&serde_json::Value::Null).unwrap()) +} + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + + let mut builder = SelectBuilder::new(table, ctx.selection(table)?, "root"); + + if let Ok(filter) = ctx.unique_filter(table) { + builder.set_filter(filter); + } + + let ast = query::select::build(builder, false)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + let mut rows = query.fetch(&connection)?; + + let mut row = match rows.next() { + Some(row) => row, + None => return Ok(null()), + }; + + let col = match row.next() { + Some(Ok(col)) => col, + Some(Err(e)) => return Err(SdkError::from(format!("query error: {e}"))), + None => return Ok(null()), + }; + + let data = col.into_bytes().map(Data::Json).unwrap_or_else(null); + + Ok(data) +} diff --git a/extensions/postgres/src/resolve/query.rs b/extensions/postgres/src/resolve/query.rs new file mode 100644 index 00000000..dbba723f --- /dev/null +++ b/extensions/postgres/src/resolve/query.rs @@ -0,0 +1,4 @@ +pub mod delete; +pub mod insert; +pub mod select; +pub mod update; diff --git a/extensions/postgres/src/resolve/query/delete.rs b/extensions/postgres/src/resolve/query/delete.rs new file mode 100644 index 00000000..eddcdbf7 --- /dev/null +++ b/extensions/postgres/src/resolve/query/delete.rs @@ -0,0 +1,53 @@ +use grafbase_database_definition::TableWalker; +use grafbase_sdk::SdkError; +use sql_ast::ast::{Aliasable, ConditionTree, Delete, Expression, Table, json_build_object}; + +use crate::context::{Context, filter::FilterIterator, selection_iterator::TableSelection}; + +pub fn build<'a>( + ctx: &'a Context<'a>, + filter: FilterIterator<'a>, + table: TableWalker<'a>, +) -> Result, SdkError> { + let sql_table = Table::from((table.schema(), table.database_name())).alias(table.database_name()); + + let mut query = Delete::from_table(sql_table); + let mut conditions = Vec::new(); + + for condition in filter { + conditions.push(Expression::from(condition?)); + } + + let condition = if conditions.is_empty() { + ConditionTree::NoCondition + } else { + ConditionTree::And(conditions) + }; + + query.so_that(condition); + + if let Some(selection) = ctx.returning_selection(table)? { + let mut returning = Vec::new(); + + for selection in selection { + match selection? { + TableSelection::Column(select) => { + let (column, expr) = select.into_expression(None); + returning.push((column.client_name(), expr)); + } + TableSelection::ColumnUnnest(unnest) => { + let (column, nested) = unnest.into_select(None); + returning.push((column.client_name(), Expression::from(nested))); + } + // our output type doesn't have relations, so this is never reachable + TableSelection::JoinMany(..) | TableSelection::JoinUnique(..) => { + unreachable!("we cannot join in a delete statement") + } + } + } + + query.returning([json_build_object(returning).alias("root")]); + } + + Ok(query) +} diff --git a/extensions/postgres/src/resolve/query/insert.rs b/extensions/postgres/src/resolve/query/insert.rs new file mode 100644 index 00000000..417c4309 --- /dev/null +++ b/extensions/postgres/src/resolve/query/insert.rs @@ -0,0 +1,121 @@ +use grafbase_database_definition::TableWalker; +use grafbase_sdk::SdkError; +use sql_ast::ast::{ + Aliasable, Column, CommonTableExpression, Expression, Insert, JoinData, MultiRowInsert, Query, Select, + SingleRowInsert, default_value, json_build_object, +}; + +use crate::context::{ + Context, + create_input::{CreateInputItem, CreateInputIterator}, + selection_iterator::TableSelection, +}; + +enum InsertType<'a> { + Single(SingleRowInsert<'a>), + Multi(MultiRowInsert<'a>), +} + +pub fn build<'a>( + ctx: &'a Context<'a>, + table: TableWalker<'a>, + input: impl IntoIterator>, +) -> Result, SdkError> { + let mut query = None; + + for input in input { + match query.take() { + None => { + query = Some(InsertType::Single(create_insert(table, input)?)); + } + Some(InsertType::Single(previous_insert)) => { + let combined = previous_insert + .merge(create_insert(table, input)?) + .map_err(|error| SdkError::from(error.to_string()))?; + + query = Some(InsertType::Multi(combined)); + } + Some(InsertType::Multi(mut previous_insert)) => { + previous_insert + .extend(create_insert(table, input)?) + .map_err(|error| SdkError::from(error.to_string()))?; + } + } + } + + let insert_name = format!("{}_{}_insert", table.schema(), table.database_name()); + + let mut insert = match query.expect("we must have at least one input document") { + InsertType::Single(insert) => insert.build(), + InsertType::Multi(insert) => insert.build(), + }; + + if let Some(selection) = ctx.returning_selection(table)? { + let mut select = Select::from_table(insert_name.clone()); + let mut returning = Vec::new(); + let mut selected_data = Vec::new(); + + for selection in selection { + match selection? { + TableSelection::Column(select) => { + let (column, expr) = select.into_expression(Some(insert_name.clone().into())); + + selected_data.push((column.client_name(), expr)); + returning.push(column.database_name()); + } + TableSelection::ColumnUnnest(unnest) => { + let (column, nested) = unnest.into_select(Some(insert_name.clone().into())); + let alias = format!("transformed_{}", column.database_name()); + + selected_data.push(( + column.client_name(), + Column::new("json_array").table(alias.clone()).into(), + )); + + returning.push(column.database_name()); + + select.cross_join({ + let expr = Expression::from(nested).alias(alias); + + let mut join_data = JoinData::all_from(expr); + join_data.lateral(); + + join_data + }); + } + // we will not have relations in the first phase + TableSelection::JoinUnique(..) | TableSelection::JoinMany(..) => { + todo!("we'll get back to this with nested inserts") + } + } + } + + insert.returning(returning); + select.value(json_build_object(selected_data).alias("root")); + select.with(CommonTableExpression::new(insert_name, insert)); + + Ok(Query::from(select)) + } else { + Ok(Query::from(insert)) + } +} + +fn create_insert<'a>(table: TableWalker<'a>, input: CreateInputIterator<'a>) -> Result, SdkError> { + let mut insert = Insert::single_into(table.database_name()); + + for input in input { + match input? { + CreateInputItem::Column(column, value) => { + let expr = match column.enum_database_name() { + Some(enum_type) => Expression::enum_value(value, enum_type), + None => Expression::value(value), + }; + + insert.value(column.database_name(), expr) + } + CreateInputItem::DefaultValue(column) => insert.value(column.database_name(), default_value()), + } + } + + Ok(insert) +} diff --git a/extensions/postgres/src/resolve/query/select.rs b/extensions/postgres/src/resolve/query/select.rs new file mode 100644 index 00000000..a133c667 --- /dev/null +++ b/extensions/postgres/src/resolve/query/select.rs @@ -0,0 +1,159 @@ +use grafbase_sdk::SdkError; +use sql_ast::ast::{ + Aliasable, Column, Comparable, ConditionTree, Expression, Joinable, Ordering, Select, Table, coalesce, + json_build_object, jsonb_agg, raw, raw_str, row_to_json, +}; + +use crate::{context::selection_iterator::TableSelection, resolve::builder::SelectBuilder}; + +/// Builds the outermost query of the selection. Gathers all the data from the nested +/// queries into a JSON array, which is serialized in the database. +/// +/// [example query](https://gist.github.com/pimeys/a7535acb0922fa432562539f5d8123c3) +pub fn build(builder: SelectBuilder<'_>, is_nested: bool) -> Result, SdkError> { + // The innermost query of the select. All filters, ordering, limits etc. are defined here. + let sql_table = + Table::from((builder.table().schema(), builder.table().database_name())).alias(builder.table().database_name()); + + let mut inner_nested = Select::from_table(sql_table); + + if let Some(filters) = builder.filter() { + for filter in filters { + inner_nested.and_where(filter?); + } + } + + if let Some(args) = builder.collection_args() { + for ordering in args.order_by().inner() { + inner_nested.order_by(ordering.clone()); + } + + if let Some(limit) = args.first() { + inner_nested.limit(limit as u32); // we load one extra for pagination + } + + // There's no `LAST` in PostgreSQL, so we limit the inner selection which is ordered in an opposite way, + // and re-order it in the outer query. + if let Some(limit) = args.last() { + inner_nested.limit(limit as u32); // we load one extra for pagination + } + } + + if let Some(relation) = builder.relation() { + for (left, right) in relation.referencing_columns().zip(relation.referenced_columns()) { + let left_column = Column::from((left.table().client_name(), left.database_name())); + let right_column = Column::from((right.table().database_name(), right.database_name())); + + inner_nested.and_where(left_column.equals(right_column)); + } + } + + // The middle query of the selection. Collects nested data from joins, and combines it with the main + // query. Returns all rows as JSON objects. + let mut collecting_select = Select::from_table(Table::from(inner_nested).alias(builder.table().client_name())); + + for selection in builder.selection() { + match selection? { + TableSelection::Column(select) => { + let (column, expr) = select.into_expression(Some(builder.table().client_name().into())); + collecting_select.value(expr.alias(column.client_name())); + } + TableSelection::ColumnUnnest(unnest) => { + let (column, nested) = unnest.into_select(None); + collecting_select.value(Expression::from(nested).alias(column.client_name())); + } + // m:1, 1:1 + TableSelection::JoinUnique(relation, selection) => { + let client_field_name = relation.client_field_name(); + collecting_select.column(client_field_name.clone()); + + let mut builder = SelectBuilder::new(relation.referenced_table(), selection, client_field_name.clone()); + builder.set_relation(relation); + + // recurse + let mut join_data = Table::from(build(builder, true)?) + .alias(client_field_name) + .on(ConditionTree::single(raw("true"))); + + join_data.lateral(); + collecting_select.left_join(join_data); + } + // 1:m + TableSelection::JoinMany(relation, selection, args) => { + let client_field_name = relation.client_field_name(); + collecting_select.column(client_field_name.clone()); + + let mut builder = SelectBuilder::new(relation.referenced_table(), selection, client_field_name.clone()); + builder.set_collection_args(args); + builder.set_relation(relation); + + // recurse + let mut join_data = Table::from(build(builder, true)?) + .alias(client_field_name) + .on(ConditionTree::single(raw("true"))); + + join_data.lateral(); + collecting_select.left_join(join_data); + } + } + } + + let mut json_select = Select::from_table(Table::from(collecting_select).alias(builder.table().database_name())); + json_select.value(row_to_json(builder.table().database_name(), false).alias(builder.field_name().to_string())); + + if is_nested { + json_select.value(raw_str("todo").alias("cursor")); + json_select.value(raw_str("todo").alias("start_cursor")); + json_select.value(raw_str("todo").alias("end_cursor")); + } + + match builder.collection_args() { + Some(args) => { + for column in args.extra_columns() { + json_select.column(column); + } + + // SQL doesn't guarantee ordering if it's not defined in the query. + // we'll reuse the nested ordering here. + for ordering in args.order_by().outer() { + json_select.order_by(ordering); + } + + let mut json_aggregation = + Select::from_table(Table::from(json_select).alias(builder.table().database_name().to_string())); + + let column = Column::from((builder.table().database_name(), builder.field_name().to_string())); + + // SQL doesn't guarantee ordering if it's not defined in the query. + // we'll reuse the nested ordering here. + let mut ordering = Ordering::default(); + + for order in args.order_by().outer() { + ordering.append(order.clone()); + } + + let json_obj = json_build_object([("node", Expression::from(column)), ("cursor", raw_str("todo"))]); + + let json_agg = jsonb_agg(json_obj, Some(ordering), false); + let json_coalesce = coalesce([Expression::from(json_agg), raw("'[]'")]); + + let page_info = json_build_object([ + ("hasNextPage", raw("false")), + ("hasNextPage", raw("false")), + ("hasPreviousPage", raw("false")), + ("startCursor", raw_str("todo")), + ("endCursor", raw_str("todo")), + ]); + + let json_obj = json_build_object([ + ("edges", Expression::from(json_coalesce)), + ("pageInfo", Expression::from(page_info)), + ]); + + json_aggregation.value(json_obj.alias(builder.field_name().to_string())); + + Ok(json_aggregation) + } + None => Ok(json_select), + } +} diff --git a/extensions/postgres/src/resolve/query/update.rs b/extensions/postgres/src/resolve/query/update.rs new file mode 100644 index 00000000..de95f645 --- /dev/null +++ b/extensions/postgres/src/resolve/query/update.rs @@ -0,0 +1,79 @@ +use grafbase_database_definition::TableWalker; +use grafbase_sdk::SdkError; +use sql_ast::ast::{ + Aliasable, Column, CommonTableExpression, ConditionTree, Expression, JoinData, Query, Select, Update, + json_build_object, +}; + +use crate::context::{Context, filter::FilterIterator, selection_iterator::TableSelection}; + +pub fn build<'a>( + ctx: &'a Context<'a>, + table: TableWalker<'a>, + mut filter: FilterIterator<'a>, +) -> Result, SdkError> { + let mut update = Update::table(table.database_name()); + + let condition = filter.try_fold(ConditionTree::NoCondition, |acc, filter| { + Result::<_, SdkError>::Ok(ConditionTree::and(acc, filter?)) + })?; + + update.so_that(condition); + + for item in ctx.update_input(table)? { + let item = item?; + update.set(item.column.database_name(), item.expression); + } + + if let Some(selection) = ctx.returning_selection(table)? { + let update_name = format!("{}_{}_update", table.schema(), table.database_name()); + let mut select = Select::from_table(update_name.clone()); + + let mut returning = Vec::new(); + let mut selected_data = Vec::new(); + + for selection in selection { + match selection? { + TableSelection::Column(select) => { + let (column, expr) = select.into_expression(Some(update_name.clone().into())); + + selected_data.push((column.database_name(), expr)); + returning.push(column.database_name()); + } + TableSelection::ColumnUnnest(unnest) => { + let (column, nested) = unnest.into_select(Some(update_name.clone().into())); + let alias = format!("transformed_{}", column.database_name()); + + selected_data.push(( + column.client_name(), + Column::new("json_array").table(alias.clone()).into(), + )); + + returning.push(column.database_name()); + + select.cross_join({ + let expr = Expression::from(nested).alias(alias); + + let mut join_data = JoinData::all_from(expr); + join_data.lateral(); + + join_data + }); + } + // we will not have relations in the first phase + TableSelection::JoinUnique(..) | TableSelection::JoinMany(..) => { + todo!("we'll get back to this with nested updates") + } + } + } + + update.returning(returning); + + select.with(CommonTableExpression::new(update_name, update)); + select.value(json_build_object(selected_data).alias("root")); + + Ok(Query::from(select)) + } else { + Ok(Query::from(update)) + } +} diff --git a/extensions/postgres/src/resolve/update_many.rs b/extensions/postgres/src/resolve/update_many.rs new file mode 100644 index 00000000..879eca7a --- /dev/null +++ b/extensions/postgres/src/resolve/update_many.rs @@ -0,0 +1,45 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::Context; + +use super::query; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::update::build(&ctx, table, ctx.filter(table)?)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let rows = query.fetch(&connection)?; + let mut result = Vec::with_capacity(rows.size_hint().0); + + for mut row in rows { + if let Some(col) = row.next() { + result.push(col?.as_json::()?); + } + } + + let row_count = result.len(); + + let data = serde_json::json!({ + "returning": result, + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::json!({ + "rowCount": row_count, + }); + + Ok(Data::Json(serde_json::to_vec(&data).unwrap())) + } +} diff --git a/extensions/postgres/src/resolve/update_one.rs b/extensions/postgres/src/resolve/update_one.rs new file mode 100644 index 00000000..5530e832 --- /dev/null +++ b/extensions/postgres/src/resolve/update_one.rs @@ -0,0 +1,53 @@ +use grafbase_database_definition::TableId; +use grafbase_sdk::{SdkError, types::Data}; +use sql_ast::renderer; + +use crate::context::Context; + +use super::query; + +pub(crate) fn execute(ctx: Context<'_>, table_id: TableId) -> Result { + let table = ctx.database_definition.walk(table_id); + let ast = query::update::build(&ctx, table, ctx.unique_filter(table)?)?; + let query = renderer::postgres::render(ast); + + tracing::debug!("Executing query: {}", query); + + let connection = ctx.pool.acquire()?; + + if ctx.mutation_is_returning() { + let mut rows = query.fetch(&connection)?; + + let result = match rows.next() { + Some(mut row) => match row.next() { + Some(Ok(col)) => col.as_json()?.unwrap_or(serde_json::Value::Null), + Some(Err(err)) => return Err(SdkError::from(format!("query error: {err}"))), + None => serde_json::Value::Null, + }, + None => serde_json::Value::Null, + }; + + println!("{result}"); + + let row_count = if result.is_null() { 0 } else { 1 }; + + let data = serde_json::json!({ + "returning": result, + "rowCount": row_count, + }); + + let data = Data::Json(serde_json::to_vec(&data).unwrap()); + + Ok(data) + } else { + let row_count = query.execute(&connection)?; + + let data = serde_json::json!({ + "rowCount": row_count, + }); + + let data = Data::Json(serde_json::to_vec(&data).unwrap()); + + Ok(data) + } +} diff --git a/extensions/postgres/tests/create_many/mod.rs b/extensions/postgres/tests/create_many/mod.rs new file mode 100644 index 00000000..2ef05dd7 --- /dev/null +++ b/extensions/postgres/tests/create_many/mod.rs @@ -0,0 +1,307 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn two_identity_by_default() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [{ id: 7 }, { id: 8 }]) { + returning { id } + rowCount + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "returning": [ + { + "id": 7 + }, + { + "id": 8 + } + ], + "rowCount": 2 + } + } + } + "#); +} + +#[tokio::test] +async fn two_identity_always() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [{}, {}]) { + returning { id } + rowCount + } + } + "}; + + println!("{mutation}"); + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "returning": [ + { + "id": 1 + }, + { + "id": 2 + } + ], + "rowCount": 2 + } + } + } + "#); +} + +#[tokio::test] +async fn two_pk_ids() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [{ id: 1 }, { id: 2 }]) { + returning { id } + rowCount + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "returning": [ + { + "id": 1 + }, + { + "id": 2 + } + ], + "rowCount": 2 + } + } + } + "#); +} + +#[tokio::test] +async fn two_pk_ids_no_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [{ id: 1 }, { id: 2 }]) { + rowCount + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "rowCount": 2 + } + } + } + "#); +} + +#[tokio::test] +async fn wrong_keys() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(5) NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreateMany(input: [{ id: 1 }, { id: 2, name: "Musti" }]) { + returning { id } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": null, + "errors": [ + { + "message": "All insert items must have the same columns.", + "locations": [ + { + "line": 2, + "column": 3 + } + ], + "path": [ + "userCreateMany" + ], + "extensions": { + "code": "EXTENSION_ERROR" + } + } + ] + } + "#); +} + +#[tokio::test] +async fn enum_array() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [ + { val: [YELLOW, GREEN] }, + { val: [RED] } + ]) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "returning": [ + { + "val": [ + "YELLOW", + "GREEN" + ] + }, + { + "val": [ + "RED" + ] + } + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/create_one/mod.rs b/extensions/postgres/tests/create_one/mod.rs new file mode 100644 index 00000000..0a0a0866 --- /dev/null +++ b/extensions/postgres/tests/create_one/mod.rs @@ -0,0 +1,174 @@ +mod types; + +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn pk_explicit_int() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { id: 1 }) { + returning { + id + } + rowCount + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); +} + +#[tokio::test] +async fn pk_explicit_int_no_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { id: 1 }) { + rowCount + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "rowCount": 1 + } + } + } + "#); +} + +#[tokio::test] +async fn renamed() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id_field INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { idField: 1 }) { + returning { idField } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "idField": 1 + } + } + } + } + "#); +} + +#[tokio::test] +async fn serial_id() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id_field SERIAL PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: {}) { + returning { idField } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "idField": 1 + } + } + } + } + "#); +} diff --git a/extensions/postgres/tests/create_one/types.rs b/extensions/postgres/tests/create_one/types.rs new file mode 100644 index 00000000..91ee2b37 --- /dev/null +++ b/extensions/postgres/tests/create_one/types.rs @@ -0,0 +1,2856 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn char() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val CHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "Musti" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn char_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val CHAR(6)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["Musti", "Naukio"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "Musti ", + "Naukio" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn name() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val NAME NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "Musti" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn name_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val NAME[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["Musti", "Naukio"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "Musti", + "Naukio" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn text() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TEXT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "Musti" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn text_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TEXT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["Musti", "Naukio"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "Musti", + "Naukio" + ] + } + } + } + } + "#); +} + +// TODO: fix +#[tokio::test] +async fn xml() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val XML NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "" + } + } + } + } + "#); +} + +#[tokio::test] +async fn xml_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val XML[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["", ""] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "", + "" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn cidr() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val CIDR NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "0.0.0.0/0" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "0.0.0.0/0" + } + } + } + } + "#); +} + +#[tokio::test] +async fn cidr_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val CIDR[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["0.0.0.0/0", "192.168.0.0/32"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "0.0.0.0/0", + "192.168.0.0/32" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr8() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MACADDR8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + + userCreate(input: { val: "08:00:2b:01:02:03:04:05" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "08:00:2b:01:02:03:04:05" + } + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr8_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MACADDR8[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["08:00:2b:01:02:03:04:05", "08002b:0102030405"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "08:00:2b:01:02:03:04:05", + "08:00:2b:01:02:03:04:05" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MACADDR NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "08:00:2b:01:02:03" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "08:00:2b:01:02:03" + } + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MACADDR[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["08:00:2b:01:02:03", "08:00:2b:01:02:04"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "08:00:2b:01:02:03", + "08:00:2b:01:02:04" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn bpchar() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BPCHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "Musti" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn bpchar_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BPCHAR(6)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["Musti", "Naukio"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "Musti ", + "Naukio" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn varchar() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val VARCHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "Musti" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn varchar_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val VARCHAR(6)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["Musti", "Naukio"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "Musti", + "Naukio" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn bit() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BIT(3) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "010" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "010" + } + } + } + } + "#); +} + +#[tokio::test] +async fn bit_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BIT(3)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["010", "101"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "010", + "101" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn varbit() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val VARBIT(3) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "010" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "010" + } + } + } + } + "#); +} + +#[tokio::test] +async fn varbit_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val VARBIT(3)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["010", "101"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "010", + "101" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn int2() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT2 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: 420 }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": 420 + } + } + } + } + "#); +} + +#[tokio::test] +async fn int2_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT2[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [1, 2] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + 1, + 2 + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn int4() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT4 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: 420 }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": 420 + } + } + } + } + "#); +} + +#[tokio::test] +async fn int4_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT4[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [1, 2] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + 1, + 2 + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn int8() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "420" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "420" + } + } + } + } + "#); +} + +#[tokio::test] +async fn int8_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INT8[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["1", "2"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "1", + "2" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn oid() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val OID NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "420" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "420" + } + } + } + } + "#); +} + +#[tokio::test] +async fn oid_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val OID[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["1", "2"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "1", + "2" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn json() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val JSON NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: { foo: 1 } }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": { + "foo": 1 + } + } + } + } + } + "#); +} + +#[tokio::test] +async fn json_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val JSON[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [{ foo: 1 }, { bar: 2 }] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + { + "foo": 1 + }, + { + "bar": 2 + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: { foo: 1 } }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": { + "foo": 1 + } + } + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val JSONB[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [{ foo: 1 }, { bar: 2 }] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + { + "foo": 1 + }, + { + "bar": 2 + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn money() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MONEY NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "1.23" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "$1.23" + } + } + } + } + "#); +} + +#[tokio::test] +async fn money_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val MONEY[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["1.23", "3.14"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "$1.23", + "$3.14" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn numeric() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val NUMERIC NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "1.23" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "1.23" + } + } + } + } + "#); +} + +#[tokio::test] +async fn numeric_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val NUMERIC[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["1.23", "3.14"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "1.23", + "3.14" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn float4() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val FLOAT4 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: 3.14 }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": 3.14 + } + } + } + } + "#); +} + +#[tokio::test] +async fn float4_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val FLOAT4[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [3.14, 1.23] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + 3.14, + 1.23 + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn float8() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val FLOAT8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: 3.14 }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": 3.14 + } + } + } + } + "#); +} + +#[tokio::test] +async fn float8_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val FLOAT8[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [3.14, 1.23] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + 3.14, + 1.23 + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn time() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIME NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "16:20:00" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "16:20:00" + } + } + } + } + "#); +} + +#[tokio::test] +async fn time_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIME[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["16:20:00", "04:20:00"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "16:20:00", + "04:20:00" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn timetz() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMETZ NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "16:20:00+00" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "16:20:00+00" + } + } + } + } + "#); +} + +#[tokio::test] +async fn timetz_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMETZ[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["16:20:00+00", "04:20:00Z"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "16:20:00+00", + "04:20:00+00" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn bool() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BOOL NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: true }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": true + } + } + } + } + "#); +} + +#[tokio::test] +async fn bool_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BOOL[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [true, false] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + true, + false + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn bytea() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BYTEA NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "\\xdeadbeef" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "\\xdeadbeef" + } + } + } + } + "#); +} + +#[tokio::test] +async fn bytea_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val BYTEA[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["\\xdeadbeef", "\\xdeadbeee"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "\\xdeadbeef", + "\\xdeadbeee" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn inet() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INET NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "192.168.0.1" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "192.168.0.1" + } + } + } + } + "#); +} + +#[tokio::test] +async fn inet_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val INET[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["192.168.0.1", "10.0.0.1"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "192.168.0.1", + "10.0.0.1" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn date() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val DATE NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "1999-01-08" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "1999-01-08" + } + } + } + } + "#); +} + +#[tokio::test] +async fn date_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val DATE[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["1999-01-08", "1999-01-09"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "1999-01-08", + "1999-01-09" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn timestamp() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMESTAMP NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "2004-10-19T10:23:54" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "2004-10-19T10:23:54" + } + } + } + } + "#); +} + +#[tokio::test] +async fn timestamp_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMESTAMP[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["2004-10-19T10:23:54", "2004-10-19T10:23:55"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "2004-10-19T10:23:54", + "2004-10-19T10:23:55" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn timestamptz() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMESTAMPTZ NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "2004-10-19T10:23:54+00:00" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "2004-10-19T10:23:54+00:00" + } + } + } + } + "#); +} + +#[tokio::test] +async fn timestamptz_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val TIMESTAMPTZ[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["2004-10-19T10:23:54+00:00", "2004-10-19T10:23:55+00:00"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "2004-10-19T10:23:54+00:00", + "2004-10-19T10:23:55+00:00" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn uuid() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val UUID NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: "d89bd15d-ac64-4c71-895c-adba9c35a132" }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "d89bd15d-ac64-4c71-895c-adba9c35a132" + } + } + } + } + "#); +} + +#[tokio::test] +async fn uuid_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val UUID[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userCreate(input: { val: ["d89bd15d-ac64-4c71-895c-adba9c35a132", "d89bd15d-ac64-4c71-895c-adba9c35a133"] }) { + returning { val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "d89bd15d-ac64-4c71-895c-adba9c35a132", + "d89bd15d-ac64-4c71-895c-adba9c35a133" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn r#enum() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: YELLOW }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": "YELLOW" + } + } + } + } + "#); +} + +#[tokio::test] +async fn enum_array() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreate(input: { val: [YELLOW, GREEN] }) { + returning { val } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreate": { + "returning": { + "val": [ + "YELLOW", + "GREEN" + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn multiple_enum_arrays() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val1 street_light[] NOT NULL, + val2 street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userCreateMany(input: [ + { val1: [RED], val2: [YELLOW] }, + { val1: [GREEN], val2: [RED, YELLOW] } + ]) { + returning { val1 val2 } + } + } + "}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userCreateMany": { + "returning": [ + { + "val1": [ + "RED" + ], + "val2": [ + "YELLOW" + ] + }, + { + "val1": [ + "GREEN" + ], + "val2": [ + "RED", + "YELLOW" + ] + } + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/delete_many/mod.rs b/extensions/postgres/tests/delete_many/mod.rs new file mode 100644 index 00000000..4dcc87e6 --- /dev/null +++ b/extensions/postgres/tests/delete_many/mod.rs @@ -0,0 +1,2007 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { eq: "Musti" } }) { + returning { id name } + rowCount + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti" + }, + { + "id": 3, + "name": "Musti" + } + ], + "rowCount": 2 + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn eq_not_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { eq: "Musti" } }) { + rowCount + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userDeleteMany": { + "rowCount": 2 + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn missing() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { eq: "Pertti" } }) { + returning { id name } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [], + "rowCount": 0 + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 2, + "name": "Naukio" + } + }, + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn eq_null() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, null), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userDeleteMany(filter: { name: { eq: null } }) { + returning { id name } + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": null + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn ne_null() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, null), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userDeleteMany(filter: { name: { ne: null } }) { + returning { id name } + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti" + }, + { + "id": 3, + "name": "Musti" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": null + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn eq_two_fields() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Naukio', 11), (3, 'Musti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { eq: "Musti" }, age: { eq: 12 } }) { + returning { id name age } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 3, + "name": "Musti", + "age": 12 + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name age } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "age": 11 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn eq_rename() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name_game VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name_game) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { nameGame: { eq: "Musti" } }) { + returning { id nameGame } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "nameGame": "Musti" + }, + { + "id": 3, + "nameGame": "Musti" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id nameGame } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "nameGame": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn ne() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { ne: "Musti" } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn gt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { gt: 1 } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio" + }, + { + "id": 3, + "name": "Musti" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn lt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { lt: 3 } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti" + }, + { + "id": 2, + "name": "Naukio" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn gte() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { gte: 2 } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio" + }, + { + "id": 3, + "name": "Musti" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn lte() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { lte: 2 } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti" + }, + { + "id": 2, + "name": "Naukio" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn r#in() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { in: [1, 3] } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti" + }, + { + "id": 3, + "name": "Musti" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn nin() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { id: { nin: [1, 3] } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn all() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Naukio', 11), (3, 'Musti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { ALL: [ + { name: { eq: "Musti" } }, + { age: { eq: 11 } } + ]}) { + returning { id name age } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "name": "Musti", + "age": 11 + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name age } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio", + "age": 11 + } + }, + { + "node": { + "id": 3, + "name": "Musti", + "age": 12 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn any() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Naukio', 11), (3, 'Musti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { ANY: [ + { name: { eq: "Naukio" } }, + { age: { eq: 12 } } + ]}) { + returning { id name age } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio", + "age": 11 + }, + { + "id": 3, + "name": "Musti", + "age": 12 + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name age } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn none() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Naukio', 12), (3, 'Pentti', 13) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { NONE: [ + { name: { eq: "Musti" } }, + { age: { eq: 13 } } + ]}) { + returning { id name age } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio", + "age": 12 + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name age } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + }, + { + "node": { + "id": 3, + "name": "Pentti", + "age": 13 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn not() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio'), (3, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { name: { not: { eq: "Musti" } } }) { + returning { id name } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "name": "Naukio" + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { edges { node { id name } } } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 3, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { eq: [3, 4] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_ne() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { ne: [3, 4] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_gt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { gt: [1, 2] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_contains() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { contains: [1, 2, 2, 1] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_contained() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { contained: [3, 6, 4, 7] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_overlaps() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDeleteMany(filter: { numbers: { overlaps: [1, 5, 5, 6] } }) { + returning { id numbers } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDeleteMany": { + "returning": [ + { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + ] + } + } + } + "#); + + let query = indoc! {r" + query { + users(first: 10) { + edges { node { id numbers } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/delete_one/mod.rs b/extensions/postgres/tests/delete_one/mod.rs new file mode 100644 index 00000000..447d8371 --- /dev/null +++ b/extensions/postgres/tests/delete_one/mod.rs @@ -0,0 +1,458 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn single_pk() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userDelete(lookup: { id: 1 }) { + returning { id name } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "returning": { + "id": 1, + "name": "Musti" + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": null + } + } + "#); +} + +#[tokio::test] +async fn single_pk_not_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userDelete(lookup: { id: 1 }) { + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "rowCount": 1 + } + } + } + "#); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": null + } + } + "#); +} + +#[tokio::test] +async fn missing() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userDelete(lookup: { id: 3 }) { returning { id name } rowCount } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id name } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "returning": null, + "rowCount": 0 + } + } + } + "#); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn single_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL UNIQUE + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDelete(lookup: { name: "Musti" }) { returning { id name } } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + let query = indoc! {r#" + query { + user(lookup: { name: "Musti" }) { id name } + } + "#}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "returning": { + "id": 1, + "name": "Musti" + } + } + } + } + "#); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": null + } + } + "#); +} + +#[tokio::test] +async fn composite_pk() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + CONSTRAINT "User_pkey" PRIMARY KEY (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (name, email) VALUES ('Musti', 'meow@example.com'), ('Musti', 'purr@example.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDelete(lookup: { nameEmail: { name: "Musti", email: "purr@example.com" } }) { + returning { name email } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Musti", email: "meow@example.com" } }) { name email } + } + "#}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "returning": { + "name": "Musti", + "email": "purr@example.com" + } + } + } + } + "#); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "name": "Musti", + "email": "meow@example.com" + } + } + } + "#); +} + +#[tokio::test] +async fn composite_key_with_nulls() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NULL, + CONSTRAINT "User_key" UNIQUE (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (name, email) VALUES ('Musti', 'meow@example.com'), ('Musti', NULL) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDelete(lookup: { nameEmail: { name: "Musti" } }) { + returning { name email } + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Musti" }}) { name email } + } + "#}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userDelete": { + "returning": { + "name": "Musti", + "email": null + } + } + } + } + "#); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": null + } + } + "#); +} + +#[tokio::test] +async fn enum_array() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, ARRAY['red', 'green']::street_light[]) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userDelete(lookup: { id: 1 }) { + returning { id val } + } + } + "#}; + + let response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "userDelete": { + "returning": { + "id": 1, + "val": [ + "RED", + "GREEN" + ] + } + } + } + } + "#); +} diff --git a/extensions/postgres/tests/find_many/mod.rs b/extensions/postgres/tests/find_many/mod.rs new file mode 100644 index 00000000..0d2c8859 --- /dev/null +++ b/extensions/postgres/tests/find_many/mod.rs @@ -0,0 +1,2191 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn eq_pk() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { id: { eq: 1 } }) { + edges { node { id name } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn first() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 1) { + edges { node { id name } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn last() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(last: 1) { + edges { node { id name } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn order_by() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, orderBy: [{ name: DESC }]) { + edges { node { id name } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + }, + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn order_by_without_selecting_id() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, orderBy: [{ name: DESC }]) { + edges { node { name } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "name": "Naukio" + } + }, + { + "node": { + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn eq_pk_rename() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id_field INT PRIMARY KEY, + name_field VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id_field, name_field) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { idField: { eq: 1 } }) { + edges { node { idField nameField } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "idField": 1, + "nameField": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { eq: "Musti" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_like() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { like: "%us%" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn bytea_eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val BYTEA NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '\xdeadbeef'::bytea), (2, '\xbeefdead'::bytea) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query Pg { + users(first: 10, filter: { val: { eq: "\\xdeadbeef" }}) { edges { node { id val }} } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "val": "\\xdeadbeef" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { eq: [3, 4] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_ne() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { ne: [3, 4] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_gt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { gt: [1, 2] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_contains() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { contains: [1, 2, 2, 1] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_contained() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { contained: [3, 6, 4, 7] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "numbers": [ + 3, + 4 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_overlaps() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + numbers INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, numbers) VALUES (1, '{1, 2}'), (2, '{3, 4}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { numbers: { overlaps: [1, 5, 5, 6] } }) { + edges { node { id numbers } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "numbers": [ + 1, + 2 + ] + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn two_field_eq() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Musti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { eq: "Musti" }, age: { eq: 11 } }) { + edges { node { id name age } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_ne() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { ne: "Musti" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_gt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { gt: "Musti" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_lt() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { lt: "Naukio" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_gte() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { gte: "Musti" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_lte() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { lte: "Naukio" } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_in() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { in: ["Musti", "Naukio"] } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_nin() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { nin: ["Musti", "Naukio"] } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [] + } + } + } + "#); +} + +#[tokio::test] +async fn inet_in() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name inet NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, '0.0.0.0'), (2, '127.0.0.1') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { in: ["0.0.0.0", "127.0.0.1"] } }) { + edges { node { id name } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "0.0.0.0" + } + }, + { + "node": { + "id": 2, + "name": "127.0.0.1" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn all() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 11), (2, 'Musti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { ALL: [ + { name: { eq: "Musti" } }, + { age: { eq: 11 } } + ]}) { + edges { node { id name age } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn any() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES (1, 'Musti', 12), (2, 'Naukio', 11) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { ANY: [ + { name: { eq: "Musti" } }, + { age: { eq: 11 } } + ]}) { + edges { node { id name age } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 12 + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "age": 11 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn none() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES + (1, 'Musti', 11), + (2, 'Naukio', 12), + (3, 'Pentti', 13) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { NONE: [ + { name: { eq: "Musti" } }, + { age: { eq: 13 } } + ]}) { + edges { node { id name age } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio", + "age": 12 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn not() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES + (1, 'Musti', 11), + (2, 'Naukio', 12), + (3, 'Pentti', 13) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { name: { not: { eq: "Pentti" } } }) { + edges { node { id name age } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 11 + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "age": 12 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meowmeowmeow'), + (2, 2, 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10) { + edges { + node { + id + name + profile { description } + } + } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "profile": { + "description": "meowmeowmeow" + } + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "profile": { + "description": "purrpurrpurr" + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_relation_filter() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meowmeowmeow'), + (2, 2, 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + users(first: 10, filter: { profile: { description: { eq: "purrpurrpurr" } } }) { + edges { + node { + id + name + profile { description } + } + } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio", + "profile": { + "description": "purrpurrpurr" + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_child_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?'), + (4, 2, 'Purr purr!') + "#}; + + api.execute_sql(insert_blogs).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + blogs(first: 10) { + edges { + node { + id + title + user { id name } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!", + "user": { + "id": 1, + "name": "Musti" + } + } + }, + { + "node": { + "id": 2, + "title": "Sayonara...", + "user": { + "id": 1, + "name": "Musti" + } + } + }, + { + "node": { + "id": 3, + "title": "Meow meow?", + "user": { + "id": 2, + "name": "Naukio" + } + } + }, + { + "node": { + "id": 4, + "title": "Purr purr!", + "user": { + "id": 2, + "name": "Naukio" + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_relation_filter_child_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_blogs).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + blogs(first: 10, filter: { user: { id: { eq: 1 } } }) { + edges { + node { + id + title + user { id name } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!", + "user": { + "id": 1, + "name": "Musti" + } + } + }, + { + "node": { + "id": 2, + "title": "Sayonara...", + "user": { + "id": 1, + "name": "Musti" + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?'), + (4, 2, 'Purr purr!') + "#}; + + api.execute_sql(insert_blogs).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10) { + edges { + node { + id + name + blogs(first: 10) { edges { node { id title } } } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + }, + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + } + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "blogs": { + "edges": [ + { + "node": { + "id": 3, + "title": "Meow meow?" + } + }, + { + "node": { + "id": 4, + "title": "Purr purr!" + } + } + ] + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_relation_filter_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_blogs).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + users(first: 10, filter: { blogs: { contains: { id: { eq: 1 } } } }) { + edges { + node { + id + name + blogs(first: 10) { edges { node { id title } } } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + }, + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + } + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn first_as_parameter() { + use serde_json::json; + + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query Pg($first: Int) { + users(first: $first) { + edges { node { id name } } + } + } + "}; + + let variables = json!({ + "first": 1 + }); + + let response = runner + .graphql_query::(query) + .with_variables(variables) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn last_as_parameter() { + use serde_json::json; + + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query Pg($last: Int) { + users(last: $last) { + edges { node { id name } } + } + } + "}; + + let variables = json!({ + "last": 1 + }); + + let response = runner + .graphql_query::(query) + .with_variables(variables) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/find_one/joins.rs b/extensions/postgres/tests/find_one/joins.rs new file mode 100644 index 00000000..6ddeb583 --- /dev/null +++ b/extensions/postgres/tests/find_one/joins.rs @@ -0,0 +1,1645 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn one_to_one_join_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meowmeowmeow'), + (2, 2, 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 2 }) { + id + name + profile { description } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "name": "Naukio", + "profile": { + "description": "purrpurrpurr" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_join_parent_side_null() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 2 }) { + id + name + profile { description } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "name": "Naukio", + "profile": null + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_join_between_schemas() { + let api = PgTestApi::new("", |api| async move { + let private_schema = indoc! {r#" + CREATE SCHEMA "private"; + "#}; + + api.execute_sql(private_schema).await; + + let public_table = indoc! {r#" + CREATE TABLE "public"."User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(public_table).await; + + let private_table = indoc! {r#" + CREATE TABLE "private"."Secret" ( + id INT PRIMARY KEY, + secret_name VARCHAR(255) NOT NULL, + user_id INT NULL UNIQUE, + CONSTRAINT User_User_fkey FOREIGN KEY (user_id) REFERENCES "public"."User" (id) + ); + "#}; + + api.execute_sql(private_table).await; + + let insert_public = indoc! {r#" + INSERT INTO "public"."User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_public).await; + + let insert_private = indoc! {r#" + INSERT INTO "private"."Secret" (id, user_id, secret_name) VALUES + (1, 1, 'Naukio'), + (2, 2, 'Musti') + "#}; + + api.execute_sql(insert_private).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + secret { secretName } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Musti", + "secret": { + "secretName": "Naukio" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_join_between_schemas_using_duplicate_table_names() { + let api = PgTestApi::new("", |api| async move { + let private_schema = indoc! {r#" + CREATE SCHEMA "private"; + "#}; + + api.execute_sql(private_schema).await; + + let public_table = indoc! {r#" + CREATE TABLE "public"."User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(public_table).await; + + let private_table = indoc! {r#" + CREATE TABLE "private"."User" ( + id INT PRIMARY KEY, + secret_name VARCHAR(255) NOT NULL, + user_id INT NULL UNIQUE, + CONSTRAINT User_User_fkey FOREIGN KEY (user_id) REFERENCES "public"."User" (id) + ); + "#}; + + api.execute_sql(private_table).await; + + let insert_public = indoc! {r#" + INSERT INTO "public"."User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_public).await; + + let insert_private = indoc! {r#" + INSERT INTO "private"."User" (id, user_id, secret_name) VALUES + (1, 1, 'Naukio'), + (2, 2, 'Musti') + "#}; + + api.execute_sql(insert_private).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + publicUser(lookup: { id: 1 }) { + id + name + privateUser { secretName } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "publicUser": { + "id": 1, + "name": "Musti", + "privateUser": { + "secretName": "Naukio" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_between_schemas() { + let api = PgTestApi::new("", |api| async move { + let private_schema = indoc! {r#" + CREATE SCHEMA "private"; + "#}; + + api.execute_sql(private_schema).await; + + let public_table = indoc! {r#" + CREATE TABLE "public"."User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(public_table).await; + + let private_table = indoc! {r#" + CREATE TABLE "private"."User" ( + id INT PRIMARY KEY, + secret_name VARCHAR(255) NOT NULL, + user_id INT NULL, + CONSTRAINT User_User_fkey FOREIGN KEY (user_id) REFERENCES "public"."User" (id) + ); + "#}; + + api.execute_sql(private_table).await; + + let insert_public = indoc! {r#" + INSERT INTO "public"."User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_public).await; + + let insert_private = indoc! {r#" + INSERT INTO "private"."User" (id, user_id, secret_name) VALUES + (1, 1, 'Naukio'), + (2, 1, 'Musti'), + (3, 2, 'Pertti'), + (4, 2, 'Matti') + "#}; + + api.execute_sql(insert_private).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + publicUser(lookup: { id: 1 }) { + id + name + privateUsers(first: 1000) { edges { node { secretName } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "publicUser": { + "id": 1, + "name": "Musti", + "privateUsers": { + "edges": [ + { + "node": { + "secretName": "Naukio" + } + }, + { + "node": { + "secretName": "Musti" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_join_parent_side_compound_fk() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + CONSTRAINT User_name_email_pk PRIMARY KEY (name, email) + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + user_name VARCHAR(255) NULL, + user_email VARCHAR(255) NULL, + description TEXT NOT NULL, + CONSTRAINT Profile_name_email_key UNIQUE (user_name, user_email), + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_name, user_email) REFERENCES "User" (name, email) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (name, email) VALUES + ('Musti', 'meow1@hotmail.com'), + ('Musti', 'meow2@hotmail.com') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (user_name, user_email, description) VALUES + ('Musti', 'meow1@hotmail.com', 'meowmeowmeow'), + ('Musti', 'meow2@hotmail.com', 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Musti", email: "meow2@hotmail.com" } }) { + name + email + profile { description } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "email": "meow2@hotmail.com", + "profile": { + "description": "purrpurrpurr" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_join_child_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meowmeowmeow'), + (2, 2, 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + profile(lookup: { id: 2 }) { + description + user { id name } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "profile": { + "description": "purrpurrpurr", + "user": { + "id": 2, + "name": "Naukio" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_to_one_join() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let extra_table = indoc! {r#" + CREATE TABLE "Extra" ( + id INT PRIMARY KEY, + profile_id INT NULL UNIQUE, + number int NOT NULL, + CONSTRAINT Extra_Profile_fkey FOREIGN KEY (profile_id) REFERENCES "Profile" (id) + ) + "#}; + + api.execute_sql(extra_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meowmeowmeow'), + (2, 2, 'purrpurrpurr') + "#}; + + api.execute_sql(insert_profiles).await; + + let insert_extras = indoc! {r#" + INSERT INTO "Extra" (id, profile_id, number) VALUES + (1, 1, 420), + (2, 2, 666) + "#}; + + api.execute_sql(insert_extras).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 2 }) { + id + name + profile { description extra { number } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "name": "Naukio", + "profile": { + "description": "purrpurrpurr", + "extra": { + "number": 666 + } + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_child_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + blog(lookup: { id: 2 }) { + title + user { id name } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "blog": { + "title": "Sayonara...", + "user": { + "id": 1, + "name": "Musti" + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 10000) { edges { node { id title } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + }, + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn nested_one_to_many_joins_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let post_table = indoc! {r#" + CREATE TABLE "Post" ( + id INT PRIMARY KEY, + blog_id INT NOT NULL, + content TEXT NOT NULL, + CONSTRAINT Post_Blog_fkey FOREIGN KEY (blog_id) REFERENCES "Blog" (id) + ) + "#}; + + api.execute_sql(post_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_blogs).await; + + let insert_posts = indoc! {r#" + INSERT INTO "Post" (id, blog_id, content) VALUES + (1, 1, 'meowmeow'), + (2, 2, 'uwuwuwuwu'), + (3, 3, 'Meow meow?') + "#}; + + api.execute_sql(insert_posts).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 1000) { + edges { + node { + id + title + posts(first: 1000) { + edges { + node { + id + content + } + } + } + } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!", + "posts": { + "edges": [ + { + "node": { + "id": 1, + "content": "meowmeow" + } + } + ] + } + } + }, + { + "node": { + "id": 2, + "title": "Sayonara...", + "posts": { + "edges": [ + { + "node": { + "id": 2, + "content": "uwuwuwuwu" + } + } + ] + } + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side_with_first() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 1) { edges { node { id title } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side_with_last() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(last: 1) { edges { node { id title } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side_with_single_column_descending_order() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 10, orderBy: [{ id: DESC }]) { edges { node { id title } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 2, + "title": "Sayonara..." + } + }, + { + "node": { + "id": 1, + "title": "Hello, world!" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side_with_compound_column_ordering_with_last() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + description VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, description, title) VALUES + (1, 1, 'a', 'a'), + (2, 1, 'a', 'b'), + (3, 1, 'b', 'c') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(last: 2, orderBy: [{ description: DESC }, { title: DESC }]) { + edges { + node { + id + description + title + } + } + } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 2, + "description": "a", + "title": "b" + } + }, + { + "node": { + "id": 1, + "description": "a", + "title": "a" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_many_join_parent_side_with_single_column_descending_order_with_last() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(last: 1, orderBy: [{ id: DESC }]) { edges { node { id title } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn two_one_to_many_joins_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let cat_table = indoc! {r#" + CREATE TABLE "Cat" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + name VARCHAR(255) NOT NULL, + CONSTRAINT Cat_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(cat_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_blogs).await; + + let insert_cats = indoc! {r#" + INSERT INTO "Cat" (id, user_id, name) VALUES + (1, 1, 'Musti'), + (2, 1, 'Naukio'), + (3, 2, 'Pertti') + "#}; + + api.execute_sql(insert_cats).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 1000) { edges { node { id title } } } + cats(first: 100) { edges { node { id name } } } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + }, + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + }, + "cats": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti" + } + }, + { + "node": { + "id": 2, + "name": "Naukio" + } + } + ] + } + } + } + } + "#); +} + +#[tokio::test] +async fn one_to_one_with_one_to_many_joins_parent_side() { + let api = PgTestApi::new("", |api| async move { + let user_table = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(user_table).await; + + let blog_table = indoc! {r#" + CREATE TABLE "Blog" ( + id INT PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(255) NOT NULL, + CONSTRAINT Blog_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(blog_table).await; + + let profile_table = indoc! {r#" + CREATE TABLE "Profile" ( + id INT PRIMARY KEY, + user_id INT NULL UNIQUE, + description TEXT NOT NULL, + CONSTRAINT Profile_User_fkey FOREIGN KEY (user_id) REFERENCES "User" (id) + ) + "#}; + + api.execute_sql(profile_table).await; + + let insert_users = indoc! {r#" + INSERT INTO "User" (id, name) VALUES + (1, 'Musti'), + (2, 'Naukio') + "#}; + + api.execute_sql(insert_users).await; + + let insert_blogs = indoc! {r#" + INSERT INTO "Blog" (id, user_id, title) VALUES + (1, 1, 'Hello, world!'), + (2, 1, 'Sayonara...'), + (3, 2, 'Meow meow?') + "#}; + + api.execute_sql(insert_blogs).await; + + let insert_profiles = indoc! {r#" + INSERT INTO "Profile" (id, user_id, description) VALUES + (1, 1, 'meow'), + (2, 2, 'uwu') + "#}; + + api.execute_sql(insert_profiles).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + name + blogs(first: 10) { edges { node { id title } } } + profile { description } + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Musti", + "blogs": { + "edges": [ + { + "node": { + "id": 1, + "title": "Hello, world!" + } + }, + { + "node": { + "id": 2, + "title": "Sayonara..." + } + } + ] + }, + "profile": { + "description": "meow" + } + } + } + } + "#); +} + +#[tokio::test] +async fn two_foreign_keys_to_same_table() { + let api = PgTestApi::new("", |api| async move { + let setup = [ + r#"CREATE TABLE public.colors ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name VARCHAR(255) NOT NULL UNIQUE, + rgb INT NOT NULL + );"#, + r#"CREATE TABLE public.users ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name VARCHAR(255) NOT NULL UNIQUE, + favorite_color_id INT NOT NULL REFERENCES colors(id), + least_favorite_color_id INT REFERENCES colors(id) + );"#, + r#"INSERT INTO public.colors (name, rgb) VALUES + ('rebeccapurple', 0x663399), + ('tomato', 0xff6347), + ('skyblue', 0x87ceeb), + ('maroon', 0x800000);"#, + r#"INSERT INTO public.users (name, favorite_color_id, least_favorite_color_id) VALUES + ('Guignol', 1, 2), + ('Gnafron', 2, NULL), + ('Flageolet', 3, 2), + ('Canezou', 3, 4); + "#, + ]; + + for statement in setup { + api.execute_sql(statement).await; + } + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + gnafron: user(lookup: { name: "Gnafron" }) { + name + colorByFavoriteColorId { id name rgb } + colorByLeastFavoriteColorId { id name rgb } + } + guignol: user(lookup: { name: "Guignol" }) { + name + colorByFavoriteColorId { id name rgb } + colorByLeastFavoriteColorId { id name rgb } + } + color(lookup: { name: "tomato" }) { + id + usersByFavoriteColorId(first: 10) { edges { node { id name favoriteColorId leastFavoriteColorId } } } + usersByLeastFavoriteColorId(first: 10) { edges { node { id name favoriteColorId leastFavoriteColorId } } } + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "gnafron": { + "name": "Gnafron", + "colorByFavoriteColorId": { + "id": 2, + "name": "tomato", + "rgb": 16737095 + }, + "colorByLeastFavoriteColorId": null + }, + "guignol": { + "name": "Guignol", + "colorByFavoriteColorId": { + "id": 1, + "name": "rebeccapurple", + "rgb": 6697881 + }, + "colorByLeastFavoriteColorId": { + "id": 2, + "name": "tomato", + "rgb": 16737095 + } + }, + "color": { + "id": 2, + "usersByFavoriteColorId": { + "edges": [ + { + "node": { + "id": 2, + "name": "Gnafron", + "favoriteColorId": 2, + "leastFavoriteColorId": null + } + } + ] + }, + "usersByLeastFavoriteColorId": { + "edges": [ + { + "node": { + "id": 1, + "name": "Guignol", + "favoriteColorId": 1, + "leastFavoriteColorId": 2 + } + }, + { + "node": { + "id": 3, + "name": "Flageolet", + "favoriteColorId": 3, + "leastFavoriteColorId": 2 + } + } + ] + } + } + } + } + "#); +} diff --git a/extensions/postgres/tests/find_one/mod.rs b/extensions/postgres/tests/find_one/mod.rs new file mode 100644 index 00000000..8fa46c6f --- /dev/null +++ b/extensions/postgres/tests/find_one/mod.rs @@ -0,0 +1,381 @@ +mod joins; +mod types; + +use indoc::indoc; + +use crate::PgTestApi; + +#[tokio::test] +async fn by_pk_no_rename() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "users" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "users" (id, name) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id name } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn by_pk_with_rename() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "users" ( + id_field INT PRIMARY KEY, + name_field VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "users" (id_field, name_field) VALUES (1, 'Musti'), (2, 'Naukio') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { idField: 1 }) { idField nameField } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "idField": 1, + "nameField": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn by_compound_pk() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + CONSTRAINT "User_pkey" PRIMARY KEY (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (name, email) VALUES + ('Musti', 'meow@meow.com'), + ('Naukio', 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Naukio", email: "purr@meow.com" } }) { + name + email + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Naukio", + "email": "purr@meow.com" + } + } + } + "#); +} + +#[tokio::test] +async fn by_compound_unique_with_nullable_column() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "users" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NULL, + CONSTRAINT "User_pkey" UNIQUE (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "users" (name, email) VALUES + ('Musti', 'meow@meow.com'), + ('Naukio', NULL), + ('Naukio', 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Naukio", email: null } }) { + name + email + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Naukio", + "email": null + } + } + } + "#); +} + +#[tokio::test] +async fn by_compound_unique_with_nullable_column_emitting_field() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NULL, + CONSTRAINT "User_pkey" UNIQUE (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (name, email) VALUES + ('Musti', 'meow@meow.com'), + ('Naukio', NULL), + ('Naukio', 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Naukio" } }) { + name + email + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "name": "Naukio", + "email": null + } + } + } + "#); +} + +#[tokio::test] +async fn by_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + email VARCHAR(255) NOT NULL UNIQUE + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, email) VALUES + (1, 'meow@meow.com'), + (2, 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { email: "purr@meow.com" }) { + id + email + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "email": "purr@meow.com" + } + } + } + "#); +} + +#[tokio::test] +async fn by_id_when_having_another_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + email VARCHAR(255) NOT NULL UNIQUE + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, email) VALUES + (1, 'meow@meow.com'), + (2, 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 2 }) { + id + email + } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "email": "purr@meow.com" + } + } + } + "#); +} + +#[tokio::test] +async fn by_compound_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + CONSTRAINT User_name_email_key UNIQUE (name, email) + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, email) VALUES + (1, 'Musti', 'meow@meow.com'), + (2, 'Naukio', 'purr@meow.com') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r#" + query { + user(lookup: { nameEmail: { name: "Naukio", email: "purr@meow.com" } }) { + id + name + email + } + } + "#}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 2, + "name": "Naukio", + "email": "purr@meow.com" + } + } + } + "#); +} diff --git a/extensions/postgres/tests/find_one/types.rs b/extensions/postgres/tests/find_one/types.rs new file mode 100644 index 00000000..af5de2d5 --- /dev/null +++ b/extensions/postgres/tests/find_one/types.rs @@ -0,0 +1,879 @@ +use indoc::indoc; + +use crate::PgTestApi; + +#[tokio::test] +async fn char() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val CHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn name() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val NAME NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn text() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val TEXT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn xml() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val XML NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "" + } + } + } + "#); +} + +#[tokio::test] +async fn cidr() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val CIDR NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '0.0.0.0/0') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "0.0.0.0/0" + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr8() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val MACADDR8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '08:00:2b:01:02:03:04:05') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "08:00:2b:01:02:03:04:05" + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val MACADDR NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '08:00:2b:01:02:03') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "08:00:2b:01:02:03" + } + } + } + "#); +} + +#[tokio::test] +async fn bpchar() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val BPCHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn varchar() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val VARCHAR(5) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "Musti" + } + } + } + "#); +} + +#[tokio::test] +async fn bit() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val BIT(3) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, B'010') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "010" + } + } + } + "#); +} + +#[tokio::test] +async fn varbit() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val VARBIT(3) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, B'010') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": "010" + } + } + } + "#); +} + +#[tokio::test] +async fn xml_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val XML[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{, }') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "", + "" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn cidr_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val CIDR[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{0.0.0.0/0, 192.168.0.0/32}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "0.0.0.0/0", + "192.168.0.0/32" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr8_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val MACADDR8[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{08:00:2b:01:02:03:04:05, 08002b:0102030405}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "08:00:2b:01:02:03:04:05", + "08:00:2b:01:02:03:04:05" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn macaddr_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val MACADDR8[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{08:00:2b:01:02:03:04:05, 08002b:0102030405}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "08:00:2b:01:02:03:04:05", + "08:00:2b:01:02:03:04:05" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn char_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val char(6)[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{Musti, Naukio}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "Musti ", + "Naukio" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn name_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val NAME[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{Musti, Naukio}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "Musti", + "Naukio" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn text_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val TEXT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "A" (id, val) VALUES (1, '{Musti, Naukio}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + a(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "a": { + "id": 1, + "val": [ + "Musti", + "Naukio" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn r#enum() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 'red'); + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 1, + "val": "RED" + } + } + } + "#); +} + +#[tokio::test] +async fn enum_array() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + val street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, ARRAY['red', 'yellow']::street_light[]); + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { id val } + } + "}; + + let response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + "RED", + "YELLOW" + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/integration_tests.rs b/extensions/postgres/tests/integration_tests.rs new file mode 100644 index 00000000..9c8567e5 --- /dev/null +++ b/extensions/postgres/tests/integration_tests.rs @@ -0,0 +1,159 @@ +mod create_many; +mod create_one; +mod delete_many; +mod delete_one; +mod find_many; +mod find_one; +mod introspection; +mod update_many; +mod update_one; + +use std::{cell::RefCell, fmt::Display, path::Path, sync::Arc}; + +use grafbase_postgres_introspection::IntrospectionOptions; +use grafbase_sdk::test::{DynamicSchema, TestConfig, TestRunner}; +use indoc::formatdoc; +use names::{Generator, Name}; +use sqlx::PgPool; +use tokio::sync::OnceCell; + +thread_local! { + static NAMES: RefCell>> = const { RefCell::new(None) }; +} + +pub async fn admin_pool() -> &'static PgPool { + // this is for creating/dropping databases, which _should not be done_ over pgbouncer. + static ADMIN_CONNECTION_STRING: &str = "postgres://postgres:grafbase@localhost:5432/postgres"; + static POOL: OnceCell = OnceCell::const_new(); + + POOL.get_or_init(|| async { PgPool::connect(ADMIN_CONNECTION_STRING).await.unwrap() }) + .await +} + +fn random_name() -> String { + NAMES.with(|maybe_generator| { + maybe_generator + .borrow_mut() + .get_or_insert_with(|| Generator::with_naming(Name::Plain)) + .next() + .unwrap() + .replace('-', "") + }) +} + +// url for the engine for introspecting, querying and mutating the database. +static BASE_CONNECTION_STRING: &str = "postgres://postgres:grafbase@localhost:5432/"; + +struct Inner { + pool: PgPool, + config: String, +} + +#[derive(Clone)] +struct PgTestApi { + inner: Arc, +} + +impl PgTestApi { + async fn new(config: impl Display, init: F) -> Self + where + F: FnOnce(PgTestApi) -> U, + U: Future, + { + let database_name = random_name(); + let admin = admin_pool().await; + + sqlx::query(&format!("DROP DATABASE IF EXISTS {database_name}")) + .execute(admin) + .await + .unwrap(); + + sqlx::query(&format!("CREATE DATABASE {database_name}")) + .execute(admin) + .await + .unwrap(); + + let database_url = format!("{BASE_CONNECTION_STRING}{database_name}"); + + let config = formatdoc! {r#" + [graph] + introspection = true + + [[extensions.postgres.config.databases]] + name = "default" + default_schema = "public" + url = "{database_url}" + + {config} + "#}; + + let pool = PgPool::connect(&database_url).await.unwrap(); + let inner = Arc::new(Inner { pool, config }); + let this = Self { inner }; + + init(this.clone()).await; + + this + } + + async fn runner_spawn(&self) -> TestRunner { + let extension_path = std::env::current_dir().unwrap().join("build"); + let schema = self.introspect_local_extension(&extension_path).await; + let schema = DynamicSchema::builder(schema) + .into_extension_only_subgraph("test", &extension_path) + .unwrap(); + + let mut config = TestConfig::builder().with_subgraph(schema); + + if std::env::var("PREBUILT_EXTENSION").is_ok() { + config = config.with_extension("./build"); + } + + let config = config + .enable_networking() + .enable_stderr() + .enable_stdout() + .enable_environment_variables() + .log_level(grafbase_sdk::test::LogLevel::WasiDebug) + .build(&self.inner.config) + .unwrap(); + + TestRunner::new(config).await.unwrap() + } + + async fn execute_sql(&self, sql: &str) { + sqlx::query(sql).execute(&self.inner.pool).await.unwrap(); + } + + async fn introspect(&self) -> String { + let mut conn = self.inner.pool.acquire().await.unwrap(); + + grafbase_postgres_introspection::introspect( + &mut conn, + IntrospectionOptions { + database_name: "default", + extension_url: "https://grafbase.com/extensions/postgres/0.1.0", + default_schema: "public", + }, + ) + .await + .unwrap() + } + + async fn introspect_local_extension(&self, extension_path: &Path) -> String { + let mut conn = self.inner.pool.acquire().await.unwrap(); + + let extension_url = format!("file://{}", extension_path.display()); + + grafbase_postgres_introspection::introspect( + &mut conn, + IntrospectionOptions { + database_name: "default", + extension_url: &extension_url, + default_schema: "public", + }, + ) + .await + .unwrap() + } +} diff --git a/extensions/postgres/tests/introspection/mod.rs b/extensions/postgres/tests/introspection/mod.rs new file mode 100644 index 00000000..07ce6188 --- /dev/null +++ b/extensions/postgres/tests/introspection/mod.rs @@ -0,0 +1,30966 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn table_with_generated_always_identity_primary_key() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "users" ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + _: Boolean + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + _: Boolean + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "users") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_generated_by_default_identity_primary_key() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "users" ( + id INT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "users") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_serial_primary_key() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_enum_field() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let table = indoc! {r#" + CREATE TABLE "A" ( + id INT PRIMARY KEY, + val street_light NOT NULL + ); + "#}; + + api.execute_sql(table).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + enum StreetLight @pgEnum(name: "street_light") { + RED @pgEnumVariant(name: "red") + YELLOW @pgEnumVariant(name: "yellow") + GREEN @pgEnumVariant(name: "green") + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Search filter input for StreetLight type. + """ + input StreetLightFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: StreetLight + """ + The value is not the one given + """ + ne: StreetLight + """ + The value is greater than the one given + """ + gt: StreetLight + """ + The value is less than the one given + """ + lt: StreetLight + """ + The value is greater than, or equal to the one given + """ + gte: StreetLight + """ + The value is less than, or equal to the one given + """ + lte: StreetLight + """ + The value is in the given array of values + """ + in: [StreetLight!] + """ + The value is not in the given array of values + """ + nin: [StreetLight!] + """ + A negation of the given filter + """ + not: StreetLightFilterInput + } + + """ + Search filter input for StreetLight array type. + """ + input StreetLightArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [StreetLight] + """ + The value is not the one given + """ + ne: [StreetLight] + """ + The value is greater than the one given + """ + gt: [StreetLight] + """ + The value is less than the one given + """ + lt: [StreetLight] + """ + The value is greater than, or equal to the one given + """ + gte: [StreetLight] + """ + The value is less than, or equal to the one given + """ + lte: [StreetLight] + """ + The value is in the given array of values + """ + in: [[StreetLight]!] + """ + The value is not in the given array of values + """ + nin: [[StreetLight]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [StreetLight] + """ + Checks if the array is contained within the provided array + """ + contained: [StreetLight] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [StreetLight] + """ + A negation of the given filter + """ + not: StreetLightArrayFilterInput + } + + """ + Update input for StreetLight type. + """ + input StreetLightUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: StreetLight + } + + """ + Update input for StreetLight array type. + """ + input StreetLightArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [StreetLight] + """ + Append an array value to the column. + """ + append: [StreetLight] + """ + Prepend an array value to the column. + """ + prepend: [StreetLight] + } + + """ + Specifies the ordering for A results. + """ + input AOrderByInput @oneOf { + """ + Order as by id + """ + id: OrderDirection + """ + Order as by val + """ + val: OrderDirection + } + + """ + Input type to select a unique A + """ + input ALookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for A collections + """ + input ACollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: AFilterInput + } + + """ + Filter input type for A objects. + """ + input AFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given val + """ + val: StreetLightFilterInput + """ + All of the filters must match + """ + ALL: [AFilterInput] + """ + None of the filters must match + """ + NONE: [AFilterInput] + """ + At least one of the filters must match + """ + ANY: [AFilterInput] + } + + """ + Input for creating a new A + """ + input ACreateInput { + """ + Set field value for id + """ + id: Int! + """ + Set field value for val + """ + val: StreetLight! + } + + """ + Input for updating an existing A + """ + input AUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for val + """ + val: StreetLightUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created A object + """ + type AReturning + @pgReturning(type: "A") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the val field + """ + val: StreetLight! + } + + """ + Return type when creating one A + """ + type ACreatePayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: AReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many as + """ + type ACreateManyPayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: [AReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one A + """ + type AUpdatePayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: AReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many as + """ + type AUpdateManyPayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: [AReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one A + """ + type ADeletePayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: AReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many as + """ + type ADeleteManyPayload + @pgMutation(type: "A") + { + """ + Returned item(s) from the mutation + """ + returning: [AReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type AEdge { + """ + The item at the end of the edge + """ + node: A! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for A + """ + type AConnection + @pgConnection(type: "A") + { + """ + A list of edges + """ + edges: [AEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type A + @pgTable(name: "A") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + val: StreetLight! @pgColumn(name: "val", type: ENUM) + } + + type Query { + """ + Query a unique A + """ + a( + """ + Input for unique A lookup + """ + lookup: ALookupInput!, + ): A @pgSelectOne + """ + Query and paginate multiple as + """ + as( + """ + Filter for A + """ + filter: AFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [AOrderByInput!], + ): AConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single A + """ + aCreate( + """ + Input for creating a single A + """ + input: ACreateInput!, + ): ACreatePayload! @pgInsertOne + """ + Create multiple as + """ + aCreateMany( + """ + Input for creating multiple A instances + """ + input: [ACreateInput!]!, + ): ACreateManyPayload! @pgInsertMany + """ + Update a unique A + """ + aUpdate( + """ + Lookup input for unique A update + """ + lookup: ALookupInput!, + """ + Input for updating a A + """ + input: AUpdateInput!, + ): AUpdatePayload! @pgUpdateOne + """ + Update multiple as + """ + aUpdateMany( + """ + Filter for updating multiple A instances + """ + filter: AFilterInput, + """ + Input for updating multiple A instances + """ + input: AUpdateInput!, + ): AUpdateManyPayload! @pgUpdateMany + """ + Delete a unique A + """ + aDelete( + """ + Lookup input for unique A deletion + """ + lookup: ALookupInput!, + ): ADeletePayload! @pgDeleteOne + """ + Delete multiple as + """ + aDeleteMany( + """ + Filter for A deletion + """ + filter: AFilterInput, + ): ADeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_int_primary_key() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_int_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT UNIQUE NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: UNIQUE) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_serial_primary_key_string_unique() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL UNIQUE + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + """ + Order users by email + """ + email: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'email' field + """ + email: String + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given email + """ + email: StringFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for email + """ + email: String! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for email + """ + email: StringUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the email field + """ + email: String! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["email"], type: UNIQUE) + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + email: String! @pgColumn(name: "email", type: VARCHAR) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_composite_primary_key() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL, + CONSTRAINT "User_pkey" PRIMARY KEY (name, email) + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by name + """ + name: OrderDirection + """ + Order users by email + """ + email: OrderDirection + } + + """ + Input type to select a unique User with multiple fields + """ + input UserNameEmailInput { + """ + Select by the 'name' field + """ + name: String! + """ + Select by the 'email' field + """ + email: String! + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select User by composite columns 'name, email' + """ + nameEmail: UserNameEmailInput + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given name + """ + name: StringFilterInput + """ + Filter by the given email + """ + email: StringFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for name + """ + name: String! + """ + Set field value for email + """ + email: String! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for name + """ + name: StringUpdateInput + """ + Update field value for email + """ + email: StringUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the name field + """ + name: String! + """ + The value of the email field + """ + email: String! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["name", "email"], type: PRIMARY) + { + name: String! @pgColumn(name: "name", type: VARCHAR) + email: String! @pgColumn(name: "email", type: VARCHAR) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn two_schemas_same_table_name() { + let api = PgTestApi::new("", |api| async move { + api.execute_sql(r"CREATE SCHEMA private").await; + + let schema = indoc! {r#" + CREATE TABLE private."User" ( + id SERIAL PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + + let schema = indoc! {r#" + CREATE TABLE public."User" ( + id SERIAL PRIMARY KEY + ) + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for PrivateUser results. + """ + input PrivateUserOrderByInput @oneOf { + """ + Order privateUsers by id + """ + id: OrderDirection + } + + """ + Input type to select a unique PrivateUser + """ + input PrivateUserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for PrivateUser collections + """ + input PrivateUserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: PrivateUserFilterInput + } + + """ + Filter input type for PrivateUser objects. + """ + input PrivateUserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [PrivateUserFilterInput] + """ + None of the filters must match + """ + NONE: [PrivateUserFilterInput] + """ + At least one of the filters must match + """ + ANY: [PrivateUserFilterInput] + } + + """ + Input for creating a new PrivateUser + """ + input PrivateUserCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing PrivateUser + """ + input PrivateUserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Specifies the ordering for PublicUser results. + """ + input PublicUserOrderByInput @oneOf { + """ + Order publicUsers by id + """ + id: OrderDirection + } + + """ + Input type to select a unique PublicUser + """ + input PublicUserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for PublicUser collections + """ + input PublicUserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: PublicUserFilterInput + } + + """ + Filter input type for PublicUser objects. + """ + input PublicUserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [PublicUserFilterInput] + """ + None of the filters must match + """ + NONE: [PublicUserFilterInput] + """ + At least one of the filters must match + """ + ANY: [PublicUserFilterInput] + } + + """ + Input for creating a new PublicUser + """ + input PublicUserCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing PublicUser + """ + input PublicUserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created PrivateUser object + """ + type PrivateUserReturning + @pgReturning(type: "PrivateUser") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one PrivateUser + """ + type PrivateUserCreatePayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: PrivateUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many privateUsers + """ + type PrivateUserCreateManyPayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PrivateUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one PrivateUser + """ + type PrivateUserUpdatePayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: PrivateUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many privateUsers + """ + type PrivateUserUpdateManyPayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PrivateUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one PrivateUser + """ + type PrivateUserDeletePayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: PrivateUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many privateUsers + """ + type PrivateUserDeleteManyPayload + @pgMutation(type: "PrivateUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PrivateUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type PrivateUserEdge { + """ + The item at the end of the edge + """ + node: PrivateUser! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for PrivateUser + """ + type PrivateUserConnection + @pgConnection(type: "PrivateUser") + { + """ + A list of edges + """ + edges: [PrivateUserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + """ + Return type containing fields of the mutated or created PublicUser object + """ + type PublicUserReturning + @pgReturning(type: "PublicUser") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one PublicUser + """ + type PublicUserCreatePayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: PublicUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many publicUsers + """ + type PublicUserCreateManyPayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PublicUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one PublicUser + """ + type PublicUserUpdatePayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: PublicUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many publicUsers + """ + type PublicUserUpdateManyPayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PublicUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one PublicUser + """ + type PublicUserDeletePayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: PublicUserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many publicUsers + """ + type PublicUserDeleteManyPayload + @pgMutation(type: "PublicUser") + { + """ + Returned item(s) from the mutation + """ + returning: [PublicUserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type PublicUserEdge { + """ + The item at the end of the edge + """ + node: PublicUser! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for PublicUser + """ + type PublicUserConnection + @pgConnection(type: "PublicUser") + { + """ + A list of edges + """ + edges: [PublicUserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type PrivateUser + @pgTable(name: "User", schema: "private") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type PublicUser + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique PrivateUser + """ + privateUser( + """ + Input for unique PrivateUser lookup + """ + lookup: PrivateUserLookupInput!, + ): PrivateUser @pgSelectOne + """ + Query and paginate multiple privateUsers + """ + privateUsers( + """ + Filter for PrivateUser + """ + filter: PrivateUserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [PrivateUserOrderByInput!], + ): PrivateUserConnection! @pgSelectMany + """ + Query a unique PublicUser + """ + publicUser( + """ + Input for unique PublicUser lookup + """ + lookup: PublicUserLookupInput!, + ): PublicUser @pgSelectOne + """ + Query and paginate multiple publicUsers + """ + publicUsers( + """ + Filter for PublicUser + """ + filter: PublicUserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [PublicUserOrderByInput!], + ): PublicUserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single PrivateUser + """ + privateUserCreate( + """ + Input for creating a single PrivateUser + """ + input: PrivateUserCreateInput!, + ): PrivateUserCreatePayload! @pgInsertOne + """ + Create multiple privateUsers + """ + privateUserCreateMany( + """ + Input for creating multiple PrivateUser instances + """ + input: [PrivateUserCreateInput!]!, + ): PrivateUserCreateManyPayload! @pgInsertMany + """ + Update a unique PrivateUser + """ + privateUserUpdate( + """ + Lookup input for unique PrivateUser update + """ + lookup: PrivateUserLookupInput!, + """ + Input for updating a PrivateUser + """ + input: PrivateUserUpdateInput!, + ): PrivateUserUpdatePayload! @pgUpdateOne + """ + Update multiple privateUsers + """ + privateUserUpdateMany( + """ + Filter for updating multiple PrivateUser instances + """ + filter: PrivateUserFilterInput, + """ + Input for updating multiple PrivateUser instances + """ + input: PrivateUserUpdateInput!, + ): PrivateUserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique PrivateUser + """ + privateUserDelete( + """ + Lookup input for unique PrivateUser deletion + """ + lookup: PrivateUserLookupInput!, + ): PrivateUserDeletePayload! @pgDeleteOne + """ + Delete multiple privateUsers + """ + privateUserDeleteMany( + """ + Filter for PrivateUser deletion + """ + filter: PrivateUserFilterInput, + ): PrivateUserDeleteManyPayload! @pgDeleteMany + """ + Create a single PublicUser + """ + publicUserCreate( + """ + Input for creating a single PublicUser + """ + input: PublicUserCreateInput!, + ): PublicUserCreatePayload! @pgInsertOne + """ + Create multiple publicUsers + """ + publicUserCreateMany( + """ + Input for creating multiple PublicUser instances + """ + input: [PublicUserCreateInput!]!, + ): PublicUserCreateManyPayload! @pgInsertMany + """ + Update a unique PublicUser + """ + publicUserUpdate( + """ + Lookup input for unique PublicUser update + """ + lookup: PublicUserLookupInput!, + """ + Input for updating a PublicUser + """ + input: PublicUserUpdateInput!, + ): PublicUserUpdatePayload! @pgUpdateOne + """ + Update multiple publicUsers + """ + publicUserUpdateMany( + """ + Filter for updating multiple PublicUser instances + """ + filter: PublicUserFilterInput, + """ + Input for updating multiple PublicUser instances + """ + input: PublicUserUpdateInput!, + ): PublicUserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique PublicUser + """ + publicUserDelete( + """ + Lookup input for unique PublicUser deletion + """ + lookup: PublicUserLookupInput!, + ): PublicUserDeletePayload! @pgDeleteOne + """ + Delete multiple publicUsers + """ + publicUserDeleteMany( + """ + Filter for PublicUser deletion + """ + filter: PublicUserFilterInput, + ): PublicUserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_an_array_column() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + name INT[] NOT NULL + ); + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + """ + Order users by name + """ + name: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given name + """ + name: IntArrayFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for name + """ + name: [Int]! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for name + """ + name: IntArrayUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the name field + """ + name: [Int]! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + name: [Int]! @pgColumn(name: "name", type: INT) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_jsonb_column() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + name JSONB NOT NULL + ); + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + """ + Order users by name + """ + name: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given name + """ + name: JSONFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for name + """ + name: JSON! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for name + """ + name: JSONUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the name field + """ + name: JSON! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + name: JSON! @pgColumn(name: "name", type: JSONB) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_json_column() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + name JSON NOT NULL + ); + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + """ + Order users by name + """ + name: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given name + """ + name: JSONFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for name + """ + name: JSON! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for name + """ + name: JSONUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the name field + """ + name: JSON! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + name: JSON! @pgColumn(name: "name", type: JSON) + } + + type Query { + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn two_tables_with_single_column_foreign_key() { + let api = PgTestApi::new("", |api| async move { + let create_user = indoc! {r#" + CREATE TABLE "User" ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + "#}; + + api.execute_sql(create_user).await; + + let create_blog = indoc! {r#" + CREATE TABLE "Blog" ( + id SERIAL PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INT NOT NULL, + CONSTRAINT "Blog_User" FOREIGN KEY (user_id) REFERENCES "User"(id) + ) + "#}; + + api.execute_sql(create_blog).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for Blog results. + """ + input BlogOrderByInput @oneOf { + """ + Order blogs by id + """ + id: OrderDirection + """ + Order blogs by title + """ + title: OrderDirection + """ + Order blogs by content + """ + content: OrderDirection + """ + Order blogs by userId + """ + userId: OrderDirection + """ + Order Blog results by User fields + """ + user: UserOrderByInput + } + + """ + Input type to select a unique Blog + """ + input BlogLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for Blog collections + """ + input BlogCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: BlogFilterInput + } + + """ + Filter input type for Blog objects. + """ + input BlogFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given title + """ + title: StringFilterInput + """ + Filter by the given content + """ + content: StringFilterInput + """ + Filter by the given userId + """ + userId: IntFilterInput + """ + Filter by the related User object + """ + user: UserFilterInput + """ + All of the filters must match + """ + ALL: [BlogFilterInput] + """ + None of the filters must match + """ + NONE: [BlogFilterInput] + """ + At least one of the filters must match + """ + ANY: [BlogFilterInput] + } + + """ + Input for creating a new Blog + """ + input BlogCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for title + """ + title: String! + """ + Set field value for content + """ + content: String + """ + Set field value for userId + """ + userId: Int! + } + + """ + Input for updating an existing Blog + """ + input BlogUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for title + """ + title: StringUpdateInput + """ + Update field value for content + """ + content: StringUpdateInput + """ + Update field value for userId + """ + userId: IntUpdateInput + } + + """ + Specifies the ordering for User results. + """ + input UserOrderByInput @oneOf { + """ + Order users by id + """ + id: OrderDirection + """ + Order users by name + """ + name: OrderDirection + } + + """ + Input type to select a unique User + """ + input UserLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for User collections + """ + input UserCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFilterInput + } + + """ + Filter input type for User objects. + """ + input UserFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given name + """ + name: StringFilterInput + """ + Filter by the related Blog objects + """ + blogs: BlogCollectionFilterInput + """ + All of the filters must match + """ + ALL: [UserFilterInput] + """ + None of the filters must match + """ + NONE: [UserFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFilterInput] + } + + """ + Input for creating a new User + """ + input UserCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for name + """ + name: String! + } + + """ + Input for updating an existing User + """ + input UserUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for name + """ + name: StringUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created Blog object + """ + type BlogReturning + @pgReturning(type: "Blog") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the title field + """ + title: String! + """ + The value of the content field + """ + content: String + """ + The value of the userId field + """ + userId: Int! + } + + """ + Return type when creating one Blog + """ + type BlogCreatePayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: BlogReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many blogs + """ + type BlogCreateManyPayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: [BlogReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one Blog + """ + type BlogUpdatePayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: BlogReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many blogs + """ + type BlogUpdateManyPayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: [BlogReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one Blog + """ + type BlogDeletePayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: BlogReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many blogs + """ + type BlogDeleteManyPayload + @pgMutation(type: "Blog") + { + """ + Returned item(s) from the mutation + """ + returning: [BlogReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type BlogEdge { + """ + The item at the end of the edge + """ + node: Blog! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for Blog + """ + type BlogConnection + @pgConnection(type: "Blog") + { + """ + A list of edges + """ + edges: [BlogEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + """ + Return type containing fields of the mutated or created User object + """ + type UserReturning + @pgReturning(type: "User") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the name field + """ + name: String! + } + + """ + Return type when creating one User + """ + type UserCreatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many users + """ + type UserCreateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one User + """ + type UserUpdatePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many users + """ + type UserUpdateManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one User + """ + type UserDeletePayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: UserReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many users + """ + type UserDeleteManyPayload + @pgMutation(type: "User") + { + """ + Returned item(s) from the mutation + """ + returning: [UserReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for User + """ + type UserConnection + @pgConnection(type: "User") + { + """ + A list of edges + """ + edges: [UserEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type Blog + @pgTable(name: "Blog") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + title: String! @pgColumn(name: "title", type: VARCHAR) + content: String @pgColumn(name: "content", type: TEXT) + userId: Int! @pgColumn(name: "user_id", type: INT) + user: User! @pgRelation(name: "Blog_User", fields: ["userId"], references: ["id"]) + } + + type User + @pgTable(name: "User") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + name: String! @pgColumn(name: "name", type: VARCHAR) + blogs( + """ + Filter the related Blog instances + """ + filter: BlogFilterInput, + """ + Select the first Blog instances + """ + first: Int, + """ + Select the last Blog instances + """ + last: Int, + """ + Select the Blog instances before the given cursor + """ + before: String, + """ + Select the Blog instances after the given cursor + """ + after: String, + """ + Order the Blog instances by the given fields + """ + orderBy: [BlogOrderByInput!], + ): BlogConnection! @pgRelation(name: "Blog_User") + } + + type Query { + """ + Query a unique Blog + """ + blog( + """ + Input for unique Blog lookup + """ + lookup: BlogLookupInput!, + ): Blog @pgSelectOne + """ + Query and paginate multiple blogs + """ + blogs( + """ + Filter for Blog + """ + filter: BlogFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [BlogOrderByInput!], + ): BlogConnection! @pgSelectMany + """ + Query a unique User + """ + user( + """ + Input for unique User lookup + """ + lookup: UserLookupInput!, + ): User @pgSelectOne + """ + Query and paginate multiple users + """ + users( + """ + Filter for User + """ + filter: UserFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserOrderByInput!], + ): UserConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single Blog + """ + blogCreate( + """ + Input for creating a single Blog + """ + input: BlogCreateInput!, + ): BlogCreatePayload! @pgInsertOne + """ + Create multiple blogs + """ + blogCreateMany( + """ + Input for creating multiple Blog instances + """ + input: [BlogCreateInput!]!, + ): BlogCreateManyPayload! @pgInsertMany + """ + Update a unique Blog + """ + blogUpdate( + """ + Lookup input for unique Blog update + """ + lookup: BlogLookupInput!, + """ + Input for updating a Blog + """ + input: BlogUpdateInput!, + ): BlogUpdatePayload! @pgUpdateOne + """ + Update multiple blogs + """ + blogUpdateMany( + """ + Filter for updating multiple Blog instances + """ + filter: BlogFilterInput, + """ + Input for updating multiple Blog instances + """ + input: BlogUpdateInput!, + ): BlogUpdateManyPayload! @pgUpdateMany + """ + Delete a unique Blog + """ + blogDelete( + """ + Lookup input for unique Blog deletion + """ + lookup: BlogLookupInput!, + ): BlogDeletePayload! @pgDeleteOne + """ + Delete multiple blogs + """ + blogDeleteMany( + """ + Filter for Blog deletion + """ + filter: BlogFilterInput, + ): BlogDeleteManyPayload! @pgDeleteMany + """ + Create a single User + """ + userCreate( + """ + Input for creating a single User + """ + input: UserCreateInput!, + ): UserCreatePayload! @pgInsertOne + """ + Create multiple users + """ + userCreateMany( + """ + Input for creating multiple User instances + """ + input: [UserCreateInput!]!, + ): UserCreateManyPayload! @pgInsertMany + """ + Update a unique User + """ + userUpdate( + """ + Lookup input for unique User update + """ + lookup: UserLookupInput!, + """ + Input for updating a User + """ + input: UserUpdateInput!, + ): UserUpdatePayload! @pgUpdateOne + """ + Update multiple users + """ + userUpdateMany( + """ + Filter for updating multiple User instances + """ + filter: UserFilterInput, + """ + Input for updating multiple User instances + """ + input: UserUpdateInput!, + ): UserUpdateManyPayload! @pgUpdateMany + """ + Delete a unique User + """ + userDelete( + """ + Lookup input for unique User deletion + """ + lookup: UserLookupInput!, + ): UserDeletePayload! @pgDeleteOne + """ + Delete multiple users + """ + userDeleteMany( + """ + Filter for User deletion + """ + filter: UserFilterInput, + ): UserDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn foreign_key_to_a_table_without_a_key_should_not_create_a_relation() { + let api = PgTestApi::new("pg", |api| async move { + api.execute_sql(r#"CREATE TABLE visible_table (id TEXT PRIMARY KEY)"#) + .await; + + api.execute_sql(r#"CREATE TABLE hidden_table (visible_table TEXT NOT NULL REFERENCES visible_table(id))"#) + .await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for VisibleTable results. + """ + input VisibleTableOrderByInput @oneOf { + """ + Order visibleTables by id + """ + id: OrderDirection + } + + """ + Input type to select a unique VisibleTable + """ + input VisibleTableLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: String + } + + """ + Filter input type for VisibleTable collections + """ + input VisibleTableCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: VisibleTableFilterInput + } + + """ + Filter input type for VisibleTable objects. + """ + input VisibleTableFilterInput @oneOf { + """ + Filter by the given id + """ + id: StringFilterInput + """ + All of the filters must match + """ + ALL: [VisibleTableFilterInput] + """ + None of the filters must match + """ + NONE: [VisibleTableFilterInput] + """ + At least one of the filters must match + """ + ANY: [VisibleTableFilterInput] + } + + """ + Input for creating a new VisibleTable + """ + input VisibleTableCreateInput { + """ + Set field value for id + """ + id: String! + } + + """ + Input for updating an existing VisibleTable + """ + input VisibleTableUpdateInput { + """ + Update field value for id + """ + id: StringUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created VisibleTable object + """ + type VisibleTableReturning + @pgReturning(type: "VisibleTable") + { + """ + The value of the id field + """ + id: String! + } + + """ + Return type when creating one VisibleTable + """ + type VisibleTableCreatePayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: VisibleTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many visibleTables + """ + type VisibleTableCreateManyPayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: [VisibleTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one VisibleTable + """ + type VisibleTableUpdatePayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: VisibleTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many visibleTables + """ + type VisibleTableUpdateManyPayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: [VisibleTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one VisibleTable + """ + type VisibleTableDeletePayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: VisibleTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many visibleTables + """ + type VisibleTableDeleteManyPayload + @pgMutation(type: "VisibleTable") + { + """ + Returned item(s) from the mutation + """ + returning: [VisibleTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type VisibleTableEdge { + """ + The item at the end of the edge + """ + node: VisibleTable! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for VisibleTable + """ + type VisibleTableConnection + @pgConnection(type: "VisibleTable") + { + """ + A list of edges + """ + edges: [VisibleTableEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type VisibleTable + @pgTable(name: "visible_table") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: String! @pgColumn(name: "id", type: TEXT) + } + + type Query { + """ + Query a unique VisibleTable + """ + visibleTable( + """ + Input for unique VisibleTable lookup + """ + lookup: VisibleTableLookupInput!, + ): VisibleTable @pgSelectOne + """ + Query and paginate multiple visibleTables + """ + visibleTables( + """ + Filter for VisibleTable + """ + filter: VisibleTableFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [VisibleTableOrderByInput!], + ): VisibleTableConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single VisibleTable + """ + visibleTableCreate( + """ + Input for creating a single VisibleTable + """ + input: VisibleTableCreateInput!, + ): VisibleTableCreatePayload! @pgInsertOne + """ + Create multiple visibleTables + """ + visibleTableCreateMany( + """ + Input for creating multiple VisibleTable instances + """ + input: [VisibleTableCreateInput!]!, + ): VisibleTableCreateManyPayload! @pgInsertMany + """ + Update a unique VisibleTable + """ + visibleTableUpdate( + """ + Lookup input for unique VisibleTable update + """ + lookup: VisibleTableLookupInput!, + """ + Input for updating a VisibleTable + """ + input: VisibleTableUpdateInput!, + ): VisibleTableUpdatePayload! @pgUpdateOne + """ + Update multiple visibleTables + """ + visibleTableUpdateMany( + """ + Filter for updating multiple VisibleTable instances + """ + filter: VisibleTableFilterInput, + """ + Input for updating multiple VisibleTable instances + """ + input: VisibleTableUpdateInput!, + ): VisibleTableUpdateManyPayload! @pgUpdateMany + """ + Delete a unique VisibleTable + """ + visibleTableDelete( + """ + Lookup input for unique VisibleTable deletion + """ + lookup: VisibleTableLookupInput!, + ): VisibleTableDeletePayload! @pgDeleteOne + """ + Delete multiple visibleTables + """ + visibleTableDeleteMany( + """ + Filter for VisibleTable deletion + """ + filter: VisibleTableFilterInput, + ): VisibleTableDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn issue_november_2023() { + let api = PgTestApi::new("pg", |api| async move { + let create = indoc! {r" + CREATE TYPE access_mode AS ENUM ('PUBLIC', 'PUBLIC_READ', 'PRIVATE'); + "}; + + api.execute_sql(create).await; + + let create = indoc! {r" + CREATE TYPE project_status AS ENUM ('CREATED', 'READY', 'FAILED'); + "}; + + api.execute_sql(create).await; + + let create = indoc! {r" + CREATE TABLE networks ( + id SERIAL PRIMARY KEY + ); + "}; + + api.execute_sql(create).await; + + let create = indoc! {r" + CREATE TABLE projects ( + id SERIAL PRIMARY KEY, + access_mode access_mode NOT NULL, + status project_status DEFAULT 'CREATED' NOT NULL, + network_id INT REFERENCES networks(id) + ); + "}; + + api.execute_sql(create).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + enum AccessMode @pgEnum(name: "access_mode") { + PUBLIC @pgEnumVariant(name: "PUBLIC") + PUBLIC_READ @pgEnumVariant(name: "PUBLIC_READ") + PRIVATE @pgEnumVariant(name: "PRIVATE") + } + + enum ProjectStatus @pgEnum(name: "project_status") { + CREATED @pgEnumVariant(name: "CREATED") + READY @pgEnumVariant(name: "READY") + FAILED @pgEnumVariant(name: "FAILED") + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Search filter input for AccessMode type. + """ + input AccessModeFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: AccessMode + """ + The value is not the one given + """ + ne: AccessMode + """ + The value is greater than the one given + """ + gt: AccessMode + """ + The value is less than the one given + """ + lt: AccessMode + """ + The value is greater than, or equal to the one given + """ + gte: AccessMode + """ + The value is less than, or equal to the one given + """ + lte: AccessMode + """ + The value is in the given array of values + """ + in: [AccessMode!] + """ + The value is not in the given array of values + """ + nin: [AccessMode!] + """ + A negation of the given filter + """ + not: AccessModeFilterInput + } + + """ + Search filter input for AccessMode array type. + """ + input AccessModeArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [AccessMode] + """ + The value is not the one given + """ + ne: [AccessMode] + """ + The value is greater than the one given + """ + gt: [AccessMode] + """ + The value is less than the one given + """ + lt: [AccessMode] + """ + The value is greater than, or equal to the one given + """ + gte: [AccessMode] + """ + The value is less than, or equal to the one given + """ + lte: [AccessMode] + """ + The value is in the given array of values + """ + in: [[AccessMode]!] + """ + The value is not in the given array of values + """ + nin: [[AccessMode]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [AccessMode] + """ + Checks if the array is contained within the provided array + """ + contained: [AccessMode] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [AccessMode] + """ + A negation of the given filter + """ + not: AccessModeArrayFilterInput + } + + """ + Update input for AccessMode type. + """ + input AccessModeUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: AccessMode + } + + """ + Update input for AccessMode array type. + """ + input AccessModeArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [AccessMode] + """ + Append an array value to the column. + """ + append: [AccessMode] + """ + Prepend an array value to the column. + """ + prepend: [AccessMode] + } + + """ + Search filter input for ProjectStatus type. + """ + input ProjectStatusFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: ProjectStatus + """ + The value is not the one given + """ + ne: ProjectStatus + """ + The value is greater than the one given + """ + gt: ProjectStatus + """ + The value is less than the one given + """ + lt: ProjectStatus + """ + The value is greater than, or equal to the one given + """ + gte: ProjectStatus + """ + The value is less than, or equal to the one given + """ + lte: ProjectStatus + """ + The value is in the given array of values + """ + in: [ProjectStatus!] + """ + The value is not in the given array of values + """ + nin: [ProjectStatus!] + """ + A negation of the given filter + """ + not: ProjectStatusFilterInput + } + + """ + Search filter input for ProjectStatus array type. + """ + input ProjectStatusArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [ProjectStatus] + """ + The value is not the one given + """ + ne: [ProjectStatus] + """ + The value is greater than the one given + """ + gt: [ProjectStatus] + """ + The value is less than the one given + """ + lt: [ProjectStatus] + """ + The value is greater than, or equal to the one given + """ + gte: [ProjectStatus] + """ + The value is less than, or equal to the one given + """ + lte: [ProjectStatus] + """ + The value is in the given array of values + """ + in: [[ProjectStatus]!] + """ + The value is not in the given array of values + """ + nin: [[ProjectStatus]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [ProjectStatus] + """ + Checks if the array is contained within the provided array + """ + contained: [ProjectStatus] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [ProjectStatus] + """ + A negation of the given filter + """ + not: ProjectStatusArrayFilterInput + } + + """ + Update input for ProjectStatus type. + """ + input ProjectStatusUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: ProjectStatus + } + + """ + Update input for ProjectStatus array type. + """ + input ProjectStatusArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [ProjectStatus] + """ + Append an array value to the column. + """ + append: [ProjectStatus] + """ + Prepend an array value to the column. + """ + prepend: [ProjectStatus] + } + + """ + Specifies the ordering for Network results. + """ + input NetworkOrderByInput @oneOf { + """ + Order networks by id + """ + id: OrderDirection + } + + """ + Input type to select a unique Network + """ + input NetworkLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for Network collections + """ + input NetworkCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: NetworkFilterInput + } + + """ + Filter input type for Network objects. + """ + input NetworkFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the related Project objects + """ + projects: ProjectCollectionFilterInput + """ + All of the filters must match + """ + ALL: [NetworkFilterInput] + """ + None of the filters must match + """ + NONE: [NetworkFilterInput] + """ + At least one of the filters must match + """ + ANY: [NetworkFilterInput] + } + + """ + Input for creating a new Network + """ + input NetworkCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing Network + """ + input NetworkUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Specifies the ordering for Project results. + """ + input ProjectOrderByInput @oneOf { + """ + Order projects by id + """ + id: OrderDirection + """ + Order projects by accessMode + """ + accessMode: OrderDirection + """ + Order projects by status + """ + status: OrderDirection + """ + Order projects by networkId + """ + networkId: OrderDirection + """ + Order Project results by Network fields + """ + network: NetworkOrderByInput + } + + """ + Input type to select a unique Project + """ + input ProjectLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for Project collections + """ + input ProjectCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: ProjectFilterInput + } + + """ + Filter input type for Project objects. + """ + input ProjectFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given accessMode + """ + accessMode: AccessModeFilterInput + """ + Filter by the given status + """ + status: ProjectStatusFilterInput + """ + Filter by the given networkId + """ + networkId: IntFilterInput + """ + Filter by the related Network object + """ + network: NetworkFilterInput + """ + All of the filters must match + """ + ALL: [ProjectFilterInput] + """ + None of the filters must match + """ + NONE: [ProjectFilterInput] + """ + At least one of the filters must match + """ + ANY: [ProjectFilterInput] + } + + """ + Input for creating a new Project + """ + input ProjectCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for accessMode + """ + accessMode: AccessMode! + """ + Set field value for status + """ + status: ProjectStatus + """ + Set field value for networkId + """ + networkId: Int + } + + """ + Input for updating an existing Project + """ + input ProjectUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for accessMode + """ + accessMode: AccessModeUpdateInput + """ + Update field value for status + """ + status: ProjectStatusUpdateInput + """ + Update field value for networkId + """ + networkId: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created Network object + """ + type NetworkReturning + @pgReturning(type: "Network") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one Network + """ + type NetworkCreatePayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: NetworkReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many networks + """ + type NetworkCreateManyPayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: [NetworkReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one Network + """ + type NetworkUpdatePayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: NetworkReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many networks + """ + type NetworkUpdateManyPayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: [NetworkReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one Network + """ + type NetworkDeletePayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: NetworkReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many networks + """ + type NetworkDeleteManyPayload + @pgMutation(type: "Network") + { + """ + Returned item(s) from the mutation + """ + returning: [NetworkReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type NetworkEdge { + """ + The item at the end of the edge + """ + node: Network! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for Network + """ + type NetworkConnection + @pgConnection(type: "Network") + { + """ + A list of edges + """ + edges: [NetworkEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + """ + Return type containing fields of the mutated or created Project object + """ + type ProjectReturning + @pgReturning(type: "Project") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the accessMode field + """ + accessMode: AccessMode! + """ + The value of the status field + """ + status: ProjectStatus! + """ + The value of the networkId field + """ + networkId: Int + } + + """ + Return type when creating one Project + """ + type ProjectCreatePayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: ProjectReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many projects + """ + type ProjectCreateManyPayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: [ProjectReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one Project + """ + type ProjectUpdatePayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: ProjectReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many projects + """ + type ProjectUpdateManyPayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: [ProjectReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one Project + """ + type ProjectDeletePayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: ProjectReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many projects + """ + type ProjectDeleteManyPayload + @pgMutation(type: "Project") + { + """ + Returned item(s) from the mutation + """ + returning: [ProjectReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type ProjectEdge { + """ + The item at the end of the edge + """ + node: Project! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for Project + """ + type ProjectConnection + @pgConnection(type: "Project") + { + """ + A list of edges + """ + edges: [ProjectEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type Network + @pgTable(name: "networks") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + projects( + """ + Filter the related Project instances + """ + filter: ProjectFilterInput, + """ + Select the first Project instances + """ + first: Int, + """ + Select the last Project instances + """ + last: Int, + """ + Select the Project instances before the given cursor + """ + before: String, + """ + Select the Project instances after the given cursor + """ + after: String, + """ + Order the Project instances by the given fields + """ + orderBy: [ProjectOrderByInput!], + ): ProjectConnection! @pgRelation(name: "projects_network_id_fkey") + } + + type Project + @pgTable(name: "projects") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + accessMode: AccessMode! @pgColumn(name: "access_mode", type: ENUM) + status: ProjectStatus! @pgColumn(name: "status", type: ENUM) + networkId: Int @pgColumn(name: "network_id", type: INT) + network: Network @pgRelation(name: "projects_network_id_fkey", fields: ["networkId"], references: ["id"]) + } + + type Query { + """ + Query a unique Network + """ + network( + """ + Input for unique Network lookup + """ + lookup: NetworkLookupInput!, + ): Network @pgSelectOne + """ + Query and paginate multiple networks + """ + networks( + """ + Filter for Network + """ + filter: NetworkFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [NetworkOrderByInput!], + ): NetworkConnection! @pgSelectMany + """ + Query a unique Project + """ + project( + """ + Input for unique Project lookup + """ + lookup: ProjectLookupInput!, + ): Project @pgSelectOne + """ + Query and paginate multiple projects + """ + projects( + """ + Filter for Project + """ + filter: ProjectFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [ProjectOrderByInput!], + ): ProjectConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single Network + """ + networkCreate( + """ + Input for creating a single Network + """ + input: NetworkCreateInput!, + ): NetworkCreatePayload! @pgInsertOne + """ + Create multiple networks + """ + networkCreateMany( + """ + Input for creating multiple Network instances + """ + input: [NetworkCreateInput!]!, + ): NetworkCreateManyPayload! @pgInsertMany + """ + Update a unique Network + """ + networkUpdate( + """ + Lookup input for unique Network update + """ + lookup: NetworkLookupInput!, + """ + Input for updating a Network + """ + input: NetworkUpdateInput!, + ): NetworkUpdatePayload! @pgUpdateOne + """ + Update multiple networks + """ + networkUpdateMany( + """ + Filter for updating multiple Network instances + """ + filter: NetworkFilterInput, + """ + Input for updating multiple Network instances + """ + input: NetworkUpdateInput!, + ): NetworkUpdateManyPayload! @pgUpdateMany + """ + Delete a unique Network + """ + networkDelete( + """ + Lookup input for unique Network deletion + """ + lookup: NetworkLookupInput!, + ): NetworkDeletePayload! @pgDeleteOne + """ + Delete multiple networks + """ + networkDeleteMany( + """ + Filter for Network deletion + """ + filter: NetworkFilterInput, + ): NetworkDeleteManyPayload! @pgDeleteMany + """ + Create a single Project + """ + projectCreate( + """ + Input for creating a single Project + """ + input: ProjectCreateInput!, + ): ProjectCreatePayload! @pgInsertOne + """ + Create multiple projects + """ + projectCreateMany( + """ + Input for creating multiple Project instances + """ + input: [ProjectCreateInput!]!, + ): ProjectCreateManyPayload! @pgInsertMany + """ + Update a unique Project + """ + projectUpdate( + """ + Lookup input for unique Project update + """ + lookup: ProjectLookupInput!, + """ + Input for updating a Project + """ + input: ProjectUpdateInput!, + ): ProjectUpdatePayload! @pgUpdateOne + """ + Update multiple projects + """ + projectUpdateMany( + """ + Filter for updating multiple Project instances + """ + filter: ProjectFilterInput, + """ + Input for updating multiple Project instances + """ + input: ProjectUpdateInput!, + ): ProjectUpdateManyPayload! @pgUpdateMany + """ + Delete a unique Project + """ + projectDelete( + """ + Lookup input for unique Project deletion + """ + lookup: ProjectLookupInput!, + ): ProjectDeletePayload! @pgDeleteOne + """ + Delete multiple projects + """ + projectDeleteMany( + """ + Filter for Project deletion + """ + filter: ProjectFilterInput, + ): ProjectDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_comment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "commented_table" ( + id INT PRIMARY KEY + ); + "#}; + + api.execute_sql(schema).await; + + let schema = indoc! {r#" + COMMENT ON TABLE "commented_table" IS 'This is a table comment.'; + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for CommentedTable results. + """ + input CommentedTableOrderByInput @oneOf { + """ + Order commentedTables by id + """ + id: OrderDirection + } + + """ + Input type to select a unique CommentedTable + """ + input CommentedTableLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for CommentedTable collections + """ + input CommentedTableCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: CommentedTableFilterInput + } + + """ + Filter input type for CommentedTable objects. + """ + input CommentedTableFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + All of the filters must match + """ + ALL: [CommentedTableFilterInput] + """ + None of the filters must match + """ + NONE: [CommentedTableFilterInput] + """ + At least one of the filters must match + """ + ANY: [CommentedTableFilterInput] + } + + """ + Input for creating a new CommentedTable + """ + input CommentedTableCreateInput { + """ + Set field value for id + """ + id: Int! + } + + """ + Input for updating an existing CommentedTable + """ + input CommentedTableUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created CommentedTable object + """ + type CommentedTableReturning + @pgReturning(type: "CommentedTable") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one CommentedTable + """ + type CommentedTableCreatePayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many commentedTables + """ + type CommentedTableCreateManyPayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one CommentedTable + """ + type CommentedTableUpdatePayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many commentedTables + """ + type CommentedTableUpdateManyPayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one CommentedTable + """ + type CommentedTableDeletePayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many commentedTables + """ + type CommentedTableDeleteManyPayload + @pgMutation(type: "CommentedTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type CommentedTableEdge { + """ + The item at the end of the edge + """ + node: CommentedTable! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for CommentedTable + """ + type CommentedTableConnection + @pgConnection(type: "CommentedTable") + { + """ + A list of edges + """ + edges: [CommentedTableEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + """ + This is a table comment. + """ + type CommentedTable + @pgTable(name: "commented_table") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + } + + type Query { + """ + Query a unique CommentedTable + """ + commentedTable( + """ + Input for unique CommentedTable lookup + """ + lookup: CommentedTableLookupInput!, + ): CommentedTable @pgSelectOne + """ + Query and paginate multiple commentedTables + """ + commentedTables( + """ + Filter for CommentedTable + """ + filter: CommentedTableFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [CommentedTableOrderByInput!], + ): CommentedTableConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single CommentedTable + """ + commentedTableCreate( + """ + Input for creating a single CommentedTable + """ + input: CommentedTableCreateInput!, + ): CommentedTableCreatePayload! @pgInsertOne + """ + Create multiple commentedTables + """ + commentedTableCreateMany( + """ + Input for creating multiple CommentedTable instances + """ + input: [CommentedTableCreateInput!]!, + ): CommentedTableCreateManyPayload! @pgInsertMany + """ + Update a unique CommentedTable + """ + commentedTableUpdate( + """ + Lookup input for unique CommentedTable update + """ + lookup: CommentedTableLookupInput!, + """ + Input for updating a CommentedTable + """ + input: CommentedTableUpdateInput!, + ): CommentedTableUpdatePayload! @pgUpdateOne + """ + Update multiple commentedTables + """ + commentedTableUpdateMany( + """ + Filter for updating multiple CommentedTable instances + """ + filter: CommentedTableFilterInput, + """ + Input for updating multiple CommentedTable instances + """ + input: CommentedTableUpdateInput!, + ): CommentedTableUpdateManyPayload! @pgUpdateMany + """ + Delete a unique CommentedTable + """ + commentedTableDelete( + """ + Lookup input for unique CommentedTable deletion + """ + lookup: CommentedTableLookupInput!, + ): CommentedTableDeletePayload! @pgDeleteOne + """ + Delete multiple commentedTables + """ + commentedTableDeleteMany( + """ + Filter for CommentedTable deletion + """ + filter: CommentedTableFilterInput, + ): CommentedTableDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_commented_column() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "commented_column_table" ( + id INT PRIMARY KEY, + "data" TEXT + ); + "#}; + + api.execute_sql(schema).await; + + let schema = indoc! {r#" + COMMENT ON COLUMN "commented_column_table"."data" IS 'This is a column comment.'; + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for CommentedColumnTable results. + """ + input CommentedColumnTableOrderByInput @oneOf { + """ + Order commentedColumnTables by id + """ + id: OrderDirection + """ + Order commentedColumnTables by data + """ + data: OrderDirection + } + + """ + Input type to select a unique CommentedColumnTable + """ + input CommentedColumnTableLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for CommentedColumnTable collections + """ + input CommentedColumnTableCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: CommentedColumnTableFilterInput + } + + """ + Filter input type for CommentedColumnTable objects. + """ + input CommentedColumnTableFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given data + """ + data: StringFilterInput + """ + All of the filters must match + """ + ALL: [CommentedColumnTableFilterInput] + """ + None of the filters must match + """ + NONE: [CommentedColumnTableFilterInput] + """ + At least one of the filters must match + """ + ANY: [CommentedColumnTableFilterInput] + } + + """ + Input for creating a new CommentedColumnTable + """ + input CommentedColumnTableCreateInput { + """ + Set field value for id + """ + id: Int! + """ + Set field value for data + """ + data: String + } + + """ + Input for updating an existing CommentedColumnTable + """ + input CommentedColumnTableUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for data + """ + data: StringUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created CommentedColumnTable object + """ + type CommentedColumnTableReturning + @pgReturning(type: "CommentedColumnTable") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the data field + """ + data: String + } + + """ + Return type when creating one CommentedColumnTable + """ + type CommentedColumnTableCreatePayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedColumnTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many commentedColumnTables + """ + type CommentedColumnTableCreateManyPayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedColumnTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one CommentedColumnTable + """ + type CommentedColumnTableUpdatePayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedColumnTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many commentedColumnTables + """ + type CommentedColumnTableUpdateManyPayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedColumnTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one CommentedColumnTable + """ + type CommentedColumnTableDeletePayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: CommentedColumnTableReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many commentedColumnTables + """ + type CommentedColumnTableDeleteManyPayload + @pgMutation(type: "CommentedColumnTable") + { + """ + Returned item(s) from the mutation + """ + returning: [CommentedColumnTableReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type CommentedColumnTableEdge { + """ + The item at the end of the edge + """ + node: CommentedColumnTable! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for CommentedColumnTable + """ + type CommentedColumnTableConnection + @pgConnection(type: "CommentedColumnTable") + { + """ + A list of edges + """ + edges: [CommentedColumnTableEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type CommentedColumnTable + @pgTable(name: "commented_column_table") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + """ + This is a column comment. + """ + data: String @pgColumn(name: "data", type: TEXT) + } + + type Query { + """ + Query a unique CommentedColumnTable + """ + commentedColumnTable( + """ + Input for unique CommentedColumnTable lookup + """ + lookup: CommentedColumnTableLookupInput!, + ): CommentedColumnTable @pgSelectOne + """ + Query and paginate multiple commentedColumnTables + """ + commentedColumnTables( + """ + Filter for CommentedColumnTable + """ + filter: CommentedColumnTableFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [CommentedColumnTableOrderByInput!], + ): CommentedColumnTableConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single CommentedColumnTable + """ + commentedColumnTableCreate( + """ + Input for creating a single CommentedColumnTable + """ + input: CommentedColumnTableCreateInput!, + ): CommentedColumnTableCreatePayload! @pgInsertOne + """ + Create multiple commentedColumnTables + """ + commentedColumnTableCreateMany( + """ + Input for creating multiple CommentedColumnTable instances + """ + input: [CommentedColumnTableCreateInput!]!, + ): CommentedColumnTableCreateManyPayload! @pgInsertMany + """ + Update a unique CommentedColumnTable + """ + commentedColumnTableUpdate( + """ + Lookup input for unique CommentedColumnTable update + """ + lookup: CommentedColumnTableLookupInput!, + """ + Input for updating a CommentedColumnTable + """ + input: CommentedColumnTableUpdateInput!, + ): CommentedColumnTableUpdatePayload! @pgUpdateOne + """ + Update multiple commentedColumnTables + """ + commentedColumnTableUpdateMany( + """ + Filter for updating multiple CommentedColumnTable instances + """ + filter: CommentedColumnTableFilterInput, + """ + Input for updating multiple CommentedColumnTable instances + """ + input: CommentedColumnTableUpdateInput!, + ): CommentedColumnTableUpdateManyPayload! @pgUpdateMany + """ + Delete a unique CommentedColumnTable + """ + commentedColumnTableDelete( + """ + Lookup input for unique CommentedColumnTable deletion + """ + lookup: CommentedColumnTableLookupInput!, + ): CommentedColumnTableDeletePayload! @pgDeleteOne + """ + Delete multiple commentedColumnTables + """ + commentedColumnTableDeleteMany( + """ + Filter for CommentedColumnTable deletion + """ + filter: CommentedColumnTableFilterInput, + ): CommentedColumnTableDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn enum_with_comment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TYPE "commented_enum" AS ENUM ('ONE', 'TWO'); + "#}; + + api.execute_sql(schema).await; + + let schema = indoc! {r#" + COMMENT ON TYPE "commented_enum" IS 'This is an enum comment.'; + "#}; + + api.execute_sql(schema).await; + + let schema = indoc! {r#" + CREATE TABLE "uses_commented_enum" ( + id INT PRIMARY KEY, + val "commented_enum" NOT NULL + ); + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + This is an enum comment. + """ + enum CommentedEnum @pgEnum(name: "commented_enum") { + ONE @pgEnumVariant(name: "ONE") + TWO @pgEnumVariant(name: "TWO") + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Search filter input for CommentedEnum type. + """ + input CommentedEnumFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: CommentedEnum + """ + The value is not the one given + """ + ne: CommentedEnum + """ + The value is greater than the one given + """ + gt: CommentedEnum + """ + The value is less than the one given + """ + lt: CommentedEnum + """ + The value is greater than, or equal to the one given + """ + gte: CommentedEnum + """ + The value is less than, or equal to the one given + """ + lte: CommentedEnum + """ + The value is in the given array of values + """ + in: [CommentedEnum!] + """ + The value is not in the given array of values + """ + nin: [CommentedEnum!] + """ + A negation of the given filter + """ + not: CommentedEnumFilterInput + } + + """ + Search filter input for CommentedEnum array type. + """ + input CommentedEnumArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [CommentedEnum] + """ + The value is not the one given + """ + ne: [CommentedEnum] + """ + The value is greater than the one given + """ + gt: [CommentedEnum] + """ + The value is less than the one given + """ + lt: [CommentedEnum] + """ + The value is greater than, or equal to the one given + """ + gte: [CommentedEnum] + """ + The value is less than, or equal to the one given + """ + lte: [CommentedEnum] + """ + The value is in the given array of values + """ + in: [[CommentedEnum]!] + """ + The value is not in the given array of values + """ + nin: [[CommentedEnum]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [CommentedEnum] + """ + Checks if the array is contained within the provided array + """ + contained: [CommentedEnum] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [CommentedEnum] + """ + A negation of the given filter + """ + not: CommentedEnumArrayFilterInput + } + + """ + Update input for CommentedEnum type. + """ + input CommentedEnumUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: CommentedEnum + } + + """ + Update input for CommentedEnum array type. + """ + input CommentedEnumArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [CommentedEnum] + """ + Append an array value to the column. + """ + append: [CommentedEnum] + """ + Prepend an array value to the column. + """ + prepend: [CommentedEnum] + } + + """ + Specifies the ordering for UsesCommentedEnum results. + """ + input UsesCommentedEnumOrderByInput @oneOf { + """ + Order usesCommentedEnums by id + """ + id: OrderDirection + """ + Order usesCommentedEnums by val + """ + val: OrderDirection + } + + """ + Input type to select a unique UsesCommentedEnum + """ + input UsesCommentedEnumLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for UsesCommentedEnum collections + """ + input UsesCommentedEnumCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UsesCommentedEnumFilterInput + } + + """ + Filter input type for UsesCommentedEnum objects. + """ + input UsesCommentedEnumFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given val + """ + val: CommentedEnumFilterInput + """ + All of the filters must match + """ + ALL: [UsesCommentedEnumFilterInput] + """ + None of the filters must match + """ + NONE: [UsesCommentedEnumFilterInput] + """ + At least one of the filters must match + """ + ANY: [UsesCommentedEnumFilterInput] + } + + """ + Input for creating a new UsesCommentedEnum + """ + input UsesCommentedEnumCreateInput { + """ + Set field value for id + """ + id: Int! + """ + Set field value for val + """ + val: CommentedEnum! + } + + """ + Input for updating an existing UsesCommentedEnum + """ + input UsesCommentedEnumUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for val + """ + val: CommentedEnumUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created UsesCommentedEnum object + """ + type UsesCommentedEnumReturning + @pgReturning(type: "UsesCommentedEnum") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the val field + """ + val: CommentedEnum! + } + + """ + Return type when creating one UsesCommentedEnum + """ + type UsesCommentedEnumCreatePayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: UsesCommentedEnumReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many usesCommentedEnums + """ + type UsesCommentedEnumCreateManyPayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: [UsesCommentedEnumReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one UsesCommentedEnum + """ + type UsesCommentedEnumUpdatePayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: UsesCommentedEnumReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many usesCommentedEnums + """ + type UsesCommentedEnumUpdateManyPayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: [UsesCommentedEnumReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one UsesCommentedEnum + """ + type UsesCommentedEnumDeletePayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: UsesCommentedEnumReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many usesCommentedEnums + """ + type UsesCommentedEnumDeleteManyPayload + @pgMutation(type: "UsesCommentedEnum") + { + """ + Returned item(s) from the mutation + """ + returning: [UsesCommentedEnumReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UsesCommentedEnumEdge { + """ + The item at the end of the edge + """ + node: UsesCommentedEnum! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for UsesCommentedEnum + """ + type UsesCommentedEnumConnection + @pgConnection(type: "UsesCommentedEnum") + { + """ + A list of edges + """ + edges: [UsesCommentedEnumEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type UsesCommentedEnum + @pgTable(name: "uses_commented_enum") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + val: CommentedEnum! @pgColumn(name: "val", type: ENUM) + } + + type Query { + """ + Query a unique UsesCommentedEnum + """ + usesCommentedEnum( + """ + Input for unique UsesCommentedEnum lookup + """ + lookup: UsesCommentedEnumLookupInput!, + ): UsesCommentedEnum @pgSelectOne + """ + Query and paginate multiple usesCommentedEnums + """ + usesCommentedEnums( + """ + Filter for UsesCommentedEnum + """ + filter: UsesCommentedEnumFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UsesCommentedEnumOrderByInput!], + ): UsesCommentedEnumConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single UsesCommentedEnum + """ + usesCommentedEnumCreate( + """ + Input for creating a single UsesCommentedEnum + """ + input: UsesCommentedEnumCreateInput!, + ): UsesCommentedEnumCreatePayload! @pgInsertOne + """ + Create multiple usesCommentedEnums + """ + usesCommentedEnumCreateMany( + """ + Input for creating multiple UsesCommentedEnum instances + """ + input: [UsesCommentedEnumCreateInput!]!, + ): UsesCommentedEnumCreateManyPayload! @pgInsertMany + """ + Update a unique UsesCommentedEnum + """ + usesCommentedEnumUpdate( + """ + Lookup input for unique UsesCommentedEnum update + """ + lookup: UsesCommentedEnumLookupInput!, + """ + Input for updating a UsesCommentedEnum + """ + input: UsesCommentedEnumUpdateInput!, + ): UsesCommentedEnumUpdatePayload! @pgUpdateOne + """ + Update multiple usesCommentedEnums + """ + usesCommentedEnumUpdateMany( + """ + Filter for updating multiple UsesCommentedEnum instances + """ + filter: UsesCommentedEnumFilterInput, + """ + Input for updating multiple UsesCommentedEnum instances + """ + input: UsesCommentedEnumUpdateInput!, + ): UsesCommentedEnumUpdateManyPayload! @pgUpdateMany + """ + Delete a unique UsesCommentedEnum + """ + usesCommentedEnumDelete( + """ + Lookup input for unique UsesCommentedEnum deletion + """ + lookup: UsesCommentedEnumLookupInput!, + ): UsesCommentedEnumDeletePayload! @pgDeleteOne + """ + Delete multiple usesCommentedEnums + """ + usesCommentedEnumDeleteMany( + """ + Filter for UsesCommentedEnum deletion + """ + filter: UsesCommentedEnumFilterInput, + ): UsesCommentedEnumDeleteManyPayload! @pgDeleteMany + } + "#); +} + +#[tokio::test] +async fn table_with_commented_foreign_key() { + let api = PgTestApi::new("", |api| async move { + let create_user = indoc! {r#" + CREATE TABLE "User_fk_comment" ( + id SERIAL PRIMARY KEY + ); + "#}; + + api.execute_sql(create_user).await; + + let create_post = indoc! {r#" + CREATE TABLE "Post_fk_comment" ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + CONSTRAINT "Post_User_FK_Comment" FOREIGN KEY (user_id) REFERENCES "User_fk_comment"(id) + ); + "#}; + + api.execute_sql(create_post).await; + + let schema = indoc! {r#" + COMMENT ON CONSTRAINT "Post_User_FK_Comment" + ON "Post_fk_comment" + IS 'Links post to its author.'; + "#}; + + api.execute_sql(schema).await; + }) + .await; + + let result = api.introspect().await; + + insta::assert_snapshot!(&result, @r#" + extend schema + @link( + url: "https://grafbase.com/extensions/postgres/0.1.0", + import: [ + "@pgDatabase", + "@pgTable", + "@pgColumn", + "@pgEnum", + "@pgEnumVariant", + "@pgRelation", + "@pgKey", + "@pgSelectOne", + "@pgSelectMany", + "@pgInsertOne", + "@pgInsertMany", + "@pgUpdateOne", + "@pgUpdateMany", + "@pgDeleteOne", + "@pgDeleteMany", + "@pgConnection", + "@pgMutation", + "@pgReturning", + "PgKeyType", + "PgColumnType" + ] + ) + @pgDatabase(name: "default") + + """ + JSON data type + """ + scalar JSON + + """ + Binary data type + """ + scalar Bytes + + """ + Big integer data type + """ + scalar BigInt + + """ + Decimal data type + """ + scalar Decimal + + """ + Specifies the direction for ordering results. + """ + enum OrderDirection { + """ + Specifies an ascending order for a given orderBy argument. + """ + ASC + """ + Specifies a descending order for a given orderBy argument. + """ + DESC + } + + """ + Search filter input for String type. + """ + input StringFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: String + """ + The value is not the one given + """ + ne: String + """ + The value is greater than the one given + """ + gt: String + """ + The value is less than the one given + """ + lt: String + """ + The value is greater than, or equal to the one given + """ + gte: String + """ + The value is less than, or equal to the one given + """ + lte: String + """ + The given input is part of the column value + """ + like: String + """ + The value is in the given array of values + """ + in: [String!] + """ + The value is not in the given array of values + """ + nin: [String!] + """ + A negation of the given filter + """ + not: StringFilterInput + } + + """ + Update input for String type. + """ + input StringUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: String + } + + """ + Update input for String array type. + """ + input StringArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [String] + """ + Append an array value to the column. + """ + append: [String] + """ + Prepend an array value to the column. + """ + prepend: [String] + } + + """ + Search filter input for BigInt type. + """ + input BigIntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: BigInt + """ + The value is not the one given + """ + ne: BigInt + """ + The value is greater than the one given + """ + gt: BigInt + """ + The value is less than the one given + """ + lt: BigInt + """ + The value is greater than, or equal to the one given + """ + gte: BigInt + """ + The value is less than, or equal to the one given + """ + lte: BigInt + """ + The value is in the given array of values + """ + in: [BigInt!] + """ + The value is not in the given array of values + """ + nin: [BigInt!] + """ + A negation of the given filter + """ + not: BigIntFilterInput + } + + """ + Update input for BigInt type. + """ + input BigIntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: BigInt + """ + Increments the value of a field by the specified value. + """ + increment: BigInt + """ + Decrements the value of a field by the specified value. + """ + decrement: BigInt + """ + Multiplies the value of a field by the specified value. + """ + multiply: BigInt + """ + Divides the value of a field by the specified value. + """ + divide: BigInt + } + + """ + Update input for BigInt array type. + """ + input BigIntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [BigInt] + """ + Append an array value to the column. + """ + append: [BigInt] + """ + Prepend an array value to the column. + """ + prepend: [BigInt] + } + + """ + Search filter input for Int type. + """ + input IntFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Int + """ + The value is not the one given + """ + ne: Int + """ + The value is greater than the one given + """ + gt: Int + """ + The value is less than the one given + """ + lt: Int + """ + The value is greater than, or equal to the one given + """ + gte: Int + """ + The value is less than, or equal to the one given + """ + lte: Int + """ + The value is in the given array of values + """ + in: [Int!] + """ + The value is not in the given array of values + """ + nin: [Int!] + """ + A negation of the given filter + """ + not: IntFilterInput + } + + """ + Update input for Int type. + """ + input IntUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Int + """ + Increments the value of a field by the specified value. + """ + increment: Int + """ + Decrements the value of a field by the specified value. + """ + decrement: Int + """ + Multiplies the value of a field by the specified value. + """ + multiply: Int + """ + Divides the value of a field by the specified value. + """ + divide: Int + } + + """ + Update input for Int array type. + """ + input IntArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Int] + """ + Append an array value to the column. + """ + append: [Int] + """ + Prepend an array value to the column. + """ + prepend: [Int] + } + + """ + Search filter input for Float type. + """ + input FloatFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Float + """ + The value is not the one given + """ + ne: Float + """ + The value is greater than the one given + """ + gt: Float + """ + The value is less than the one given + """ + lt: Float + """ + The value is greater than, or equal to the one given + """ + gte: Float + """ + The value is less than, or equal to the one given + """ + lte: Float + """ + The value is in the given array of values + """ + in: [Float!] + """ + The value is not in the given array of values + """ + nin: [Float!] + """ + A negation of the given filter + """ + not: FloatFilterInput + } + + """ + Update input for Float type. + """ + input FloatUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Float + """ + Increments the value of a field by the specified value. + """ + increment: Float + """ + Decrements the value of a field by the specified value. + """ + decrement: Float + """ + Multiplies the value of a field by the specified value. + """ + multiply: Float + """ + Divides the value of a field by the specified value. + """ + divide: Float + } + + """ + Update input for Float array type. + """ + input FloatArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Float] + """ + Append an array value to the column. + """ + append: [Float] + """ + Prepend an array value to the column. + """ + prepend: [Float] + } + + """ + Search filter input for Boolean type. + """ + input BooleanFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Boolean + """ + The value is not the one given + """ + ne: Boolean + """ + The value is greater than the one given + """ + gt: Boolean + """ + The value is less than the one given + """ + lt: Boolean + """ + The value is greater than, or equal to the one given + """ + gte: Boolean + """ + The value is less than, or equal to the one given + """ + lte: Boolean + """ + The value is in the given array of values + """ + in: [Boolean!] + """ + The value is not in the given array of values + """ + nin: [Boolean!] + """ + A negation of the given filter + """ + not: BooleanFilterInput + } + + """ + Update input for Boolean type. + """ + input BooleanUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Boolean + } + + """ + Update input for Boolean array type. + """ + input BooleanArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Boolean] + """ + Append an array value to the column. + """ + append: [Boolean] + """ + Prepend an array value to the column. + """ + prepend: [Boolean] + } + + """ + Search filter input for Decimal type. + """ + input DecimalFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Decimal + """ + The value is not the one given + """ + ne: Decimal + """ + The value is greater than the one given + """ + gt: Decimal + """ + The value is less than the one given + """ + lt: Decimal + """ + The value is greater than, or equal to the one given + """ + gte: Decimal + """ + The value is less than, or equal to the one given + """ + lte: Decimal + """ + The value is in the given array of values + """ + in: [Decimal!] + """ + The value is not in the given array of values + """ + nin: [Decimal!] + """ + A negation of the given filter + """ + not: DecimalFilterInput + } + + """ + Update input for Decimal type. + """ + input DecimalUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Decimal + """ + Increments the value of a field by the specified value. + """ + increment: Decimal + """ + Decrements the value of a field by the specified value. + """ + decrement: Decimal + """ + Multiplies the value of a field by the specified value. + """ + multiply: Decimal + """ + Divides the value of a field by the specified value. + """ + divide: Decimal + } + + """ + Update input for Decimal array type. + """ + input DecimalArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Decimal] + """ + Append an array value to the column. + """ + append: [Decimal] + """ + Prepend an array value to the column. + """ + prepend: [Decimal] + } + + """ + Search filter input for Bytes type. + """ + input BytesFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: Bytes + """ + The value is not the one given + """ + ne: Bytes + """ + The value is greater than the one given + """ + gt: Bytes + """ + The value is less than the one given + """ + lt: Bytes + """ + The value is greater than, or equal to the one given + """ + gte: Bytes + """ + The value is less than, or equal to the one given + """ + lte: Bytes + """ + The value is in the given array of values + """ + in: [Bytes!] + """ + The value is not in the given array of values + """ + nin: [Bytes!] + """ + A negation of the given filter + """ + not: BytesFilterInput + } + + """ + Update input for Bytes type. + """ + input BytesUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: Bytes + } + + """ + Update input for Bytes array type. + """ + input BytesArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [Bytes] + """ + Append an array value to the column. + """ + append: [Bytes] + """ + Prepend an array value to the column. + """ + prepend: [Bytes] + } + + """ + Search filter input for JSON type. + """ + input JSONFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: JSON + """ + The value is not the one given + """ + ne: JSON + """ + The value is greater than the one given + """ + gt: JSON + """ + The value is less than the one given + """ + lt: JSON + """ + The value is greater than, or equal to the one given + """ + gte: JSON + """ + The value is less than, or equal to the one given + """ + lte: JSON + """ + The value is in the given array of values + """ + in: [JSON!] + """ + The value is not in the given array of values + """ + nin: [JSON!] + """ + A negation of the given filter + """ + not: JSONFilterInput + } + + """ + Update input for JSON type. + """ + input JSONUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: JSON + """ + Append JSON value to the column. + """ + append: JSON + """ + Prepend JSON value to the column. + """ + prepend: JSON + """ + Deletes a key (and its value) from a JSON object, or matching string value(s) from a JSON array. + """ + deleteKey: String + """ + Deletes the array element with specified index (negative integers count from the end). Throws an error if JSON value is not an array. + """ + deleteElem: Int + """ + Deletes the field or array element at the specified path, where path elements can be either field keys or array indexes. + """ + deleteAtPath: [String!] + } + + """ + Update input for JSON array type. + """ + input JSONArrayUpdateInput @oneOf { + """ + Replaces the value of a field with the specified value. + """ + set: [JSON] + """ + Append an array value to the column. + """ + append: [JSON] + """ + Prepend an array value to the column. + """ + prepend: [JSON] + } + + """ + Search filter input for String array type. + """ + input StringArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [String] + """ + The value is not the one given + """ + ne: [String] + """ + The value is greater than the one given + """ + gt: [String] + """ + The value is less than the one given + """ + lt: [String] + """ + The value is greater than, or equal to the one given + """ + gte: [String] + """ + The value is less than, or equal to the one given + """ + lte: [String] + """ + The value is in the given array of values + """ + in: [[String]!] + """ + The value is not in the given array of values + """ + nin: [[String]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [String] + """ + Checks if the array is contained within the provided array + """ + contained: [String] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [String] + """ + A negation of the given filter + """ + not: StringArrayFilterInput + } + + """ + Search filter input for Int array type. + """ + input IntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Int] + """ + The value is not the one given + """ + ne: [Int] + """ + The value is greater than the one given + """ + gt: [Int] + """ + The value is less than the one given + """ + lt: [Int] + """ + The value is greater than, or equal to the one given + """ + gte: [Int] + """ + The value is less than, or equal to the one given + """ + lte: [Int] + """ + The value is in the given array of values + """ + in: [[Int]!] + """ + The value is not in the given array of values + """ + nin: [[Int]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Int] + """ + Checks if the array is contained within the provided array + """ + contained: [Int] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Int] + """ + A negation of the given filter + """ + not: IntArrayFilterInput + } + + """ + Search filter input for BigInt array type. + """ + input BigIntArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [BigInt] + """ + The value is not the one given + """ + ne: [BigInt] + """ + The value is greater than the one given + """ + gt: [BigInt] + """ + The value is less than the one given + """ + lt: [BigInt] + """ + The value is greater than, or equal to the one given + """ + gte: [BigInt] + """ + The value is less than, or equal to the one given + """ + lte: [BigInt] + """ + The value is in the given array of values + """ + in: [[BigInt]!] + """ + The value is not in the given array of values + """ + nin: [[BigInt]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [BigInt] + """ + Checks if the array is contained within the provided array + """ + contained: [BigInt] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [BigInt] + """ + A negation of the given filter + """ + not: BigIntArrayFilterInput + } + + """ + Search filter input for Decimal array type. + """ + input DecimalArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Decimal] + """ + The value is not the one given + """ + ne: [Decimal] + """ + The value is greater than the one given + """ + gt: [Decimal] + """ + The value is less than the one given + """ + lt: [Decimal] + """ + The value is greater than, or equal to the one given + """ + gte: [Decimal] + """ + The value is less than, or equal to the one given + """ + lte: [Decimal] + """ + The value is in the given array of values + """ + in: [[Decimal]!] + """ + The value is not in the given array of values + """ + nin: [[Decimal]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Decimal] + """ + Checks if the array is contained within the provided array + """ + contained: [Decimal] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Decimal] + """ + A negation of the given filter + """ + not: DecimalArrayFilterInput + } + + """ + Search filter input for Float array type. + """ + input FloatArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Float] + """ + The value is not the one given + """ + ne: [Float] + """ + The value is greater than the one given + """ + gt: [Float] + """ + The value is less than the one given + """ + lt: [Float] + """ + The value is greater than, or equal to the one given + """ + gte: [Float] + """ + The value is less than, or equal to the one given + """ + lte: [Float] + """ + The value is in the given array of values + """ + in: [[Float]!] + """ + The value is not in the given array of values + """ + nin: [[Float]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Float] + """ + Checks if the array is contained within the provided array + """ + contained: [Float] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Float] + """ + A negation of the given filter + """ + not: FloatArrayFilterInput + } + + """ + Search filter input for Boolean array type. + """ + input BooleanArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Boolean] + """ + The value is not the one given + """ + ne: [Boolean] + """ + The value is greater than the one given + """ + gt: [Boolean] + """ + The value is less than the one given + """ + lt: [Boolean] + """ + The value is greater than, or equal to the one given + """ + gte: [Boolean] + """ + The value is less than, or equal to the one given + """ + lte: [Boolean] + """ + The value is in the given array of values + """ + in: [[Boolean]!] + """ + The value is not in the given array of values + """ + nin: [[Boolean]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Boolean] + """ + Checks if the array is contained within the provided array + """ + contained: [Boolean] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Boolean] + """ + A negation of the given filter + """ + not: BooleanArrayFilterInput + } + + """ + Search filter input for Bytes array type. + """ + input BytesArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [Bytes] + """ + The value is not the one given + """ + ne: [Bytes] + """ + The value is greater than the one given + """ + gt: [Bytes] + """ + The value is less than the one given + """ + lt: [Bytes] + """ + The value is greater than, or equal to the one given + """ + gte: [Bytes] + """ + The value is less than, or equal to the one given + """ + lte: [Bytes] + """ + The value is in the given array of values + """ + in: [[Bytes]!] + """ + The value is not in the given array of values + """ + nin: [[Bytes]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [Bytes] + """ + Checks if the array is contained within the provided array + """ + contained: [Bytes] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [Bytes] + """ + A negation of the given filter + """ + not: BytesArrayFilterInput + } + + """ + Search filter input for JSON array type. + """ + input JSONArrayFilterInput @oneOf { + """ + The value is exactly the one given + """ + eq: [JSON] + """ + The value is not the one given + """ + ne: [JSON] + """ + The value is greater than the one given + """ + gt: [JSON] + """ + The value is less than the one given + """ + lt: [JSON] + """ + The value is greater than, or equal to the one given + """ + gte: [JSON] + """ + The value is less than, or equal to the one given + """ + lte: [JSON] + """ + The value is in the given array of values + """ + in: [[JSON]!] + """ + The value is not in the given array of values + """ + nin: [[JSON]!] + """ + Checks if the array contains all elements of the provided array + """ + contains: [JSON] + """ + Checks if the array is contained within the provided array + """ + contained: [JSON] + """ + Checks if the array has any elements in common with the provided array + """ + overlaps: [JSON] + """ + A negation of the given filter + """ + not: JSONArrayFilterInput + } + + """ + Specifies the ordering for PostFkComment results. + """ + input PostFkCommentOrderByInput @oneOf { + """ + Order postFkComments by id + """ + id: OrderDirection + """ + Order postFkComments by userId + """ + userId: OrderDirection + """ + Order PostFkComment results by UserFkComment fields + """ + userFkComment: UserFkCommentOrderByInput + } + + """ + Input type to select a unique PostFkComment + """ + input PostFkCommentLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for PostFkComment collections + """ + input PostFkCommentCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: PostFkCommentFilterInput + } + + """ + Filter input type for PostFkComment objects. + """ + input PostFkCommentFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the given userId + """ + userId: IntFilterInput + """ + Filter by the related UserFkComment object + """ + userFkComment: UserFkCommentFilterInput + """ + All of the filters must match + """ + ALL: [PostFkCommentFilterInput] + """ + None of the filters must match + """ + NONE: [PostFkCommentFilterInput] + """ + At least one of the filters must match + """ + ANY: [PostFkCommentFilterInput] + } + + """ + Input for creating a new PostFkComment + """ + input PostFkCommentCreateInput { + """ + Set field value for id + """ + id: Int + """ + Set field value for userId + """ + userId: Int! + } + + """ + Input for updating an existing PostFkComment + """ + input PostFkCommentUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + """ + Update field value for userId + """ + userId: IntUpdateInput + } + + """ + Specifies the ordering for UserFkComment results. + """ + input UserFkCommentOrderByInput @oneOf { + """ + Order userFkComments by id + """ + id: OrderDirection + } + + """ + Input type to select a unique UserFkComment + """ + input UserFkCommentLookupInput @oneOf { + """ + Select by the 'id' field + """ + id: Int + } + + """ + Filter input type for UserFkComment collections + """ + input UserFkCommentCollectionFilterInput { + """ + The object is related to an object with the given fields + """ + contains: UserFkCommentFilterInput + } + + """ + Filter input type for UserFkComment objects. + """ + input UserFkCommentFilterInput @oneOf { + """ + Filter by the given id + """ + id: IntFilterInput + """ + Filter by the related PostFkComment objects + """ + postFkComments: PostFkCommentCollectionFilterInput + """ + All of the filters must match + """ + ALL: [UserFkCommentFilterInput] + """ + None of the filters must match + """ + NONE: [UserFkCommentFilterInput] + """ + At least one of the filters must match + """ + ANY: [UserFkCommentFilterInput] + } + + """ + Input for creating a new UserFkComment + """ + input UserFkCommentCreateInput { + """ + Set field value for id + """ + id: Int + } + + """ + Input for updating an existing UserFkComment + """ + input UserFkCommentUpdateInput { + """ + Update field value for id + """ + id: IntUpdateInput + } + + """ + Information about pagination in a collection of objects + """ + type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + The cursor of the first item in the page + """ + startCursor: String! + """ + The cursor of the last item in the page + """ + endCursor: String! + } + + """ + Return type containing fields of the mutated or created PostFkComment object + """ + type PostFkCommentReturning + @pgReturning(type: "PostFkComment") + { + """ + The value of the id field + """ + id: Int! + """ + The value of the userId field + """ + userId: Int! + } + + """ + Return type when creating one PostFkComment + """ + type PostFkCommentCreatePayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: PostFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many postFkComments + """ + type PostFkCommentCreateManyPayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [PostFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one PostFkComment + """ + type PostFkCommentUpdatePayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: PostFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many postFkComments + """ + type PostFkCommentUpdateManyPayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [PostFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one PostFkComment + """ + type PostFkCommentDeletePayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: PostFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many postFkComments + """ + type PostFkCommentDeleteManyPayload + @pgMutation(type: "PostFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [PostFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type PostFkCommentEdge { + """ + The item at the end of the edge + """ + node: PostFkComment! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for PostFkComment + """ + type PostFkCommentConnection + @pgConnection(type: "PostFkComment") + { + """ + A list of edges + """ + edges: [PostFkCommentEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + """ + Return type containing fields of the mutated or created UserFkComment object + """ + type UserFkCommentReturning + @pgReturning(type: "UserFkComment") + { + """ + The value of the id field + """ + id: Int! + } + + """ + Return type when creating one UserFkComment + """ + type UserFkCommentCreatePayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: UserFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when creating many userFkComments + """ + type UserFkCommentCreateManyPayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [UserFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating one UserFkComment + """ + type UserFkCommentUpdatePayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: UserFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when updating many userFkComments + """ + type UserFkCommentUpdateManyPayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [UserFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting one UserFkComment + """ + type UserFkCommentDeletePayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: UserFkCommentReturning + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + Return type when deleting many userFkComments + """ + type UserFkCommentDeleteManyPayload + @pgMutation(type: "UserFkComment") + { + """ + Returned item(s) from the mutation + """ + returning: [UserFkCommentReturning]! + """ + The number of rows mutated + """ + rowCount: Int! + } + + """ + An edge in a connection. Contains the node and its cursor + """ + type UserFkCommentEdge { + """ + The item at the end of the edge + """ + node: UserFkComment! + """ + A cursor for use in pagination + """ + cursor: String! + } + + """ + The connection type for UserFkComment + """ + type UserFkCommentConnection + @pgConnection(type: "UserFkComment") + { + """ + A list of edges + """ + edges: [UserFkCommentEdge!]! + """ + Information to aid in pagination + """ + pageInfo: PageInfo! + } + + type PostFkComment + @pgTable(name: "Post_fk_comment") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + userId: Int! @pgColumn(name: "user_id", type: INT) + """ + Links post to its author. + """ + userFkComment: UserFkComment! @pgRelation(name: "Post_User_FK_Comment", fields: ["userId"], references: ["id"]) + } + + type UserFkComment + @pgTable(name: "User_fk_comment") + @pgKey(fields: ["id"], type: PRIMARY) + { + id: Int! @pgColumn(name: "id", type: INT) + """ + Links post to its author. + """ + postFkComments( + """ + Filter the related PostFkComment instances + """ + filter: PostFkCommentFilterInput, + """ + Select the first PostFkComment instances + """ + first: Int, + """ + Select the last PostFkComment instances + """ + last: Int, + """ + Select the PostFkComment instances before the given cursor + """ + before: String, + """ + Select the PostFkComment instances after the given cursor + """ + after: String, + """ + Order the PostFkComment instances by the given fields + """ + orderBy: [PostFkCommentOrderByInput!], + ): PostFkCommentConnection! @pgRelation(name: "Post_User_FK_Comment") + } + + type Query { + """ + Query a unique PostFkComment + """ + postFkComment( + """ + Input for unique PostFkComment lookup + """ + lookup: PostFkCommentLookupInput!, + ): PostFkComment @pgSelectOne + """ + Query and paginate multiple postFkComments + """ + postFkComments( + """ + Filter for PostFkComment + """ + filter: PostFkCommentFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [PostFkCommentOrderByInput!], + ): PostFkCommentConnection! @pgSelectMany + """ + Query a unique UserFkComment + """ + userFkComment( + """ + Input for unique UserFkComment lookup + """ + lookup: UserFkCommentLookupInput!, + ): UserFkComment @pgSelectOne + """ + Query and paginate multiple userFkComments + """ + userFkComments( + """ + Filter for UserFkComment + """ + filter: UserFkCommentFilterInput, + """ + Limit the number of results, from the beginning + """ + first: Int, + """ + Limit the number of results, from the end + """ + last: Int, + """ + Cursor for pagination, select items before the cursor. Use together with `last`. + """ + before: String, + """ + Cursor for pagination, select items after the cursor. Use together with `first`. + """ + after: String, + """ + Order the results by selected fields + """ + orderBy: [UserFkCommentOrderByInput!], + ): UserFkCommentConnection! @pgSelectMany + } + + type Mutation { + """ + Create a single PostFkComment + """ + postFkCommentCreate( + """ + Input for creating a single PostFkComment + """ + input: PostFkCommentCreateInput!, + ): PostFkCommentCreatePayload! @pgInsertOne + """ + Create multiple postFkComments + """ + postFkCommentCreateMany( + """ + Input for creating multiple PostFkComment instances + """ + input: [PostFkCommentCreateInput!]!, + ): PostFkCommentCreateManyPayload! @pgInsertMany + """ + Update a unique PostFkComment + """ + postFkCommentUpdate( + """ + Lookup input for unique PostFkComment update + """ + lookup: PostFkCommentLookupInput!, + """ + Input for updating a PostFkComment + """ + input: PostFkCommentUpdateInput!, + ): PostFkCommentUpdatePayload! @pgUpdateOne + """ + Update multiple postFkComments + """ + postFkCommentUpdateMany( + """ + Filter for updating multiple PostFkComment instances + """ + filter: PostFkCommentFilterInput, + """ + Input for updating multiple PostFkComment instances + """ + input: PostFkCommentUpdateInput!, + ): PostFkCommentUpdateManyPayload! @pgUpdateMany + """ + Delete a unique PostFkComment + """ + postFkCommentDelete( + """ + Lookup input for unique PostFkComment deletion + """ + lookup: PostFkCommentLookupInput!, + ): PostFkCommentDeletePayload! @pgDeleteOne + """ + Delete multiple postFkComments + """ + postFkCommentDeleteMany( + """ + Filter for PostFkComment deletion + """ + filter: PostFkCommentFilterInput, + ): PostFkCommentDeleteManyPayload! @pgDeleteMany + """ + Create a single UserFkComment + """ + userFkCommentCreate( + """ + Input for creating a single UserFkComment + """ + input: UserFkCommentCreateInput!, + ): UserFkCommentCreatePayload! @pgInsertOne + """ + Create multiple userFkComments + """ + userFkCommentCreateMany( + """ + Input for creating multiple UserFkComment instances + """ + input: [UserFkCommentCreateInput!]!, + ): UserFkCommentCreateManyPayload! @pgInsertMany + """ + Update a unique UserFkComment + """ + userFkCommentUpdate( + """ + Lookup input for unique UserFkComment update + """ + lookup: UserFkCommentLookupInput!, + """ + Input for updating a UserFkComment + """ + input: UserFkCommentUpdateInput!, + ): UserFkCommentUpdatePayload! @pgUpdateOne + """ + Update multiple userFkComments + """ + userFkCommentUpdateMany( + """ + Filter for updating multiple UserFkComment instances + """ + filter: UserFkCommentFilterInput, + """ + Input for updating multiple UserFkComment instances + """ + input: UserFkCommentUpdateInput!, + ): UserFkCommentUpdateManyPayload! @pgUpdateMany + """ + Delete a unique UserFkComment + """ + userFkCommentDelete( + """ + Lookup input for unique UserFkComment deletion + """ + lookup: UserFkCommentLookupInput!, + ): UserFkCommentDeletePayload! @pgDeleteOne + """ + Delete multiple userFkComments + """ + userFkCommentDeleteMany( + """ + Filter for UserFkComment deletion + """ + filter: UserFkCommentFilterInput, + ): UserFkCommentDeleteManyPayload! @pgDeleteMany + } + "#); +} diff --git a/extensions/postgres/tests/update_many/mod.rs b/extensions/postgres/tests/update_many/mod.rs new file mode 100644 index 00000000..b43c8fb0 --- /dev/null +++ b/extensions/postgres/tests/update_many/mod.rs @@ -0,0 +1,282 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn string_set_with_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { name: { set: "Naukio" } }) { + returning { + id + name + } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1, + "name": "Naukio" + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Naukio" + } + } + } + "#); +} + +#[tokio::test] +async fn with_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES + (1, 'Musti', 11), + (2, 'Naukio', 11), + (3, 'Pertti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdateMany(filter: { age: { eq: 11 } }, input: { age: { set: 10 } }) { + returning { + id + name + age + } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdateMany": { + "returning": [ + { + "id": 1, + "name": "Musti", + "age": 10 + }, + { + "id": 2, + "name": "Naukio", + "age": 10 + } + ], + "rowCount": 2 + } + } + } + "#); + + let query = indoc! {r" + query { + users(orderBy: [{ id: ASC }]) { + edges { node { id name age } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 10 + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "age": 10 + } + }, + { + "node": { + "id": 3, + "name": "Pertti", + "age": 12 + } + } + ] + } + } + } + "#); +} + +#[tokio::test] +async fn no_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + age INT NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name, age) VALUES + (1, 'Musti', 11), + (2, 'Naukio', 11), + (3, 'Pertti', 12) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdateMany(filter: { age: { eq: 11 } }, input: { age: { set: 10 } }) { + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdateMany": { + "rowCount": 2 + } + } + } + "#); + + let query = indoc! {r" + query { + users(orderBy: [{ id: ASC }]) { + edges { node { id name age } } + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "users": { + "edges": [ + { + "node": { + "id": 1, + "name": "Musti", + "age": 10 + } + }, + { + "node": { + "id": 2, + "name": "Naukio", + "age": 10 + } + }, + { + "node": { + "id": 3, + "name": "Pertti", + "age": 12 + } + } + ] + } + } + } + "#); +} diff --git a/extensions/postgres/tests/update_one/mod.rs b/extensions/postgres/tests/update_one/mod.rs new file mode 100644 index 00000000..817bd092 --- /dev/null +++ b/extensions/postgres/tests/update_one/mod.rs @@ -0,0 +1,1660 @@ +use crate::PgTestApi; +use indoc::indoc; + +#[tokio::test] +async fn string_set_with_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { name: { set: "Naukio" } }) { + returning { + id + name + } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1, + "name": "Naukio" + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Naukio" + } + } + } + "#); +} + +#[tokio::test] +async fn enum_set_with_returning() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name street_light NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'red') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { name: { set: GREEN } }) { + returning { + id + name + } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1, + "name": "GREEN" + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "GREEN" + } + } + } + "#); +} + +#[tokio::test] +async fn enum_array_set_with_returning() { + let api = PgTestApi::new("", |api| async move { + let r#type = indoc! {r" + CREATE TYPE street_light AS ENUM ('red', 'yellow', 'green'); + "}; + + api.execute_sql(r#type).await; + + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name street_light[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, ARRAY['red', 'yellow', 'green']::street_light[]) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { name: { set: [GREEN, YELLOW] } }) { + returning { + id + name + } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1, + "name": [ + "GREEN", + "YELLOW" + ] + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": [ + "GREEN", + "YELLOW" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn string_set_no_returning() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, name) VALUES (1, 'Musti') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { name: { set: "Naukio" } }) { + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + name + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "name": "Naukio" + } + } + } + "#); +} + +#[tokio::test] +async fn int2_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT2 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: 68 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69 + } + } + } + "#); +} + +#[tokio::test] +async fn int2_decrement() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT2 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 70) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { decrement: 1 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69 + } + } + } + "#); +} + +#[tokio::test] +async fn int2_multiply() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT2 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 6) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { multiply: 8 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 48 + } + } + } + "#); +} + +#[tokio::test] +async fn int2_divide() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT2 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 138) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { divide: 2 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69 + } + } + } + "#); +} + +#[tokio::test] +async fn int4_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT4 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: 68 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69 + } + } + } + "#); +} + +#[tokio::test] +async fn int8_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: "68" } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": "69" + } + } + } + "#); +} + +#[tokio::test] +async fn float_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val FLOAT4 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1.0) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: 68.0 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69.0 + } + } + } + "#); +} + +#[tokio::test] +async fn double_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val FLOAT8 NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1.0) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: 68.0 } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": 69.0 + } + } + } + "#); +} + +#[tokio::test] +async fn numeric_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val NUMERIC NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1.0) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: "68.0" } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": "69.0" + } + } + } + "#); +} + +#[tokio::test] +async fn money_increment() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val MONEY NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, 1.0) + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { increment: "68.0" } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": "$69.00" + } + } + } + "#); +} + +#[tokio::test] +async fn array_set() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT2[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '{1, 2}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { set: [3, 4] } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + 3, + 4 + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_append() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '{1}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { append: [2, 3] } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + 1, + 2, + 3 + ] + } + } + } + "#); +} + +#[tokio::test] +async fn array_prepend() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val INT[] NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '{1}') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { prepend: [2, 3] } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + 2, + 3, + 1 + ] + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_append() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '[1]') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { append: [2, 3] } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + 1, + 2, + 3 + ] + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_prepend() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '[1]') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { prepend: [2, 3] } }) { + returning { id } + rowCount + } + } + "}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + 2, + 3, + 1 + ] + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_delete_key_from_object() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '{ "foo": 1, "bar": 2 }') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { deleteKey: "foo" } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": { + "bar": 2 + } + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_delete_key_from_array() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '["foo", "bar"]') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { deleteKey: "foo" } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + "bar" + ] + } + } + } + "#); +} + +#[tokio::test] +async fn jsonb_delete_at_path() { + let api = PgTestApi::new("", |api| async move { + let schema = indoc! {r#" + CREATE TABLE "User" ( + id INT PRIMARY KEY, + val JSONB NOT NULL + ) + "#}; + + api.execute_sql(schema).await; + + let insert = indoc! {r#" + INSERT INTO "User" (id, val) VALUES (1, '["a", { "b": 1 }]') + "#}; + + api.execute_sql(insert).await; + }) + .await; + + let runner = api.runner_spawn().await; + + let mutation = indoc! {r#" + mutation { + userUpdate(lookup: { id: 1 }, input: { val: { deleteAtPath: ["1", "b"] } }) { + returning { id } + rowCount + } + } + "#}; + + let mutation_response = runner + .graphql_query::(mutation) + .send() + .await + .unwrap(); + + insta::assert_json_snapshot!(mutation_response, @r#" + { + "data": { + "userUpdate": { + "returning": { + "id": 1 + }, + "rowCount": 1 + } + } + } + "#); + + let query = indoc! {r" + query { + user(lookup: { id: 1 }) { + id + val + } + } + "}; + + let query_response = runner.graphql_query::(query).send().await.unwrap(); + + insta::assert_json_snapshot!(query_response, @r#" + { + "data": { + "user": { + "id": 1, + "val": [ + "a", + {} + ] + } + } + } + "#); +} diff --git a/extensions/snowflake/src/statements.rs b/extensions/snowflake/src/statements.rs index 6074ae45..c2988ca2 100644 --- a/extensions/snowflake/src/statements.rs +++ b/extensions/snowflake/src/statements.rs @@ -75,8 +75,6 @@ impl crate::Snowflake { let response = http::execute(&request).map_err(|err| Error::new(err.to_string()))?; - // eprintln!("{}", std::str::from_utf8(response.body()).unwrap()); - let body = serde_json::from_slice(response.body()).map_err(|err| Error::new(err.to_string()))?; Ok(body) diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 7d762de9..2932d373 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] profile = "default" -channel = "1.85.0" +channel = "1.86.0" targets = ["wasm32-wasip2"] diff --git a/test-matrix/src/main.rs b/test-matrix/src/main.rs index 88b10913..784a06e1 100644 --- a/test-matrix/src/main.rs +++ b/test-matrix/src/main.rs @@ -25,6 +25,7 @@ fn main() -> anyhow::Result<()> { test_arguments.push("-p".to_string()); test_arguments.push(cargo_toml.name().to_string()); + found_extensions = true; }