From f215d398066fcc8e959cfd2385a3926aca285927 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Wed, 20 Jan 2021 21:59:00 -0600 Subject: [PATCH 01/26] create Session object wrapping fastapi_utils --- Pipfile.lock | 184 ++++++++++++++------------- setup.py | 1 + stac_api/clients/postgres/session.py | 13 ++ 3 files changed, 110 insertions(+), 88 deletions(-) create mode 100644 stac_api/clients/postgres/session.py diff --git a/Pipfile.lock b/Pipfile.lock index 087a50acd..aa453677c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -59,38 +59,38 @@ }, "brotli": { "hashes": [ - "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c", - "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4", - "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126", - "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a", - "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1", + "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8", "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b", - "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7", - "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6", - "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa", - "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb", - "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3", - "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1", - "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429", - "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b", - "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5", - "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d", + "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c", + "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70", "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f", - "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389", + "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429", + "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126", + "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4", + "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438", "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f", - "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430", - "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761", - "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70", - "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea", - "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452", - "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8", - "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14", + "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389", + "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6", "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26", "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7", + "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14", + "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430", "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296", - "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4", "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12", - "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438" + "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452", + "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761", + "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea", + "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a", + "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5", + "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d", + "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa", + "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb", + "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b", + "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4", + "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3", + "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7", + "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1", + "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1" ], "version": "==1.0.9" }, @@ -177,6 +177,14 @@ "markers": "python_version >= '3.6'", "version": "==0.63.0" }, + "fastapi-utils": { + "hashes": [ + "sha256:0e6c7fc1870b80e681494957abf65d4f4f42f4c7f70005918e9181b22f1bd759", + "sha256:dd0be7dc7f03fa681b25487a206651d99f2330d5a567fb8ab6cb5f8a06a29360" + ], + "markers": "python_version >= '3.6' and python_version < '4.0'", + "version": "==0.2.1" + }, "geoalchemy2": { "hashes": [ "sha256:379b0fc4ca5f9b5ef625719f47e22c9b8abd347aa78344e85f99d32594cfccd4", @@ -536,29 +544,29 @@ }, "pyyaml": { "hashes": [ - "sha256:02c78d77281d8f8d07a255e57abdbf43b02257f59f50cc6b636937d68efa5dd0", - "sha256:0dc9f2eb2e3c97640928dec63fd8dc1dd91e6b6ed236bd5ac00332b99b5c2ff9", - "sha256:124fd7c7bc1e95b1eafc60825f2daf67c73ce7b33f1194731240d24b0d1bf628", - "sha256:26fcb33776857f4072601502d93e1a619f166c9c00befb52826e7b774efaa9db", - "sha256:31ba07c54ef4a897758563e3a0fcc60077698df10180abe4b8165d9895c00ebf", - "sha256:3c49e39ac034fd64fd576d63bb4db53cda89b362768a67f07749d55f128ac18a", - "sha256:52bf0930903818e600ae6c2901f748bc4869c0c406056f679ab9614e5d21a166", - "sha256:5a3f345acff76cad4aa9cb171ee76c590f37394186325d53d1aa25318b0d4a09", - "sha256:5e7ac4e0e79a53451dc2814f6876c2fa6f71452de1498bbe29c0b54b69a986f4", - "sha256:7242790ab6c20316b8e7bb545be48d7ed36e26bbe279fd56f2c4a12510e60b4b", - "sha256:737bd70e454a284d456aa1fa71a0b429dd527bcbf52c5c33f7c8eee81ac16b89", - "sha256:8635d53223b1f561b081ff4adecb828fd484b8efffe542edcfdff471997f7c39", - "sha256:8b818b6c5a920cbe4203b5a6b14256f0e5244338244560da89b7b0f1313ea4b6", - "sha256:8bf38641b4713d77da19e91f8b5296b832e4db87338d6aeffe422d42f1ca896d", - "sha256:a36a48a51e5471513a5aea920cdad84cbd56d70a5057cca3499a637496ea379c", - "sha256:b2243dd033fd02c01212ad5c601dafb44fbb293065f430b0d3dbf03f3254d615", - "sha256:cc547d3ead3754712223abb7b403f0a184e4c3eae18c9bb7fd15adef1597cc4b", - "sha256:cc552b6434b90d9dbed6a4f13339625dc466fd82597119897e9489c953acbc22", - "sha256:f3790156c606299ff499ec44db422f66f05a7363b39eb9d5b064f17bd7d7c47b", - "sha256:f7a21e3d99aa3095ef0553e7ceba36fb693998fbb1226f1392ce33681047465f", - "sha256:fdc6b2cb4b19e431994f25a9160695cc59a4e861710cc6fc97161c5e845fc579" - ], - "version": "==5.4" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "version": "==5.4.1" }, "rasterio": { "hashes": [ @@ -645,28 +653,28 @@ }, "shapely": { "hashes": [ + "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688", + "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129", + "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b", + "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a", + "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1", + "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891", "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93", - "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f", - "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba", - "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea", - "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d", + "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798", + "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1", "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719", - "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b", + "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1", + "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b", "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb", - "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1", - "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1", - "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a", + "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19", "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840", + "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d", + "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba", + "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea", + "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f", "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8", - "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129", - "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b", - "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19", - "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798", - "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1", - "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891", - "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688", - "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b", - "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1" + "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1", + "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b" ], "version": "==1.7.1" }, @@ -1037,29 +1045,29 @@ }, "pyyaml": { "hashes": [ - "sha256:02c78d77281d8f8d07a255e57abdbf43b02257f59f50cc6b636937d68efa5dd0", - "sha256:0dc9f2eb2e3c97640928dec63fd8dc1dd91e6b6ed236bd5ac00332b99b5c2ff9", - "sha256:124fd7c7bc1e95b1eafc60825f2daf67c73ce7b33f1194731240d24b0d1bf628", - "sha256:26fcb33776857f4072601502d93e1a619f166c9c00befb52826e7b774efaa9db", - "sha256:31ba07c54ef4a897758563e3a0fcc60077698df10180abe4b8165d9895c00ebf", - "sha256:3c49e39ac034fd64fd576d63bb4db53cda89b362768a67f07749d55f128ac18a", - "sha256:52bf0930903818e600ae6c2901f748bc4869c0c406056f679ab9614e5d21a166", - "sha256:5a3f345acff76cad4aa9cb171ee76c590f37394186325d53d1aa25318b0d4a09", - "sha256:5e7ac4e0e79a53451dc2814f6876c2fa6f71452de1498bbe29c0b54b69a986f4", - "sha256:7242790ab6c20316b8e7bb545be48d7ed36e26bbe279fd56f2c4a12510e60b4b", - "sha256:737bd70e454a284d456aa1fa71a0b429dd527bcbf52c5c33f7c8eee81ac16b89", - "sha256:8635d53223b1f561b081ff4adecb828fd484b8efffe542edcfdff471997f7c39", - "sha256:8b818b6c5a920cbe4203b5a6b14256f0e5244338244560da89b7b0f1313ea4b6", - "sha256:8bf38641b4713d77da19e91f8b5296b832e4db87338d6aeffe422d42f1ca896d", - "sha256:a36a48a51e5471513a5aea920cdad84cbd56d70a5057cca3499a637496ea379c", - "sha256:b2243dd033fd02c01212ad5c601dafb44fbb293065f430b0d3dbf03f3254d615", - "sha256:cc547d3ead3754712223abb7b403f0a184e4c3eae18c9bb7fd15adef1597cc4b", - "sha256:cc552b6434b90d9dbed6a4f13339625dc466fd82597119897e9489c953acbc22", - "sha256:f3790156c606299ff499ec44db422f66f05a7363b39eb9d5b064f17bd7d7c47b", - "sha256:f7a21e3d99aa3095ef0553e7ceba36fb693998fbb1226f1392ce33681047465f", - "sha256:fdc6b2cb4b19e431994f25a9160695cc59a4e861710cc6fc97161c5e845fc579" - ], - "version": "==5.4" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "version": "==5.4.1" }, "requests": { "hashes": [ diff --git a/setup.py b/setup.py index 4d2a1e9a0..7d56deb2b 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ "stac-pydantic>=1.3.5", "pydantic[dotenv]", "titiler==0.1.0a12", + "fastapi-utils" ] extra_reqs = { diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py new file mode 100644 index 000000000..77a147df7 --- /dev/null +++ b/stac_api/clients/postgres/session.py @@ -0,0 +1,13 @@ +import attr + +from fastapi_utils.session import FastAPISessionMaker + + +@attr.s +class Session: + reader_conn_string: str = attr.ib() + writer_conn_string: str = attr.ib() + + def __attrs_post_init__(self): + self.reader: FastAPISessionMaker = FastAPISessionMaker(self.reader_conn_string) + self.writer: FastAPISessionMaker = FastAPISessionMaker(self.writer_conn_string) \ No newline at end of file From 352c05751cf2993e216a3a0c304271e66e58f094 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Wed, 20 Jan 2021 22:02:54 -0600 Subject: [PATCH 02/26] add Session to core, try it out with /collections endpoint --- stac_api/clients/postgres/core.py | 27 +++++++++++++++------------ stac_api/clients/postgres/session.py | 11 ++++++++++- 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 18013e581..4f98675f5 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -19,6 +19,7 @@ from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.errors import DatabaseError from stac_api.models import database, schemas @@ -33,6 +34,7 @@ class CoreCrudClient(PostgresClient, BaseCoreClient): """Client for core endpoints defined by stac""" + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) pagination_client: PaginationTokenClient = attr.ib(default=None) table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) @@ -89,19 +91,20 @@ def conformance(self, **kwargs) -> ConformanceClasses: def all_collections(self, **kwargs) -> List[schemas.Collection]: """Read all collections from the database""" - try: - collections = self.reader_session.query(self.collection_table).all() - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting item collection" - ) - response = [] - for collection in collections: - collection.base_url = str(kwargs["request"].base_url) - response.append(schemas.Collection.from_orm(collection)) - return response + with self.session.reader.context_session() as session: + try: + collections = session.query(self.collection_table).all() + except Exception as e: + logger.error(e, exc_info=True) + raise errors.DatabaseError( + "Unhandled database error when getting item collection" + ) + response = [] + for collection in collections: + collection.base_url = str(kwargs["request"].base_url) + response.append(schemas.Collection.from_orm(collection)) + return response def get_collection(self, id: str, **kwargs) -> schemas.Collection: """Get collection by id""" diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py index 77a147df7..6dffdb719 100644 --- a/stac_api/clients/postgres/session.py +++ b/stac_api/clients/postgres/session.py @@ -1,3 +1,5 @@ +import os + import attr from fastapi_utils.session import FastAPISessionMaker @@ -8,6 +10,13 @@ class Session: reader_conn_string: str = attr.ib() writer_conn_string: str = attr.ib() + @classmethod + def create_from_env(cls): + return cls( + reader_conn_string=os.environ["READER_CONN_STRING"], + writer_conn_string=os.environ["WRITER_CONN_STRING"], + ) + def __attrs_post_init__(self): self.reader: FastAPISessionMaker = FastAPISessionMaker(self.reader_conn_string) - self.writer: FastAPISessionMaker = FastAPISessionMaker(self.writer_conn_string) \ No newline at end of file + self.writer: FastAPISessionMaker = FastAPISessionMaker(self.writer_conn_string) From 2d40c9ad867bdf27b904b79ecd6239e08dfe233d Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Wed, 20 Jan 2021 22:03:40 -0600 Subject: [PATCH 03/26] init session on app creation --- stac_api/app.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/stac_api/app.py b/stac_api/app.py index 905fcab6b..4bd42f0ab 100644 --- a/stac_api/app.py +++ b/stac_api/app.py @@ -7,13 +7,14 @@ TilesExtension, TransactionExtension, ) -from stac_api.clients.postgres.core import CoreCrudClient +from stac_api.clients.postgres.core import CoreCrudClient, Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.clients.postgres.transactions import TransactionsClient from stac_api.config import ApiSettings +settings = ApiSettings() api = StacApi( - settings=ApiSettings(), + settings=settings, extensions=[ TransactionExtension(client=TransactionsClient()), FieldsExtension(), @@ -21,6 +22,11 @@ SortExtension(), TilesExtension(), ], - client=CoreCrudClient(pagination_client=PaginationTokenClient()), + client=CoreCrudClient( + session=Session( + settings.reader_connection_string, settings.writer_connection_string + ), + pagination_client=PaginationTokenClient(), + ), ) app = api.app From 9c17c5d8e1ab987fd233191ef32d93bb4d6fa192 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Wed, 20 Jan 2021 22:28:58 -0600 Subject: [PATCH 04/26] push Session lower --- stac_api/app.py | 15 ++++++++------- stac_api/clients/postgres/base.py | 23 +++++++++++++---------- stac_api/clients/postgres/core.py | 2 -- 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/stac_api/app.py b/stac_api/app.py index 4bd42f0ab..058ff8456 100644 --- a/stac_api/app.py +++ b/stac_api/app.py @@ -7,26 +7,27 @@ TilesExtension, TransactionExtension, ) -from stac_api.clients.postgres.core import CoreCrudClient, Session +from stac_api.clients.postgres.core import CoreCrudClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.clients.postgres.transactions import TransactionsClient +from stac_api.clients.tiles.ogc import TilesClient from stac_api.config import ApiSettings settings = ApiSettings() +session = Session(settings.reader_connection_string, settings.writer_connection_string) api = StacApi( settings=settings, extensions=[ - TransactionExtension(client=TransactionsClient()), + TransactionExtension(client=TransactionsClient(session=session)), FieldsExtension(), QueryExtension(), SortExtension(), - TilesExtension(), + TilesExtension(TilesClient(session=session)), ], client=CoreCrudClient( - session=Session( - settings.reader_connection_string, settings.writer_connection_string - ), - pagination_client=PaginationTokenClient(), + session=session, + pagination_client=PaginationTokenClient(session=session), ), ) app = api.app diff --git a/stac_api/clients/postgres/base.py b/stac_api/clients/postgres/base.py index bf882724b..0eb08bedf 100644 --- a/stac_api/clients/postgres/base.py +++ b/stac_api/clients/postgres/base.py @@ -9,6 +9,7 @@ import psycopg2 from stac_api import errors +from stac_api.clients.postgres.session import Session from stac_api.models import database from stac_api.utils.dependencies import READER, WRITER @@ -19,6 +20,7 @@ class PostgresClient(abc.ABC): """Database CRUD operations on the defined table""" + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) table: Type[database.BaseModel] = attr.ib(default=database.BaseModel) @property @@ -58,13 +60,14 @@ def lookup_id( ) -> Query: """Create a query to access a single record from the table""" table = table or self.table - try: - query = self.reader_session.query(table).filter(table.id == item_id) - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database during ID lookup") - if not self.row_exists(query): - error_message = f"Row {item_id} does not exist" - logger.warning(error_message) - raise errors.NotFoundError(error_message) - return query + with self.session.reader.context_session() as session: + try: + query = session.query(table).filter(table.id == item_id) + except Exception as e: + logger.error(e, exc_info=True) + raise errors.DatabaseError("Unhandled database during ID lookup") + if not self.row_exists(query): + error_message = f"Row {item_id} does not exist" + logger.warning(error_message) + raise errors.NotFoundError(error_message) + return query diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 4f98675f5..7f9e895db 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -19,7 +19,6 @@ from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient from stac_api.clients.postgres.base import PostgresClient -from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.errors import DatabaseError from stac_api.models import database, schemas @@ -34,7 +33,6 @@ class CoreCrudClient(PostgresClient, BaseCoreClient): """Client for core endpoints defined by stac""" - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) pagination_client: PaginationTokenClient = attr.ib(default=None) table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) From 506879fe1f8c2128754a11b5ad5307ba5eeb5b1e Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Wed, 20 Jan 2021 22:30:43 -0600 Subject: [PATCH 05/26] remove db middlewares and event hooks --- stac_api/api/app.py | 45 --------------------------------------------- 1 file changed, 45 deletions(-) diff --git a/stac_api/api/app.py b/stac_api/api/app.py index 6f2225e06..62fc3c7e6 100644 --- a/stac_api/api/app.py +++ b/stac_api/api/app.py @@ -4,11 +4,8 @@ import attr from fastapi import APIRouter, FastAPI from fastapi.openapi.utils import get_openapi -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker from stac_pydantic import ItemCollection from stac_pydantic.api import ConformanceClasses, LandingPage -from starlette.requests import Request from stac_api.api.extensions import FieldsExtension from stac_api.api.extensions.extension import ApiExtension @@ -25,7 +22,6 @@ from stac_api.config import ApiSettings, inject_settings from stac_api.errors import DEFAULT_STATUS_CODES, add_exception_handlers from stac_api.models import schemas -from stac_api.utils.dependencies import READER, WRITER @attr.s @@ -164,45 +160,6 @@ async def ping(): self.app.include_router(mgmt_router, tags=["Liveliness/Readiness"]) - def setup_db_connection(self): - """setup database connection""" - - @self.app.on_event("startup") - async def on_startup(): - """Create database engines and sessions on startup""" - self.app.state.ENGINE_READER = create_engine( - self.settings.reader_connection_string, echo=self.settings.debug - ) - self.app.state.ENGINE_WRITER = create_engine( - self.settings.writer_connection_string, echo=self.settings.debug - ) - self.app.state.DB_READER = sessionmaker( - autocommit=False, autoflush=False, bind=self.app.state.ENGINE_READER - ) - self.app.state.DB_WRITER = sessionmaker( - autocommit=False, autoflush=False, bind=self.app.state.ENGINE_WRITER - ) - - @self.app.on_event("shutdown") - async def on_shutdown(): - """Dispose of database engines and sessions on app shutdown""" - self.app.state.ENGINE_READER.dispose() - self.app.state.ENGINE_WRITER.dispose() - - @self.app.middleware("http") - async def create_db_connection(request: Request, call_next): - """Create a new database connection for each request""" - if "titiler" in str(request.url): - return await call_next(request) - reader = request.app.state.DB_READER() - writer = request.app.state.DB_WRITER() - READER.set(reader) - WRITER.set(writer) - resp = await call_next(request) - reader.close() - writer.close() - return resp - def __attrs_post_init__(self): """post-init hook""" # inject settings @@ -226,7 +183,5 @@ def __attrs_post_init__(self): # register exception handlers add_exception_handlers(self.app, status_codes=self.exceptions) - self.setup_db_connection() - # customize openapi self.app.openapi = self.customize_openapi From e10dfb3ebebb89e77af6a61a4e8b85709a28fc0e Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 21:31:39 -0600 Subject: [PATCH 06/26] relock --- Dockerfile | 4 ++-- Pipfile.lock | 46 ++++++++++++++++++++-------------------------- setup.py | 2 +- 3 files changed, 23 insertions(+), 29 deletions(-) diff --git a/Dockerfile b/Dockerfile index 40f3468bf..f85e8fa58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.8-slim # Any python libraries that require system libraries to be installed will likely # need the following packages in order to build -RUN apt-get update && apt-get install -y build-essential git +RUN apt-get update && apt-get install -y build-essential git libgeos-dev RUN pip install pipenv ENV PIPENV_NOSPIN=true @@ -20,7 +20,7 @@ COPY . ./ ENV APP_HOST=0.0.0.0 ENV APP_PORT=80 -ENV RELOAD="" +ENV RELOAD="true" ENTRYPOINT ["pipenv", "run"] CMD if [ "$RELOAD" ]; then uvicorn stac_api.app:app --host=${APP_HOST} --port=${APP_PORT} --reload ; \ diff --git a/Pipfile.lock b/Pipfile.lock index aa453677c..b05b88180 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -45,17 +45,17 @@ }, "boto3": { "hashes": [ - "sha256:2fd3c2f42006988dc8ddae43c988aea481d11e2af7ab1deb83b293640357986c", - "sha256:4a499cc2f53dd557a88c6db6a552748a2abd83ffeda70ceb71dc8db39a027314" + "sha256:550a513315194292651bb6cc96e94185bfc4dc6b299c3cf1594882bdd16b3905", + "sha256:f8a2f0bf929af92c4d254d1e495f6642dd335818cc7172e1bdc3dfe28655fb94" ], - "version": "==1.16.57" + "version": "==1.16.59" }, "botocore": { "hashes": [ - "sha256:c756d65ffa989c5c0e92178175e41abf7b18ad19b2fe2e82e192f085e264e03a", - "sha256:cf7d108a4d67a0fe670379111927b5d9e0ff1160146c81c326bb9e54c2b8cb19" + "sha256:33959aa19cb6d336c47495c871b00d8670de0023b53bbbbd25790ba0bc5cefe9", + "sha256:67d273b5dcc5033edb2def244ecab51ca24351becf5c1644de279e5653e4e932" ], - "version": "==1.19.57" + "version": "==1.19.59" }, "brotli": { "hashes": [ @@ -408,6 +408,14 @@ "markers": "python_version >= '3.6'", "version": "==1.19.5" }, + "packaging": { + "hashes": [ + "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", + "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.8" + }, "psycopg2-binary": { "hashes": [ "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c", @@ -482,25 +490,10 @@ }, "pygeos": { "hashes": [ - "sha256:0df97a1cc15fef561c36596f065cac8e05e1b0de08292ad7a07140a5fa6b4015", - "sha256:1e95c79e34abad166824f3e0bf2412c9ea9873fcf51e0cacfb483f8955505aec", - "sha256:35b36a819a663a09f8585c6a67fc7c016004769ae08496b6de10b176e7ba61e1", - "sha256:36e2b60a987b2ce0ce988a334f2afa8925864456dec2c03891af8fd900a89504", - "sha256:45b7e1aaa5fc9ff53565ef089fb75c53c419ace8cee18385ec1d7c1515c17cbc", - "sha256:5294e7c8e732325a1f7d370ba0fea1faabc6d14926fbbb2b67a4ae618ed47ed2", - "sha256:650e56de79eceb75b216c3c08c6d703680d845395f5ea9fd67201bfa8240f6a0", - "sha256:6585a6adaad8ad3ebcf67e7854211fe9a2bc9beb59dc945b60db6ad40a3a9bd6", - "sha256:696f2106b7c4a4f33b813cac141cb8483d420d4209f8e1dbde94e5e572cdac16", - "sha256:914fe40a9da4a4bd280448f173b48e9232aa652d5ff38e23c7039633fe57e92f", - "sha256:92bae740bc2ccbb9d03f86995e936ff5a6e41b19a9da0499e8985e6e940a7a93", - "sha256:97a16e1ea243ac65176890d642597276c2c413ba8442a8b4f52cbe1b24ba5f68", - "sha256:c2bcf0aa4ecad93617d35072230ec9215ca6f9537bf75f959cd4084117269369", - "sha256:e1ecb3d9edf56eb55208f8d9ff0d314926af3b0527e024172f2fe142b046f4ea", - "sha256:efb21aa3b01957c21237aaffbecfabc76f6411a9b7c75d359f1b9eb2d7239181", - "sha256:ff14eff6248b65b46481f06d4141b7b0dd9e3a0a6b9bf10b1a2c0eddccc94ad6" + "sha256:c0584b20e95f80ee57277a6eb1e5d7f86600f8b1ef3c627d238e243afdcc0cc7" ], "markers": "python_version >= '3'", - "version": "==0.8" + "version": "==0.9" }, "pyparsing": { "hashes": [ @@ -695,10 +688,11 @@ }, "sqlakeyset": { "hashes": [ - "sha256:933b3fe8e97d4648ce85e31df9d97c461f20d72105317208b3d2af39ffedb57b", - "sha256:ecc9abcf4e4e681c13ceea4fa45883bb5ebe8f43925f63bbd29ad37c6b34ba26" + "sha256:0a42aa35fb6ef4e92d1db24c02e169e8bf01e96566f4c0adc605896969f9ec97", + "sha256:6b9f5e1ff0a556e7dc0b5916d7ce762c6a3a6f9537f481d91d6a2c5818fac0fa" ], - "version": "==1.0.1602495765" + "markers": "python_version >= '3.4'", + "version": "==1.0.1611286921" }, "sqlalchemy": { "hashes": [ diff --git a/setup.py b/setup.py index 7d56deb2b..d5fccabc3 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ "stac-pydantic>=1.3.5", "pydantic[dotenv]", "titiler==0.1.0a12", - "fastapi-utils" + "fastapi-utils", ] extra_reqs = { From 580c2daffbc705ed1ff7948114ce10a64eda71e2 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 21:32:39 -0600 Subject: [PATCH 07/26] finish updating core --- stac_api/clients/postgres/core.py | 442 ++++++++++++++++-------------- 1 file changed, 236 insertions(+), 206 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 7f9e895db..2e36f19c0 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -89,7 +89,6 @@ def conformance(self, **kwargs) -> ConformanceClasses: def all_collections(self, **kwargs) -> List[schemas.Collection]: """Read all collections from the database""" - with self.session.reader.context_session() as session: try: collections = session.query(self.collection_table).all() @@ -106,89 +105,115 @@ def all_collections(self, **kwargs) -> List[schemas.Collection]: def get_collection(self, id: str, **kwargs) -> schemas.Collection: """Get collection by id""" - collection = self.lookup_id(id, table=self.collection_table).first() - collection.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(collection) + with self.session.reader.context_session() as session: + try: + collection = ( + session.query(self.collection_table) + .filter(self.collection_table.id == id) + .first() + ) + except Exception as e: + logger.error(e, exc_info=True) + raise errors.DatabaseError( + "Unhandled database error when getting collection" + ) + + if not collection: + raise errors.NotFoundError(f"Collection {id} not found") + + # TODO: Don't do this + collection.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(collection) def item_collection( self, id: str, limit: int = 10, token: str = None, **kwargs ) -> ItemCollection: """Read an item collection from the database""" - try: - collection_children = ( - self.reader_session.query(self.table) - .join(self.collection_table) - .filter(self.collection_table.id == id) - .order_by(self.table.datetime.desc(), self.table.id) - ) - count = None - if self.extension_is_enabled(ContextExtension): - count_query = collection_children.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = collection_children.session.execute(count_query).scalar() - token = self.pagination_client.get(token) if token else token - page = get_page(collection_children, per_page=limit, page=(token or False)) - # Create dynamic attributes for each page - page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except errors.NotFoundError: - raise - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting collection children" - ) + with self.session.reader.context_session() as session: + try: + collection_children = ( + session.query(self.table) + .join(self.collection_table) + .filter(self.collection_table.id == id) + .order_by(self.table.datetime.desc(), self.table.id) + ) + count = None + if self.extension_is_enabled(ContextExtension): + count_query = collection_children.statement.with_only_columns( + [func.count()] + ).order_by(None) + count = collection_children.session.execute(count_query).scalar() + token = self.pagination_client.get(token) if token else token + page = get_page( + collection_children, per_page=limit, page=(token or False) + ) + # Create dynamic attributes for each page + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.pagination_client.insert(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + except errors.NotFoundError: + raise + except Exception as e: + logger.error(e, exc_info=True) + raise errors.DatabaseError( + "Unhandled database error when getting collection children" + ) - links = [] - if page.next: - links.append( - PaginationLink( - rel=Relations.next, - type="application/geo+json", - href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.next}&limit={limit}", - method="GET", + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.next}&limit={limit}", + method="GET", + ) ) - ) - if page.previous: - links.append( - PaginationLink( - rel=Relations.previous, - type="application/geo+json", - href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.previous}&limit={limit}", - method="GET", + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{kwargs['request'].base_url}collections/{id}/items?token={page.previous}&limit={limit}", + method="GET", + ) ) - ) - response_features = [] - for item in page: - item.base_url = str(kwargs["request"].base_url) - response_features.append(schemas.Item.from_orm(item)) + response_features = [] + for item in page: + item.base_url = str(kwargs["request"].base_url) + response_features.append(schemas.Item.from_orm(item)) - context_obj = None - if self.extension_is_enabled(ContextExtension): - context_obj = {"returned": len(page), "limit": limit, "matched": count} + context_obj = None + if self.extension_is_enabled(ContextExtension): + context_obj = {"returned": len(page), "limit": limit, "matched": count} - return ItemCollection( - type="FeatureCollection", - context=context_obj, - features=response_features, - links=links, - ) + return ItemCollection( + type="FeatureCollection", + context=context_obj, + features=response_features, + links=links, + ) def get_item(self, id: str, **kwargs) -> schemas.Item: """Get item by id""" - obj = self.lookup_id(id).first() - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) + with self.session.reader.context_session() as session: + try: + item = session.query(self.table).filter(self.table.id == id).first() + except Exception as e: + logger.error(e, exc_info=True) + raise errors.DatabaseError("Unhandled database error") + if not item: + raise errors.NotFoundError(f"Item {id} not found") + item.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(item) def get_search( self, @@ -264,154 +289,159 @@ def post_search( self, search_request: schemas.STACSearch, **kwargs ) -> Dict[str, Any]: """POST search catalog""" - token = ( - self.pagination_client.get(search_request.token) - if search_request.token - else False - ) - query = self.reader_session.query(self.table) - - # Filter by collection - count = None - if search_request.collections: - query = query.join(self.collection_table).filter( - sa.or_( - *[ - self.collection_table.id == col_id - for col_id in search_request.collections - ] - ) + with self.session.reader.context_session() as session: + token = ( + self.pagination_client.get(search_request.token) + if search_request.token + else False ) + query = session.query(self.table) - # Sort - if search_request.sortby: - sort_fields = [ - getattr(self.table.get_field(sort.field), sort.direction.value)() - for sort in search_request.sortby - ] - sort_fields.append(self.table.id) - query = query.order_by(*sort_fields) - else: - # Default sort is date - query = query.order_by(self.table.datetime.desc(), self.table.id) - - # Ignore other parameters if ID is present - if search_request.ids: - id_filter = sa.or_(*[self.table.id == i for i in search_request.ids]) - try: - items = query.filter(id_filter).order_by(self.table.id) - page = get_page(items, per_page=search_request.limit, page=token) - if self.extension_is_enabled(ContextExtension): - count = len(search_request.ids) - page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error when searching for items by id" - ) - else: - # Spatial query - poly = search_request.polygon() - if poly: - filter_geom = ga.shape.from_shape(poly, srid=4326) - query = query.filter( - ga.func.ST_Intersects(self.table.geometry, filter_geom) + # Filter by collection + count = None + if search_request.collections: + query = query.join(self.collection_table).filter( + sa.or_( + *[ + self.collection_table.id == col_id + for col_id in search_request.collections + ] + ) ) - # Temporal query - if search_request.datetime: - # Two tailed query (between) - if ".." not in search_request.datetime: - query = query.filter( - self.table.datetime.between(*search_request.datetime) + # Sort + if search_request.sortby: + sort_fields = [ + getattr(self.table.get_field(sort.field), sort.direction.value)() + for sort in search_request.sortby + ] + sort_fields.append(self.table.id) + query = query.order_by(*sort_fields) + else: + # Default sort is date + query = query.order_by(self.table.datetime.desc(), self.table.id) + + # Ignore other parameters if ID is present + if search_request.ids: + id_filter = sa.or_(*[self.table.id == i for i in search_request.ids]) + try: + items = query.filter(id_filter).order_by(self.table.id) + page = get_page(items, per_page=search_request.limit, page=token) + if self.extension_is_enabled(ContextExtension): + count = len(search_request.ids) + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None ) - # All items after the start date - if search_request.datetime[0] != "..": - query = query.filter( - self.table.datetime >= search_request.datetime[0] + page.previous = ( + self.pagination_client.insert( + keyset=page.paging.bookmark_previous + ) + if page.paging.has_previous + else None ) - # All items before the end date - if search_request.datetime[1] != "..": + except Exception as e: + logger.error(e, exc_info=True) + raise DatabaseError( + "Unhandled database error when searching for items by id" + ) + else: + # Spatial query + poly = search_request.polygon() + if poly: + filter_geom = ga.shape.from_shape(poly, srid=4326) query = query.filter( - self.table.datetime <= search_request.datetime[1] + ga.func.ST_Intersects(self.table.geometry, filter_geom) ) - # Query fields - if search_request.query: - for (field_name, expr) in search_request.query.items(): - field = self.table.get_field(field_name) - for (op, value) in expr.items(): - query = query.filter(op.operator(field, value)) - - try: - if self.extension_is_enabled(ContextExtension): - count_query = query.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = query.session.execute(count_query).scalar() - page = get_page(query, per_page=search_request.limit, page=token) - # Create dynamic attributes for each page - page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error during spatial/temporal query" - ) - links = [] - if page.next: - links.append( - PaginationLink( - rel=Relations.next, - type="application/geo+json", - href=f"{kwargs['request'].base_url}search", - method="POST", - body={"token": page.next}, - merge=True, + # Temporal query + if search_request.datetime: + # Two tailed query (between) + if ".." not in search_request.datetime: + query = query.filter( + self.table.datetime.between(*search_request.datetime) + ) + # All items after the start date + if search_request.datetime[0] != "..": + query = query.filter( + self.table.datetime >= search_request.datetime[0] + ) + # All items before the end date + if search_request.datetime[1] != "..": + query = query.filter( + self.table.datetime <= search_request.datetime[1] + ) + + # Query fields + if search_request.query: + for (field_name, expr) in search_request.query.items(): + field = self.table.get_field(field_name) + for (op, value) in expr.items(): + query = query.filter(op.operator(field, value)) + + try: + if self.extension_is_enabled(ContextExtension): + count_query = query.statement.with_only_columns( + [func.count()] + ).order_by(None) + count = query.session.execute(count_query).scalar() + page = get_page(query, per_page=search_request.limit, page=token) + # Create dynamic attributes for each page + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.pagination_client.insert( + keyset=page.paging.bookmark_previous + ) + if page.paging.has_previous + else None + ) + except Exception as e: + logger.error(e, exc_info=True) + raise DatabaseError( + "Unhandled database error during spatial/temporal query" + ) + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{kwargs['request'].base_url}search", + method="POST", + body={"token": page.next}, + merge=True, + ) ) - ) - if page.previous: - links.append( - PaginationLink( - rel=Relations.previous, - type="application/geo+json", - href=f"{kwargs['request'].base_url}search", - method="POST", - body={"token": page.previous}, - merge=True, + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{kwargs['request'].base_url}search", + method="POST", + body={"token": page.previous}, + merge=True, + ) ) - ) - - response_features = [] - filter_kwargs = {} - if self.extension_is_enabled(FieldsExtension): - filter_kwargs = search_request.field.filter_fields - xvals = [] - yvals = [] - for item in page: - item.base_url = str(kwargs["request"].base_url) - item_model = schemas.Item.from_orm(item) - xvals += [item_model.bbox[0], item_model.bbox[2]] - yvals += [item_model.bbox[1], item_model.bbox[3]] - response_features.append(item_model.to_dict(**filter_kwargs)) + response_features = [] + filter_kwargs = {} + if self.extension_is_enabled(FieldsExtension): + filter_kwargs = search_request.field.filter_fields + + xvals = [] + yvals = [] + for item in page: + item.base_url = str(kwargs["request"].base_url) + item_model = schemas.Item.from_orm(item) + xvals += [item_model.bbox[0], item_model.bbox[2]] + yvals += [item_model.bbox[1], item_model.bbox[3]] + response_features.append(item_model.to_dict(**filter_kwargs)) try: bbox = (min(xvals), min(yvals), max(xvals), max(yvals)) From 6300108e3ed827cc936d2f908d27613ed02c41cd Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 21:36:28 -0600 Subject: [PATCH 08/26] update pagination client --- stac_api/clients/postgres/tokens.py | 42 +++++++++++++++++++---------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/stac_api/clients/postgres/tokens.py b/stac_api/clients/postgres/tokens.py index d6f330d3c..049d3e882 100644 --- a/stac_api/clients/postgres/tokens.py +++ b/stac_api/clients/postgres/tokens.py @@ -1,5 +1,6 @@ """Pagination token client.""" +import logging import os from base64 import urlsafe_b64encode from typing import Type @@ -7,9 +8,11 @@ import attr from stac_api.clients.postgres.base import PostgresClient -from stac_api.errors import DatabaseError +from stac_api.errors import DatabaseError, NotFoundError from stac_api.models import database +logger = logging.getLogger(__name__) + @attr.s class PaginationTokenClient(PostgresClient): @@ -21,22 +24,33 @@ def insert(self, keyset: str, tries: int = 0) -> str: # type:ignore """Insert a keyset into the database""" # uid has collision chance of 1e-7 percent uid = urlsafe_b64encode(os.urandom(6)).decode() - try: - token = database.PaginationToken(id=uid, keyset=keyset) - self.writer_session.add(token) - self.commit() - return uid - except DatabaseError: - # Try again if uid already exists in the database - # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) - self.insert(keyset, tries=tries + 1) - if tries > 5: - raise + with self.session.writer.context_session() as session: + try: + token = database.PaginationToken(id=uid, keyset=keyset) + session.add(token) + return uid + except DatabaseError: + # Try again if uid already exists in the database + # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) + if tries > 5: + raise + self.insert(keyset, tries=tries + 1) def get(self, token_id: str) -> str: """Retrieve a keyset from the database""" - row = self.lookup_id(token_id).first() - return row.keyset + with self.session.reader.context_session() as session: + try: + token = ( + session.query(self.table).filter(self.table.id == token_id).first() + ) + except Exception as e: + logger.error(e, exc_info=True) + raise DatabaseError("Error fetching token from database") + + if not token: + raise NotFoundError(f"Could not find token {token_id}") + + return token.keyset def pagination_token_client_factory() -> PaginationTokenClient: From 304663d725f9b89c633937dc73a5508bb2d5f0b1 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 21:46:21 -0600 Subject: [PATCH 09/26] remove PostgresClient from core --- stac_api/clients/postgres/core.py | 47 ++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 2e36f19c0..188dd39ec 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -18,7 +18,9 @@ from stac_api import errors from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient -from stac_api.clients.postgres.base import PostgresClient + +# from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.errors import DatabaseError from stac_api.models import database, schemas @@ -30,11 +32,12 @@ @attr.s -class CoreCrudClient(PostgresClient, BaseCoreClient): +class CoreCrudClient(BaseCoreClient): """Client for core endpoints defined by stac""" + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) pagination_client: PaginationTokenClient = attr.ib(default=None) - table: Type[database.Item] = attr.ib(default=database.Item) + item_table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) def landing_page(self, **kwargs) -> LandingPage: @@ -132,10 +135,10 @@ def item_collection( with self.session.reader.context_session() as session: try: collection_children = ( - session.query(self.table) + session.query(self.item_table) .join(self.collection_table) .filter(self.collection_table.id == id) - .order_by(self.table.datetime.desc(), self.table.id) + .order_by(self.item_table.datetime.desc(), self.item_table.id) ) count = None if self.extension_is_enabled(ContextExtension): @@ -206,7 +209,11 @@ def get_item(self, id: str, **kwargs) -> schemas.Item: """Get item by id""" with self.session.reader.context_session() as session: try: - item = session.query(self.table).filter(self.table.id == id).first() + item = ( + session.query(self.item_table) + .filter(self.item_table.id == id) + .first() + ) except Exception as e: logger.error(e, exc_info=True) raise errors.DatabaseError("Unhandled database error") @@ -295,7 +302,7 @@ def post_search( if search_request.token else False ) - query = session.query(self.table) + query = session.query(self.item_table) # Filter by collection count = None @@ -312,20 +319,26 @@ def post_search( # Sort if search_request.sortby: sort_fields = [ - getattr(self.table.get_field(sort.field), sort.direction.value)() + getattr( + self.item_table.get_field(sort.field), sort.direction.value + )() for sort in search_request.sortby ] - sort_fields.append(self.table.id) + sort_fields.append(self.item_table.id) query = query.order_by(*sort_fields) else: # Default sort is date - query = query.order_by(self.table.datetime.desc(), self.table.id) + query = query.order_by( + self.item_table.datetime.desc(), self.item_table.id + ) # Ignore other parameters if ID is present if search_request.ids: - id_filter = sa.or_(*[self.table.id == i for i in search_request.ids]) + id_filter = sa.or_( + *[self.item_table.id == i for i in search_request.ids] + ) try: - items = query.filter(id_filter).order_by(self.table.id) + items = query.filter(id_filter).order_by(self.item_table.id) page = get_page(items, per_page=search_request.limit, page=token) if self.extension_is_enabled(ContextExtension): count = len(search_request.ids) @@ -352,7 +365,7 @@ def post_search( if poly: filter_geom = ga.shape.from_shape(poly, srid=4326) query = query.filter( - ga.func.ST_Intersects(self.table.geometry, filter_geom) + ga.func.ST_Intersects(self.item_table.geometry, filter_geom) ) # Temporal query @@ -360,23 +373,23 @@ def post_search( # Two tailed query (between) if ".." not in search_request.datetime: query = query.filter( - self.table.datetime.between(*search_request.datetime) + self.item_table.datetime.between(*search_request.datetime) ) # All items after the start date if search_request.datetime[0] != "..": query = query.filter( - self.table.datetime >= search_request.datetime[0] + self.item_table.datetime >= search_request.datetime[0] ) # All items before the end date if search_request.datetime[1] != "..": query = query.filter( - self.table.datetime <= search_request.datetime[1] + self.item_table.datetime <= search_request.datetime[1] ) # Query fields if search_request.query: for (field_name, expr) in search_request.query.items(): - field = self.table.get_field(field_name) + field = self.item_table.get_field(field_name) for (op, value) in expr.items(): query = query.filter(op.operator(field, value)) From ba5800cfc1d0caf2eb871314a932444b7d9c9b40 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 21:52:52 -0600 Subject: [PATCH 10/26] remove PostgresClient from pagination, switch to mixin --- stac_api/app.py | 6 +----- stac_api/clients/postgres/core.py | 24 +++++++++--------------- stac_api/clients/postgres/tokens.py | 24 ++++++++++++------------ 3 files changed, 22 insertions(+), 32 deletions(-) diff --git a/stac_api/app.py b/stac_api/app.py index 058ff8456..53d5078d0 100644 --- a/stac_api/app.py +++ b/stac_api/app.py @@ -9,7 +9,6 @@ ) from stac_api.clients.postgres.core import CoreCrudClient from stac_api.clients.postgres.session import Session -from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.clients.postgres.transactions import TransactionsClient from stac_api.clients.tiles.ogc import TilesClient from stac_api.config import ApiSettings @@ -25,9 +24,6 @@ SortExtension(), TilesExtension(TilesClient(session=session)), ], - client=CoreCrudClient( - session=session, - pagination_client=PaginationTokenClient(session=session), - ), + client=CoreCrudClient(session=session), ) app = api.app diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 188dd39ec..c6db1e6a0 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -32,7 +32,7 @@ @attr.s -class CoreCrudClient(BaseCoreClient): +class CoreCrudClient(PaginationTokenClient, BaseCoreClient): """Client for core endpoints defined by stac""" session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) @@ -146,18 +146,18 @@ def item_collection( [func.count()] ).order_by(None) count = collection_children.session.execute(count_query).scalar() - token = self.pagination_client.get(token) if token else token + token = self.get_token(token) if token else token page = get_page( collection_children, per_page=limit, page=(token or False) ) # Create dynamic attributes for each page page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert(keyset=page.paging.bookmark_previous) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) @@ -298,9 +298,7 @@ def post_search( """POST search catalog""" with self.session.reader.context_session() as session: token = ( - self.pagination_client.get(search_request.token) - if search_request.token - else False + self.get_token(search_request.token) if search_request.token else False ) query = session.query(self.item_table) @@ -343,14 +341,12 @@ def post_search( if self.extension_is_enabled(ContextExtension): count = len(search_request.ids) page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert( - keyset=page.paging.bookmark_previous - ) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) @@ -402,14 +398,12 @@ def post_search( page = get_page(query, per_page=search_request.limit, page=token) # Create dynamic attributes for each page page.next = ( - self.pagination_client.insert(keyset=page.paging.bookmark_next) + self.insert_token(keyset=page.paging.bookmark_next) if page.paging.has_next else None ) page.previous = ( - self.pagination_client.insert( - keyset=page.paging.bookmark_previous - ) + self.insert_token(keyset=page.paging.bookmark_previous) if page.paging.has_previous else None ) diff --git a/stac_api/clients/postgres/tokens.py b/stac_api/clients/postgres/tokens.py index 049d3e882..b168ff5d8 100644 --- a/stac_api/clients/postgres/tokens.py +++ b/stac_api/clients/postgres/tokens.py @@ -7,7 +7,7 @@ import attr -from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.errors import DatabaseError, NotFoundError from stac_api.models import database @@ -15,12 +15,15 @@ @attr.s -class PaginationTokenClient(PostgresClient): +class PaginationTokenClient: """Pagination token specific CRUD operations""" - table: Type[database.PaginationToken] = attr.ib(default=database.PaginationToken) + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + token_table: Type[database.PaginationToken] = attr.ib( + default=database.PaginationToken + ) - def insert(self, keyset: str, tries: int = 0) -> str: # type:ignore + def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore """Insert a keyset into the database""" # uid has collision chance of 1e-7 percent uid = urlsafe_b64encode(os.urandom(6)).decode() @@ -34,14 +37,16 @@ def insert(self, keyset: str, tries: int = 0) -> str: # type:ignore # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) if tries > 5: raise - self.insert(keyset, tries=tries + 1) + self.insert_token(keyset, tries=tries + 1) - def get(self, token_id: str) -> str: + def get_token(self, token_id: str) -> str: """Retrieve a keyset from the database""" with self.session.reader.context_session() as session: try: token = ( - session.query(self.table).filter(self.table.id == token_id).first() + session.query(self.token_table) + .filter(self.token_table.id == token_id) + .first() ) except Exception as e: logger.error(e, exc_info=True) @@ -51,8 +56,3 @@ def get(self, token_id: str) -> str: raise NotFoundError(f"Could not find token {token_id}") return token.keyset - - -def pagination_token_client_factory() -> PaginationTokenClient: - """FastAPI dependency""" - return PaginationTokenClient() From 1c18ab9f1b07bcb6c0323411b399abe7d9bd94b9 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 22:09:03 -0600 Subject: [PATCH 11/26] add lookup_id staticmethod --- stac_api/clients/postgres/core.py | 47 +++++++++++------------------ stac_api/clients/postgres/tokens.py | 29 ++++++++---------- 2 files changed, 31 insertions(+), 45 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index c6db1e6a0..eb0148b45 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -8,6 +8,7 @@ import attr import sqlalchemy as sa from sqlalchemy import func +from sqlalchemy.orm import Session as SqlSession from stac_pydantic import ItemCollection from stac_pydantic.api import ConformanceClasses, LandingPage from stac_pydantic.api.extensions.paging import PaginationLink @@ -18,11 +19,9 @@ from stac_api import errors from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient - -# from stac_api.clients.postgres.base import PostgresClient from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient -from stac_api.errors import DatabaseError +from stac_api.errors import DatabaseError, NotFoundError from stac_api.models import database, schemas from stac_api.models.links import CollectionLinks @@ -40,6 +39,20 @@ class CoreCrudClient(PaginationTokenClient, BaseCoreClient): item_table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) + @staticmethod + def lookup_id( + id: str, table: Type[database.BaseModel], session: SqlSession + ) -> Type[database.BaseModel]: + """lookup row by id""" + try: + row = session.query(table).filter(table.id == id).first() + except Exception as e: + logger.error(e, exc_info=True) + raise DatabaseError("Unhandled database error") + if not row: + raise NotFoundError(f"{table.__name__} {id} not found") + return row + def landing_page(self, **kwargs) -> LandingPage: """landing page""" landing_page = LandingPage( @@ -109,21 +122,7 @@ def all_collections(self, **kwargs) -> List[schemas.Collection]: def get_collection(self, id: str, **kwargs) -> schemas.Collection: """Get collection by id""" with self.session.reader.context_session() as session: - try: - collection = ( - session.query(self.collection_table) - .filter(self.collection_table.id == id) - .first() - ) - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting collection" - ) - - if not collection: - raise errors.NotFoundError(f"Collection {id} not found") - + collection = self.lookup_id(id, self.collection_table, session) # TODO: Don't do this collection.base_url = str(kwargs["request"].base_url) return schemas.Collection.from_orm(collection) @@ -208,17 +207,7 @@ def item_collection( def get_item(self, id: str, **kwargs) -> schemas.Item: """Get item by id""" with self.session.reader.context_session() as session: - try: - item = ( - session.query(self.item_table) - .filter(self.item_table.id == id) - .first() - ) - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database error") - if not item: - raise errors.NotFoundError(f"Item {id} not found") + item = self.lookup_id(id, self.item_table, session) item.base_url = str(kwargs["request"].base_url) return schemas.Item.from_orm(item) diff --git a/stac_api/clients/postgres/tokens.py b/stac_api/clients/postgres/tokens.py index b168ff5d8..f0d7e90e7 100644 --- a/stac_api/clients/postgres/tokens.py +++ b/stac_api/clients/postgres/tokens.py @@ -1,21 +1,22 @@ """Pagination token client.""" - +import abc import logging import os from base64 import urlsafe_b64encode from typing import Type import attr +from sqlalchemy.orm import Session as SqlSession from stac_api.clients.postgres.session import Session -from stac_api.errors import DatabaseError, NotFoundError +from stac_api.errors import DatabaseError from stac_api.models import database logger = logging.getLogger(__name__) @attr.s -class PaginationTokenClient: +class PaginationTokenClient(abc.ABC): """Pagination token specific CRUD operations""" session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) @@ -23,6 +24,14 @@ class PaginationTokenClient: default=database.PaginationToken ) + @staticmethod + @abc.abstractmethod + def lookup_id( + id: str, table: Type[database.BaseModel], session: SqlSession + ) -> Type[database.BaseModel]: + """lookup row by id""" + ... + def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore """Insert a keyset into the database""" # uid has collision chance of 1e-7 percent @@ -42,17 +51,5 @@ def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore def get_token(self, token_id: str) -> str: """Retrieve a keyset from the database""" with self.session.reader.context_session() as session: - try: - token = ( - session.query(self.token_table) - .filter(self.token_table.id == token_id) - .first() - ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError("Error fetching token from database") - - if not token: - raise NotFoundError(f"Could not find token {token_id}") - + token = self.lookup_id(token_id, self.token_table, session) return token.keyset From d9e0ea49d476f4740587b739c9b6612aff514091 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 22:12:38 -0600 Subject: [PATCH 12/26] make private method --- stac_api/clients/postgres/core.py | 6 +++--- stac_api/clients/postgres/tokens.py | 4 ++-- stac_api/clients/postgres/transactions.py | 7 +------ 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index eb0148b45..21daac85d 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -40,7 +40,7 @@ class CoreCrudClient(PaginationTokenClient, BaseCoreClient): collection_table: Type[database.Collection] = attr.ib(default=database.Collection) @staticmethod - def lookup_id( + def _lookup_id( id: str, table: Type[database.BaseModel], session: SqlSession ) -> Type[database.BaseModel]: """lookup row by id""" @@ -122,7 +122,7 @@ def all_collections(self, **kwargs) -> List[schemas.Collection]: def get_collection(self, id: str, **kwargs) -> schemas.Collection: """Get collection by id""" with self.session.reader.context_session() as session: - collection = self.lookup_id(id, self.collection_table, session) + collection = self._lookup_id(id, self.collection_table, session) # TODO: Don't do this collection.base_url = str(kwargs["request"].base_url) return schemas.Collection.from_orm(collection) @@ -207,7 +207,7 @@ def item_collection( def get_item(self, id: str, **kwargs) -> schemas.Item: """Get item by id""" with self.session.reader.context_session() as session: - item = self.lookup_id(id, self.item_table, session) + item = self._lookup_id(id, self.item_table, session) item.base_url = str(kwargs["request"].base_url) return schemas.Item.from_orm(item) diff --git a/stac_api/clients/postgres/tokens.py b/stac_api/clients/postgres/tokens.py index f0d7e90e7..ae144da11 100644 --- a/stac_api/clients/postgres/tokens.py +++ b/stac_api/clients/postgres/tokens.py @@ -26,7 +26,7 @@ class PaginationTokenClient(abc.ABC): @staticmethod @abc.abstractmethod - def lookup_id( + def _lookup_id( id: str, table: Type[database.BaseModel], session: SqlSession ) -> Type[database.BaseModel]: """lookup row by id""" @@ -51,5 +51,5 @@ def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore def get_token(self, token_id: str) -> str: """Retrieve a keyset from the database""" with self.session.reader.context_session() as session: - token = self.lookup_id(token_id, self.token_table, session) + token = self._lookup_id(token_id, self.token_table, session) return token.keyset diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index 52a572e8e..fab656fd6 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -19,14 +19,9 @@ class TransactionsClient(PostgresClient, BaseTransactionsClient): """Transactions extension specific CRUD operations""" - table: Type[database.Collection] = attr.ib(default=database.Collection) + collection_table: Type[database.Collection] = attr.ib(default=database.Collection) item_table: Type[database.Item] = attr.ib(default=database.Item) - @property - def collection_table(self): - """alias for `self.table` # TODO: Figure out a better way to do this""" - return self.table - def _create( self, model: Union[schemas.Collection, schemas.Item], From e1d8ab7b7b1e15210341e08c518e4ccc17f267a4 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sat, 23 Jan 2021 22:46:45 -0600 Subject: [PATCH 13/26] refactor transactions --- stac_api/clients/postgres/transactions.py | 123 ++++++++-------------- 1 file changed, 44 insertions(+), 79 deletions(-) diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index fab656fd6..51159b7df 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -6,109 +6,74 @@ import attr from sqlalchemy import create_engine +from sqlalchemy.orm import Session as SqlSession from stac_api import errors from stac_api.clients.base import BaseTransactionsClient, BulkTransactionsClient -from stac_api.clients.postgres.base import PostgresClient +from stac_api.clients.postgres.session import Session from stac_api.models import database, schemas +from stac_api.errors import DatabaseError, NotFoundError logger = logging.getLogger(__name__) @attr.s -class TransactionsClient(PostgresClient, BaseTransactionsClient): +class TransactionsClient(BaseTransactionsClient): """Transactions extension specific CRUD operations""" + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) item_table: Type[database.Item] = attr.ib(default=database.Item) - def _create( - self, - model: Union[schemas.Collection, schemas.Item], - table: Union[Type[database.Collection], Type[database.Item]], - ) -> Union[database.Collection, database.Item]: - """Create a single record""" - try: - self.lookup_id(model.id, table=table) - error_message = f"Row {model.id} already exists" - logger.error(error_message, exc_info=True) - raise errors.ConflictError(error_message) - except errors.NotFoundError: - row_data = table.from_schema(model) - self.writer_session.add(row_data) - self.commit() - return row_data - - def _update( - self, - model: Union[schemas.Collection, schemas.Item], - table: Union[Type[database.Collection], Type[database.Item]], - ) -> Union[database.Collection, database.Item]: - """Create a single record if it does not exist or update an existing record""" - try: - query = self.lookup_id(model.id, table=table) - update_data = table.get_database_model(model) - # SQLAlchemy orm updates don't seem to like geoalchemy types - update_data.pop("geometry", None) - query.update(update_data) - self.commit() - return table.from_schema(model) - except errors.NotFoundError: - row_data = table.from_schema(model) - self.writer_session.add(row_data) - self.commit() - return row_data - - def _delete( - self, item_id: str, table: Union[Type[database.Collection], Type[database.Item]] - ) -> Union[database.Collection, database.Item]: - """Delete a single record""" - query = self.lookup_id(item_id, table=table) - row_data = query.first() - query.delete() - self.commit() - return row_data - def create_item(self, model: schemas.Item, **kwargs) -> schemas.Item: - """Create an item""" - obj = self._create(model, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) - - def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: - """Update an item""" - obj = self._update(model, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) - - def delete_item(self, id: str, **kwargs) -> schemas.Item: - """Delete an item""" - obj = self._delete(id, table=self.item_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(obj) + data = self.item_table.from_schema(model) + with self.session.writer.context_session() as session: + session.add(data) + return model def create_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: - """Create a collection""" - obj = self._create(model, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) + data = self.collection_table.from_schema(model) + with self.session.writer.context_session() as session: + session.add(data) + return model + + def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: + with self.session.reader.context_session() as session: + query = session.query(self.item_table).filter(self.item_table.id == model.id) + if not query.scalar(): + raise NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.item_table.get_database_model(model) + data.pop("geometry", None) + return model def update_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: - """Update a collection""" - obj = self._update(model, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) - - def delete_collection(self, id: str, **kwargs) -> schemas.Collection: - """Delete a collection""" - obj = self._delete(id, table=self.collection_table) - obj.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(obj) + with self.session.reader.context_session() as session: + query = session.query(self.item_table).filter(self.item_table.id == model.id) + if not query.scalar(): + raise NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.collection_table.get_database_model(model) + data.pop("geometry", None) + return model + def delete_item(self, id: str, **kwargs) -> schemas.Item: + with self.session.writer.context_session() as session: + query = session.query(self.item_table).filter(self.item_table.id == id) + data = query.first() + query.delete() + return data + + def delete_collection(self, id: str, **kwargs) -> schemas.Item: + with self.session.writer.context_session() as session: + query = session.query(self.collection_table).filter(self.item_table.id == id) + data = query.first() + query.delete() + return data @attr.s class PostgresBulkTransactions(BulkTransactionsClient): From d41b4c7b9b3785033a595b925587fb138313e34a Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 19:24:22 -0600 Subject: [PATCH 14/26] client tests working --- stac_api/clients/postgres/core.py | 1 - stac_api/clients/postgres/transactions.py | 49 +++++++++++---- tests/conftest.py | 76 +++++++---------------- 3 files changed, 60 insertions(+), 66 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 21daac85d..48cf47e54 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -35,7 +35,6 @@ class CoreCrudClient(PaginationTokenClient, BaseCoreClient): """Client for core endpoints defined by stac""" session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - pagination_client: PaginationTokenClient = attr.ib(default=None) item_table: Type[database.Item] = attr.ib(default=database.Item) collection_table: Type[database.Collection] = attr.ib(default=database.Collection) diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index 51159b7df..cc2368a12 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -5,14 +5,14 @@ from typing import Dict, List, Optional, Type, Union import attr +import sqlalchemy as sa from sqlalchemy import create_engine -from sqlalchemy.orm import Session as SqlSession +import psycopg2 from stac_api import errors from stac_api.clients.base import BaseTransactionsClient, BulkTransactionsClient from stac_api.clients.postgres.session import Session from stac_api.models import database, schemas -from stac_api.errors import DatabaseError, NotFoundError logger = logging.getLogger(__name__) @@ -26,55 +26,82 @@ class TransactionsClient(BaseTransactionsClient): item_table: Type[database.Item] = attr.ib(default=database.Item) def create_item(self, model: schemas.Item, **kwargs) -> schemas.Item: + """create item""" data = self.item_table.from_schema(model) - with self.session.writer.context_session() as session: - session.add(data) + try: + with self.session.writer.context_session() as session: + session.add(data) + except sa.exc.IntegrityError as e: + if isinstance(e.orig, psycopg2.errors.UniqueViolation): + raise errors.ConflictError(f"Item {model.id} already exists") return model def create_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: + """create collection""" data = self.collection_table.from_schema(model) - with self.session.writer.context_session() as session: - session.add(data) + try: + with self.session.writer.context_session() as session: + session.add(data) + except sa.exc.IntegrityError as e: + if isinstance(e.orig, psycopg2.errors.UniqueViolation): + raise errors.ConflictError(f"Collection {model.id} already exists") return model def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: + """update item""" with self.session.reader.context_session() as session: - query = session.query(self.item_table).filter(self.item_table.id == model.id) + query = session.query(self.item_table).filter( + self.item_table.id == model.id + ) if not query.scalar(): - raise NotFoundError(f"Item {model.id} not found") + raise errors.NotFoundError(f"Item {model.id} not found") # SQLAlchemy orm updates don't seem to like geoalchemy types data = self.item_table.get_database_model(model) data.pop("geometry", None) + query.update(data) return model def update_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: + """update collection""" with self.session.reader.context_session() as session: - query = session.query(self.item_table).filter(self.item_table.id == model.id) + query = session.query(self.collection_table).filter( + self.collection_table.id == model.id + ) if not query.scalar(): - raise NotFoundError(f"Item {model.id} not found") + raise errors.NotFoundError(f"Item {model.id} not found") # SQLAlchemy orm updates don't seem to like geoalchemy types data = self.collection_table.get_database_model(model) data.pop("geometry", None) + query.update(data) return model def delete_item(self, id: str, **kwargs) -> schemas.Item: + """delete item""" with self.session.writer.context_session() as session: query = session.query(self.item_table).filter(self.item_table.id == id) data = query.first() + if not data: + raise errors.NotFoundError(f"Item {id} not found") query.delete() return data def delete_collection(self, id: str, **kwargs) -> schemas.Item: + """delete collection""" with self.session.writer.context_session() as session: - query = session.query(self.collection_table).filter(self.item_table.id == id) + query = session.query(self.collection_table).filter( + self.collection_table.id == id + ) data = query.first() + if not data: + raise errors.NotFoundError(f"Collection {id} not found") query.delete() return data + @attr.s class PostgresBulkTransactions(BulkTransactionsClient): """postgres bulk transactions""" diff --git a/tests/conftest.py b/tests/conftest.py index bb490115a..db1e0b83d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,9 @@ import json import os -from typing import Callable, Dict, Generator -from unittest.mock import PropertyMock, patch +from typing import Callable, Dict import pytest -from sqlalchemy import create_engine -from sqlalchemy.orm import Session, sessionmaker + from starlette.testclient import TestClient from stac_api.api.app import StacApi @@ -17,12 +15,14 @@ TransactionExtension, ) from stac_api.clients.postgres.core import CoreCrudClient +from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.clients.postgres.transactions import ( PostgresBulkTransactions, TransactionsClient, ) from stac_api.config import ApiSettings, inject_settings +from stac_api.models import database from stac_api.models.schemas import Collection DATA_DIR = os.path.join(os.path.dirname(__file__), "data") @@ -70,62 +70,30 @@ class MockStarletteRequest: @pytest.fixture -def sqlalchemy_engine(): - engine = create_engine(settings.reader_connection_string) - yield engine - engine.dispose() - - -@pytest.fixture -def reader_connection(sqlalchemy_engine) -> Generator[Session, None, None]: - """Create a reader connection""" - db_session = sessionmaker( - autocommit=False, autoflush=False, bind=sqlalchemy_engine - )() - yield db_session - db_session.close() - - -@pytest.fixture -def writer_connection(sqlalchemy_engine) -> Generator[Session, None, None]: - """Create a writer connection""" - db_session = sessionmaker( - autocommit=False, autoflush=False, bind=sqlalchemy_engine - )() - yield db_session - db_session.close() +def db_session() -> Session: + return Session( + reader_conn_string=settings.reader_connection_string, + writer_conn_string=settings.writer_connection_string, + ) @pytest.fixture -def postgres_core(reader_connection, writer_connection): - with patch( - "stac_api.clients.postgres.base.PostgresClient.writer_session", - new_callable=PropertyMock, - ) as mock_writer: - mock_writer.return_value = writer_connection - with patch( - "stac_api.clients.postgres.base.PostgresClient.reader_session", - new_callable=PropertyMock, - ) as mock_reader: - mock_reader.return_value = reader_connection - client = CoreCrudClient() - yield client +def postgres_core(db_session): + return CoreCrudClient( + session=db_session, + item_table=database.Item, + collection_table=database.Collection, + token_table=database.PaginationToken, + ) @pytest.fixture -def postgres_transactions(reader_connection, writer_connection): - with patch( - "stac_api.clients.postgres.base.PostgresClient.writer_session", - new_callable=PropertyMock, - ) as mock_writer: - mock_writer.return_value = writer_connection - with patch( - "stac_api.clients.postgres.base.PostgresClient.reader_session", - new_callable=PropertyMock, - ) as mock_reader: - mock_reader.return_value = reader_connection - client = TransactionsClient() - yield client +def postgres_transactions(db_session): + return TransactionsClient( + session=db_session, + item_table=database.Item, + collection_table=database.Collection, + ) @pytest.fixture From dacf3d44bd717f5d50cca90c5966858a7aa84c29 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 19:58:05 -0600 Subject: [PATCH 15/26] resource tests working --- stac_api/clients/postgres/transactions.py | 51 ++++++++++++++++------- tests/conftest.py | 8 ++-- tests/resources/test_collection.py | 7 +--- tests/resources/test_item.py | 11 ++++- 4 files changed, 49 insertions(+), 28 deletions(-) diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index cc2368a12..fcb609539 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -2,7 +2,7 @@ import json import logging -from typing import Dict, List, Optional, Type, Union +from typing import Dict, List, Optional, Type import attr import sqlalchemy as sa @@ -31,10 +31,16 @@ def create_item(self, model: schemas.Item, **kwargs) -> schemas.Item: try: with self.session.writer.context_session() as session: session.add(data) + data.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(data) except sa.exc.IntegrityError as e: if isinstance(e.orig, psycopg2.errors.UniqueViolation): raise errors.ConflictError(f"Item {model.id} already exists") - return model + elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): + raise errors.ForeignKeyError( + f"Collection {model.collection} does not exist" + ) + raise e def create_collection( self, model: schemas.Collection, **kwargs @@ -44,23 +50,36 @@ def create_collection( try: with self.session.writer.context_session() as session: session.add(data) + model.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(model) except sa.exc.IntegrityError as e: if isinstance(e.orig, psycopg2.errors.UniqueViolation): raise errors.ConflictError(f"Collection {model.id} already exists") - return model + raise e def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: """update item""" - with self.session.reader.context_session() as session: - query = session.query(self.item_table).filter( - self.item_table.id == model.id - ) - if not query.scalar(): - raise errors.NotFoundError(f"Item {model.id} not found") - # SQLAlchemy orm updates don't seem to like geoalchemy types - data = self.item_table.get_database_model(model) - data.pop("geometry", None) - query.update(data) + try: + with self.session.reader.context_session() as session: + query = session.query(self.item_table).filter( + self.item_table.id == model.id + ) + if not query.scalar(): + raise errors.NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.item_table.get_database_model(model) + data.pop("geometry", None) + query.update(data) + + response = self.item_table.from_schema(model) + response.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(response) + + except sa.exc.IntegrityError as e: + if isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): + raise errors.ForeignKeyError( + f"Collection {model.collection} does not exist" + ) return model def update_collection( @@ -87,7 +106,8 @@ def delete_item(self, id: str, **kwargs) -> schemas.Item: if not data: raise errors.NotFoundError(f"Item {id} not found") query.delete() - return data + data.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(data) def delete_collection(self, id: str, **kwargs) -> schemas.Item: """delete collection""" @@ -99,7 +119,8 @@ def delete_collection(self, id: str, **kwargs) -> schemas.Item: if not data: raise errors.NotFoundError(f"Collection {id} not found") query.delete() - return data + data.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(data) @attr.s diff --git a/tests/conftest.py b/tests/conftest.py index db1e0b83d..41cc4d2e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,6 @@ from typing import Callable, Dict import pytest - from starlette.testclient import TestClient from stac_api.api.app import StacApi @@ -16,7 +15,6 @@ ) from stac_api.clients.postgres.core import CoreCrudClient from stac_api.clients.postgres.session import Session -from stac_api.clients.postgres.tokens import PaginationTokenClient from stac_api.clients.postgres.transactions import ( PostgresBulkTransactions, TransactionsClient, @@ -103,12 +101,12 @@ def postgres_bulk_transactions(): @pytest.fixture -def api_client(): +def api_client(db_session): return StacApi( settings=ApiSettings(), - client=CoreCrudClient(pagination_client=PaginationTokenClient()), + client=CoreCrudClient(session=db_session), extensions=[ - TransactionExtension(client=TransactionsClient()), + TransactionExtension(client=TransactionsClient(session=db_session)), ContextExtension(), SortExtension(), FieldsExtension(), diff --git a/tests/resources/test_collection.py b/tests/resources/test_collection.py index 68136c58a..1a1a3b3c1 100644 --- a/tests/resources/test_collection.py +++ b/tests/resources/test_collection.py @@ -43,12 +43,7 @@ def test_update_new_collection(app_client, load_test_data): test_collection["id"] = "new-test-collection" resp = app_client.put("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_collection["id"] + assert resp.status_code == 404 def test_collection_not_found(app_client): diff --git a/tests/resources/test_item.py b/tests/resources/test_item.py index 9933ddbf1..d75fd927d 100644 --- a/tests/resources/test_item.py +++ b/tests/resources/test_item.py @@ -7,7 +7,6 @@ from urllib.parse import parse_qs, urlparse, urlsplit from shapely.geometry import Polygon - from stac_pydantic.api.search import DATETIME_RFC339 @@ -79,12 +78,20 @@ def test_update_new_item(app_client, load_test_data): resp = app_client.put( f"/collections/{test_item['collection']}/items", json=test_item ) - assert resp.status_code == 200 + assert resp.status_code == 404 def test_update_item_missing_collection(app_client, load_test_data): """Test updating an item without a parent collection (transactions extension)""" test_item = load_test_data("test_item.json") + + # Create the item + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + # Try to update collection of the item test_item["collection"] = "stac is cool" resp = app_client.put( f"/collections/{test_item['collection']}/items", json=test_item From 48e720888ee7419f1d32aabfb5654595e2a57fe6 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 20:11:51 -0600 Subject: [PATCH 16/26] lock pygeos to 0.8 --- Pipfile.lock | 25 ++++++++++++++++++++----- setup.py | 1 + 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index b05b88180..18f0638c1 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -103,11 +103,11 @@ }, "cachetools": { "hashes": [ - "sha256:3796e1de094f0eaca982441c92ce96c68c89cced4cd97721ab297ea4b16db90e", - "sha256:c6b07a6ded8c78bf36730b3dc452dfff7d95f2a12a2fed856b1a0cb13ca78c61" + "sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2", + "sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9" ], "markers": "python_version ~= '3.5'", - "version": "==4.2.0" + "version": "==4.2.1" }, "certifi": { "hashes": [ @@ -490,10 +490,25 @@ }, "pygeos": { "hashes": [ - "sha256:c0584b20e95f80ee57277a6eb1e5d7f86600f8b1ef3c627d238e243afdcc0cc7" + "sha256:0df97a1cc15fef561c36596f065cac8e05e1b0de08292ad7a07140a5fa6b4015", + "sha256:1e95c79e34abad166824f3e0bf2412c9ea9873fcf51e0cacfb483f8955505aec", + "sha256:35b36a819a663a09f8585c6a67fc7c016004769ae08496b6de10b176e7ba61e1", + "sha256:36e2b60a987b2ce0ce988a334f2afa8925864456dec2c03891af8fd900a89504", + "sha256:45b7e1aaa5fc9ff53565ef089fb75c53c419ace8cee18385ec1d7c1515c17cbc", + "sha256:5294e7c8e732325a1f7d370ba0fea1faabc6d14926fbbb2b67a4ae618ed47ed2", + "sha256:650e56de79eceb75b216c3c08c6d703680d845395f5ea9fd67201bfa8240f6a0", + "sha256:6585a6adaad8ad3ebcf67e7854211fe9a2bc9beb59dc945b60db6ad40a3a9bd6", + "sha256:696f2106b7c4a4f33b813cac141cb8483d420d4209f8e1dbde94e5e572cdac16", + "sha256:914fe40a9da4a4bd280448f173b48e9232aa652d5ff38e23c7039633fe57e92f", + "sha256:92bae740bc2ccbb9d03f86995e936ff5a6e41b19a9da0499e8985e6e940a7a93", + "sha256:97a16e1ea243ac65176890d642597276c2c413ba8442a8b4f52cbe1b24ba5f68", + "sha256:c2bcf0aa4ecad93617d35072230ec9215ca6f9537bf75f959cd4084117269369", + "sha256:e1ecb3d9edf56eb55208f8d9ff0d314926af3b0527e024172f2fe142b046f4ea", + "sha256:efb21aa3b01957c21237aaffbecfabc76f6411a9b7c75d359f1b9eb2d7239181", + "sha256:ff14eff6248b65b46481f06d4141b7b0dd9e3a0a6b9bf10b1a2c0eddccc94ad6" ], "markers": "python_version >= '3'", - "version": "==0.9" + "version": "==0.8" }, "pyparsing": { "hashes": [ diff --git a/setup.py b/setup.py index d5fccabc3..db49ecac8 100644 --- a/setup.py +++ b/setup.py @@ -21,6 +21,7 @@ "pydantic[dotenv]", "titiler==0.1.0a12", "fastapi-utils", + "pygeos==0.8.0", ] extra_reqs = { From 3a90507e06fd6eec5d08861778af1f152655dc6a Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 20:18:32 -0600 Subject: [PATCH 17/26] oops --- stac_api/clients/postgres/transactions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index fcb609539..7bcf72372 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -50,8 +50,8 @@ def create_collection( try: with self.session.writer.context_session() as session: session.add(data) - model.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(model) + data.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(data) except sa.exc.IntegrityError as e: if isinstance(e.orig, psycopg2.errors.UniqueViolation): raise errors.ConflictError(f"Collection {model.id} already exists") From a9692e0cda83bb6a5fe8546dd8c9119db1c3c271 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 20:37:42 -0600 Subject: [PATCH 18/26] do exception handling in one place --- stac_api/clients/postgres/core.py | 153 +++++++++------------- stac_api/clients/postgres/session.py | 33 ++++- stac_api/clients/postgres/transactions.py | 77 ++++------- 3 files changed, 120 insertions(+), 143 deletions(-) diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 48cf47e54..3504e2d02 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -16,12 +16,11 @@ import geoalchemy2 as ga from sqlakeyset import get_page -from stac_api import errors from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient from stac_api.clients.postgres.session import Session from stac_api.clients.postgres.tokens import PaginationTokenClient -from stac_api.errors import DatabaseError, NotFoundError +from stac_api.errors import NotFoundError from stac_api.models import database, schemas from stac_api.models.links import CollectionLinks @@ -43,11 +42,7 @@ def _lookup_id( id: str, table: Type[database.BaseModel], session: SqlSession ) -> Type[database.BaseModel]: """lookup row by id""" - try: - row = session.query(table).filter(table.id == id).first() - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError("Unhandled database error") + row = session.query(table).filter(table.id == id).first() if not row: raise NotFoundError(f"{table.__name__} {id} not found") return row @@ -105,13 +100,7 @@ def conformance(self, **kwargs) -> ConformanceClasses: def all_collections(self, **kwargs) -> List[schemas.Collection]: """Read all collections from the database""" with self.session.reader.context_session() as session: - try: - collections = session.query(self.collection_table).all() - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting item collection" - ) + collections = session.query(self.collection_table).all() response = [] for collection in collections: collection.base_url = str(kwargs["request"].base_url) @@ -131,41 +120,31 @@ def item_collection( ) -> ItemCollection: """Read an item collection from the database""" with self.session.reader.context_session() as session: - try: - collection_children = ( - session.query(self.item_table) - .join(self.collection_table) - .filter(self.collection_table.id == id) - .order_by(self.item_table.datetime.desc(), self.item_table.id) - ) - count = None - if self.extension_is_enabled(ContextExtension): - count_query = collection_children.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = collection_children.session.execute(count_query).scalar() - token = self.get_token(token) if token else token - page = get_page( - collection_children, per_page=limit, page=(token or False) - ) - # Create dynamic attributes for each page - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except errors.NotFoundError: - raise - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError( - "Unhandled database error when getting collection children" - ) + collection_children = ( + session.query(self.item_table) + .join(self.collection_table) + .filter(self.collection_table.id == id) + .order_by(self.item_table.datetime.desc(), self.item_table.id) + ) + count = None + if self.extension_is_enabled(ContextExtension): + count_query = collection_children.statement.with_only_columns( + [func.count()] + ).order_by(None) + count = collection_children.session.execute(count_query).scalar() + token = self.get_token(token) if token else token + page = get_page(collection_children, per_page=limit, page=(token or False)) + # Create dynamic attributes for each page + page.next = ( + self.insert_token(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.insert_token(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) links = [] if page.next: @@ -323,26 +302,21 @@ def post_search( id_filter = sa.or_( *[self.item_table.id == i for i in search_request.ids] ) - try: - items = query.filter(id_filter).order_by(self.item_table.id) - page = get_page(items, per_page=search_request.limit, page=token) - if self.extension_is_enabled(ContextExtension): - count = len(search_request.ids) - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error when searching for items by id" - ) + items = query.filter(id_filter).order_by(self.item_table.id) + page = get_page(items, per_page=search_request.limit, page=token) + if self.extension_is_enabled(ContextExtension): + count = len(search_request.ids) + page.next = ( + self.insert_token(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.insert_token(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + else: # Spatial query poly = search_request.polygon() @@ -377,29 +351,24 @@ def post_search( for (op, value) in expr.items(): query = query.filter(op.operator(field, value)) - try: - if self.extension_is_enabled(ContextExtension): - count_query = query.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = query.session.execute(count_query).scalar() - page = get_page(query, per_page=search_request.limit, page=token) - # Create dynamic attributes for each page - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - except Exception as e: - logger.error(e, exc_info=True) - raise DatabaseError( - "Unhandled database error during spatial/temporal query" - ) + if self.extension_is_enabled(ContextExtension): + count_query = query.statement.with_only_columns( + [func.count()] + ).order_by(None) + count = query.session.execute(count_query).scalar() + page = get_page(query, per_page=search_request.limit, page=token) + # Create dynamic attributes for each page + page.next = ( + self.insert_token(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.insert_token(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + links = [] if page.next: links.append( diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py index 6dffdb719..e42286161 100644 --- a/stac_api/clients/postgres/session.py +++ b/stac_api/clients/postgres/session.py @@ -1,8 +1,39 @@ +"""database session management""" +import logging import os +from contextlib import contextmanager +from typing import Iterator import attr +import sqlalchemy as sa +from sqlalchemy.orm import Session as SqlSession -from fastapi_utils.session import FastAPISessionMaker +import psycopg2 +from fastapi_utils.session import FastAPISessionMaker as _FastAPISessionMaker +from stac_api import errors + +logger = logging.getLogger(__name__) + + +class FastAPISessionMaker(_FastAPISessionMaker): + """FastAPISessionMaker""" + + def __init__(self, database_uri: str): + """init""" + super().__init__(database_uri) + + @contextmanager + def context_session(self) -> Iterator[SqlSession]: + """override base method to include exception handling""" + try: + yield from self.get_db() + except sa.exc.IntegrityError as e: + if isinstance(e.orig, psycopg2.errors.UniqueViolation): + raise errors.ConflictError("resource already exists") from e + elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): + raise errors.ForeignKeyError("collection does not exist") from e + logger.error(e, exc_info=True) + raise errors.DatabaseError("unhandled database error") @attr.s diff --git a/stac_api/clients/postgres/transactions.py b/stac_api/clients/postgres/transactions.py index 7bcf72372..40b6ec368 100644 --- a/stac_api/clients/postgres/transactions.py +++ b/stac_api/clients/postgres/transactions.py @@ -5,13 +5,11 @@ from typing import Dict, List, Optional, Type import attr -import sqlalchemy as sa from sqlalchemy import create_engine -import psycopg2 -from stac_api import errors from stac_api.clients.base import BaseTransactionsClient, BulkTransactionsClient from stac_api.clients.postgres.session import Session +from stac_api.errors import NotFoundError from stac_api.models import database, schemas logger = logging.getLogger(__name__) @@ -28,58 +26,37 @@ class TransactionsClient(BaseTransactionsClient): def create_item(self, model: schemas.Item, **kwargs) -> schemas.Item: """create item""" data = self.item_table.from_schema(model) - try: - with self.session.writer.context_session() as session: - session.add(data) - data.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(data) - except sa.exc.IntegrityError as e: - if isinstance(e.orig, psycopg2.errors.UniqueViolation): - raise errors.ConflictError(f"Item {model.id} already exists") - elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): - raise errors.ForeignKeyError( - f"Collection {model.collection} does not exist" - ) - raise e + with self.session.writer.context_session() as session: + session.add(data) + data.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(data) def create_collection( self, model: schemas.Collection, **kwargs ) -> schemas.Collection: """create collection""" data = self.collection_table.from_schema(model) - try: - with self.session.writer.context_session() as session: - session.add(data) - data.base_url = str(kwargs["request"].base_url) - return schemas.Collection.from_orm(data) - except sa.exc.IntegrityError as e: - if isinstance(e.orig, psycopg2.errors.UniqueViolation): - raise errors.ConflictError(f"Collection {model.id} already exists") - raise e + with self.session.writer.context_session() as session: + session.add(data) + data.base_url = str(kwargs["request"].base_url) + return schemas.Collection.from_orm(data) def update_item(self, model: schemas.Item, **kwargs) -> schemas.Item: """update item""" - try: - with self.session.reader.context_session() as session: - query = session.query(self.item_table).filter( - self.item_table.id == model.id - ) - if not query.scalar(): - raise errors.NotFoundError(f"Item {model.id} not found") - # SQLAlchemy orm updates don't seem to like geoalchemy types - data = self.item_table.get_database_model(model) - data.pop("geometry", None) - query.update(data) - - response = self.item_table.from_schema(model) - response.base_url = str(kwargs["request"].base_url) - return schemas.Item.from_orm(response) - - except sa.exc.IntegrityError as e: - if isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): - raise errors.ForeignKeyError( - f"Collection {model.collection} does not exist" - ) + with self.session.reader.context_session() as session: + query = session.query(self.item_table).filter( + self.item_table.id == model.id + ) + if not query.scalar(): + raise NotFoundError(f"Item {model.id} not found") + # SQLAlchemy orm updates don't seem to like geoalchemy types + data = self.item_table.get_database_model(model) + data.pop("geometry", None) + query.update(data) + + response = self.item_table.from_schema(model) + response.base_url = str(kwargs["request"].base_url) + return schemas.Item.from_orm(response) return model def update_collection( @@ -91,7 +68,7 @@ def update_collection( self.collection_table.id == model.id ) if not query.scalar(): - raise errors.NotFoundError(f"Item {model.id} not found") + raise NotFoundError(f"Item {model.id} not found") # SQLAlchemy orm updates don't seem to like geoalchemy types data = self.collection_table.get_database_model(model) data.pop("geometry", None) @@ -104,12 +81,12 @@ def delete_item(self, id: str, **kwargs) -> schemas.Item: query = session.query(self.item_table).filter(self.item_table.id == id) data = query.first() if not data: - raise errors.NotFoundError(f"Item {id} not found") + raise NotFoundError(f"Item {id} not found") query.delete() data.base_url = str(kwargs["request"].base_url) return schemas.Item.from_orm(data) - def delete_collection(self, id: str, **kwargs) -> schemas.Item: + def delete_collection(self, id: str, **kwargs) -> schemas.Collection: """delete collection""" with self.session.writer.context_session() as session: query = session.query(self.collection_table).filter( @@ -117,7 +94,7 @@ def delete_collection(self, id: str, **kwargs) -> schemas.Item: ) data = query.first() if not data: - raise errors.NotFoundError(f"Collection {id} not found") + raise NotFoundError(f"Collection {id} not found") query.delete() data.base_url = str(kwargs["request"].base_url) return schemas.Collection.from_orm(data) From 625c302c6b0932afdbd6f4725b2902957d7ff374 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Sun, 24 Jan 2021 20:40:30 -0600 Subject: [PATCH 19/26] remove old code --- stac_api/clients/postgres/base.py | 73 ------------------------------- stac_api/utils/__init__.py | 0 stac_api/utils/dependencies.py | 7 --- 3 files changed, 80 deletions(-) delete mode 100644 stac_api/clients/postgres/base.py delete mode 100644 stac_api/utils/__init__.py delete mode 100644 stac_api/utils/dependencies.py diff --git a/stac_api/clients/postgres/base.py b/stac_api/clients/postgres/base.py deleted file mode 100644 index 0eb08bedf..000000000 --- a/stac_api/clients/postgres/base.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Postgresql base client""" -import abc -import logging -from typing import Optional, Type - -import attr -import sqlalchemy as sa -from sqlalchemy.orm import Query - -import psycopg2 -from stac_api import errors -from stac_api.clients.postgres.session import Session -from stac_api.models import database -from stac_api.utils.dependencies import READER, WRITER - -logger = logging.getLogger(__name__) - - -@attr.s -class PostgresClient(abc.ABC): - """Database CRUD operations on the defined table""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - table: Type[database.BaseModel] = attr.ib(default=database.BaseModel) - - @property - def reader_session(self): - """Get reader session from context var""" - return READER.get() - - @property - def writer_session(self): - """Get writer session from context var""" - return WRITER.get() - - @staticmethod - def row_exists(query: Query) -> bool: - """Check if a record exists from the sqlalchemy query object""" - return True if query.scalar() else False - - def commit(self) -> None: - """Commit both reader and writer sessions to keep them in sync, rolling back on psycopg2 errors""" - try: - self.writer_session.commit() - self.reader_session.commit() - except sa.exc.IntegrityError as e: - self.writer_session.rollback() - self.reader_session.rollback() - logger.error(e.orig.pgerror, exc_info=True) - # Explicitly catch foreign key errors to be reraised by the API as validation errors - if isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): - raise errors.ForeignKeyError(e.orig.pgerror) - raise errors.DatabaseError(e.orig.pgerror) from e - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database exception during commit") - - def lookup_id( - self, item_id: str, table: Optional[Type[database.BaseModel]] = None - ) -> Query: - """Create a query to access a single record from the table""" - table = table or self.table - with self.session.reader.context_session() as session: - try: - query = session.query(table).filter(table.id == item_id) - except Exception as e: - logger.error(e, exc_info=True) - raise errors.DatabaseError("Unhandled database during ID lookup") - if not self.row_exists(query): - error_message = f"Row {item_id} does not exist" - logger.warning(error_message) - raise errors.NotFoundError(error_message) - return query diff --git a/stac_api/utils/__init__.py b/stac_api/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/stac_api/utils/dependencies.py b/stac_api/utils/dependencies.py deleted file mode 100644 index a1e87a08a..000000000 --- a/stac_api/utils/dependencies.py +++ /dev/null @@ -1,7 +0,0 @@ -"""FastAPI dependencies.""" - -from contextvars import ContextVar - -# TODO: Find a new home -READER: ContextVar = ContextVar("reader") -WRITER: ContextVar = ContextVar("writer") From e97686ea1c917ec95955c973f12f127ca9ff3017 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Mon, 25 Jan 2021 08:36:46 -0600 Subject: [PATCH 20/26] switch back to pygeos 0.9 --- Dockerfile | 2 +- Pipfile.lock | 35 ++++++++++++++++++----------------- setup.py | 1 - 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Dockerfile b/Dockerfile index f85e8fa58..2a1ec56c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.8-slim # Any python libraries that require system libraries to be installed will likely # need the following packages in order to build -RUN apt-get update && apt-get install -y build-essential git libgeos-dev +RUN apt-get update && apt-get install -y build-essential git RUN pip install pipenv ENV PIPENV_NOSPIN=true diff --git a/Pipfile.lock b/Pipfile.lock index 18f0638c1..fa9fd222f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -490,25 +490,26 @@ }, "pygeos": { "hashes": [ - "sha256:0df97a1cc15fef561c36596f065cac8e05e1b0de08292ad7a07140a5fa6b4015", - "sha256:1e95c79e34abad166824f3e0bf2412c9ea9873fcf51e0cacfb483f8955505aec", - "sha256:35b36a819a663a09f8585c6a67fc7c016004769ae08496b6de10b176e7ba61e1", - "sha256:36e2b60a987b2ce0ce988a334f2afa8925864456dec2c03891af8fd900a89504", - "sha256:45b7e1aaa5fc9ff53565ef089fb75c53c419ace8cee18385ec1d7c1515c17cbc", - "sha256:5294e7c8e732325a1f7d370ba0fea1faabc6d14926fbbb2b67a4ae618ed47ed2", - "sha256:650e56de79eceb75b216c3c08c6d703680d845395f5ea9fd67201bfa8240f6a0", - "sha256:6585a6adaad8ad3ebcf67e7854211fe9a2bc9beb59dc945b60db6ad40a3a9bd6", - "sha256:696f2106b7c4a4f33b813cac141cb8483d420d4209f8e1dbde94e5e572cdac16", - "sha256:914fe40a9da4a4bd280448f173b48e9232aa652d5ff38e23c7039633fe57e92f", - "sha256:92bae740bc2ccbb9d03f86995e936ff5a6e41b19a9da0499e8985e6e940a7a93", - "sha256:97a16e1ea243ac65176890d642597276c2c413ba8442a8b4f52cbe1b24ba5f68", - "sha256:c2bcf0aa4ecad93617d35072230ec9215ca6f9537bf75f959cd4084117269369", - "sha256:e1ecb3d9edf56eb55208f8d9ff0d314926af3b0527e024172f2fe142b046f4ea", - "sha256:efb21aa3b01957c21237aaffbecfabc76f6411a9b7c75d359f1b9eb2d7239181", - "sha256:ff14eff6248b65b46481f06d4141b7b0dd9e3a0a6b9bf10b1a2c0eddccc94ad6" + "sha256:006aee5215d305afa96ce3e930a5fc00cfe9cc5e7c79f194922cd2eeb5547e2a", + "sha256:3eaee0e9f9fcc7b6370bcfe839b15fcaa93dab3c894681df3e3437692decc4f1", + "sha256:3f9126534e9bc88e872eae267032d0fab35f351d469caf6ffef4e1002bad4550", + "sha256:425221deda0e355166fcf7ce69613de4a52d6cff6ca667ccd1dc446fbad097c4", + "sha256:4341501c725c8945319970f1e572a514719a17ddec6571d319561bb4ae6edf17", + "sha256:531661dc547b0a9e03cf3cd8166a11d6a0e8cb3ecb71b8e6b240f6f28314023e", + "sha256:82ddf8a6f4ea93d952678e9523f08debea5361d908490b25ac5fd46ed712a652", + "sha256:845567e866dbd8821675dce32d63bc9e6aa3a0384e8d5a7f259baf3bafbfd0c9", + "sha256:8e5b246dd7f1c5c554245a545424cac226443babfd7d6ef71a02b5635da9e8da", + "sha256:aaa10b3a29a6fce9036f80ccfdf96ddb75575ecef8f0dbdbb0a8c456a3793084", + "sha256:b17fe2ed942e3b83c4afb4d0bb5870ac21a26f8353b137444f49a6b6beb71d2f", + "sha256:b55e9bb4e03a30b809bdda102574d7e09e79907fe4f09c5b53be8c47585e1185", + "sha256:bbf661e2d2aa1fcbe10b4f6d739384a4f58e1006d51ef8d0e577fc62618aec16", + "sha256:bc017fe4521577f8a4f62baf6c4f0882b8c8a0b0fbf65befe5060b1921dfa80e", + "sha256:c0584b20e95f80ee57277a6eb1e5d7f86600f8b1ef3c627d238e243afdcc0cc7", + "sha256:c3d127091b18d98cd460cc5b20e2acabcf74668e8170146f40a158c01bbad055", + "sha256:fae860732c532697f3708e8da669e057ce6692ed88740f493a522c5ed2f87aac" ], "markers": "python_version >= '3'", - "version": "==0.8" + "version": "==0.9" }, "pyparsing": { "hashes": [ diff --git a/setup.py b/setup.py index db49ecac8..d5fccabc3 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,6 @@ "pydantic[dotenv]", "titiler==0.1.0a12", "fastapi-utils", - "pygeos==0.8.0", ] extra_reqs = { From ad364e6b3d9c687e727fef1a8222b55be6a4ea75 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Mon, 25 Jan 2021 18:27:05 -0600 Subject: [PATCH 21/26] update except --- stac_api/clients/postgres/session.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py index e42286161..3c3d9efd5 100644 --- a/stac_api/clients/postgres/session.py +++ b/stac_api/clients/postgres/session.py @@ -6,6 +6,7 @@ import attr import sqlalchemy as sa +import sqlalchemy.exc from sqlalchemy.orm import Session as SqlSession import psycopg2 @@ -18,16 +19,12 @@ class FastAPISessionMaker(_FastAPISessionMaker): """FastAPISessionMaker""" - def __init__(self, database_uri: str): - """init""" - super().__init__(database_uri) - @contextmanager def context_session(self) -> Iterator[SqlSession]: """override base method to include exception handling""" try: yield from self.get_db() - except sa.exc.IntegrityError as e: + except sa.exc.StatementError as e: if isinstance(e.orig, psycopg2.errors.UniqueViolation): raise errors.ConflictError("resource already exists") from e elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): From 4a14c5ef1e62ec47b6ed10ca486e413ce1c6451a Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Mon, 25 Jan 2021 21:19:04 -0600 Subject: [PATCH 22/26] add tox, update pre-commit config, lint --- .pre-commit-config.yaml | 13 ++++++------- alembic/env.py | 4 +++- alembic/versions/131aab4d9e49_create_tables.py | 2 +- stac_api/api/routes.py | 11 +++++++---- stac_api/clients/postgres/core.py | 4 ++-- stac_api/clients/postgres/session.py | 4 ++-- stac_api/models/database.py | 4 ++-- stac_api/models/decompose.py | 3 ++- stac_api/models/schemas.py | 10 +++++----- tox.ini | 17 +++++++++++++++++ 10 files changed, 47 insertions(+), 25 deletions(-) create mode 100644 tox.ini diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 49ac69da0..229008ee9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,9 @@ repos: - - - repo: https://github.com/pre-commit/mirrors-isort - rev: v4.3.21 - hooks: - - id: isort - language_version: python3.7 + - repo: https://github.com/PyCQA/isort + rev: 5.4.2 + hooks: + - id: isort + language_version: python3.7 - repo: https://github.com/psf/black rev: stable @@ -22,7 +21,7 @@ repos: # E501 let black handle all line length decisions # W503 black conflicts with "line break before operator" rule # E203 black conflicts with "whitespace before ':'" rule - '--ignore=E501,W503,E203'] + '--ignore=E501,W503,E203,C901'] - repo: https://github.com/chewse/pre-commit-mirrors-pydocstyle # 2.1.1 diff --git a/alembic/env.py b/alembic/env.py index 4c69dec3f..4c28c884e 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -72,7 +72,9 @@ def run_migrations_online(): configuration = config.get_section(config.config_ini_section) configuration["sqlalchemy.url"] = get_connection_url() connectable = engine_from_config( - configuration, prefix="sqlalchemy.", poolclass=pool.NullPool, + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, ) with connectable.connect() as connection: diff --git a/alembic/versions/131aab4d9e49_create_tables.py b/alembic/versions/131aab4d9e49_create_tables.py index f6dd772cf..2d197285c 100644 --- a/alembic/versions/131aab4d9e49_create_tables.py +++ b/alembic/versions/131aab4d9e49_create_tables.py @@ -6,10 +6,10 @@ """ # noqa import sqlalchemy as sa +from geoalchemy2.types import Geometry from sqlalchemy.dialects.postgresql import JSONB from alembic import op -from geoalchemy2.types import Geometry # revision identifiers, used by Alembic. revision = "131aab4d9e49" diff --git a/stac_api/api/routes.py b/stac_api/api/routes.py index 2fee656cf..9e1388fdc 100644 --- a/stac_api/api/routes.py +++ b/stac_api/api/routes.py @@ -2,9 +2,9 @@ from typing import Callable, Type from fastapi import Depends +from pydantic import BaseModel from starlette.requests import Request -from pydantic import BaseModel from stac_api.api.models import APIRequest @@ -18,7 +18,8 @@ def create_endpoint_from_model( """ def _endpoint( - request: Request, request_data: request_model, # type:ignore + request: Request, + request_data: request_model, # type:ignore ): """endpoint""" resp = func(request_data, request=request) @@ -28,14 +29,16 @@ def _endpoint( def create_endpoint_with_depends( - func: Callable, request_model: Type[APIRequest], + func: Callable, + request_model: Type[APIRequest], ) -> Callable: """ Create a fastapi endpoint where request model is a dataclass. This works best for validating query/patm params. """ def _endpoint( - request: Request, request_data: request_model = Depends(), # type:ignore + request: Request, + request_data: request_model = Depends(), # type:ignore ): """endpoint""" resp = func( diff --git a/stac_api/clients/postgres/core.py b/stac_api/clients/postgres/core.py index 3504e2d02..206d8161c 100644 --- a/stac_api/clients/postgres/core.py +++ b/stac_api/clients/postgres/core.py @@ -6,7 +6,9 @@ from urllib.parse import urlencode, urljoin import attr +import geoalchemy2 as ga import sqlalchemy as sa +from sqlakeyset import get_page from sqlalchemy import func from sqlalchemy.orm import Session as SqlSession from stac_pydantic import ItemCollection @@ -14,8 +16,6 @@ from stac_pydantic.api.extensions.paging import PaginationLink from stac_pydantic.shared import Link, MimeTypes, Relations -import geoalchemy2 as ga -from sqlakeyset import get_page from stac_api.api.extensions import ContextExtension, FieldsExtension from stac_api.clients.base import BaseCoreClient from stac_api.clients.postgres.session import Session diff --git a/stac_api/clients/postgres/session.py b/stac_api/clients/postgres/session.py index 3c3d9efd5..0528156d1 100644 --- a/stac_api/clients/postgres/session.py +++ b/stac_api/clients/postgres/session.py @@ -5,12 +5,12 @@ from typing import Iterator import attr +import psycopg2 import sqlalchemy as sa import sqlalchemy.exc +from fastapi_utils.session import FastAPISessionMaker as _FastAPISessionMaker from sqlalchemy.orm import Session as SqlSession -import psycopg2 -from fastapi_utils.session import FastAPISessionMaker as _FastAPISessionMaker from stac_api import errors logger = logging.getLogger(__name__) diff --git a/stac_api/models/database.py b/stac_api/models/database.py index 67621116d..2255ac153 100644 --- a/stac_api/models/database.py +++ b/stac_api/models/database.py @@ -4,15 +4,15 @@ from datetime import datetime from typing import Optional +import geoalchemy2 as ga import sqlalchemy as sa from shapely.geometry import shape from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.declarative import declarative_base +from stac_pydantic.shared import DATETIME_RFC339 -import geoalchemy2 as ga from stac_api import config from stac_api.models import schemas -from stac_pydantic.shared import DATETIME_RFC339 BaseModel = declarative_base() diff --git a/stac_api/models/decompose.py b/stac_api/models/decompose.py index a94698132..5d7b9f2fd 100644 --- a/stac_api/models/decompose.py +++ b/stac_api/models/decompose.py @@ -6,10 +6,11 @@ import geoalchemy2 as ga from pydantic import BaseModel from pydantic.utils import GetterDict +from stac_pydantic.shared import DATETIME_RFC339 + from stac_api import config from stac_api.errors import DatabaseError from stac_api.models.links import CollectionLinks, ItemLinks, filter_links -from stac_pydantic.shared import DATETIME_RFC339 def resolve_links(links: list, base_url: str) -> List[Dict]: diff --git a/stac_api/models/schemas.py b/stac_api/models/schemas.py index 1f7693899..ac2a5840f 100644 --- a/stac_api/models/schemas.py +++ b/stac_api/models/schemas.py @@ -8,14 +8,11 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union import sqlalchemy as sa -from shapely.geometry import Polygon as ShapelyPolygon -from shapely.geometry import shape - from geojson_pydantic.geometries import Polygon from pydantic import Field, ValidationError, root_validator from pydantic.error_wrappers import ErrorWrapper -from stac_api import config -from stac_api.models.decompose import CollectionGetter, ItemGetter +from shapely.geometry import Polygon as ShapelyPolygon +from shapely.geometry import shape from stac_pydantic import Collection as CollectionBase from stac_pydantic import Item as ItemBase from stac_pydantic.api import Search @@ -24,6 +21,9 @@ from stac_pydantic.shared import Link from stac_pydantic.utils import AutoValueEnum +from stac_api import config +from stac_api.models.decompose import CollectionGetter, ItemGetter + # Be careful: https://github.com/samuelcolvin/pydantic/issues/1423#issuecomment-642797287 NumType = Union[float, int] diff --git a/tox.ini b/tox.ini new file mode 100644 index 000000000..9bc3156db --- /dev/null +++ b/tox.ini @@ -0,0 +1,17 @@ +# Linter configs +[flake8] +ignore = D203 +exclude = .git,__pycache__,docs/source/conf.py,old,build,dist +max-complexity = 12 +max-line-length = 90 + +[mypy] +no_strict_optional = true +ignore_missing_imports = True + +[tool:isort] +profile=black +known_arturo=arturo +known_first_party = stac_api +known_third_party = rasterio,stac-pydantic,sqlalchemy,geoalchemy2,fastapi +sections=FUTURE,STDLIB,THIRDPARTY,ARTURO,FIRSTPARTY,LOCALFOLDER \ No newline at end of file From 930df785d37929a5103beaea246366a3e343f33a Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Tue, 26 Jan 2021 17:38:01 -0600 Subject: [PATCH 23/26] add precommit check --- .github/workflows/cicd.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml index e07d65260..fd01531af 100644 --- a/.github/workflows/cicd.yaml +++ b/.github/workflows/cicd.yaml @@ -40,6 +40,11 @@ jobs: with: python-version: "3.8" + - name: Lint code + uses: pre-commit/action@v2.0.0 + with: + - python-version: "3.8" + - name: Install pipenv run: | python -m pip install --upgrade pipenv wheel From 7810b1f64654f2b1e6796444e7062a79646c9452 Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Tue, 26 Jan 2021 17:39:55 -0600 Subject: [PATCH 24/26] fix syntax --- .github/workflows/cicd.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml index fd01531af..128857548 100644 --- a/.github/workflows/cicd.yaml +++ b/.github/workflows/cicd.yaml @@ -43,7 +43,7 @@ jobs: - name: Lint code uses: pre-commit/action@v2.0.0 with: - - python-version: "3.8" + python-version: "3.8" - name: Install pipenv run: | From 278961a9e238fd730ba9a3526bd06e89c960e75e Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Tue, 26 Jan 2021 17:41:59 -0600 Subject: [PATCH 25/26] remove python-version --- .github/workflows/cicd.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml index 128857548..0f159d6c2 100644 --- a/.github/workflows/cicd.yaml +++ b/.github/workflows/cicd.yaml @@ -42,8 +42,6 @@ jobs: - name: Lint code uses: pre-commit/action@v2.0.0 - with: - python-version: "3.8" - name: Install pipenv run: | From ccf4d97f64e69e219a563f08743456650b9f3c0b Mon Sep 17 00:00:00 2001 From: Jeff Albrecht Date: Tue, 26 Jan 2021 18:00:25 -0600 Subject: [PATCH 26/26] switch pre-commit to 3.8 --- .pre-commit-config.yaml | 10 +++++----- stac_api/__init__.py | 1 + stac_api/api/__init__.py | 2 ++ stac_api/api/extensions/__init__.py | 21 ++++++++++++++------- stac_api/models/__init__.py | 1 + 5 files changed, 23 insertions(+), 12 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 229008ee9..b043026d3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,20 +3,20 @@ repos: rev: 5.4.2 hooks: - id: isort - language_version: python3.7 + language_version: python3.8 - repo: https://github.com/psf/black rev: stable hooks: - id: black args: ['--safe'] - language_version: python3.7 + language_version: python3.8 - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.4.0 hooks: - id: flake8 - language_version: python3.7 + language_version: python3.8 args: [ # E501 let black handle all line length decisions # W503 black conflicts with "line break before operator" rule @@ -28,7 +28,7 @@ repos: rev: 22d3ccf6cf91ffce3b16caa946c155778f0cb20f hooks: - id: pydocstyle - language_version: python3.7 + language_version: python3.8 args: [ # Check for docstring presence only '--select=D1', @@ -39,5 +39,5 @@ repos: rev: v0.770 hooks: - id: mypy - language_version: python3.7 + language_version: python3.8 args: [--no-strict-optional, --ignore-missing-imports] \ No newline at end of file diff --git a/stac_api/__init__.py b/stac_api/__init__.py index e69de29bb..53b94283c 100644 --- a/stac_api/__init__.py +++ b/stac_api/__init__.py @@ -0,0 +1 @@ +"""stac_api""" diff --git a/stac_api/api/__init__.py b/stac_api/api/__init__.py index 5a91d06be..de7fac8e3 100644 --- a/stac_api/api/__init__.py +++ b/stac_api/api/__init__.py @@ -1,2 +1,4 @@ """api module""" from .routes import create_endpoint_from_model, create_endpoint_with_depends + +__all__ = ("create_endpoint_from_model", "create_endpoint_with_depends") diff --git a/stac_api/api/extensions/__init__.py b/stac_api/api/extensions/__init__.py index 019c5b376..00c31470b 100644 --- a/stac_api/api/extensions/__init__.py +++ b/stac_api/api/extensions/__init__.py @@ -1,9 +1,16 @@ """stac_api.api.extensions""" -from .context import ContextExtension # noqa +from .context import ContextExtension +from .fields import FieldsExtension +from .query import QueryExtension +from .sort import SortExtension +from .tiles import TilesExtension +from .transaction import TransactionExtension -# from .extension import ApiExtension # noqa -from .fields import FieldsExtension # noqa -from .query import QueryExtension # noqa -from .sort import SortExtension # noqa -from .tiles import TilesExtension # noqa -from .transaction import TransactionExtension # noqa +__all__ = ( + "ContextExtension", + "FieldsExtension", + "QueryExtension", + "SortExtension", + "TilesExtension", + "TransactionExtension", +) diff --git a/stac_api/models/__init__.py b/stac_api/models/__init__.py index e69de29bb..fd9898f9f 100644 --- a/stac_api/models/__init__.py +++ b/stac_api/models/__init__.py @@ -0,0 +1 @@ +"""stac_api.models"""