Add live (redis and websockets)

Add modernised ipynb_tools
Add scheduler
Fix crs in settings
Lots of small fixes
This commit is contained in:
phil 2023-12-21 10:51:31 +05:30
parent 461c31fb6f
commit 47df53f4d1
15 changed files with 1614 additions and 61 deletions

16
.vscode/launch.json vendored
View file

@ -5,7 +5,7 @@
"version": "0.2.0",
"configurations": [
{
"name": "Python: FastAPI",
"name": "Gisaf FastAPI",
"type": "python",
"request": "launch",
"module": "uvicorn",
@ -14,8 +14,20 @@
"--port=5003",
"--reload"
],
"jinja": true,
"justMyCode": false
},
{
"name": "Gisaf scheduler FastAPI",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": [
"src.gisaf.scheduler_application:app",
"--port=5004",
"--reload"
],
"justMyCode": false
}
]
}

474
pdm.lock generated
View file

@ -5,7 +5,17 @@
groups = ["default", "dev"]
strategy = ["cross_platform"]
lock_version = "4.4"
content_hash = "sha256:4593cf6b7e4e89f1e407c7b7feeb12c56c84bf16d84b94d1bbe89d3d3ed4ea6d"
content_hash = "sha256:bbb3fe3a2f7ffeaa01f5bd1e2c92a34fba1e052fafb3e9e7bd6ff649ed157d3e"
[[package]]
name = "affine"
version = "2.4.0"
requires_python = ">=3.7"
summary = "Matrices describing affine transformation of the plane"
files = [
{file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"},
{file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"},
]
[[package]]
name = "annotated-types"
@ -40,6 +50,21 @@ files = [
{file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"},
]
[[package]]
name = "apscheduler"
version = "3.10.4"
requires_python = ">=3.6"
summary = "In-process task scheduler with Cron-like capabilities"
dependencies = [
"pytz",
"six>=1.4.0",
"tzlocal!=3.*,>=2.0",
]
files = [
{file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"},
{file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"},
]
[[package]]
name = "asttokens"
version = "2.4.0"
@ -181,6 +206,46 @@ files = [
{file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
requires_python = ">=3.7.0"
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "click"
version = "8.1.7"
@ -229,6 +294,61 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "contextily"
version = "1.4.0"
requires_python = ">=3.8"
summary = "Context geo-tiles in Python"
dependencies = [
"geopy",
"joblib",
"matplotlib",
"mercantile",
"pillow",
"rasterio",
"requests",
"xyzservices",
]
files = [
{file = "contextily-1.4.0-py3-none-any.whl", hash = "sha256:bb3bf6d595c1850d9c31587b548d734b1f6eb9ffe4f3a4d778504f50a0aa7cd3"},
{file = "contextily-1.4.0.tar.gz", hash = "sha256:179623fdc11d82d458091d9aaf9e2be8d7b07453aa885c58491296bcf85d058d"},
]
[[package]]
name = "contourpy"
version = "1.2.0"
requires_python = ">=3.9"
summary = "Python library for calculating contours of 2D quadrilateral grids"
dependencies = [
"numpy<2.0,>=1.20",
]
files = [
{file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"},
{file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"},
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"},
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"},
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"},
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"},
{file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"},
{file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"},
{file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"},
{file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"},
{file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"},
{file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"},
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"},
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"},
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"},
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"},
{file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"},
{file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"},
{file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"},
{file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"},
{file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"},
{file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"},
{file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"},
{file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"},
]
[[package]]
name = "cryptography"
version = "41.0.5"
@ -263,6 +383,16 @@ files = [
{file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"},
]
[[package]]
name = "cycler"
version = "0.12.1"
requires_python = ">=3.8"
summary = "Composable style cycles"
files = [
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
]
[[package]]
name = "decorator"
version = "5.1.1"
@ -337,6 +467,32 @@ files = [
{file = "fiona-1.9.5.tar.gz", hash = "sha256:99e2604332caa7692855c2ae6ed91e1fffdf9b59449aa8032dd18e070e59a2f7"},
]
[[package]]
name = "fonttools"
version = "4.46.0"
requires_python = ">=3.8"
summary = "Tools to manipulate font files"
files = [
{file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:982f69855ac258260f51048d9e0c53c5f19881138cc7ca06deb38dc4b97404b6"},
{file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c23c59d321d62588620f2255cf951270bf637d88070f38ed8b5e5558775b86c"},
{file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e94244ec24a940ecfbe5b31c975c8a575d5ed2d80f9a280ce3b21fa5dc9c34"},
{file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9f9cdd7ef63d1b8ac90db335762451452426b3207abd79f60da510cea62da5"},
{file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ca9eceebe70035b057ce549e2054cad73e95cac3fe91a9d827253d1c14618204"},
{file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8be6adfa4e15977075278dd0a0bae74dec59be7b969b5ceed93fb86af52aa5be"},
{file = "fonttools-4.46.0-cp311-cp311-win32.whl", hash = "sha256:7b5636f5706d49f13b6d610fe54ee662336cdf56b5a6f6683c0b803e23d826d2"},
{file = "fonttools-4.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:49ea0983e55fd7586a809787cd4644a7ae471e53ab8ddc016f9093b400e32646"},
{file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7b460720ce81773da1a3e7cc964c48e1e11942b280619582a897fa0117b56a62"},
{file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8bee9f4fc8c99824a424ae45c789ee8c67cb84f8e747afa7f83b7d3cef439c3b"},
{file = "fonttools-4.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3d7b96aba96e05e8c911ce2dfc5acc6a178b8f44f6aa69371ab91aa587563da"},
{file = "fonttools-4.46.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6aeb5c340416d11a3209d75c48d13e72deea9e1517837dd1522c1fd1f17c11"},
{file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c779f8701deedf41908f287aeb775b8a6f59875ad1002b98ac6034ae4ddc1b7b"},
{file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce199227ce7921eaafdd4f96536f16b232d6b580ce74ce337de544bf06cb2752"},
{file = "fonttools-4.46.0-cp312-cp312-win32.whl", hash = "sha256:1c9937c4dd1061afd22643389445fabda858af5e805860ec3082a4bc07c7a720"},
{file = "fonttools-4.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:a9fa52ef8fd14d7eb3d813e1451e7ace3e1eebfa9b7237d3f81fee8f3de6a114"},
{file = "fonttools-4.46.0-py3-none-any.whl", hash = "sha256:5b627ed142398ea9202bd752c04311592558964d1a765fb2f78dc441a05633f4"},
{file = "fonttools-4.46.0.tar.gz", hash = "sha256:2ae45716c27a41807d58a9f3f59983bdc8c0a46cb259e4450ab7e196253a9853"},
]
[[package]]
name = "geoalchemy2"
version = "0.14.2"
@ -351,6 +507,16 @@ files = [
{file = "GeoAlchemy2-0.14.2.tar.gz", hash = "sha256:8ca023dcb9a36c6d312f3b4aee631d66385264e2fc9feb0ab0f446eb5609407d"},
]
[[package]]
name = "geographiclib"
version = "2.0"
requires_python = ">=3.7"
summary = "The geodesic routines from GeographicLib"
files = [
{file = "geographiclib-2.0-py3-none-any.whl", hash = "sha256:6b7225248e45ff7edcee32becc4e0a1504c606ac5ee163a5656d482e0cd38734"},
{file = "geographiclib-2.0.tar.gz", hash = "sha256:f7f41c85dc3e1c2d3d935ec86660dc3b2c848c83e17f9a9e51ba9d5146a15859"},
]
[[package]]
name = "geopandas"
version = "0.14.1"
@ -368,6 +534,19 @@ files = [
{file = "geopandas-0.14.1.tar.gz", hash = "sha256:4853ff89ecb6d1cfc43e7b3671092c8160e8a46a3dd7368f25906283314e42bb"},
]
[[package]]
name = "geopy"
version = "2.4.1"
requires_python = ">=3.7"
summary = "Python Geocoding Toolbox"
dependencies = [
"geographiclib<3,>=1.52",
]
files = [
{file = "geopy-2.4.1-py3-none-any.whl", hash = "sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7"},
{file = "geopy-2.4.1.tar.gz", hash = "sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1"},
]
[[package]]
name = "greenlet"
version = "3.0.0"
@ -477,6 +656,105 @@ files = [
{file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
]
[[package]]
name = "joblib"
version = "1.3.2"
requires_python = ">=3.7"
summary = "Lightweight pipelining with Python functions"
files = [
{file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"},
{file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"},
]
[[package]]
name = "kiwisolver"
version = "1.4.5"
requires_python = ">=3.7"
summary = "A fast implementation of the Cassowary constraint solver"
files = [
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"},
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"},
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"},
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"},
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"},
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"},
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"},
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"},
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"},
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"},
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"},
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"},
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"},
{file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"},
{file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"},
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"},
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"},
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"},
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"},
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"},
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"},
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"},
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"},
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"},
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"},
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"},
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"},
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"},
{file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"},
{file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"},
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"},
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"},
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"},
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"},
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"},
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"},
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"},
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"},
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"},
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"},
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"},
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"},
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"},
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"},
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"},
{file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
]
[[package]]
name = "matplotlib"
version = "3.8.2"
requires_python = ">=3.9"
summary = "Python plotting package"
dependencies = [
"contourpy>=1.0.1",
"cycler>=0.10",
"fonttools>=4.22.0",
"kiwisolver>=1.3.1",
"numpy<2,>=1.21",
"packaging>=20.0",
"pillow>=8",
"pyparsing>=2.3.1",
"python-dateutil>=2.7",
]
files = [
{file = "matplotlib-3.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d86593ccf546223eb75a39b44c32788e6f6440d13cfc4750c1c15d0fcb850b63"},
{file = "matplotlib-3.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a5430836811b7652991939012f43d2808a2db9b64ee240387e8c43e2e5578c8"},
{file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9576723858a78751d5aacd2497b8aef29ffea6d1c95981505877f7ac28215c6"},
{file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba9cbd8ac6cf422f3102622b20f8552d601bf8837e49a3afed188d560152788"},
{file = "matplotlib-3.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:03f9d160a29e0b65c0790bb07f4f45d6a181b1ac33eb1bb0dd225986450148f0"},
{file = "matplotlib-3.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:3773002da767f0a9323ba1a9b9b5d00d6257dbd2a93107233167cfb581f64717"},
{file = "matplotlib-3.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c318c1e95e2f5926fba326f68177dee364aa791d6df022ceb91b8221bd0a627"},
{file = "matplotlib-3.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:091275d18d942cf1ee9609c830a1bc36610607d8223b1b981c37d5c9fc3e46a4"},
{file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b0f3b8ea0e99e233a4bcc44590f01604840d833c280ebb8fe5554fd3e6cfe8d"},
{file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b1704a530395aaf73912be741c04d181f82ca78084fbd80bc737be04848331"},
{file = "matplotlib-3.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533b0e3b0c6768eef8cbe4b583731ce25a91ab54a22f830db2b031e83cca9213"},
{file = "matplotlib-3.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:0f4fc5d72b75e2c18e55eb32292659cf731d9d5b312a6eb036506304f4675630"},
{file = "matplotlib-3.8.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa11b3c6928a1e496c1a79917d51d4cd5d04f8a2e75f21df4949eeefdf697f4b"},
{file = "matplotlib-3.8.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1095fecf99eeb7384dabad4bf44b965f929a5f6079654b681193edf7169ec20"},
{file = "matplotlib-3.8.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bddfb1db89bfaa855912261c805bd0e10218923cc262b9159a49c29a7a1c1afa"},
{file = "matplotlib-3.8.2.tar.gz", hash = "sha256:01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1"},
]
[[package]]
name = "matplotlib-inline"
version = "0.1.6"
@ -490,6 +768,18 @@ files = [
{file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"},
]
[[package]]
name = "mercantile"
version = "1.2.1"
summary = "Web mercator XYZ tile utilities"
dependencies = [
"click>=3.0",
]
files = [
{file = "mercantile-1.2.1-py3-none-any.whl", hash = "sha256:30f457a73ee88261aab787b7069d85961a5703bb09dc57a170190bc042cd023f"},
{file = "mercantile-1.2.1.tar.gz", hash = "sha256:fa3c6db15daffd58454ac198b31887519a19caccee3f9d63d17ae7ff61b3b56b"},
]
[[package]]
name = "numpy"
version = "1.26.0"
@ -636,6 +926,41 @@ files = [
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
]
[[package]]
name = "pillow"
version = "10.1.0"
requires_python = ">=3.8"
summary = "Python Imaging Library (Fork)"
files = [
{file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"},
{file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"},
{file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"},
{file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"},
{file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"},
{file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"},
{file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"},
{file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"},
{file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"},
{file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"},
{file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"},
{file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"},
{file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"},
{file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"},
{file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"},
{file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"},
{file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"},
{file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"},
{file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"},
{file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"},
{file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"},
{file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"},
{file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"},
{file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"},
{file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"},
{file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"},
{file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"},
]
[[package]]
name = "pretty-errors"
version = "1.2.25"
@ -840,6 +1165,16 @@ files = [
{file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
]
[[package]]
name = "pyparsing"
version = "3.1.1"
requires_python = ">=3.6.8"
summary = "pyparsing module - Classes and methods to define and execute parsing grammars"
files = [
{file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"},
{file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"},
]
[[package]]
name = "pyproj"
version = "3.6.1"
@ -969,6 +1304,34 @@ files = [
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
name = "rasterio"
version = "1.3.9"
requires_python = ">=3.8"
summary = "Fast and direct raster I/O for use with Numpy and SciPy"
dependencies = [
"affine",
"attrs",
"certifi",
"click-plugins",
"click>=4.0",
"cligj>=0.5",
"numpy",
"setuptools",
"snuggs>=1.4.1",
]
files = [
{file = "rasterio-1.3.9-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0172dbd80bd9adc105ec2c9bd207dbd5519ea06b438a4d965c6290ae8ed6ff9f"},
{file = "rasterio-1.3.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ea5b42597d85868ee88c750cc33f2ae729e1b5e3fe28f99071f39e1417bf1c0"},
{file = "rasterio-1.3.9-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:be9b343bd08245df22115775dc9513c912afb4134d832662fa165d70cb805c34"},
{file = "rasterio-1.3.9-cp311-cp311-win_amd64.whl", hash = "sha256:06d53e2e0885f039f960beb7c861400b92ea3e0e5abc2c67483fb56b1e5cbc13"},
{file = "rasterio-1.3.9-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a34bb9eef67b7896e2dfb39e10ba6372f9894226fb790bd7a46f5748f205b7d8"},
{file = "rasterio-1.3.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67b144b9678f9ad4cf5f2c3f455cbc6a7166c0523179249cee8f2e2c57d76c5b"},
{file = "rasterio-1.3.9-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:99b72fccb702a921f43e56a4507b4cafe2a9196b478b993b98e82ec6851916d7"},
{file = "rasterio-1.3.9-cp312-cp312-win_amd64.whl", hash = "sha256:6777fad3c31eb3e5da0ccaa28a032ad07c20d003bcd14f8bc13e16ca2f62348c"},
{file = "rasterio-1.3.9.tar.gz", hash = "sha256:fc6d0d290492fa1a5068711cfebb21cc936968891b7ed9da0690c8a7388885c5"},
]
[[package]]
name = "redis"
version = "5.0.1"
@ -982,6 +1345,22 @@ files = [
{file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"},
]
[[package]]
name = "requests"
version = "2.31.0"
requires_python = ">=3.7"
summary = "Python HTTP for Humans."
dependencies = [
"certifi>=2017.4.17",
"charset-normalizer<4,>=2",
"idna<4,>=2.5",
"urllib3<3,>=1.21.1",
]
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[[package]]
name = "rsa"
version = "4.9"
@ -1051,6 +1430,19 @@ files = [
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
[[package]]
name = "snuggs"
version = "1.4.7"
summary = "Snuggs are s-expressions for Numpy"
dependencies = [
"numpy",
"pyparsing>=2.1.6",
]
files = [
{file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"},
{file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"},
]
[[package]]
name = "sqlalchemy"
version = "2.0.23"
@ -1183,6 +1575,29 @@ files = [
{file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"},
]
[[package]]
name = "tzlocal"
version = "5.2"
requires_python = ">=3.8"
summary = "tzinfo object for the local timezone"
dependencies = [
"tzdata; platform_system == \"Windows\"",
]
files = [
{file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
{file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
]
[[package]]
name = "urllib3"
version = "2.1.0"
requires_python = ">=3.8"
summary = "HTTP library with thread-safe connection pooling, file post, and more."
files = [
{file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"},
{file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"},
]
[[package]]
name = "uvicorn"
version = "0.24.0.post1"
@ -1205,3 +1620,60 @@ files = [
{file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"},
{file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"},
]
[[package]]
name = "websockets"
version = "12.0"
requires_python = ">=3.8"
summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
files = [
{file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
{file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
{file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
{file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
{file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
{file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
{file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
{file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
{file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
{file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
{file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
{file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
{file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
{file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
{file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
{file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
{file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
{file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
]
[[package]]
name = "xyzservices"
version = "2023.10.1"
requires_python = ">=3.8"
summary = "Source of XYZ tiles providers"
files = [
{file = "xyzservices-2023.10.1-py3-none-any.whl", hash = "sha256:6a4c38d3a9f89d3e77153eff9414b36a8ee0850c9e8b85796fd1b2a85b8dfd68"},
{file = "xyzservices-2023.10.1.tar.gz", hash = "sha256:091229269043bc8258042edbedad4fcb44684b0473ede027b5672ad40dc9fa02"},
]

View file

@ -24,10 +24,16 @@ dependencies = [
"orjson>=3.9.10",
"sqlmodel>=0.0.14",
"redis>=5.0.1",
"websockets>=12.0",
"apscheduler>=3.10.4",
]
requires-python = ">=3.11"
readme = "README.md"
license = {text = "MIT"}
license = {text = "GPLv3"}
[project.optional-dependencies]
contextily = ["contextily>=1.4.0"]
all = ["gisaf[contextily]"]
[build-system]
requires = ["pdm-backend"]

View file

@ -1 +1 @@
__version__ = '2023.4.dev4+g049b8c9.d20231213'
__version__ = '2023.4.dev7+g461c31f.d20231218'

View file

@ -12,9 +12,9 @@ from .geoapi import api as geoapi
from .config import conf
from .registry import registry, ModelRegistry
from .redis_tools import setup_redis, shutdown_redis, setup_redis_cache
from .live import setup_live
logging.basicConfig(level=conf.gisaf.debugLevel)
logger = logging.getLogger(__name__)
## Subclass FastAPI to add attributes to be used globally, ie. registry
@ -29,9 +29,11 @@ class GisafFastAPI(FastAPI):
@asynccontextmanager
async def lifespan(app: FastAPI):
await registry.make_registry(app)
await setup_redis(app)
await registry.make_registry()
await setup_redis()
await setup_live()
yield
await shutdown_redis()
app = FastAPI(
debug=False,

View file

@ -221,6 +221,16 @@ class Job(BaseSettings):
minutes: int | None = 0
seconds: int | None = 0
class Crs(BaseSettings):
'''
Handy definitions for crs-es
'''
db: str
geojson: str
for_proj: str
survey: str
web_mercator: str
class Config(BaseSettings):
model_config = SettingsConfigDict(
#env_prefix='gisaf_',
@ -238,9 +248,20 @@ class Config(BaseSettings):
) -> Tuple[PydanticBaseSettingsSource, ...]:
return env_settings, init_settings, file_secret_settings, config_file_settings
# def __init__(self, **kwargs):
# super().__init__(**kwargs)
# self.crs = {
# 'db': f'epsg:{conf.srid}',
# 'geojson': f'epsg:{conf.geojson_srid}',
# 'for_proj': f'epsg:{conf.srid_for_proj}',
# 'survey': f'epsg:{conf.raw_survey_srid}',
# 'web_mercator': 'epsg:3857',
# }
admin: Admin
attachments: Attachments
basket: BasketOldDef
# crs: Crs
crypto: Crypto
dashboard: Dashboard
db: DB
@ -261,6 +282,15 @@ class Config(BaseSettings):
#engine: AsyncEngine
#session_maker: sessionmaker
@property
def crs(self) -> Crs:
return Crs(
db=f'epsg:{self.geo.srid}',
geojson=f'epsg:{self.geo.srid}',
for_proj=f'epsg:{self.geo.srid_for_proj}',
survey=f'epsg:{self.geo.raw_survey.srid}',
web_mercator='epsg:3857',
)
def config_file_settings() -> dict[str, Any]:
config: dict[str, Any] = {}

View file

@ -2,14 +2,16 @@
Geographical json stores, served under /gj
Used for displaying features on maps
"""
from json import JSONDecodeError
import logging
from typing import Annotated
from asyncio import CancelledError
from fastapi import FastAPI, HTTPException, Response, status, responses, Header
from fastapi import (FastAPI, HTTPException, Response, Header, WebSocket, WebSocketDisconnect,
status, responses)
from .redis_tools import store as redis_store
# from gisaf.live import live_server
from .live import live_server
from .registry import registry
@ -19,28 +21,54 @@ api = FastAPI(
default_response_class=responses.ORJSONResponse,
)
@api.get('/live/{store}')
async def live_layer(store: str):
class ConnectionManager:
active_connections: list[WebSocket]
def __init__(self):
self.active_connections = []
async def connect(self, websocket: WebSocket):
await websocket.accept()
self.active_connections.append(websocket)
def disconnect(self, websocket: WebSocket):
self.active_connections.remove(websocket)
async def send_personal_message(self, message: str, websocket: WebSocket):
await websocket.send_text(message)
async def broadcast(self, message: str):
for connection in self.active_connections:
await connection.send_text(message)
manager = ConnectionManager()
@api.websocket('/live/{store}')
async def live_layer(store: str, websocket: WebSocket):
"""
Websocket for live layer updates
"""
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == WSMsgType.TEXT:
if msg.data == 'close':
await ws.close()
else:
msg_data = msg.json()
await websocket.accept()
try:
while True:
try:
msg_data = await websocket.receive_json()
except JSONDecodeError:
msg_text = await websocket.receive_text()
if msg_text == 'close':
await websocket.close()
continue
# else:
if 'message' in msg_data:
if msg_data['message'] == 'subscribeLiveLayer':
live_server.add_subscription(ws, store)
live_server.add_subscription(websocket, store)
elif msg_data['message'] == 'unsubscribeLiveLayer':
live_server.remove_subscription(ws, store)
elif msg.type == WSMsgType.ERROR:
logger.exception(ws.exception())
logger.debug('websocket connection closed')
return ws
live_server.remove_subscription(websocket, store)
else:
logger.warning(f'Got websocket message with no message field: {msg_data}')
except WebSocketDisconnect:
logger.debug('Websocket disconnected')
# logger.debug('websocket connection closed')
@api.get('/{store_name}')
async def get_geojson(store_name,

357
src/gisaf/ipynb_tools.py Normal file
View file

@ -0,0 +1,357 @@
"""
Utility functions for Jupyter/iPython notebooks
Usage from a notebook:
from gisaf.ipynb_tools import registry
"""
import logging
from urllib.error import URLError
from datetime import datetime
from io import BytesIO
from pickle import dump, HIGHEST_PROTOCOL
# from aiohttp import ClientSession, MultipartWriter
import pandas as pd
import geopandas as gpd
from geoalchemy2 import WKTElement
# from geoalchemy2.shape import from_shape
from sqlalchemy import create_engine
# from shapely import wkb
from .config import conf
from .redis_tools import store as redis_store
from .live import live_server
from .registry import registry
## For base maps: contextily
try:
import contextily as ctx
except ImportError:
ctx = None
logger = logging.getLogger('Gisaf tools')
class Notebook:
"""
Proof of concept? Gisaf could control notebook execution.
"""
def __init__(self, path: str):
self.path = path
class Gisaf:
"""
Gisaf tool for ipython/Jupyter notebooks
"""
def __init__(self):
# self.db = db
self.conf = conf
self.store = redis_store
self.live_server = live_server
if ctx:
## Contextily newer version deprecated ctx.sources
self.basemaps = ctx.providers
else:
self.basemaps = None
async def setup(self, with_mqtt=False):
await self.store.create_connections()
if with_mqtt:
logger.warning('Gisaf live_server does not support with_mqtt anymore: ignoring')
try:
await self.live_server.setup()
except Exception as err:
logger.warn(f'Cannot setup live_server: {err}')
logger.exception(err)
async def make_models(self, **kwargs):
"""
Populate the model registry.
By default, all models will be added, including the those defined in categories (full registry).
Set with_categories=False to skip them and speed up the registry initialization.
:return:
"""
await registry.make_registry()
if 'with_categories' in kwargs:
logger.warning(f'{self.__class__}.make_models() does not support argument with_categories anymore')
self.registry = registry
## TODO: Compatibility: mark "models" deprecated, replaced by "registry"
# self.models = registry
def get_layer_list(self):
"""
Get a list of the names of all layers (ie. models with a geometry).
See get_all_geo for fetching data for a layer.
:return: list of strings
"""
return self.registry.geom.keys()
async def get_query(self, query):
"""
Return a dataframe for the query
"""
async with query.bind.raw_pool.acquire() as conn:
compiled = query.compile()
columns = [a.name for a in compiled.statement.columns]
stmt = await conn.prepare(compiled.string)
data = await stmt.fetch(*[compiled.params.get(param) for param in compiled.positiontup])
return pd.DataFrame(data, columns=columns)
async def get_all(self, model, **kwargs):
"""
Return a dataframe with all records for the model
"""
return await self.get_query(model.query)
async def set_dashboard(self, name, group,
notebook=None,
description=None,
html=None,
plot=None,
df=None,
attached=None,
expanded_panes=None,
sections=None):
"""
Add or update a dashboard page in Gisaf
:param name: name of the dashboard page
:param group: name of the group (level directory)
:param notebook: name of the notebook, to be registered for future use
:param description:
:param attached: a matplotlib/pyplot plot, etc
:param sections: a list of DashboardPageSection
:return:
"""
from gisaf.models.dashboard import DashboardPage, DashboardPageSection
expanded_panes = expanded_panes or []
sections = sections or []
now = datetime.now()
if not description:
description = 'Dashboard {}/{}'.format(group, name)
if df is not None:
with BytesIO() as buf:
## Don't use df.to_pickle as it closes the buffer (as per pandas==0.25.1)
dump(df, buf, protocol=HIGHEST_PROTOCOL)
buf.seek(0)
df_blob = buf.read()
else:
df_blob = None
if plot is not None:
with BytesIO() as buf:
dump(plot, buf)
buf.seek(0)
plot_blob = buf.read()
else:
plot_blob = None
page = await DashboardPage.query.where((DashboardPage.name==name) & (DashboardPage.group==group)).gino.first()
if not page:
page = DashboardPage(
name=name,
group=group,
description=description,
notebook=notebook,
time=now,
df=df_blob,
plot=plot_blob,
html=html,
expanded_panes=','.join(expanded_panes)
)
if attached:
page.attachment = page.save_attachment(attached, name=name)
await page.create()
else:
if attached:
page.attachment = page.save_attachment(attached)
await page.update(
description=description,
notebook=notebook,
html=html,
attachment=page.attachment,
time=now,
df=df_blob,
plot=plot_blob,
expanded_panes=','.join(expanded_panes)
).apply()
for section in sections:
#print(section)
section.page = page
## Replace section.plot (matplotlib plot or figure)
## by the name of the rendered pic inthe filesystem
section.plot = section.save_plot(section.plot)
section_record = await DashboardPageSection.query.where(
(DashboardPageSection.dashboard_page_id==page.id) & (DashboardPageSection.name==section.name)
).gino.first()
if not section_record:
section.dashboard_page_id = page.id
await section.create()
else:
logger.warn('TODO: set_dashboard section update')
logger.warn('TODO: set_dashboard section remove')
async def set_widget(self, name, title, subtitle, content, notebook=None):
"""
Create a web widget, that is served by /embed/<name>.
"""
from gisaf.models.dashboard import Widget
now = datetime.now()
widget = await Widget.query.where(Widget.name==name).gino.first()
kwargs = dict(
title=title,
subtitle=subtitle,
content=content,
notebook=notebook,
time=now,
)
if widget:
await widget.update(**kwargs).apply()
else:
await Widget(name=name, **kwargs).create()
async def to_live_layer(self, gdf, channel, mapbox_paint=None, mapbox_layout=None, properties=None):
"""
Send a geodataframe to a gisaf server with an HTTP POST request for live map display
"""
with BytesIO() as buf:
dump(gdf, buf, protocol=HIGHEST_PROTOCOL)
buf.seek(0)
async with ClientSession() as session:
with MultipartWriter('mixed') as mpwriter:
mpwriter.append(buf)
if mapbox_paint != None:
mpwriter.append_json(mapbox_paint, {'name': 'mapbox_paint'})
if mapbox_layout != None:
mpwriter.append_json(mapbox_layout, {'name': 'mapbox_layout'})
if properties != None:
mpwriter.append_json(properties, {'name': 'properties'})
async with session.post('{}://{}:{}/api/live/{}'.format(
self.conf.gisaf_live['scheme'],
self.conf.gisaf_live['hostname'],
self.conf.gisaf_live['port'],
channel,
), data=mpwriter) as resp:
return await resp.text()
async def remove_live_layer(self, channel):
"""
Remove the channel from Gisaf Live
"""
async with ClientSession() as session:
async with session.get('{}://{}:{}/api/remove-live/{}'.format(
self.conf.gisaf_live['scheme'],
self.conf.gisaf_live['hostname'],
self.conf.gisaf_live['port'],
channel
)) as resp:
return await resp.text()
def to_layer(self, gdf: gpd.GeoDataFrame, model, project_id=None,
skip_columns=None, replace_all=True,
chunksize=100):
"""
Save the geodataframe gdf to the Gisaf model, using pandas' to_sql dataframes' method.
Note that it's NOT an async call. Explanations:
* to_sql doesn't seems to work with gino/asyncpg
* using Gisaf models is few magnitude orders slower
(the async code using this technique is left commented out, for reference)
"""
if skip_columns == None:
skip_columns = []
## Filter empty geometries, and reproject
_gdf: gpd.GeoDataFrame = gdf[~gdf.geometry.is_empty].to_crs(self.conf.crs['geojson'])
## Remove the empty geometries
_gdf.dropna(inplace=True, subset=['geometry'])
#_gdf['geom'] = _gdf.geom1.apply(lambda geom: from_shape(geom, srid=self.conf.srid))
for col in skip_columns:
if col in _gdf.columns:
_gdf.drop(columns=[col], inplace=True)
_gdf['geom'] = _gdf['geometry'].apply(lambda geom: WKTElement(geom.wkt, srid=self.conf.srid))
_gdf.drop(columns=['geometry'], inplace=True)
engine = create_engine(self.conf.db['uri'], echo=False)
## Drop existing
if replace_all:
engine.execute('DELETE FROM "{}"."{}"'.format(model.__table_args__['schema'], model.__tablename__))
else:
raise NotImplementedError('ipynb_tools.Gisaf.to_layer does not support updates yet')
## See https://stackoverflow.com/questions/38361336/write-geodataframe-into-sql-database
# Use 'dtype' to specify column's type
_gdf.to_sql(
name=model.__tablename__,
con=engine,
schema=model.__table_args__['schema'],
if_exists='append',
index=False,
dtype={
'geom': model.geom.type,
},
method='multi',
chunksize=chunksize,
)
#async with self.db.transaction() as tx:
# if replace_all:
# await model.delete.gino.status()
# else:
# raise NotImplementedError('ipynb_tools.Gisaf.to_layer does not support updates yet')
# if not skip_columns:
# skip_columns = ['x', 'y', 'z', 'coords']
# ## Reproject
# ggdf = gdf.to_crs(self.conf.crs['geojson'])
# ## Remove the empty geometries
# ggdf.dropna(inplace=True)
# #ggdf['geom'] = ggdf.geom1.apply(lambda geom: from_shape(geom, srid=self.conf.srid))
# for col in skip_columns:
# if col in ggdf.columns:
# ggdf.drop(columns=[col], inplace=True)
# #ggdf.set_geometry('geom', inplace=True)
# if project_id:
# ggdf['project_id'] = project_id
# ## XXX: index?
# gdf_dict = ggdf.to_dict(orient='records')
# gdf_dict_2 = []
# for row in gdf_dict:
# geometry = row.pop('geometry')
# if not geometry.is_empty:
# row['geom'] = str(from_shape(geometry, srid=self.conf.srid))
# gdf_dict_2.append(row)
# result = await model.insert().gino.all(*gdf_dict_2)
# return
# for row in gdf_dict:
# if 'id' in row:
# ## TODO: Existing id: can use merge
# ex_item = await model.get(item['id'])
# await ex_item.update(**row)
# else:
# geometry = row.pop('geometry')
# if not geometry.is_empty:
# feature = model(**row)
# feature.geom = from_shape(geometry, srid=self.conf.srid)
# await feature.create()
# #db.session.commit()
gisaf = Gisaf()

81
src/gisaf/live.py Normal file
View file

@ -0,0 +1,81 @@
import asyncio
import logging
from collections import defaultdict
from fastapi import FastAPI, WebSocket, WebSocketDisconnect
# from .config import conf
from .redis_tools import store
logger = logging.getLogger(__name__)
class LiveServer:
def __init__(self):
self.ws_clients = defaultdict(set)
async def setup(self, listen_to_redis=False, with_mqtt=False):
"""
Setup for the live server
"""
if with_mqtt:
logger.warning('Gisaf LiveServer does not support with_mqtt: ignoring')
if listen_to_redis:
self.pub = store.redis.pubsub()
await self.pub.psubscribe('live:*:json')
asyncio.create_task(self._listen_to_redis())
async def _listen_to_redis(self):
"""
Subscribe the redis sub channel to all data ("live:*:json"),
and send the messages to websockets
"""
async for msg in self.pub.listen():
if msg['type'] == 'pmessage':
await self._send_to_ws_clients(msg['channel'].decode(),
msg['data'].decode())
async def _send_to_ws_clients(self, store_name, json_data):
"""
Send the json_data to the websoclets which have subscribed
to that channel (store_name)
"""
if len(self.ws_clients[store_name]) > 0:
logger.debug(f'WS channel {store_name} got {len(json_data)} bytes to send to:'
f' {", ".join([str(id(ws)) for ws in self.ws_clients[store_name]])}')
for ws in self.ws_clients[store_name]:
if ws.client_state.name != 'CONNECTED':
logger.debug(f'Cannot send {store_name} for WS {id(ws)}, state: {ws.client_state.name}')
continue
try:
await ws.send_text(json_data)
logger.debug(f'Sent live update for WS {id(ws)}: {len(json_data)}')
except RuntimeError as err:
## The ws is probably closed, remove it from the clients
logger.debug(f'Cannot send live update for {store_name}: {err}')
del self.ws_clients[store_name]
else:
pass
#logger.debug(f'WS channel {store_name} has no clients')
def add_subscription(self, ws, store_name):
"""
Add the websocket subscription to the layer
"""
channel = store.get_json_channel(store_name)
logger.debug(f'WS {id(ws)} subscribed to {channel}')
self.ws_clients[channel].add(ws)
def remove_subscription(self, ws, store_name):
"""
Remove the websocket subscription to the layer
"""
channel = store.get_json_channel(store_name)
if ws in self.ws_clients[channel]:
self.ws_clients[channel].remove(ws)
async def setup_live():
global live_server
await live_server.setup(listen_to_redis=True)
live_server = LiveServer()

31
src/gisaf/models/live.py Normal file
View file

@ -0,0 +1,31 @@
# from pydantic import BaseModel, Field
# from .geo_models_base import GeoModel
# class LiveModel(GeoModel):
# attribution: str | None = None
# # auto_import:
# category: str | None = None
# count: int
# custom: bool = False
# description: str
# gisType: str
# group: str
# icon: str | None = None
# is_db: bool = True
# is_live: bool
# name: str
# rawSurveyStore: str | None = None
# store: str
# style: str | None = None
# symbol: str
# tagPlugins: list[str] = []
# type: str
# viewableRole: str | None = None
# z_index: int = Field(..., alias='zIndex')
# class GeomGroup(BaseModel):
# name: str
# title: str
# description: str
# models: list[GeoModel]

View file

@ -90,14 +90,12 @@ class Store:
- redis: RedisConnection
- pub (/sub) connections
"""
async def setup(self, app):
async def setup(self):
"""
Setup the live service for the main Gisaf application:
- Create connection for the publishers
- Create connection for redis listeners (websocket service)
"""
self.app = app
app.extra['store'] = self
await self.create_connections()
await self.get_live_layer_defs()
@ -187,7 +185,7 @@ class Store:
if 'popup' not in gdf.columns:
gdf['popup'] = 'Live: ' + live_name + ' #' + gdf.index.astype('U')
if len(gdf) > 0:
gdf = gdf.to_crs(conf.crs['geojson'])
gdf = gdf.to_crs(conf.crs.geojson)
gis_type = gdf.geom_type.iloc[0]
else:
gis_type = 'Point' ## FIXME: cannot be inferred from the gdf?
@ -240,7 +238,6 @@ class Store:
await self.redis.set(self.get_layer_def_channel(store_name), layer_def_data)
## Update the layers/stores registry
if hasattr(self, 'app'):
await self.get_live_layer_defs()
return geojson
@ -259,7 +256,6 @@ class Store:
await self.redis.delete(self.get_mapbox_paint_channel(store_name))
## Update the layers/stores registry
if hasattr(self, 'app'):
await self.get_live_layer_defs()
async def has_channel(self, store_name):
@ -274,7 +270,7 @@ class Store:
async def get_layer_def(self, store_name):
return loads(await self.redis.get(self.get_layer_def_channel(store_name)))
async def get_live_layer_defs(self) -> list[LiveGeoModel]:
async def get_live_layer_defs(self): # -> list[LiveGeoModel]:
registry.geom_live_defs = {}
for channel in sorted(await self.get_live_layer_def_channels()):
model_info = loads(await self.redis.get(channel))
@ -370,8 +366,6 @@ class Store:
- listen to the DB event emitter: setup a callback function
"""
## Setup the function and triggers on tables
db = self.app['db']
## Keep the connection alive: don't use a "with" block
## It needs to be closed correctly: see _close_permanant_db_connection
self._permanent_conn = await db.acquire()
@ -419,17 +413,17 @@ class Store:
await self._permanent_conn.release()
async def setup_redis(app):
async def setup_redis():
global store
await store.setup(app)
await store.setup()
async def setup_redis_cache(app):
async def setup_redis_cache():
global store
await store._setup_db_cache_system()
async def shutdown_redis(app):
async def shutdown_redis():
global store
await store._close_permanant_db_connection()

View file

@ -11,9 +11,7 @@ from typing import Any, ClassVar
from pydantic import create_model
from sqlalchemy import inspect, text
from sqlalchemy.orm import selectinload
from sqlmodel import select
import numpy as np
from sqlmodel import SQLModel, select
import pandas as pd
from .config import conf
@ -23,6 +21,7 @@ from .models.geo_models_base import (
LiveGeoModel,
PlottableModel,
GeoModel,
SurveyModel,
RawSurveyBaseModel,
LineWorkSurveyModel,
GeoPointSurveyModel,
@ -32,6 +31,7 @@ from .models.geo_models_base import (
from .utils import ToMigrate
from .models.category import Category, CategoryGroup
from .database import db_session
from . import models
from .models.metadata import survey, raw_survey
logger = logging.getLogger(__name__)
@ -71,23 +71,32 @@ class ModelRegistry:
Provides tools to get the models from their names, table names, etc.
"""
stores: pd.DataFrame
values: dict[str, PlottableModel]
geom_live: dict[str, LiveGeoModel]
geom_live_defs: dict[str, dict[str, Any]]
geom_custom: dict[str, GeoModel]
geom_custom_store: dict[str, Any]
other: dict[str, SQLModel]
misc: dict[str, SQLModel]
raw_survey_models: dict[str, RawSurveyBaseModel]
survey_models: dict[str, SurveyModel]
def __init__(self):
def __init__(self) -> None:
"""
Get geo models
:return: None
"""
self.geom_custom = {}
self.geom_custom_store = {}
self.geom_live: dict[str, LiveGeoModel] = {}
self.geom_live_defs: dict[str, dict[str, Any]] = {}
self.geom_live = {}
self.geom_live_defs = {}
self.values = {}
self.other = {}
self.misc = {}
self.raw_survey_models = {}
self.survey_models = {}
async def make_registry(self, app=None):
async def make_registry(self):
"""
Make (or refresh) the registry of models.
:return:
@ -98,10 +107,7 @@ class ModelRegistry:
await self.build()
## If ogcapi is in app (i.e. not with scheduler):
## Now that the models are refreshed, tells the ogcapi to (re)build
if app:
#app.extra['registry'] = self
if 'ogcapi' in app.extra:
await app.extra['ogcapi'].build()
#await app.extra['ogcapi'].build()
async def make_category_models(self):
"""
@ -190,14 +196,12 @@ class ModelRegistry:
which are defined by categories), and store them for reference.
"""
logger.debug('scan')
from . import models # nocheck
## Scan the models defined in modules
for module_name, module in import_submodules(models).items():
if module_name in (
'src.gisaf.models.geo_models_base',
'src.gisaf.models.models_base',
if module_name.rsplit('.', 1)[-1] in (
'geo_models_base',
'models_base',
):
continue
for name in dir(module):
@ -630,13 +634,13 @@ class ModelRegistry:
'live': 'is_live',
'zIndex': 'z_index',
'gisType': 'model_type',
'type': 'mapbox_type',
# 'type': 'mapbox_type',
'viewableRole': 'viewable_role',
}, inplace=True
)
## Add columns
df_live['auto_import'] = False
df_live['base_gis_type'] = df_live['model_type']
df_live['base_gis_type'] = df_live['gis_type']
df_live['custom'] = False
df_live['group'] = ''
df_live['in_menu'] = True

310
src/gisaf/scheduler.py Executable file
View file

@ -0,0 +1,310 @@
#!/usr/bin/env python
"""
Gisaf task scheduler, orchestrating the background tasks
like remote device data collection, etc.
"""
import os
import logging
import sys
import asyncio
from json import dumps
from datetime import datetime
from importlib.metadata import entry_points
from typing import Any, Mapping, List
from fastapi import FastAPI
from pydantic_settings import BaseSettings, SettingsConfigDict
# from apscheduler import SchedulerStarted
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.triggers.date import DateTrigger
from .ipynb_tools import Gisaf
formatter = logging.Formatter(
"%(asctime)s:%(levelname)s:%(name)s:%(message)s",
"%Y-%m-%d %H:%M:%S"
)
for handler in logging.root.handlers:
handler.setFormatter(formatter)
logger = logging.getLogger('gisaf.scheduler')
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_prefix='gisaf_scheduler_')
app_name: str = 'Gisaf scheduler'
job_names: List[str] = []
exclude_job_names: List[str] = []
list: bool = False
class JobBaseClass:
"""
Base class for all the jobs.
"""
task_id = None
interval = None
cron = None
enabled = True
type = '' ## interval, cron or longrun
sched_params = ''
name = '<unnammed task>'
features = None
def __init__(self):
self.last_run = None
self.current_run = None
async def get_feature_ids(self):
"""
Subclasses might define a get_features function to inform the
front-ends about the map features it works on.
The scheduler runs this on startup.
"""
return []
async def run(self):
"""
Subclasses should define a run async function to run
"""
logger.info(f'Noop defined for {self.name}')
class JobScheduler:
gs: Gisaf
jobs: dict[str, Any]
tasks: dict[str, Any]
wss: dict[str, Any]
subscribers: set[Any]
scheduler: AsyncIOScheduler
def __init__(self):
#self.redis_store = gs.app['store']
self.jobs = {}
self.tasks = {}
self.wss = {}
self.subscribers = set()
self.scheduler = AsyncIOScheduler()
def start(self):
self.scheduler.start()
def scheduler_event_listener(self, event):
asyncio.create_task(self.scheduler_event_alistener(event))
async def scheduler_event_alistener(self, event):
if isinstance(event, SchedulerStarted):
pid = os.getpid()
logger.debug(f'Scheduler started, pid={pid}')
#await self.gs.app['store'].pub.set('_scheduler/pid', pid)
async def job_event_added(self, event):
task = await self.scheduler.data_store.get_task(event.task_id)
schedules = [ss for ss in await self.scheduler.get_schedules()
if ss.task_id == event.task_id]
if len(schedules) > 1:
logger.warning(f'More than 1 schedule matching task {event.task_id}')
return
else:
schedule = schedules[0]
async def job_acquired(self, event):
pass
async def job_cancelled(self, event):
pass
async def job_released(self, event):
pass
# task = self.tasks.get(event.job_id)
# if not task:
# breakpoint()
# logger.warning(f'Got an event {event} for unregistered task {event.task_id}')
# return
# if isinstance(event, apscheduler.JobCancelled): #events.EVENT_JOB_ERROR:
# msg = f'"{task.name}" cancelled ({task.task_id})'
# task.last_run = event
# task.current_run = None
# logger.warning(msg)
# ## TODO: try to restart the task
# elif isinstance(event, apscheduler.JobAcquired): #events.EVENT_JOB_SUBMITTED:
# ## XXX: should be task.last_run = None
# task.last_run = event
# task.current_run = event
# msg = f'"{task.name}" started ({task.task_id})'
# elif isinstance(event, apscheduler.JobReleased): #events.EVENT_JOB_EXECUTED:
# task.last_run = event
# task.current_run = None
# msg = f'"{task.name}" worked ({task.task_id})'
# else:
# logger.info(f'*********** Unhandled event: {event}')
# pass
# #await self.send_to_redis_store(task, event, msg)
# ## Send to notification graphql websockets subscribers
# for queue in self.subscribers:
# queue.put_nowait((task, event))
# ## Send raw messages through websockets
# await self.send_to_websockets(task, event, msg)
async def send_to_redis_store(self, job, event, msg):
"""
Send to Redis store
"""
try:
self.gs.app['store'].pub.publish(
'admin:scheduler:json',
dumps({'msg': msg})
)
except Exception as err:
logger.warning(f'Cannot publish updates for "{job.name}" to Redis: {err}')
logger.exception(err)
async def send_to_websockets(self, job, event, msg):
"""
Send to all connected websockets
"""
for ws in self.wss.values():
asyncio.create_task(
ws.send_json({
'msg': msg
})
)
def add_subscription(self, ws):
self.wss[id(ws)] = ws
def delete_subscription(self, ws):
del self.wss[id(ws)]
def get_available_jobs(self):
return [
entry_point.name
for entry_point in entry_points().select(group='gisaf_jobs')
]
async def setup(self, job_names=None, exclude_job_names=None):
if job_names is None:
job_names = []
if exclude_job_names is None:
exclude_job_names = []
## Go through entry points and define the tasks
for entry_point in entry_points().select(group='gisaf_jobs'):
## Eventually skip task according to arguments of the command line
if (entry_point.name in exclude_job_names) \
or ((len(job_names) > 0) and entry_point.name not in job_names):
logger.info(f'Skip task {entry_point.name}')
continue
try:
task_class = entry_point.load()
except Exception as err:
logger.error(f'Task {entry_point.name} skipped cannot be loaded: {err}')
continue
## Create the task instance
try:
task = task_class(self.gs)
except Exception as err:
logger.error(f'Task {entry_point.name} cannot be instanciated: {err}')
continue
task.name = entry_point.name
if not task.enabled:
logger.debug(f'Job "{entry_point.name}" disabled')
continue
logger.debug(f'Add task "{entry_point.name}"')
if not hasattr(task, 'run'):
logger.error(f'Task {entry_point.name} skipped: no run method')
continue
task.features = await task.get_feature_ids()
kwargs: dict[str: Any] = {
# 'tags': [entry_point.name],
}
if isinstance(task.interval, dict):
kwargs['trigger'] = IntervalTrigger(**task.interval)
task.type = 'interval'
## TODO: format user friendly text for interval
task.sched_params = get_pretty_format_interval(task.interval)
elif isinstance(task.cron, dict):
## FIXME: CronTrigger
kwargs['trigger'] = CronTrigger(**task.cron)
kwargs.update(task.cron)
task.type = 'cron'
## TODO: format user friendly text for cron
task.sched_params = get_pretty_format_cron(task.cron)
else:
task.type = 'longrun'
task.sched_params = 'always running'
kwargs['trigger'] = DateTrigger(datetime.now())
# task.task_id = await self.scheduler.add_job(task.run, **kwargs)
# self.tasks[task.task_id] = task
# continue
## Create the APScheduler task
try:
task.task_id = await self.scheduler.add_schedule(task.run, **kwargs)
except Exception as err:
logger.warning(f'Cannot add task {entry_point.name}: {err}')
logger.exception(err)
else:
logger.info(f'Job "{entry_point.name}" added ({task.task_id})')
self.tasks[task.task_id] = task
## Subscribe to all events
# self.scheduler.subscribe(self.job_acquired, JobAcquired)
# self.scheduler.subscribe(self.job_cancelled, JobCancelled)
# self.scheduler.subscribe(self.job_released, JobReleased)
# self.scheduler.subscribe(self.job_event_added, JobAdded)
# self.scheduler.subscribe(self.scheduler_event_listener, SchedulerEvent)
class GSFastAPI(FastAPI):
js: JobScheduler
allowed_interval_params = set(('seconds', 'minutes', 'hours', 'days', 'weeks'))
def get_pretty_format_interval(params):
"""
Return a format for describing interval
"""
return str({
k: v for k, v in params.items()
if k in allowed_interval_params
})
def get_pretty_format_cron(params):
"""
Return a format for describing cron
"""
return str(params)
async def startup(settings):
if settings.list:
## Just print avalable jobs and exit
jobs = js.get_available_jobs()
print(' '.join(jobs))
sys.exit(0)
# try:
# await js.gs.setup()
# await js.gs.make_models()
# except Exception as err:
# logger.error('Cannot setup Gisaf')
# logger.exception(err)
# sys.exit(1)
try:
await js.setup(job_names=settings.job_names,
exclude_job_names=settings.exclude_job_names)
except Exception as err:
logger.error('Cannot setup scheduler')
logger.exception(err)
sys.exit(1)
js = JobScheduler()

View file

@ -0,0 +1,57 @@
#!/usr/bin/env python
"""
Gisaf job scheduler, orchestrating the background tasks
like remote device data collection, etc.
"""
import logging
from contextlib import asynccontextmanager
from starlette.routing import Mount
from fastapi.middleware.cors import CORSMiddleware
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from .config import conf
from .ipynb_tools import gisaf
from .scheduler import GSFastAPI, js, startup, Settings
from .scheduler_web import app as sched_app
formatter = logging.Formatter(
"%(asctime)s:%(levelname)s:%(name)s:%(message)s",
"%Y-%m-%d %H:%M:%S"
)
for handler in logging.root.handlers:
handler.setFormatter(formatter)
logging.basicConfig(level=conf.gisaf.debugLevel)
logger = logging.getLogger('gisaf.scheduler_application')
@asynccontextmanager
async def lifespan(app: GSFastAPI):
'''
Handle startup and shutdown: setup scheduler, etc
'''
## Startup
await gisaf.setup()
await startup(settings)
js.start()
yield
## Shutdown
pass
settings = Settings()
app = GSFastAPI(
title=settings.app_name,
lifespan=lifespan,
)
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.mount('/_sched', sched_app)
app.mount('/sched', sched_app)

169
src/gisaf/scheduler_web.py Normal file
View file

@ -0,0 +1,169 @@
"""
The web API for Gisaf scheduler
"""
import logging
import asyncio
from datetime import datetime
from uuid import UUID
from typing import List
from fastapi import Request, WebSocket
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from redis import asyncio as aioredis
from pandas import DataFrame
from gisaf.live import live_server
from gisaf.scheduler import GSFastAPI
logger = logging.getLogger(__name__)
app = GSFastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class Subscriber:
## See https://gist.github.com/appeltel/fd3ddeeed6c330c7208502462639d2c9
def __init__(self, hub):
self.hub = hub
self.queue = asyncio.Queue()
def __enter__(self):
self.hub.subscribers.add(self.queue)
return self.queue
def __exit__(self, type, value, traceback):
self.hub.subscribers.remove(self.queue)
class JobEvent(BaseModel):
jobId: str | UUID
time: datetime | None
status: str
msg: str
nextRunTime: datetime | None
class Feature(BaseModel):
store: str
## XXX: Using "id" gives very strange issue with apollo client
id_: str
class Job_(BaseModel):
id: str
name: str
type: str
schedParams: str
nextRunTime: datetime | None
lastRun: JobEvent | None
features: List[Feature]
class Task_(BaseModel):
id: str
name: str
type: str
schedParams: str
nextRunTime: datetime | None
lastRunTime: datetime | None
features: list[Feature]
def df_as_ObjectTypes(df):
"""
Utility function that returns List(Feature) graphql from a dataframe.
The dataframe must contain a 'store' column and the feature ids as index.
"""
if not isinstance(df, DataFrame):
return []
if 'store' not in df.columns:
# logger.warning(f'no store in get_feature_ids() for job "{job.name}"')
return []
return [
Feature(id_=str(f[0]), store=f.store)
for f in df.itertuples(index=True)
]
@app.websocket('/events')
async def scheduler_ws(
ws: WebSocket,
):
"""
Websocket for scheduler updates
"""
#session = await get_session(request)
#js = request.app.js
await ws.accept()
while True:
# msg_text = await ws.receive_text()
msg_data = await ws.receive_json()
#await websocket.send_text(f"Message text was: {data}")
if 'message' in msg_data:
if msg_data['message'] == 'subscribe':
live_server.add_subscription(ws, 'admin:scheduler')
ws.app.js.add_subscription(ws)
elif msg_data['message'] == 'unsubscribe':
live_server.remove_subscription(ws, 'admin:scheduler')
ws.app.js.delete_subscription(ws)
@app.websocket('/subscriptions')
async def subscriptions(ws: WebSocket):
await ws.accept()
while True:
msg_data = await ws.receive_json()
if msg.type == WSMsgType.TEXT:
if msg.data == 'close':
await ws.close()
else:
await ws.send_str(msg.data + '/answer')
elif msg.type == WSMsgType.ERROR:
print('ws connection closed with exception %s' % ws.exception())
@app.get('/time')
async def get_time():
return datetime.now()
@app.get('/jobs')
async def get_jobs(request: Request) -> list[Task_]:
app: GSFastAPI = request.app
tasks = {task.id: task for task in await app.js.scheduler.data_store.get_tasks()}
tasks_ = []
for schedule in await app.js.scheduler.data_store.get_schedules():
task = tasks[schedule.task_id]
task_ = app.js.tasks[schedule.id]
tasks_.append(
Task_(
id=task.id,
name=task.id,
type=schedule.trigger.__class__.__name__,
schedParams='',
lastRunTime=schedule.last_fire_time,
nextRunTime=schedule.next_fire_time,
features=df_as_ObjectTypes(task_.features),
)
)
return tasks_
# async def setup_app_session(app):
# """
# Setup a redis pool for session management
# Not related to the redis connection used by Gisaf
# """
# redis = aioredis.from_url('redis://localhost')
# redis_storage = RedisStorage(redis)
# session_identity_policy = SessionIdentityPolicy()
# setup_session(app, redis_storage)