Misc:
Basic registry, with survey stores Move to standard src/ dir versions: sqlmodel official, pydantic v2 etc...
This commit is contained in:
parent
5494f6085f
commit
049b8c9927
31 changed files with 670 additions and 526 deletions
.vscode
pdm.lockpyproject.tomlsrc
_version.pyapplication.py
gisaf
__init__.py_version.pyapi.pyapplication.pyconfig.pydatabase.py
models
__init__.pyauthentication.pybootstrap.pycategory.pygeo_models_base.pymap_bases.pymetadata.pymisc.pymodels_base.pyproject.pyraw_survey.pyreconcile.pystore.pysurvey.pytags.py
registry.pysecurity.pyutils.pymodels
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
|
@ -10,7 +10,7 @@
|
|||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"src.application:app",
|
||||
"src.gisaf.application:app",
|
||||
"--port=5003",
|
||||
"--reload"
|
||||
],
|
||||
|
|
127
pdm.lock
generated
127
pdm.lock
generated
|
@ -5,7 +5,7 @@
|
|||
groups = ["default", "dev"]
|
||||
strategy = ["cross_platform"]
|
||||
lock_version = "4.4"
|
||||
content_hash = "sha256:03b37375a71c7e841ead16f1b6813034b4bfde011ebeb2985958dcba75376c47"
|
||||
content_hash = "sha256:0d6cc736afc51fceae2eaff49ffbd91678e0ecb5c6f29e683f12c974c6f9bdac"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
|
@ -273,16 +273,6 @@ files = [
|
|||
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.4.2"
|
||||
requires_python = ">=3.8,<4.0"
|
||||
summary = "DNS toolkit"
|
||||
files = [
|
||||
{file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"},
|
||||
{file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.18.0"
|
||||
|
@ -296,20 +286,6 @@ files = [
|
|||
{file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.1.0.post1"
|
||||
requires_python = ">=3.8"
|
||||
summary = "A robust email address syntax and deliverability validation library."
|
||||
dependencies = [
|
||||
"dnspython>=2.0.0",
|
||||
"idna>=2.0.0",
|
||||
]
|
||||
files = [
|
||||
{file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"},
|
||||
{file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "executing"
|
||||
version = "2.0.0"
|
||||
|
@ -660,6 +636,18 @@ files = [
|
|||
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pretty-errors"
|
||||
version = "1.2.25"
|
||||
summary = "Prettifies Python exception output to make it legible."
|
||||
dependencies = [
|
||||
"colorama",
|
||||
]
|
||||
files = [
|
||||
{file = "pretty_errors-1.2.25-py3-none-any.whl", hash = "sha256:8ce68ccd99e0f2a099265c8c1f1c23b7c60a15d69bb08816cb336e237d5dc983"},
|
||||
{file = "pretty_errors-1.2.25.tar.gz", hash = "sha256:a16ba5c752c87c263bf92f8b4b58624e3b1e29271a9391f564f12b86e93c6755"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prompt-toolkit"
|
||||
version = "3.0.39"
|
||||
|
@ -842,21 +830,6 @@ files = [
|
|||
{file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.4.0"
|
||||
extras = ["email"]
|
||||
requires_python = ">=3.7"
|
||||
summary = "Data validation using Python type hints"
|
||||
dependencies = [
|
||||
"email-validator>=2.0.0",
|
||||
"pydantic==2.4.0",
|
||||
]
|
||||
files = [
|
||||
{file = "pydantic-2.4.0-py3-none-any.whl", hash = "sha256:909b2b7d7be775a890631218e8c4b6b5418c9b6c57074ae153e5c09b73bf06a3"},
|
||||
{file = "pydantic-2.4.0.tar.gz", hash = "sha256:54216ccb537a606579f53d7f6ed912e98fffce35aff93b25cd80b1c2ca806fc3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.16.1"
|
||||
|
@ -1067,7 +1040,7 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "2.0.11"
|
||||
version = "2.0.23"
|
||||
requires_python = ">=3.7"
|
||||
summary = "Database Abstraction Library"
|
||||
dependencies = [
|
||||
|
@ -1075,51 +1048,69 @@ dependencies = [
|
|||
"typing-extensions>=4.2.0",
|
||||
]
|
||||
files = [
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa81761ff674d2e2d591fc88d31835d3ecf65bddb021a522f4eaaae831c584cf"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:21f447403a1bfeb832a7384c4ac742b7baab04460632c0335e020e8e2c741d4b"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4d8d96c0a7265de8496250a2c2d02593da5e5e85ea24b5c54c2db028d74cf8c"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c4c5834789f718315cb25d1b95d18fde91b72a1a158cdc515d7f6380c1f02a3"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f57965a9d5882efdea0a2c87ae2f6c7dbc14591dcd0639209b50eec2b3ec947e"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0dd98b0be54503afc4c74e947720c3196f96fb2546bfa54d911d5de313c5463c"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-win32.whl", hash = "sha256:eec40c522781a58839df6a2a7a2d9fbaa473419a3ab94633d61e00a8c0c768b7"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:62835d8cd6713458c032466c38a43e56503e19ea6e54b0e73295c6ab281fc0b1"},
|
||||
{file = "SQLAlchemy-2.0.11-py3-none-any.whl", hash = "sha256:1d28e8278d943d9111d44720f92cc338282e956ed68849bfcee053c06bde4f39"},
|
||||
{file = "SQLAlchemy-2.0.11.tar.gz", hash = "sha256:c3cbff7cced3c42dbe71448ce6bf4202b4a2d305e78dd77e3f280ba6cd245138"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"},
|
||||
{file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"},
|
||||
{file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "2.0.11"
|
||||
version = "2.0.23"
|
||||
extras = ["asyncio"]
|
||||
requires_python = ">=3.7"
|
||||
summary = "Database Abstraction Library"
|
||||
dependencies = [
|
||||
"greenlet!=0.4.17",
|
||||
"sqlalchemy==2.0.11",
|
||||
"sqlalchemy==2.0.23",
|
||||
]
|
||||
files = [
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa81761ff674d2e2d591fc88d31835d3ecf65bddb021a522f4eaaae831c584cf"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:21f447403a1bfeb832a7384c4ac742b7baab04460632c0335e020e8e2c741d4b"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4d8d96c0a7265de8496250a2c2d02593da5e5e85ea24b5c54c2db028d74cf8c"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c4c5834789f718315cb25d1b95d18fde91b72a1a158cdc515d7f6380c1f02a3"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f57965a9d5882efdea0a2c87ae2f6c7dbc14591dcd0639209b50eec2b3ec947e"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0dd98b0be54503afc4c74e947720c3196f96fb2546bfa54d911d5de313c5463c"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-win32.whl", hash = "sha256:eec40c522781a58839df6a2a7a2d9fbaa473419a3ab94633d61e00a8c0c768b7"},
|
||||
{file = "SQLAlchemy-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:62835d8cd6713458c032466c38a43e56503e19ea6e54b0e73295c6ab281fc0b1"},
|
||||
{file = "SQLAlchemy-2.0.11-py3-none-any.whl", hash = "sha256:1d28e8278d943d9111d44720f92cc338282e956ed68849bfcee053c06bde4f39"},
|
||||
{file = "SQLAlchemy-2.0.11.tar.gz", hash = "sha256:c3cbff7cced3c42dbe71448ce6bf4202b4a2d305e78dd77e3f280ba6cd245138"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"},
|
||||
{file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"},
|
||||
{file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"},
|
||||
{file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"},
|
||||
{file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlmodel"
|
||||
version = "0"
|
||||
version = "0.0.14"
|
||||
requires_python = ">=3.7,<4.0"
|
||||
git = "https://github.com/mbsantiago/sqlmodel.git"
|
||||
revision = "3005495a3ec6c8216b31cbd623f91c7bc8ba174f"
|
||||
summary = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
|
||||
dependencies = [
|
||||
"SQLAlchemy<=2.0.11,>=2.0.0",
|
||||
"pydantic[email]<=2.4,>=2.1.1",
|
||||
"SQLAlchemy<2.1.0,>=2.0.0",
|
||||
"pydantic<3.0.0,>=1.10.13",
|
||||
]
|
||||
files = [
|
||||
{file = "sqlmodel-0.0.14-py3-none-any.whl", hash = "sha256:accea3ff5d878e41ac439b11e78613ed61ce300cfcb860e87a2d73d4884cbee4"},
|
||||
{file = "sqlmodel-0.0.14.tar.gz", hash = "sha256:0bff8fc94af86b44925aa813f56cf6aabdd7f156b73259f2f60692c6a64ac90e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[project]
|
||||
name = "Gisaf"
|
||||
name = "gisaf"
|
||||
dynamic = ["version"]
|
||||
description = ""
|
||||
authors = [
|
||||
|
@ -13,7 +13,6 @@ dependencies = [
|
|||
"psycopg2-binary>=2.9.9",
|
||||
"sqlalchemy[asyncio]",
|
||||
"asyncpg>=0.28.0",
|
||||
#"sqlmodel>=0.0.11",
|
||||
"python-jose[cryptography]>=3.3.0",
|
||||
"geoalchemy2>=0.14.2",
|
||||
"pyyaml>=6.0.1",
|
||||
|
@ -23,6 +22,7 @@ dependencies = [
|
|||
"passlib[bcrypt]>=1.7.4",
|
||||
"pyshp>=2.3.1",
|
||||
"orjson>=3.9.10",
|
||||
"sqlmodel>=0.0.14",
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
readme = "README.md"
|
||||
|
@ -35,10 +35,10 @@ build-backend = "pdm.backend"
|
|||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"ipdb>=0.13.13",
|
||||
"sqlmodel @ git+https://github.com/mbsantiago/sqlmodel.git#egg=sqlmodel",
|
||||
"pretty-errors>=1.2.25",
|
||||
]
|
||||
|
||||
[tool.pdm.version]
|
||||
source = "scm"
|
||||
write_to = "_version.py"
|
||||
write_to = "gisaf/_version.py"
|
||||
write_template = "__version__ = '{}'"
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
__version__ = '2023.4.dev1+g90091e8.d20231118'
|
|
@ -1,14 +0,0 @@
|
|||
from fastapi import FastAPI
|
||||
import logging
|
||||
|
||||
from .api import api
|
||||
from .config import conf
|
||||
|
||||
logging.basicConfig(level=conf.gisaf.debugLevel)
|
||||
|
||||
app = FastAPI(
|
||||
debug=True,
|
||||
title=conf.gisaf.title,
|
||||
version=conf.version,
|
||||
)
|
||||
app.mount('/v2', api)
|
0
src/gisaf/__init__.py
Normal file
0
src/gisaf/__init__.py
Normal file
1
src/gisaf/_version.py
Normal file
1
src/gisaf/_version.py
Normal file
|
@ -0,0 +1 @@
|
|||
__version__ = '2023.4.dev3+g5494f60.d20231212'
|
|
@ -1,48 +1,34 @@
|
|||
import logging
|
||||
from datetime import timedelta
|
||||
from time import time
|
||||
from uuid import uuid1
|
||||
from typing import Annotated
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException, status, Request
|
||||
from fastapi import Depends, FastAPI, HTTPException, status, responses
|
||||
from sqlalchemy.orm import selectinload
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
from fastapi.responses import ORJSONResponse
|
||||
from starlette.middleware.sessions import SessionMiddleware
|
||||
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
|
||||
from .models.authentication import (
|
||||
User, UserRead,
|
||||
Role, RoleRead,
|
||||
UserRoleLink
|
||||
)
|
||||
from .models.category import (
|
||||
CategoryGroup, CategoryModelType,
|
||||
Category, CategoryRead
|
||||
)
|
||||
from .models.category import Category, CategoryRead
|
||||
from .config import conf
|
||||
from .models.bootstrap import BootstrapData
|
||||
from .models.store import Store
|
||||
from .database import get_db_session, pandas_query
|
||||
from .security import (
|
||||
User as UserAuth,
|
||||
Token,
|
||||
authenticate_user, get_current_user, create_access_token,
|
||||
)
|
||||
from .config import conf
|
||||
from .registry import make_registry
|
||||
from .registry import registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
make_registry(app)
|
||||
yield
|
||||
|
||||
api = FastAPI(lifespan=lifespan)
|
||||
api = FastAPI(
|
||||
default_response_class=responses.ORJSONResponse,
|
||||
)
|
||||
#api.add_middleware(SessionMiddleware, secret_key=conf.crypto.secret)
|
||||
|
||||
db_session = Annotated[AsyncSession, Depends(get_db_session)]
|
||||
|
@ -71,6 +57,16 @@ async def login_for_access_token(
|
|||
expires_delta=timedelta(seconds=conf.crypto.expire))
|
||||
return {"access_token": access_token, "token_type": "bearer"}
|
||||
|
||||
@api.get("/list")
|
||||
async def list_data_providers():
|
||||
"""
|
||||
Return a list of data providers, for use with the api (graphs, etc)
|
||||
:return:
|
||||
"""
|
||||
return [{'name': m.__name__, 'store': m.get_store_name()}
|
||||
for m in registry.values_for_model]
|
||||
|
||||
|
||||
@api.get("/users")
|
||||
async def get_users(
|
||||
db_session: db_session,
|
||||
|
@ -95,7 +91,7 @@ async def get_categories(
|
|||
data = await db_session.exec(query)
|
||||
return data.all()
|
||||
|
||||
@api.get("/categories_p")
|
||||
@api.get("/categories_pandas")
|
||||
async def get_categories_p(
|
||||
db_session: db_session,
|
||||
) -> list[CategoryRead]:
|
||||
|
@ -103,6 +99,12 @@ async def get_categories_p(
|
|||
df = await db_session.run_sync(pandas_query, query)
|
||||
return df.to_dict(orient="records")
|
||||
|
||||
@api.get("/stores")
|
||||
async def get_stores() -> list[Store]:
|
||||
df = registry.stores.reset_index().drop(columns=['model', 'raw_model'])
|
||||
return df.to_dict(orient="records")
|
||||
|
||||
|
||||
# @api.get("/user-role")
|
||||
# async def get_user_role_relation(
|
||||
# *, db_session: AsyncSession = Depends(get_db_session)
|
40
src/gisaf/application.py
Normal file
40
src/gisaf/application.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
#import colorama
|
||||
#colorama.init()
|
||||
|
||||
from fastapi import FastAPI, responses
|
||||
|
||||
from .api import api
|
||||
from .config import conf
|
||||
from .registry import registry, ModelRegistry
|
||||
|
||||
logging.basicConfig(level=conf.gisaf.debugLevel)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
## Subclass FastAPI to add attributes to be used globally, ie. registry
|
||||
class GisafExtra:
|
||||
registry: ModelRegistry
|
||||
#raw_survey_models: dict[str, Any] = {}
|
||||
#survey_models: dict[str, Any] = {}
|
||||
|
||||
|
||||
class GisafFastAPI(FastAPI):
|
||||
gisaf_extra: GisafExtra
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await registry.make_registry(app)
|
||||
yield
|
||||
|
||||
app = FastAPI(
|
||||
debug=False,
|
||||
title=conf.gisaf.title,
|
||||
version=conf.version,
|
||||
lifespan=lifespan,
|
||||
default_response_class=responses.ORJSONResponse,
|
||||
)
|
||||
app.mount('/v2', api)
|
|
@ -3,7 +3,10 @@ import logging
|
|||
from pathlib import Path
|
||||
from typing import Any, Type, Tuple
|
||||
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
|
||||
from pydantic_settings import (BaseSettings,
|
||||
PydanticBaseSettingsSource,
|
||||
SettingsConfigDict)
|
||||
from pydantic import ConfigDict
|
||||
from pydantic.v1.utils import deep_update
|
||||
from yaml import safe_load
|
||||
|
||||
|
@ -30,6 +33,7 @@ class GisafConfig(BaseSettings):
|
|||
debugLevel: str
|
||||
dashboard_home: DashboardHome
|
||||
redirect: str = ''
|
||||
use_pretty_errors: bool = False
|
||||
|
||||
class SpatialSysRef(BaseSettings):
|
||||
author: str
|
||||
|
@ -59,7 +63,8 @@ class Flask(BaseSettings):
|
|||
debug: int
|
||||
|
||||
class MQTT(BaseSettings):
|
||||
broker: str
|
||||
broker: str = 'localhost'
|
||||
port: int = 1883
|
||||
|
||||
class GisafLive(BaseSettings):
|
||||
hostname: str
|
||||
|
@ -73,8 +78,9 @@ class DefaultSurvey(BaseSettings):
|
|||
equipment_id: int
|
||||
|
||||
class Survey(BaseSettings):
|
||||
schema_raw: str
|
||||
schema: str
|
||||
model_config = ConfigDict(extra='ignore')
|
||||
db_schema_raw: str
|
||||
db_schema: str
|
||||
default: DefaultSurvey
|
||||
|
||||
class Crypto(BaseSettings):
|
||||
|
@ -153,11 +159,11 @@ class OGCAPI(BaseSettings):
|
|||
server: OGCAPIServer
|
||||
|
||||
class TileServer(BaseSettings):
|
||||
BaseDir: str
|
||||
UseRequestUrl: bool = False
|
||||
SpriteBaseDir: str
|
||||
SpriteUrl: str
|
||||
SpriteBaseUrl: str
|
||||
baseDir: str
|
||||
useRequestUrl: bool = False
|
||||
spriteBaseDir: str
|
||||
spriteUrl: str
|
||||
spriteBaseUrl: str
|
||||
openMapTilesKey: str | None = None
|
||||
|
||||
class Map(BaseSettings):
|
||||
|
@ -216,6 +222,11 @@ class Job(BaseSettings):
|
|||
seconds: int | None = 0
|
||||
|
||||
class Config(BaseSettings):
|
||||
model_config = SettingsConfigDict(
|
||||
#env_prefix='gisaf_',
|
||||
env_nested_delimiter='__',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def settings_customise_sources(
|
||||
cls,
|
0
src/gisaf/models/__init__.py
Normal file
0
src/gisaf/models/__init__.py
Normal file
|
@ -1,12 +1,13 @@
|
|||
from sqlmodel import Field, SQLModel, MetaData, JSON, TEXT, Relationship, Column
|
||||
from pydantic import BaseModel
|
||||
from ..config import conf, Map, Measures, Geo
|
||||
from .authentication import UserRead
|
||||
|
||||
class Proj(SQLModel):
|
||||
class Proj(BaseModel):
|
||||
srid: str
|
||||
srid_for_proj: str
|
||||
|
||||
class BootstrapData(SQLModel):
|
||||
|
||||
class BootstrapData(BaseModel):
|
||||
version: str = conf.version
|
||||
title: str = conf.gisaf.title
|
||||
windowTitle: str = conf.gisaf.windowTitle
|
|
@ -1,8 +1,10 @@
|
|||
from typing import Any
|
||||
from sqlmodel import Field, SQLModel, JSON, TEXT, Column
|
||||
from pydantic import computed_field
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from pydantic import computed_field, ConfigDict
|
||||
from sqlmodel import Field, Relationship, SQLModel, JSON, TEXT, Column, select
|
||||
|
||||
from .metadata import gisaf_survey
|
||||
from ..database import db_session, pandas_query
|
||||
|
||||
mapbox_type_mapping = {
|
||||
'Point': 'symbol',
|
||||
|
@ -10,20 +12,31 @@ mapbox_type_mapping = {
|
|||
'Polygon': 'fill',
|
||||
}
|
||||
|
||||
class CategoryGroup(SQLModel, table=True):
|
||||
class BaseModel(SQLModel):
|
||||
@classmethod
|
||||
async def get_df(cls):
|
||||
async with db_session() as session:
|
||||
query = select(cls)
|
||||
return await session.run_sync(pandas_query, query)
|
||||
|
||||
|
||||
class CategoryGroup(BaseModel, table=True):
|
||||
metadata = gisaf_survey
|
||||
name: str = Field(min_length=4, max_length=4,
|
||||
__tablename__ = 'category_group'
|
||||
name: str | None = Field(min_length=4, max_length=4,
|
||||
default=None, primary_key=True)
|
||||
major: str
|
||||
long_name: str
|
||||
categories: list['Category'] = Relationship(back_populates='category_group')
|
||||
|
||||
class Admin:
|
||||
menu = 'Other'
|
||||
flask_admin_model_view = 'CategoryGroupModelView'
|
||||
|
||||
|
||||
class CategoryModelType(SQLModel, table=True):
|
||||
class CategoryModelType(BaseModel, table=True):
|
||||
metadata = gisaf_survey
|
||||
__tablename__ = 'category_model_type'
|
||||
name: str = Field(default=None, primary_key=True)
|
||||
|
||||
class Admin:
|
||||
|
@ -31,42 +44,33 @@ class CategoryModelType(SQLModel, table=True):
|
|||
flask_admin_model_view = 'MyModelViewWithPrimaryKey'
|
||||
|
||||
|
||||
class CategoryBase(SQLModel):
|
||||
class CategoryBase(BaseModel):
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
class Admin:
|
||||
menu = 'Other'
|
||||
flask_admin_model_view = 'CategoryModelView'
|
||||
|
||||
name: str | None = Field(default=None, primary_key=True)
|
||||
domain: ClassVar[str] = 'V'
|
||||
description: str | None
|
||||
group: str = Field(min_length=4, max_length=4,
|
||||
foreign_key="CategoryGroup.name", index=True)
|
||||
#group_: CategoryGroup = Relationship()
|
||||
foreign_key="category_group.name", index=True)
|
||||
minor_group_1: str = Field(min_length=4, max_length=4, default='----')
|
||||
minor_group_2: str = Field(min_length=4, max_length=4, default='----')
|
||||
status: str = Field(min_length=1, max_length=1)
|
||||
custom: bool | None
|
||||
auto_import: bool = True
|
||||
model_type: str = Field(max_length=50,
|
||||
foreign_key='CategoryModelType.name',
|
||||
foreign_key='category_model_type.name',
|
||||
default='Point')
|
||||
long_name: str | None = Field(max_length=50)
|
||||
style: str | None = Field(sa_column=Column(TEXT))
|
||||
style: str | None = Field(sa_type=TEXT)
|
||||
symbol: str | None = Field(max_length=1)
|
||||
mapbox_type_custom: str | None = Field(max_length=32)
|
||||
mapbox_paint: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
mapbox_layout: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
mapbox_paint: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
mapbox_layout: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
viewable_role: str | None
|
||||
extra: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
|
||||
|
||||
class Category(CategoryBase, table=True):
|
||||
metadata = gisaf_survey
|
||||
name: str = Field(default=None, primary_key=True)
|
||||
|
||||
|
||||
class CategoryRead(CategoryBase):
|
||||
name: str
|
||||
domain: str = 'V' # Survey
|
||||
extra: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
|
@ -105,3 +109,13 @@ class CategoryRead(CategoryBase):
|
|||
@property
|
||||
def mapbox_type(self) -> str:
|
||||
return self.mapbox_type_custom or mapbox_type_mapping[self.model_type]
|
||||
|
||||
|
||||
class Category(CategoryBase, table=True):
|
||||
metadata = gisaf_survey
|
||||
name: str = Field(default=None, primary_key=True)
|
||||
category_group: CategoryGroup = Relationship(back_populates="categories")
|
||||
|
||||
|
||||
class CategoryRead(CategoryBase):
|
||||
name: str
|
|
@ -1,5 +1,5 @@
|
|||
from pathlib import Path
|
||||
from typing import Any, ClassVar
|
||||
from typing import Any, ClassVar, Annotated
|
||||
from datetime import date, datetime
|
||||
from collections import OrderedDict
|
||||
from io import BytesIO
|
||||
|
@ -16,15 +16,15 @@ import shapely
|
|||
import pyproj
|
||||
|
||||
from sqlmodel import SQLModel, Field
|
||||
from pydantic import BaseModel
|
||||
|
||||
from geoalchemy2.shape import from_shape
|
||||
|
||||
from sqlalchemy.dialects.postgresql import BIGINT
|
||||
from sqlalchemy import BigInteger, Column, String, func, and_
|
||||
from sqlalchemy.sql import sqltypes
|
||||
from psycopg2.extensions import adapt
|
||||
|
||||
from geoalchemy2.types import Geometry
|
||||
from geoalchemy2.elements import WKBElement
|
||||
from geoalchemy2.types import Geometry, WKBElement
|
||||
|
||||
from shapely import wkb
|
||||
from shapely.geometry import mapping
|
||||
|
@ -74,13 +74,13 @@ exportable_cols = {
|
|||
}
|
||||
|
||||
|
||||
class BaseSurveyModel(SQLModel):
|
||||
class BaseSurveyModel(BaseModel):
|
||||
"""
|
||||
Base mixin class for all layers defined from a category:
|
||||
- raw survey (RAW_V_*')
|
||||
- projected ('V_*')
|
||||
"""
|
||||
id: int = Field(sa_column=Column(BigInteger()), primary_key=True)
|
||||
id: int = Field(sa_type=BigInteger, primary_key=True, default=None)
|
||||
equip_id: int = Field(foreign_key='equipment.id')
|
||||
srvyr_id: int = Field('surveyor.id')
|
||||
accur_id: int = Field('accuracy.id')
|
||||
|
@ -138,11 +138,11 @@ class SurveyModel(BaseSurveyModel):
|
|||
"""
|
||||
Base mixin class for defining final (reprojected) survey data, with a status
|
||||
"""
|
||||
status: str = Field(sa_column=Column(String(1)))
|
||||
status: str = Field(sa_type=String(1))
|
||||
|
||||
get_gdf_with_related: bool = False
|
||||
get_gdf_with_related: ClassVar[bool] = False
|
||||
|
||||
filtered_columns_on_map: list[str] = [
|
||||
filtered_columns_on_map: ClassVar[list[str]] = [
|
||||
'equip_id',
|
||||
'srvyr_id',
|
||||
'accur_id',
|
||||
|
@ -279,17 +279,17 @@ class GeoModel(Model):
|
|||
Base class for all geo models
|
||||
"""
|
||||
#__abstract__ = True
|
||||
description: str = ''
|
||||
attribution: str | None = None
|
||||
description: ClassVar[str] = ''
|
||||
attribution: ClassVar[str | None] = None
|
||||
|
||||
can_get_features_as_df: bool = True
|
||||
can_get_features_as_df: ClassVar[bool] = True
|
||||
"""
|
||||
can_get_features_as_df indicates that the model is ready to get GeoJson using GeoDataframe
|
||||
If False, switch back to gino and dict based conversion using get_features_in_bulk_gino
|
||||
and record.get_feature_as_dict (DEPRECATED)
|
||||
"""
|
||||
|
||||
cache_enabled: bool = True
|
||||
cache_enabled: ClassVar[bool] = True
|
||||
"""
|
||||
cache_enabled indicated that the model is OK with the caching mechanism of geojson stores.
|
||||
The cache is time-stamped with DB triggers on modification, so it's safe unless the model
|
||||
|
@ -297,7 +297,7 @@ class GeoModel(Model):
|
|||
See gisaf.redis_tools and geoapi.gj_feature for the implementation details of the cache.
|
||||
"""
|
||||
|
||||
get_gdf_with_related: bool = False
|
||||
get_gdf_with_related: ClassVar[bool] = False
|
||||
"""
|
||||
get_gdf_with_related indicates that get_df (thus, get_geo_df and the geoJson API for
|
||||
the map online) gets related models (1-n relations, as defined with _join_with and dyn_join_with)
|
||||
|
@ -305,39 +305,39 @@ class GeoModel(Model):
|
|||
It can be overridden with the with_related parameter when calling get_df.
|
||||
"""
|
||||
|
||||
z_index: int = 450
|
||||
z_index: ClassVar[int] = 450
|
||||
"""
|
||||
z-index for the leaflet layer.
|
||||
Should be between 400 and 500.
|
||||
"""
|
||||
|
||||
icon: str | None = None
|
||||
icon: ClassVar[str | None] = None
|
||||
"""
|
||||
Icon for the model, used for normal web interface (ie. except the map)
|
||||
"""
|
||||
|
||||
symbol: str | None = None
|
||||
symbol: ClassVar[str | None] = None
|
||||
"""
|
||||
Icon for the model, used in the map (mapbox)
|
||||
"""
|
||||
|
||||
style: str = ''
|
||||
style: ClassVar[str] = ''
|
||||
"""
|
||||
Style for the model, used in the map, etc
|
||||
"""
|
||||
|
||||
status: str = 'E'
|
||||
status: ClassVar[str] = 'E'
|
||||
"""
|
||||
Status (ISO layers definition) of the layer. E -> Existing.
|
||||
"""
|
||||
|
||||
_join_with: dict[str, Any] = {
|
||||
_join_with: ClassVar[dict[str, Any]] = {
|
||||
}
|
||||
"""
|
||||
Fields to join when getching items using get_features.
|
||||
"""
|
||||
|
||||
hidden: bool = False
|
||||
hidden: ClassVar[bool] = False
|
||||
"""
|
||||
This model should be hidden from the menu
|
||||
"""
|
||||
|
@ -748,11 +748,12 @@ class Geom(str):
|
|||
class GeoPointModel(GeoModel):
|
||||
#__abstract__ = True
|
||||
shapefile_model: ClassVar[int] = POINT
|
||||
geom: Any = Field(sa_column=Column(Geometry('POINT', srid=conf.geo.srid)))
|
||||
icon: str | None = None
|
||||
mapbox_type: str = 'symbol'
|
||||
base_gis_type: str = 'Point'
|
||||
symbol: str = '\ue32b'
|
||||
## geometry typing, see https://stackoverflow.com/questions/77333100/geoalchemy2-geometry-schema-for-pydantic-fastapi
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POINT', srid=conf.geo.srid))
|
||||
icon: ClassVar[str | None] = None
|
||||
mapbox_type: ClassVar[str] = 'symbol'
|
||||
base_gis_type: ClassVar[str] = 'Point'
|
||||
symbol: ClassVar[str] = '\ue32b'
|
||||
|
||||
@property
|
||||
def latitude(self):
|
||||
|
@ -810,8 +811,8 @@ class GeoPointModel(GeoModel):
|
|||
|
||||
class GeoPointZModel(GeoPointModel):
|
||||
#__abstract__ = True
|
||||
geom: Any = Field(sa_column=Column(Geometry('POINTZ', dimension=3, srid=conf.geo.srid)))
|
||||
shapefile_model: int = POINTZ
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POINTZ', dimension=3, srid=conf.geo.srid))
|
||||
shapefile_model: ClassVar[int] = POINTZ
|
||||
|
||||
def get_coords(self):
|
||||
return (self.shapely_geom.x, self.shapely_geom.y, self.shapely_geom.z)
|
||||
|
@ -824,16 +825,16 @@ class GeoPointZModel(GeoPointModel):
|
|||
|
||||
class GeoPointMModel(GeoPointZModel):
|
||||
#__abstract__ = True
|
||||
shapefile_model: int = POINTZ
|
||||
geom: Any = Field(sa_column=Column(Geometry('POINTZ', dimension=3, srid=conf.geo.srid)))
|
||||
shapefile_model: ClassVar[int] = POINTZ
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POINTZ', dimension=3, srid=conf.geo.srid))
|
||||
|
||||
|
||||
class GeoLineModel(GeoModel):
|
||||
#__abstract__ = True
|
||||
shapefile_model: int = POLYLINE
|
||||
geom: Any = Field(sa_column=Column(Geometry('LINESTRING', srid=conf.geo.srid)))
|
||||
mapbox_type: str = 'line'
|
||||
base_gis_type: str = 'Line'
|
||||
shapefile_model: ClassVar[int] = POLYLINE
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('LINESTRING', srid=conf.geo.srid))
|
||||
mapbox_type: ClassVar[str] = 'line'
|
||||
base_gis_type: ClassVar[str] = 'Line'
|
||||
|
||||
@property
|
||||
def length(self):
|
||||
|
@ -894,8 +895,8 @@ class GeoLineModel(GeoModel):
|
|||
|
||||
class GeoLineModelZ(GeoLineModel):
|
||||
#__abstract__ = True
|
||||
shapefile_model: int = POLYLINEZ
|
||||
geom: Any = Field(sa_column=Column(Geometry('LINESTRINGZ', dimension=3, srid=conf.geo.srid)))
|
||||
shapefile_model: ClassVar[int] = POLYLINEZ
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('LINESTRINGZ', dimension=3, srid=conf.geo.srid))
|
||||
|
||||
async def get_geo_info(self):
|
||||
info = await super(GeoLineModelZ, self).get_geo_info()
|
||||
|
@ -910,11 +911,11 @@ class GeoLineModelZ(GeoLineModel):
|
|||
|
||||
|
||||
class GeoPolygonModel(GeoModel):
|
||||
__abstract__ = True
|
||||
shapefile_model: int = POLYGON
|
||||
geom: Any = Field(sa_column=Column(Geometry('POLYGON', srid=conf.geo.srid)))
|
||||
mapbox_type: str = 'fill'
|
||||
base_gis_type: str = 'Polygon'
|
||||
#__abstract__ = True
|
||||
shapefile_model: ClassVar[int] = POLYGON
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POLYGON', srid=conf.geo.srid))
|
||||
mapbox_type: ClassVar[str] = 'fill'
|
||||
base_gis_type: ClassVar[str] = 'Polygon'
|
||||
|
||||
@property
|
||||
def area(self):
|
||||
|
@ -982,9 +983,9 @@ class GeoPolygonModel(GeoModel):
|
|||
|
||||
|
||||
class GeoPolygonModelZ(GeoPolygonModel):
|
||||
__abstract__ = True
|
||||
shapefile_model: int = POLYGONZ
|
||||
geom: Any = Field(sa_column=Column(Geometry('POLYGONZ', dimension=3, srid=conf.geo.srid)))
|
||||
#__abstract__ = True
|
||||
shapefile_model: ClassVar[int] = POLYGONZ
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POLYGONZ', dimension=3, srid=conf.geo.srid))
|
||||
|
||||
async def get_geo_info(self):
|
||||
info = await super(GeoPolygonModelZ, self).get_geo_info()
|
||||
|
@ -1006,14 +1007,14 @@ class GeoPointSurveyModel(SurveyModel, GeoPointMModel):
|
|||
#__abstract__ = True
|
||||
|
||||
## raw_model is set in category_models_maker.make_category_models
|
||||
raw_model: Any = None
|
||||
raw_model: ClassVar['RawSurveyBaseModel'] = None
|
||||
|
||||
|
||||
class LineWorkSurveyModel(SurveyModel):
|
||||
__abstract__ = True
|
||||
#__abstract__ = True
|
||||
|
||||
## raw_model is set in category_models_maker.make_category_models
|
||||
raw_model: Any = None
|
||||
raw_model: ClassVar['RawSurveyBaseModel'] = None
|
||||
|
||||
def match_raw_points(self):
|
||||
reprojected_geom = transform(reproject_func, self.shapely_geom)
|
||||
|
@ -1026,27 +1027,31 @@ class LineWorkSurveyModel(SurveyModel):
|
|||
|
||||
|
||||
class GeoLineSurveyModel(LineWorkSurveyModel, GeoLineModelZ):
|
||||
__abstract__ = True
|
||||
#__abstract__ = True
|
||||
pass
|
||||
|
||||
|
||||
class GeoPolygonSurveyModel(LineWorkSurveyModel, GeoPolygonModelZ):
|
||||
__abstract__ = True
|
||||
#__abstract__ = True
|
||||
pass
|
||||
|
||||
|
||||
class RawSurveyBaseModel(BaseSurveyModel, GeoPointMModel):
|
||||
"""
|
||||
Abstract base class for category based raw survey point models
|
||||
"""
|
||||
__abstract__ = True
|
||||
geom: Any = Field(sa_column=Column(Geometry('POINTZ', dimension=3, srid=conf.geo.raw_survey.srid)))
|
||||
status: str = Field(sa_column=Column(String(1)))
|
||||
#__abstract__ = True
|
||||
geom: Annotated[str, WKBElement] = Field(sa_type=Geometry('POINTZ', dimension=3,
|
||||
srid=conf.geo.raw_survey.srid))
|
||||
status: str = Field(sa_type=String(1))
|
||||
|
||||
## store_name is set in category_models_maker.make_category_models
|
||||
store_name: str | None = None
|
||||
store_name: ClassVar[str | None] = None
|
||||
|
||||
@classmethod
|
||||
async def get_geo_df(cls, *args, **kwargs):
|
||||
return await super().get_geo_df(crs=conf.raw_survey['spatial_sys_ref'], *args, **kwargs)
|
||||
return await super().get_geo_df(crs=conf.raw_survey['spatial_sys_ref'],
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
class PlottableModel(Model):
|
||||
|
@ -1061,9 +1066,9 @@ class PlottableModel(Model):
|
|||
to be used (the first one being the default)
|
||||
* OR an ordereed dict of value => resampling method
|
||||
"""
|
||||
__abstract__ = True
|
||||
#__abstract__ = True
|
||||
|
||||
float_format: str = '%.1f'
|
||||
float_format: ClassVar[str] = '%.1f'
|
||||
values: dict[Any, Any] = {}
|
||||
|
||||
@classmethod
|
||||
|
@ -1092,7 +1097,7 @@ class PlottableModel(Model):
|
|||
|
||||
|
||||
class TimePlottableModel(PlottableModel):
|
||||
__abstract__ = True
|
||||
#__abstract__ = True
|
||||
|
||||
time: datetime
|
||||
|
||||
|
@ -1107,7 +1112,8 @@ class TimePlottableModel(PlottableModel):
|
|||
with_only_columns.insert(0, 'time')
|
||||
|
||||
df = await super().get_as_dataframe(model_id=model_id,
|
||||
with_only_columns=with_only_columns, **kwargs)
|
||||
with_only_columns=with_only_columns,
|
||||
**kwargs)
|
||||
|
||||
## Set time as index
|
||||
df.set_index('time', drop=True, inplace=True)
|
|
@ -16,8 +16,8 @@ class BaseStyle(Model):
|
|||
|
||||
id: int = Field(primary_key=True)
|
||||
name: str
|
||||
style: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
mbtiles: str = Field(sa_column=Column(String(50)))
|
||||
style: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
mbtiles: str = Field(sa_type=String(50))
|
||||
static_tiles_url: str
|
||||
enabled: bool = True
|
||||
|
||||
|
@ -51,7 +51,7 @@ class BaseMapLayer(Model):
|
|||
|
||||
id: int = Field(primary_key=True)
|
||||
base_map_id: int = Field(foreign_key='base_map.id', index=True)
|
||||
store: str = Field(sa_column=Column(String(100)))
|
||||
store: str = Field(sa_type=String(100))
|
||||
|
||||
@classmethod
|
||||
def dyn_join_with(cls):
|
10
src/gisaf/models/metadata.py
Normal file
10
src/gisaf/models/metadata.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
from sqlmodel import MetaData
|
||||
|
||||
from ..config import conf
|
||||
|
||||
gisaf = MetaData(schema='gisaf')
|
||||
gisaf_survey = MetaData(schema='gisaf_survey')
|
||||
gisaf_admin = MetaData(schema='gisaf_admin')
|
||||
gisaf_map = MetaData(schema='gisaf_map')
|
||||
raw_survey = MetaData(schema=conf.survey.db_schema_raw)
|
||||
survey = MetaData(schema=conf.survey.db_schema)
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
from typing import Any
|
||||
from pydantic import ConfigDict
|
||||
|
||||
from sqlmodel import Field, JSON, Column
|
||||
|
||||
|
@ -17,6 +18,7 @@ class Qml(Model):
|
|||
"""
|
||||
Model for storing qml (QGis style)
|
||||
"""
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
metadata = gisaf_map
|
||||
|
||||
class Admin:
|
||||
|
@ -27,8 +29,8 @@ class Qml(Model):
|
|||
qml: str
|
||||
attr: str
|
||||
style: str
|
||||
mapbox_paint: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
mapbox_layout: dict[str, Any] | None = Field(sa_column=Column(JSON(none_as_null=True)))
|
||||
mapbox_paint: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
mapbox_layout: dict[str, Any] | None = Field(sa_type=JSON(none_as_null=True))
|
||||
|
||||
def __repr__(self):
|
||||
return '<models.Qml {self.model_name:s}>'.format(self=self)
|
|
@ -27,11 +27,11 @@ class Model(SQLModel):
|
|||
|
||||
@classmethod
|
||||
def get_store_name(cls):
|
||||
return "{}.{}".format(cls.__table_args__['schema'], cls.__tablename__)
|
||||
return "{}.{}".format(cls.metadata.schema, cls.__tablename__)
|
||||
|
||||
@classmethod
|
||||
def get_table_name_prefix(cls):
|
||||
return "{}_{}".format(cls.__table_args__['schema'], cls.__tablename__)
|
||||
return "{}_{}".format(cls.metadata.schema, cls.__tablename__)
|
||||
|
||||
@classmethod
|
||||
async def get_df(cls, where=None,
|
|
@ -1,7 +1,8 @@
|
|||
from sqlmodel import Field, SQLModel, MetaData, JSON, TEXT, Relationship, Column
|
||||
from typing import ClassVar
|
||||
from sqlmodel import Field, BigInteger
|
||||
|
||||
from .models_base import Model
|
||||
from .models_base import GeoPointMModel, BaseSurveyModel
|
||||
from .geo_models_base import GeoPointMModel, BaseSurveyModel
|
||||
from .project import Project
|
||||
from .category import Category
|
||||
from .metadata import gisaf_survey
|
||||
|
@ -9,11 +10,12 @@ from .metadata import gisaf_survey
|
|||
class RawSurveyModel(BaseSurveyModel, GeoPointMModel):
|
||||
metadata = gisaf_survey
|
||||
__tablename__ = 'raw_survey'
|
||||
hidden: ClassVar[bool] = True
|
||||
|
||||
id: int = Field(default=None, primary_key=True)
|
||||
project_id = db.Column(db.Integer, db.ForeignKey(Project.id))
|
||||
category = db.Column(db.String, db.ForeignKey(Category.name))
|
||||
in_menu = False
|
||||
project_id: int | None = Field(foreign_key='project.id')
|
||||
category: str = Field(foreign_key='category.name')
|
||||
in_menu: bool = False
|
||||
|
||||
@classmethod
|
||||
def dyn_join_with(cls):
|
||||
|
@ -78,16 +80,18 @@ class RawSurveyModel(BaseSurveyModel, GeoPointMModel):
|
|||
|
||||
class OriginRawPoint(Model):
|
||||
"""
|
||||
Store information of the raw survey point used in the line work for each line and polygon shape
|
||||
Store information of the raw survey point used in the line work
|
||||
for each line and polygon shape
|
||||
Filled when importing shapefiles
|
||||
"""
|
||||
metadata = gisaf_survey
|
||||
__tablename__ = 'origin_raw_point'
|
||||
__table_args__ = {'schema' : 'gisaf_survey'}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
shape_table = db.Column(db.String, index=True)
|
||||
shape_id = db.Column(db.Integer, index=True)
|
||||
raw_point_id = db.Column(db.BigInteger)
|
||||
id: int = Field(default=None, primary_key=True)
|
||||
shape_table: str = Field(index=True)
|
||||
shape_id: int = Field(index=True)
|
||||
raw_point_id: int = Field(sa_type=BigInteger())
|
||||
|
||||
def __repr__(self):
|
||||
return '<models.OriginRawPoint {self.id:d} {self.shape_table:s} {self.shape_id:d} {self.raw_point_id:d}>'.format(self=self)
|
||||
return f'<models.OriginRawPoint {self.id:d} {self.shape_table:s} ' \
|
||||
f'{self.shape_id:d} {self.raw_point_id:d}>'
|
43
src/gisaf/models/reconcile.py
Normal file
43
src/gisaf/models/reconcile.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
from datetime import datetime
|
||||
from sqlalchemy import BigInteger
|
||||
from sqlmodel import Field, SQLModel, MetaData, JSON, TEXT, Relationship, Column, String
|
||||
|
||||
from .models_base import Model
|
||||
from .metadata import gisaf_admin
|
||||
|
||||
|
||||
class Reconciliation(Model):
|
||||
metadata = gisaf_admin
|
||||
|
||||
class Admin:
|
||||
menu = 'Other'
|
||||
flask_admin_model_view = 'ReconciliationModelView'
|
||||
|
||||
id: int = Field(primary_key=True, sa_type=BigInteger,
|
||||
sa_column_kwargs={'autoincrement': False})
|
||||
target: str = Field(sa_type=String(50))
|
||||
source: str = Field(sa_type=String(50))
|
||||
|
||||
|
||||
class StatusChange(Model):
|
||||
metadata = gisaf_admin
|
||||
__tablename__ = 'status_change'
|
||||
|
||||
id: int = Field(primary_key=True, sa_type=BigInteger,
|
||||
sa_column_kwargs={'autoincrement': False})
|
||||
store: str = Field(sa_type=String(50))
|
||||
ref_id: int = Field(sa_type=BigInteger())
|
||||
original: str = Field(sa_type=String(1))
|
||||
new: str = Field(sa_type=String(1))
|
||||
time: datetime
|
||||
|
||||
|
||||
class FeatureDeletion(Model):
|
||||
metadata = gisaf_admin
|
||||
__tablename__ = 'feature_deletion'
|
||||
|
||||
id: int = Field(BigInteger, primary_key=True,
|
||||
sa_column_kwargs={'autoincrement': False})
|
||||
store: str = Field(sa_type=String(50))
|
||||
ref_id: int = Field(sa_type=BigInteger())
|
||||
time: datetime
|
43
src/gisaf/models/store.py
Normal file
43
src/gisaf/models/store.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
from typing import Any
|
||||
from pydantic import BaseModel
|
||||
from .geo_models_base import GeoModel, RawSurveyBaseModel, GeoPointSurveyModel
|
||||
|
||||
|
||||
class MapLibreStyle(BaseModel):
|
||||
...
|
||||
|
||||
class Store(BaseModel):
|
||||
auto_import: bool
|
||||
base_gis_type: str
|
||||
count: int
|
||||
custom: bool
|
||||
description: str
|
||||
#extra: dict[str, Any] | None
|
||||
group: str
|
||||
#icon: str
|
||||
in_menu: bool
|
||||
is_db: bool
|
||||
is_line_work: bool
|
||||
is_live: bool
|
||||
long_name: str | None
|
||||
#mapbox_layout: dict[str, Any] | None
|
||||
#mapbox_paint: dict[str, Any] | None
|
||||
#mapbox_type: str
|
||||
mapbox_type_custom: str | None
|
||||
#mapbox_type_default: str
|
||||
minor_group_1: str
|
||||
minor_group_2: str
|
||||
#model: GeoModel
|
||||
model_type: str
|
||||
name: str
|
||||
#name_letter: str
|
||||
#name_number: int
|
||||
#raw_model: GeoPointSurveyModel
|
||||
#raw_model_store_name: str
|
||||
status: str
|
||||
store: str
|
||||
style: str | None
|
||||
symbol: str | None
|
||||
title: str
|
||||
viewable_role: str | None
|
||||
z_index: int
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any
|
||||
from typing import Any, ClassVar
|
||||
from sqlalchemy import BigInteger
|
||||
from sqlalchemy.ext.mutable import MutableDict
|
||||
from sqlalchemy.dialects.postgresql import HSTORE
|
||||
|
@ -10,7 +10,7 @@ from .geo_models_base import GeoPointModel
|
|||
|
||||
class Tags(GeoPointModel, table=True):
|
||||
metadata = gisaf
|
||||
hidden: bool = True
|
||||
hidden: ClassVar[bool] = True
|
||||
|
||||
class Admin:
|
||||
menu = 'Other'
|
||||
|
@ -18,8 +18,8 @@ class Tags(GeoPointModel, table=True):
|
|||
|
||||
id: int | None = Field(primary_key=True)
|
||||
store: str = Field(index=True)
|
||||
ref_id: int = Field(index=True, sa_column=Column(BigInteger))
|
||||
tags: dict = Field(sa_column=Column(MutableDict.as_mutable(HSTORE)))
|
||||
ref_id: int = Field(index=True, sa_type=BigInteger)
|
||||
tags: dict = Field(sa_type=MutableDict.as_mutable(HSTORE))
|
||||
|
||||
def __str__(self):
|
||||
return '{self.store:s} {self.ref_id}: {self.tags}'.format(self=self)
|
|
@ -4,18 +4,20 @@ Define the models for the ORM
|
|||
import logging
|
||||
import importlib
|
||||
import pkgutil
|
||||
from collections import OrderedDict, defaultdict
|
||||
from collections import defaultdict
|
||||
from importlib.metadata import entry_points
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from pydantic import create_model
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.orm import selectinload
|
||||
from sqlmodel import select
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from .config import conf
|
||||
from .models import misc, category, project, reconcile, map_bases, tags
|
||||
#from .models.graphql import GeomGroup, GeomModel
|
||||
from .models import (misc, category as category_module,
|
||||
project, reconcile, map_bases, tags)
|
||||
from .models.geo_models_base import (
|
||||
PlottableModel,
|
||||
GeoModel,
|
||||
|
@ -26,11 +28,20 @@ from .models.geo_models_base import (
|
|||
GeoPolygonSurveyModel,
|
||||
)
|
||||
from .utils import ToMigrate
|
||||
from .models.category import Category, CategoryGroup
|
||||
from .database import db_session
|
||||
from .models.metadata import survey, raw_survey
|
||||
|
||||
registry = None
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logger = logging.getLogger('Gisaf registry')
|
||||
category_model_mapper = {
|
||||
'Point': GeoPointSurveyModel,
|
||||
'Line': GeoLineSurveyModel,
|
||||
'Polygon': GeoPolygonSurveyModel,
|
||||
}
|
||||
|
||||
class NotInRegistry(Exception):
|
||||
pass
|
||||
|
||||
def import_submodules(package, recursive=True):
|
||||
""" Import all submodules of a module, recursively, including subpackages
|
||||
|
@ -57,7 +68,7 @@ class ModelRegistry:
|
|||
Maintains registries for all kind of model types, eg. geom, data, values...
|
||||
Provides tools to get the models from their names, table names, etc.
|
||||
"""
|
||||
def __init__(self, raw_survey_models=None, survey_models=None):
|
||||
def __init__(self):
|
||||
"""
|
||||
Get geo models
|
||||
:return: None
|
||||
|
@ -67,8 +78,192 @@ class ModelRegistry:
|
|||
self.values = {}
|
||||
self.other = {}
|
||||
self.misc = {}
|
||||
self.raw_survey_models = raw_survey_models or {}
|
||||
self.geom_auto = survey_models or {}
|
||||
self.raw_survey_models = {}
|
||||
self.survey_models = {}
|
||||
|
||||
async def make_registry(self, app=None):
|
||||
"""
|
||||
Make (or refresh) the registry of models.
|
||||
:return:
|
||||
"""
|
||||
logger.debug('make_registry')
|
||||
await self.make_category_models()
|
||||
self.scan()
|
||||
await self.build()
|
||||
## If ogcapi is in app (i.e. not with scheduler):
|
||||
## Now that the models are refreshed, tells the ogcapi to (re)build
|
||||
if app:
|
||||
#app.extra['registry'] = self
|
||||
if 'ogcapi' in app.extra:
|
||||
await app.extra['ogcapi'].build()
|
||||
|
||||
async def make_category_models(self):
|
||||
"""
|
||||
Make geom models from the category model
|
||||
and update raw_survey_models and survey_models
|
||||
Important notes:
|
||||
- the db must be bound before running this function
|
||||
- the db must be rebound after running this function,
|
||||
so that the models created are actually bound to the db connection
|
||||
:return:
|
||||
"""
|
||||
logger.debug('make_category_models')
|
||||
async with db_session() as session:
|
||||
query = select(Category).order_by(Category.long_name).options(selectinload(Category.category_group))
|
||||
data = await session.exec(query)
|
||||
categories: list[Category] = data.all()
|
||||
for category in categories:
|
||||
## Several statuses can coexist for the same model, so
|
||||
## consider only the ones with the 'E' (existing) status
|
||||
## The other statuses are defined only for import (?)
|
||||
if getattr(category, 'status', 'E') != 'E':
|
||||
continue
|
||||
|
||||
## Use pydantic create_model, supported by SQLModel
|
||||
## See https://github.com/tiangolo/sqlmodel/issues/377
|
||||
store_name = f'{survey.schema}.{category.table_name}'
|
||||
raw_store_name = f'{raw_survey.schema}.RAW_{category.table_name}'
|
||||
raw_survey_field_definitions = {
|
||||
## FIXME: RawSurveyBaseModel.category should be a Category, not category.name
|
||||
'category_name': (str, category.name),
|
||||
## FIXME: Same for RawSurveyBaseModel.group
|
||||
'group_name': (str, category.category_group.name),
|
||||
'viewable_role': (str, category.viewable_role),
|
||||
'store_name': (str, raw_store_name),
|
||||
# 'icon': (str, ''),
|
||||
# 'icon': (str, ''),
|
||||
}
|
||||
## Raw survey points
|
||||
try:
|
||||
self.raw_survey_models[store_name] = create_model(
|
||||
__base__=RawSurveyBaseModel,
|
||||
__model_name=category.raw_survey_table_name,
|
||||
__cls_kwargs__={
|
||||
'table': True,
|
||||
'metadata': raw_survey,
|
||||
'__tablename__': category.raw_survey_table_name,
|
||||
## FIXME: RawSurveyBaseModel.category should be a Category, not category.name
|
||||
'category_name': category.name,
|
||||
## FIXME: Same for RawSurveyBaseModel.group
|
||||
'group_name': category.category_group.name,
|
||||
'viewable_role': category.viewable_role,
|
||||
'store_name': raw_store_name,
|
||||
},
|
||||
# **raw_survey_field_definitions
|
||||
)
|
||||
except Exception as err:
|
||||
logger.exception(err)
|
||||
logger.warning(err)
|
||||
else:
|
||||
logger.debug('Discovered {:s}'.format(category.raw_survey_table_name))
|
||||
|
||||
model_class = category_model_mapper.get(category.model_type)
|
||||
## Final geometries
|
||||
try:
|
||||
if model_class:
|
||||
survey_field_definitions = {
|
||||
'category_name': (str, category.name),
|
||||
'group_name': (str, category.category_group.name),
|
||||
'raw_store_name': (str, raw_store_name),
|
||||
'viewable_role': (str, category.viewable_role),
|
||||
'symbol': (str, category.symbol),
|
||||
#'raw_model': (str, self.raw_survey_models.get(raw_store_name)),
|
||||
# 'icon': (str, f'{survey.schema}-{category.table_name}'),
|
||||
}
|
||||
self.survey_models[store_name] = create_model(
|
||||
__base__= model_class,
|
||||
__model_name=category.table_name,
|
||||
__cls_kwargs__={
|
||||
'table': True,
|
||||
'metadata': survey,
|
||||
'__tablename__': category.table_name,
|
||||
'category_name': category.name,
|
||||
'group_name': category.category_group.name,
|
||||
'raw_store_name': raw_store_name,
|
||||
'viewable_role': category.viewable_role,
|
||||
'symbol': category.symbol,
|
||||
},
|
||||
# **survey_field_definitions,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.warning(err)
|
||||
else:
|
||||
logger.debug('Discovered {:s}'.format(category.table_name))
|
||||
|
||||
logger.info('Discovered {:d} models'.format(len(categories)))
|
||||
|
||||
def scan(self):
|
||||
"""
|
||||
Scan all models defined explicitely (not the survey ones,
|
||||
which are defined by categories), and store them for reference.
|
||||
"""
|
||||
logger.debug('scan')
|
||||
from . import models # nocheck
|
||||
|
||||
## Scan the models defined in modules
|
||||
for module_name, module in import_submodules(models).items():
|
||||
if module_name in (
|
||||
'src.gisaf.models.geo_models_base',
|
||||
'src.gisaf.models.models_base',
|
||||
|
||||
):
|
||||
continue
|
||||
for name in dir(module):
|
||||
obj = getattr(module, name)
|
||||
if hasattr(obj, '__module__') and obj.__module__.startswith(module.__name__)\
|
||||
and hasattr(obj, '__tablename__') and hasattr(obj, 'get_store_name'):
|
||||
model_type = self.add_model(obj)
|
||||
logger.debug(f'Model {obj.get_store_name()} added in the registry from gisaf source tree as {model_type}')
|
||||
|
||||
## Scan the models defined in plugins (setuptools' entry points)
|
||||
for module_name, model in self.scan_entry_points(name='gisaf_extras.models').items():
|
||||
model_type = self.add_model(model)
|
||||
logger.debug(f'Model {model.get_store_name()} added in the registry from {module_name} entry point as {model_type}')
|
||||
|
||||
for module_name, store in self.scan_entry_points(name='gisaf_extras.stores').items():
|
||||
self.add_store(store)
|
||||
logger.debug(f'Store {store} added in the registry from {module_name} gisaf_extras.stores entry point')
|
||||
|
||||
## Add misc models
|
||||
for module in misc, category_module, project, reconcile, map_bases, tags:
|
||||
for name in dir(module):
|
||||
obj = getattr(module, name)
|
||||
if hasattr(obj, '__module__') and hasattr(obj, '__tablename__'):
|
||||
self.misc[name] = obj
|
||||
|
||||
async def build(self):
|
||||
"""
|
||||
Build the registry: organize all models in a common reference point.
|
||||
This should be executed after the discovery of surey models (categories)
|
||||
and the scan of custom/module defined models.
|
||||
"""
|
||||
logger.debug('build')
|
||||
## Combine all geom models (auto and custom)
|
||||
self.geom = {**self.survey_models, **self.geom_custom}
|
||||
|
||||
await self.make_stores()
|
||||
|
||||
## Some lists of table, by usage
|
||||
## XXX: Gino: doesn't set __tablename__ and __table__ , or engine not started???
|
||||
## So, hack the table names of auto_geom
|
||||
#self.geom_tables = [model.__tablename__
|
||||
#self.geom_tables = [getattr(model, "__tablename__", None)
|
||||
# for model in sorted(list(self.geom.values()),
|
||||
# key=lambda a: a.z_index)]
|
||||
|
||||
values_tables = [model.__tablename__ for model in self.values.values()]
|
||||
other_tables = [model.__tablename__ for model in self.other.values()]
|
||||
|
||||
self.data_tables = values_tables + other_tables
|
||||
|
||||
## Build a dict for quick access to the values from a model
|
||||
logger.warn(ToMigrate('get_geom_model_from_table_name, only used for values_for_model'))
|
||||
self.values_for_model = {}
|
||||
for model_value in self.values.values():
|
||||
for constraint in inspect(model_value).foreign_key_constraints:
|
||||
model = self.get_geom_model_from_table_name(constraint.referred_table.name)
|
||||
self.values_for_model[model] = model_value
|
||||
self.make_menu()
|
||||
|
||||
def scan_entry_points(self, name):
|
||||
"""
|
||||
|
@ -88,6 +283,8 @@ class ModelRegistry:
|
|||
Add the model
|
||||
:return: Model type (one of {'GeoModel', 'PlottableModel', 'Other model'})
|
||||
"""
|
||||
# if not hasattr(model, 'get_store_name'):
|
||||
# raise NotInRegistry()
|
||||
table_name = model.get_store_name()
|
||||
if issubclass(model, GeoModel) and not issubclass(model, RawSurveyBaseModel) and not model.hidden:
|
||||
self.geom_custom[table_name] = model
|
||||
|
@ -102,71 +299,6 @@ class ModelRegistry:
|
|||
def add_store(self, store):
|
||||
self.geom_custom_store[store.name] = store
|
||||
|
||||
def scan(self):
|
||||
"""
|
||||
Scan all models defined explicitely (not the survey ones, which are defined by categories),
|
||||
and store them for reference.
|
||||
:return:
|
||||
"""
|
||||
from gisaf import models
|
||||
|
||||
## Scan the models defined in modules
|
||||
for module_name, module in import_submodules(models).items():
|
||||
for name in dir(module):
|
||||
obj = getattr(module, name)
|
||||
if hasattr(obj, '__module__') and obj.__module__.startswith(module.__name__)\
|
||||
and hasattr(obj, '__tablename__'):
|
||||
model_type = self.add_model(obj)
|
||||
logger.debug(f'Model {obj.get_store_name()} added in the registry from gisaf source tree as {model_type}')
|
||||
|
||||
## Scan the models defined in plugins (setuptools' entry points)
|
||||
for module_name, model in self.scan_entry_points(name='gisaf_extras.models').items():
|
||||
model_type = self.add_model(model)
|
||||
logger.debug(f'Model {model.get_store_name()} added in the registry from {module_name} entry point as {model_type}')
|
||||
|
||||
for module_name, store in self.scan_entry_points(name='gisaf_extras.stores').items():
|
||||
self.add_store(store)
|
||||
logger.debug(f'Store {store} added in the registry from {module_name} gisaf_extras.stores entry point')
|
||||
|
||||
## Add misc models
|
||||
for module in misc, category, project, reconcile, map_bases, tags:
|
||||
for name in dir(module):
|
||||
obj = getattr(module, name)
|
||||
if hasattr(obj, '__module__') and hasattr(obj, '__tablename__'):
|
||||
self.misc[name] = obj
|
||||
|
||||
async def build(self):
|
||||
"""
|
||||
Build the registry: organize all models in a common reference point.
|
||||
This should be executed after the discovery of surey models (categories)
|
||||
and the scan of custom/module defined models.
|
||||
"""
|
||||
## Combine all geom models (auto and custom)
|
||||
self.geom = {**self.geom_auto, **self.geom_custom}
|
||||
|
||||
await self.make_stores()
|
||||
|
||||
## Some lists of table, by usage
|
||||
## XXX: Gino: doesn't set __tablename__ and __table__ , or engine not started???
|
||||
## So, hack the table names of auto_geom
|
||||
#self.geom_tables = [model.__tablename__
|
||||
self.geom_tables = [getattr(model, "__tablename__", None)
|
||||
for model in sorted(list(self.geom.values()),
|
||||
key=lambda a: a.z_index)]
|
||||
|
||||
values_tables = [model.__tablename__ for model in self.values.values()]
|
||||
other_tables = [model.__tablename__ for model in self.other.values()]
|
||||
|
||||
self.data_tables = values_tables + other_tables
|
||||
|
||||
## Build a dict for quick access to the values from a model
|
||||
self.values_for_model = {}
|
||||
for model_value in self.values.values():
|
||||
for constraint in inspect(model_value).foreign_key_constraints:
|
||||
model = self.get_geom_model_from_table_name(constraint.referred_table.name)
|
||||
self.values_for_model[model] = model_value
|
||||
self.make_menu()
|
||||
|
||||
def make_menu(self):
|
||||
"""
|
||||
Build the Admin menu
|
||||
|
@ -177,20 +309,18 @@ class ModelRegistry:
|
|||
if hasattr(model, 'Admin'):
|
||||
self.menu[model.Admin.menu].append(model)
|
||||
|
||||
def get_raw_survey_model_mapping(self):
|
||||
"""
|
||||
Get a mapping of category_name -> model for categories
|
||||
:return: dict of name -> model (class)
|
||||
"""
|
||||
## TODO: add option to pass a single item
|
||||
## Local imports, avoiding cyclic dependencies
|
||||
## FIXME: Gino
|
||||
from .models.category import Category
|
||||
from .database import db
|
||||
categories = db.session.query(Category)
|
||||
return {category.name: self.raw_survey_models[category.table_name]
|
||||
for category in categories
|
||||
if self.raw_survey_models.get(category.table_name)}
|
||||
# def get_raw_survey_model_mapping(self):
|
||||
# """
|
||||
# Get a mapping of category_name -> model for categories
|
||||
# :return: dict of name -> model (class)
|
||||
# """
|
||||
# ## TODO: add option to pass a single item
|
||||
# ## Local imports, avoiding cyclic dependencies
|
||||
# ## FIXME: Gino
|
||||
# categories = db.session.query(Category)
|
||||
# return {category.name: self.raw_survey_models[category.table_name]
|
||||
# for category in categories
|
||||
# if self.raw_survey_models.get(category.table_name)}
|
||||
|
||||
async def get_model_id_params(self, model, id):
|
||||
"""
|
||||
|
@ -251,8 +381,10 @@ class ModelRegistry:
|
|||
## Utility functions used with apply method (dataframes)
|
||||
def fill_columns_from_custom_models(row):
|
||||
return (
|
||||
## FIXME: Like: 'AVESHTEquipment'
|
||||
row.model.__namespace__['__qualname__'], ## Name of the class - hacky
|
||||
row.model.description,
|
||||
## FIXME: Like: 'other_aves'
|
||||
row.model.__table__.schema
|
||||
)
|
||||
|
||||
|
@ -268,11 +400,11 @@ class ModelRegistry:
|
|||
if category.minor_group_2 != '----':
|
||||
fragments.append(category.minor_group_2)
|
||||
return '.'.join([
|
||||
conf.survey['schema'],
|
||||
survey.schema,
|
||||
'_'.join(fragments)
|
||||
])
|
||||
|
||||
self.categories = await category.Category.get_df()
|
||||
self.categories = await Category.get_df()
|
||||
self.categories['title'] = self.categories.long_name.fillna(self.categories.description)
|
||||
|
||||
self.categories['store'] = self.categories.apply(get_store_name, axis=1)
|
||||
|
@ -280,35 +412,37 @@ class ModelRegistry:
|
|||
self.categories['count'] = pd.Series(dtype=pd.Int64Dtype())
|
||||
self.categories.set_index('name', inplace=True)
|
||||
|
||||
df_models = pd.DataFrame(self.geom.items(), columns=['store', 'model']).set_index('store')
|
||||
|
||||
df_models = pd.DataFrame(self.geom.items(),
|
||||
columns=['store', 'model']
|
||||
).set_index('store')
|
||||
df_raw_models = pd.DataFrame(self.raw_survey_models.items(),
|
||||
columns=('store', 'raw_model')
|
||||
).set_index('store')
|
||||
self.categories = self.categories.merge(df_models, left_on='store', right_index=True)
|
||||
self.categories = self.categories.merge(df_raw_models, left_on='store', right_index=True)
|
||||
self.categories['custom'] = False
|
||||
self.categories['is_db'] = True
|
||||
self.categories['name_letter'] = self.categories.index.str.slice(0, 1)
|
||||
self.categories['name_number'] = self.categories.index.str.slice(1).astype('int64')
|
||||
self.categories.sort_values(['name_letter', 'name_number'], inplace=True)
|
||||
self.categories.sort_index(inplace=True)
|
||||
# self.categories['name_letter'] = self.categories.index.str.slice(0, 1)
|
||||
# self.categories['name_number'] = self.categories.index.str.slice(1).astype('int64')
|
||||
# self.categories.sort_values(['name_letter', 'name_number'], inplace=True)
|
||||
|
||||
## Set in the stores dataframe some useful properties, from the model class
|
||||
## Maybe at some point it makes sense to get away from class-based definitions
|
||||
if len(self.categories) > 0:
|
||||
self.categories['store_name'] = self.categories.apply(
|
||||
lambda row: row.model.get_store_name(),
|
||||
axis=1
|
||||
)
|
||||
self.categories['raw_model_store_name'] = self.categories.apply(
|
||||
lambda row: row.model.raw_model.store_name,
|
||||
axis=1
|
||||
)
|
||||
## XXX: redundant self.categories['store_name'] with self.categories['store']
|
||||
#self.categories['store_name'] = self.categories.apply(
|
||||
# lambda row: row.model.get_store_name(),
|
||||
# axis=1
|
||||
#)
|
||||
#self.categories['raw_model_store_name'] = self.categories.apply(
|
||||
# lambda row: row.raw_model.store_name,
|
||||
# axis=1
|
||||
#)
|
||||
self.categories['is_line_work'] = self.categories.apply(
|
||||
lambda row: issubclass(row.model, LineWorkSurveyModel),
|
||||
axis=1
|
||||
)
|
||||
## Add the raw survey models
|
||||
self.categories['raw_survey_model'] = self.categories.apply(
|
||||
lambda row: self.raw_survey_models[row.raw_model_store_name],
|
||||
axis=1
|
||||
)
|
||||
else:
|
||||
self.categories['store_name'] = None
|
||||
self.categories['raw_model_store_name'] = None
|
||||
|
@ -329,6 +463,8 @@ class ModelRegistry:
|
|||
axis=1
|
||||
)
|
||||
self.custom_models = self.custom_models.loc[self.custom_models.in_menu]
|
||||
self.custom_models['auto_import'] = False
|
||||
self.custom_models['is_line_work'] = False
|
||||
|
||||
if len(self.custom_models) > 0:
|
||||
self.custom_models['long_name'],\
|
||||
|
@ -355,6 +491,8 @@ class ModelRegistry:
|
|||
axis=1
|
||||
)
|
||||
self.custom_stores = self.custom_stores.loc[self.custom_stores.in_menu]
|
||||
self.custom_stores['auto_import'] = False
|
||||
self.custom_stores['is_line_work'] = False
|
||||
|
||||
if len(self.custom_stores) > 0:
|
||||
self.custom_stores['long_name'],\
|
||||
|
@ -366,30 +504,31 @@ class ModelRegistry:
|
|||
## Combine Misc (custom) and survey (auto) stores
|
||||
## Retain only one status per category (defaultStatus, 'E'/existing by default)
|
||||
self.stores = pd.concat([
|
||||
self.categories[self.categories.status==conf.map['defaultStatus'][0]].reset_index().set_index('store').sort_values('title'),
|
||||
self.categories[self.categories.status==conf.map.defaultStatus[0]].reset_index().set_index('store').sort_values('title'),
|
||||
self.custom_models,
|
||||
self.custom_stores
|
||||
]).drop(columns=['store_name'])
|
||||
])#.drop(columns=['store_name'])
|
||||
self.stores['in_menu'] = self.stores['in_menu'].astype(bool)
|
||||
|
||||
## Set in the stores dataframe some useful properties, from the model class
|
||||
## Maybe at some point it makes sense to get away from class-based definitions
|
||||
def fill_columns_from_model(row):
|
||||
return (
|
||||
row.model.mapbox_type or None,
|
||||
row.model.icon,
|
||||
row.model.symbol,
|
||||
# row.model.icon,
|
||||
# row.model.symbol,
|
||||
row.model.mapbox_type, # or None,
|
||||
row.model.base_gis_type,
|
||||
row.model.z_index,
|
||||
)
|
||||
|
||||
# self.stores['icon'],\
|
||||
# self.stores['symbol'],\
|
||||
self.stores['mapbox_type_default'],\
|
||||
self.stores['icon'],\
|
||||
self.stores['symbol'],\
|
||||
self.stores['base_gis_type'],\
|
||||
self.stores['z_index']\
|
||||
= zip(*self.stores.apply(fill_columns_from_model, axis=1))
|
||||
|
||||
self.stores['mapbox_type_custom'] = self.stores['mapbox_type_custom'].replace('', np.nan).fillna(np.nan)
|
||||
#self.stores['mapbox_type_custom'] = self.stores['mapbox_type_custom'].replace('', np.nan).fillna(np.nan)
|
||||
self.stores['mapbox_type'] = self.stores['mapbox_type_custom'].fillna(
|
||||
self.stores['mapbox_type_default']
|
||||
)
|
||||
|
@ -400,31 +539,12 @@ class ModelRegistry:
|
|||
)
|
||||
self.stores['viewable_role'].replace('', None, inplace=True)
|
||||
|
||||
def make_model_gql_object_type(row):
|
||||
raise ToMigrate('make_model_gql_object_type')
|
||||
# return GeomModel(
|
||||
# name=row.long_name or row.description,
|
||||
# category=row.name,
|
||||
# description=row.description,
|
||||
# store=row.name,
|
||||
# rawSurveyStore=row.raw_model_store_name,
|
||||
# #style=row.style,
|
||||
# zIndex=row.z_index,
|
||||
# custom=row.custom,
|
||||
# count=None,
|
||||
# group=row.group,
|
||||
# type=row.mapbox_type,
|
||||
# icon=row.icon,
|
||||
# symbol=row.symbol,
|
||||
# gisType=row.base_gis_type,
|
||||
# viewableRole=row.viewable_role
|
||||
# )
|
||||
|
||||
self.stores['gql_object_type'] = self.stores.apply(make_model_gql_object_type, axis=1)
|
||||
#self.stores['gql_object_type'] = self.stores.apply(make_model_gql_object_type, axis=1)
|
||||
self.stores['is_live'] = False
|
||||
self.stores['description'].fillna('', inplace=True)
|
||||
|
||||
## Layer groups: Misc, survey's primary groups, Live
|
||||
self.primary_groups = await category.CategoryGroup.get_df()
|
||||
self.primary_groups = await CategoryGroup.get_df()
|
||||
self.primary_groups.sort_values('name', inplace=True)
|
||||
self.primary_groups['title'] = self.primary_groups['long_name']
|
||||
|
||||
|
@ -454,22 +574,29 @@ class ModelRegistry:
|
|||
|
||||
self.primary_groups.sort_index(inplace=True)
|
||||
|
||||
def make_group(group):
|
||||
return GeomGroup(
|
||||
name=group['name'],
|
||||
title=group['title'],
|
||||
description=group['long_name']
|
||||
)
|
||||
#def make_group(group):
|
||||
# return GeomGroup(
|
||||
# name=group['name'],
|
||||
# title=group['title'],
|
||||
# description=group['long_name']
|
||||
# )
|
||||
#self.primary_groups['gql_object_type'] = self.primary_groups.apply(make_group, axis=1)
|
||||
await self.update_stores_counts()
|
||||
|
||||
self.primary_groups['gql_object_type'] = self.primary_groups.apply(make_group, axis=1)
|
||||
|
||||
async def get_stores(self, db):
|
||||
async def get_stores(self):
|
||||
"""
|
||||
Get information about the available stores
|
||||
"""
|
||||
raise DeprecationWarning('get_stores was for graphql')
|
||||
|
||||
async def update_stores_counts(self):
|
||||
"""
|
||||
Update the counts of the stores fro the DB
|
||||
"""
|
||||
query = "SELECT schemaname, relname, n_live_tup FROM pg_stat_user_tables"
|
||||
async with db.acquire(reuse=False) as connection:
|
||||
rows = await connection.all(query)
|
||||
# async with db.acquire(reuse=False) as connection:
|
||||
async with db_session() as session:
|
||||
rows = await session.exec(text(query))
|
||||
all_tables_count = pd.DataFrame(rows, columns=['schema', 'table', 'count'])
|
||||
all_tables_count['store'] = all_tables_count['schema'] + '.' + all_tables_count['table']
|
||||
all_tables_count.set_index(['store'], inplace=True)
|
||||
|
@ -478,14 +605,14 @@ class ModelRegistry:
|
|||
|
||||
## Update the count in registry's stores
|
||||
self.stores.loc[:, 'count'] = all_tables_count['count']
|
||||
## FIXME: count for custom stores
|
||||
store_df = self.stores.loc[self.stores['count'] != 0]
|
||||
def set_count(row):
|
||||
row.gql_object_type.count = row['count']
|
||||
# ## FIXME: count for custom stores
|
||||
# store_df = self.stores.loc[(self.stores['count'] != 0) | (self.stores['is_live'])]
|
||||
# def set_count(row):
|
||||
# row.gql_object_type.count = row['count']
|
||||
|
||||
store_df[store_df.is_db].apply(set_count, axis=1)
|
||||
# store_df[store_df.is_db].apply(set_count, axis=1)
|
||||
|
||||
return store_df.gql_object_type.to_list()
|
||||
# return store_df.gql_object_type.to_list()
|
||||
|
||||
#def update_live_layers(self, live_models: List[GeomModel]):
|
||||
#raise ToMigrate('make_model_gql_object_type')
|
||||
|
@ -509,98 +636,8 @@ class ModelRegistry:
|
|||
'custom': True,
|
||||
}
|
||||
|
||||
|
||||
category_model_mapper = {
|
||||
'Point': GeoPointSurveyModel,
|
||||
'Line': GeoLineSurveyModel,
|
||||
'Polygon': GeoPolygonSurveyModel,
|
||||
}
|
||||
|
||||
|
||||
async def make_category_models(raw_survey_models, geom_models):
|
||||
"""
|
||||
Make geom models from the category model, and update raw_survey_models and geom_models
|
||||
Important notes:
|
||||
- the db must be bound before running this function
|
||||
- the db must be rebound after running this function,
|
||||
so that the models created are actually bound to the db connection
|
||||
:return:
|
||||
"""
|
||||
from .models.category import Category, CategoryGroup
|
||||
## XXX: Using Gino!
|
||||
categories = await Category.load(group_info=CategoryGroup).order_by(Category.long_name).gino.all()
|
||||
for category in categories:
|
||||
## Several statuses can coexist for the same model, so
|
||||
## consider only the ones with the 'E' (existing) status
|
||||
## The other statuses are defined only for import (?)
|
||||
if getattr(category, 'status', 'E') != 'E':
|
||||
continue
|
||||
|
||||
## Python magic here! Create classes using type(name, bases, dict)
|
||||
try:
|
||||
store_name = '{}.RAW_{}'.format(conf.survey['schema_raw'], category.table_name)
|
||||
raw_survey_models[store_name] = type(
|
||||
category.raw_survey_table_name,
|
||||
(RawSurveyBaseModel, ), {
|
||||
'__tablename__': category.raw_survey_table_name,
|
||||
'__table_args__': {
|
||||
'schema': conf.survey['schema_raw'],
|
||||
'extend_existing': True
|
||||
},
|
||||
'category': category,
|
||||
'group': category.group_info,
|
||||
'viewable_role': category.viewable_role,
|
||||
'store_name': store_name,
|
||||
'icon': ''
|
||||
})
|
||||
except Exception as err:
|
||||
logger.warning(err)
|
||||
else:
|
||||
logger.debug('Discovered {:s}'.format(category.raw_survey_table_name))
|
||||
|
||||
model_class = category_model_mapper.get(category.model_type)
|
||||
try:
|
||||
if model_class:
|
||||
schema = conf.survey['schema']
|
||||
store_name = f'{schema}.{category.table_name}'
|
||||
raw_survey_store_name = f"{conf.survey['schema_raw']}.RAW_{category.table_name}"
|
||||
geom_models[store_name] = type(
|
||||
category.table_name,
|
||||
(model_class, ), {
|
||||
'__tablename__': category.table_name,
|
||||
'__table_args__': {
|
||||
'schema': schema,
|
||||
'extend_existing': True
|
||||
},
|
||||
'category': category,
|
||||
'group': category.group_info,
|
||||
'raw_model': raw_survey_models.get(raw_survey_store_name),
|
||||
'viewable_role': category.viewable_role,
|
||||
'symbol': category.symbol,
|
||||
'icon': f'{schema}-{category.table_name}'
|
||||
})
|
||||
except Exception as err:
|
||||
logger.warning(err)
|
||||
else:
|
||||
logger.debug('Discovered {:s}'.format(category.table_name))
|
||||
|
||||
logger.info('Discovered {:d} models'.format(len(categories)))
|
||||
|
||||
|
||||
async def make_registry(app):
|
||||
"""
|
||||
Make (or refresh) the registry of models.
|
||||
:return:
|
||||
"""
|
||||
global registry
|
||||
registry = ModelRegistry(app['raw_survey_models'], app['survey_models'])
|
||||
registry.scan()
|
||||
await registry.build()
|
||||
app['registry'] = registry
|
||||
## If ogcapi is in app (i.e. not with scheduler):
|
||||
## Now that the models are refreshed, tells the ogcapi to (re)build
|
||||
if 'ogcapi' in app:
|
||||
await app['ogcapi'].build()
|
||||
# Accessible as global
|
||||
registry: ModelRegistry = ModelRegistry()
|
||||
|
||||
|
||||
## Below, some unused code, maybe to be used later for displaying layers in a tree structure
|
|
@ -1,6 +0,0 @@
|
|||
from sqlmodel import MetaData
|
||||
|
||||
gisaf = MetaData(schema='gisaf')
|
||||
gisaf_survey = MetaData(schema='gisaf_survey')
|
||||
gisaf_admin= MetaData(schema='gisaf_admin')
|
||||
gisaf_map= MetaData(schema='gisaf_map')
|
|
@ -1,40 +0,0 @@
|
|||
from datetime import datetime
|
||||
from sqlalchemy import BigInteger
|
||||
from sqlmodel import Field, SQLModel, MetaData, JSON, TEXT, Relationship, Column, String
|
||||
|
||||
from .models_base import Model
|
||||
from .metadata import gisaf_admin
|
||||
|
||||
|
||||
class Reconciliation(Model):
|
||||
metadata = gisaf_admin
|
||||
|
||||
class Admin:
|
||||
menu = 'Other'
|
||||
flask_admin_model_view = 'ReconciliationModelView'
|
||||
|
||||
id: int = Field(primary_key=True, sa_column=Column(BigInteger, autoincrement=False))
|
||||
target: str = Field(sa_column=Column(String(50)))
|
||||
source: str = Field(sa_column=Column(String(50)))
|
||||
|
||||
|
||||
class StatusChange(Model):
|
||||
metadata = gisaf_admin
|
||||
__tablename__ = 'status_change'
|
||||
|
||||
id: int = Field(BigInteger, primary_key=True, sa_column=Column(autoincrement=False))
|
||||
store: str = Field(sa_column=Column(String(50)))
|
||||
ref_id: int = Field(sa_column=Column(BigInteger()))
|
||||
original: str = Field(sa_column=Column(String(1)))
|
||||
new: str = Field(sa_column=Column(String(1)))
|
||||
time: datetime
|
||||
|
||||
|
||||
class FeatureDeletion(Model):
|
||||
metadata = gisaf_admin
|
||||
__tablename__ = 'feature_deletion'
|
||||
|
||||
id: int = Field(BigInteger, primary_key=True, sa_column=Column(autoincrement=False))
|
||||
store: str = Field(sa_column=Column(String(50)))
|
||||
ref_id: int = Field(sa_column=Column(BigInteger()))
|
||||
time: datetime
|
Loading…
Add table
Add a link
Reference in a new issue