Switch to uv package manager and static semver; fix for pydantic and pandas updates

This commit is contained in:
phil 2024-12-14 15:57:11 +01:00
parent 594c267731
commit ccb8728bbb
9 changed files with 2178 additions and 2183 deletions

1
.python-version Normal file
View file

@ -0,0 +1 @@
3.11

2030
pdm.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,64 +1,63 @@
[project]
name = "gisaf"
dynamic = ["version"]
description = ""
authors = [
{name = "phil", email = "phil.dev@philome.mooo.com"},
]
name = "gisaf-backend"
version = "0.6.0-alpha"
description = "Gisaf backend"
authors = [{ name = "phil", email = "phil.dev@philome.mooo.com" }]
dependencies = [
"apscheduler>=3.10.4",
"asyncpg>=0.28.0",
"fastapi>=0.111",
"geoalchemy2>=0.14.2",
"geopandas>=0.14.4",
"itsdangerous>=2.1.2",
"orjson>=3.9.10",
"pandas>=2.1.1",
"passlib[bcrypt]>=1.7.4",
"pydantic-settings>=2.0.3",
"pyshp>=2.3.1",
"python-jose[cryptography]>=3.3.0",
"python-multipart>=0.0.9",
"pyyaml>=6.0.1",
"redis>=5.0.1",
"sqlalchemy[asyncio]>=2.0.23",
"sqlmodel>=0.0.18",
"uvicorn>=0.23.2",
"websockets>=12.0",
"aiosqlite>=0.19.0",
"psycopg>=3.1.18",
"plotly>=5.20.0",
"matplotlib>=3.8.3",
"aiopath>=0.6.11",
"psycopg2-binary>=2.9.9",
"psutil>=5.9.8",
"aiopath>=0.6.11",
"aiosqlite>=0.19.0",
"apscheduler>=3.10.4",
"asyncpg>=0.28.0",
"fastapi>=0.111",
"geoalchemy2>=0.14.2",
"geopandas>=1.0.1",
"itsdangerous>=2.1.2",
"matplotlib>=3.8.3",
"orjson>=3.9.10",
"pandas>=2.1.1",
"passlib[bcrypt]>=1.7.4",
"plotly>=5.20.0",
"psutil>=5.9.8",
"psycopg2-binary>=2.9.9",
"psycopg>=3.1.18",
"pydantic-settings[yaml]>=2.7",
"pyshp>=2.3.1",
"python-jose[cryptography]>=3.3.0",
"python-multipart>=0.0.9",
"pyyaml>=6.0.1",
"redis>=5.0.1",
"sqlalchemy[asyncio]>=2.0.23",
"sqlmodel>=0.0.18",
"uvicorn>=0.23.2",
"websockets>=12.0",
"pyxdg>=0.28",
]
requires-python = ">=3.11,<4"
requires-python = ">=3.11"
readme = "README.md"
license = {text = "GPLv3"}
[build-system]
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[project.scripts]
gisaf-backend = "gisaf_backend:main"
[project.optional-dependencies]
contextily = ["contextily>=1.4.0"]
mqtt = ["aiomqtt>=1.2.1"]
all = ["gisaf[contextily]", "gisaf[mqtt]"]
all = ["gisaf-backend[contextily]", "gisaf-backend[mqtt]"]
[tool.pdm.version]
source = "scm"
write_to = "gisaf/_version.py"
write_template = "__version__: str = '{}'"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.pdm.dev-dependencies]
dev = [
"ipdb>=0.13.13",
"pandas-stubs>=2.1.4.231218",
"pretty-errors>=1.2.25",
"types-psycopg2>=2.9.21.20",
"types-PyYAML>=6.0.12.12",
"asyncpg-stubs>=0.29.1",
"types-python-jose>=3.3.4.20240106",
"types-passlib>=1.7.7.20240311",
[tool.hatch.build.targets.wheel]
packages = ["src/gisaf"]
[tool.uv]
dev-dependencies = [
"ipdb>=0.13.13",
"pandas-stubs>=2.1.4.231218",
"pretty-errors>=1.2.25",
"types-psycopg2>=2.9.21.20",
"types-PyYAML>=6.0.12.12",
"asyncpg-stubs>=0.29.1",
"types-python-jose>=3.3.4.20240106",
"types-passlib>=1.7.7.20240311",
]

View file

@ -1 +0,0 @@
__version__: str = '2023.4.dev95+g46b5246.d20240520'

View file

@ -2,34 +2,36 @@ from os import environ
import logging
from pathlib import Path
from typing import Any, Type, Tuple
from yaml import safe_load
from xdg import BaseDirectory
from pydantic_settings import (
BaseSettings,
PydanticBaseSettingsSource,
SettingsConfigDict,
)
# from pydantic import ConfigDict
from pydantic.v1.utils import deep_update
from yaml import safe_load
from gisaf._version import __version__
# from sqlalchemy.ext.asyncio.engine import AsyncEngine
# from sqlalchemy.orm.session import sessionmaker
from importlib.metadata import version
logger = logging.getLogger(__name__)
ENV = environ.get("env", "prod")
config_files = [
Path(Path.cwd().root) / "etc" / "gisaf" / ENV,
Path.home() / ".local" / "gisaf" / ENV,
Path(BaseDirectory.xdg_config_home) / "gisaf" / ENV,
]
class DashboardHome(BaseSettings):
title: str = "Gisaf - home/dashboards"
content_file: str = "/etc/gisaf/dashboard_home_content.html"
footer_file: str = "/etc/gisaf/dashboard_home_footer.html"
content_file: Path = (
Path(Path.cwd().root) / "etc" / "gisaf" / "dashboard_home_content.html"
)
footer_file: Path = (
Path(Path.cwd().root) / "etc" / "gisaf" / "dashboard_home_footer.html"
)
class GisafConfig(BaseSettings):
@ -92,7 +94,6 @@ class DefaultSurvey(BaseSettings):
class Survey(BaseSettings):
# model_config = ConfigDict(extra='ignore')
db_schema_raw: str = "raw_survey"
db_schema: str = "survey"
default: DefaultSurvey = DefaultSurvey()
@ -173,7 +174,9 @@ class ServerBind(BaseSettings):
class OGCAPIServerMap(BaseSettings):
url: str = "https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png"
attribution: str = """<a href="https://wikimediafoundation.org/wiki/Maps_Terms_of_Use">Wikimedia maps</a> | Map data &copy; <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>"""
attribution: str = (
"""<a href="https://wikimediafoundation.org/wiki/Maps_Terms_of_Use">Wikimedia maps</a> | Map data &copy; <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>"""
)
class OGCAPIServer(BaseSettings):
@ -196,13 +199,20 @@ class OGCAPI(BaseSettings):
class TileServer(BaseSettings):
baseDir: str = "/path/to/mbtiles_files_dir"
baseDir: Path = Path(BaseDirectory.xdg_data_home) / "gisaf" / "mbtiles_files_dir"
useRequestUrl: bool = False
spriteBaseDir: str = "/path/to/mbtiles_sprites_dir"
spriteBaseDir: Path = (
Path(BaseDirectory.xdg_data_home) / "gisaf" / "mbtiles_sprites_dir"
)
spriteUrl: str = "/tiles/sprite/sprite"
spriteBaseUrl: str = "https://gisaf.example.org"
openMapTilesKey: str | None = None
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.baseDir.mkdir(parents=True, exist_ok=True)
self.spriteBaseDir.mkdir(parents=True, exist_ok=True)
class Map(BaseSettings):
tileServer: TileServer = TileServer()
@ -251,7 +261,9 @@ class Dashboard(BaseSettings):
class Widgets(BaseSettings):
footer: str = """Generated by <span class='link' onclick="window.open('https://redmine.auroville.org.in/projects/gisaf/')">Gisaf</span>"""
footer: str = (
"""Generated by <span class='link' onclick="window.open('https://redmine.auroville.org.in/projects/gisaf/')">Gisaf</span>"""
)
class Admin(BaseSettings):
@ -327,7 +339,7 @@ class Config(BaseSettings):
plot: Plot = Plot()
plugins: dict[str, dict[str, Any]] = {}
survey: Survey = Survey()
version: str = __version__
version: str = version('gisaf-backend')
weather_station: dict[str, dict[str, Any]] = {}
widgets: Widgets = Widgets()

View file

@ -8,13 +8,16 @@ import tempfile
from geopandas.io.file import infer_schema
import fiona
from gisaf.registry import registry
from gisaf.redis_tools import store as redis_store, RedisError
logger = logging.getLogger(__name__)
async def export_with_pyogrio(store_names, driver, mimetype, extension, filter_columns=None, reproject=False):
import pyogrio
pass
async def export_with_fiona(store_names, driver, mimetype, extension, filter_columns=None, reproject=False):
"""
Use fiona to export geo data.
@ -25,6 +28,7 @@ async def export_with_fiona(store_names, driver, mimetype, extension, filter_col
filter_columns: list of column names to filter out
reproject: if true-ish, the geometries are reprojected to the srid specified in conf.srid_for_proj
"""
import fiona
layers_features = {}
for store_name in store_names.split(','):
try:

View file

@ -13,6 +13,7 @@ from pydantic import create_model
from sqlalchemy import text
from sqlalchemy.orm import selectinload, joinedload
from sqlalchemy.exc import NoResultFound
from asyncpg.exceptions import PostgresError
from sqlmodel import SQLModel, select, inspect, Relationship
import pandas as pd
import numpy as np
@ -145,7 +146,11 @@ class ModelRegistry:
.order_by(Category.long_name)
.options(selectinload(Category.category_group))
) # type: ignore
data = await session.exec(query)
try:
data = await session.exec(query)
except PostgresError as err:
logger.error(f"Cannot make category models. DB error: {err}")
return
categories: list[Category] = data.all() # type: ignore
for category in categories:
## Several statuses can coexist for the same model, so
@ -169,9 +174,9 @@ class ModelRegistry:
}
## Raw survey points
try:
self.raw_survey_models[store_name] = create_model( # type: ignore
self.raw_survey_models[store_name] = create_model(
category.raw_survey_table_name,
__base__=RawSurveyBaseModel,
__model_name=category.raw_survey_table_name,
__cls_kwargs__={
"table": True,
},
@ -201,9 +206,9 @@ class ModelRegistry:
#'raw_model': (str, self.raw_survey_models.get(raw_store_name)),
# 'icon': (str, f'{survey.schema}-{category.table_name}'),
}
self.survey_models[store_name] = create_model( # type: ignore
self.survey_models[store_name] = create_model(
category.table_name,
__base__=model_class,
__model_name=category.table_name,
__cls_kwargs__={
"table": True,
},
@ -451,14 +456,11 @@ class ModelRegistry:
fragments.append(category.minor_group_2)
return ".".join([survey.name, "_".join(fragments)])
self.categories = await Category.get_df()
self.categories["title"] = self.categories.long_name.fillna(
self.categories.description
)
self.categories["store"] = self.categories.apply(get_store_name, axis=1)
self.categories["count"] = pd.Series(dtype=pd.Int64Dtype())
categories = await Category.get_df()
categories["title"] = categories.long_name.fillna(categories.description)
categories["store"] = categories.apply(get_store_name, axis=1)
categories["count"] = pd.Series(dtype=pd.Int64Dtype())
categories = categories.reset_index().set_index("store")
df_models = pd.DataFrame(
self.geom.items(), columns=["store", "model"]
@ -466,18 +468,14 @@ class ModelRegistry:
df_raw_models = pd.DataFrame(
self.raw_survey_models.items(), columns=("store", "raw_model")
).set_index("store")
self.categories = self.categories.merge(
df_models, left_on="store", right_index=True
)
self.categories = self.categories.merge(
df_raw_models, left_on="store", right_index=True
)
self.categories["custom"] = False
self.categories["is_db"] = True
self.categories.reset_index(inplace=True)
self.categories.rename(columns={"name": "category"}, inplace=True)
self.categories.set_index("store", inplace=True)
self.categories.sort_values("category")
categories = categories.merge(df_models, left_index=True, right_index=True)
categories = categories.merge(df_raw_models, left_index=True, right_index=True)
categories["custom"] = False
categories["is_db"] = True
categories.reset_index(inplace=True)
categories.rename(columns={"name": "category"}, inplace=True)
categories.set_index("store", inplace=True)
categories.sort_values("category")
# self.categories.sort_index(inplace=True)
# self.categories['name_letter'] = self.categories.index.str.slice(0, 1)
# self.categories['name_number'] = self.categories.index.str.slice(1).astype('int64')
@ -485,7 +483,7 @@ class ModelRegistry:
## Set in the stores dataframe some useful properties, from the model class
## Maybe at some point it makes sense to get away from class-based definitions
if len(self.categories) > 0:
if len(categories) > 0:
## XXX: redundant self.categories['store_name'] with self.categories['store']
# self.categories['store_name'] = self.categories.apply(
# lambda row: row.model.get_store_name(),
@ -495,14 +493,15 @@ class ModelRegistry:
# lambda row: row.raw_model.store_name,
# axis=1
# )
self.categories["is_line_work"] = self.categories.apply(
categories["is_line_work"] = categories.apply(
lambda row: issubclass(row.model, LineWorkSurveyModel), axis=1
)
else:
self.categories["store_name"] = None
self.categories["raw_model_store_name"] = None
self.categories["is_line_work"] = None
self.categories["raw_survey_model"] = None
categories["store_name"] = None
categories["raw_model_store_name"] = None
categories["is_line_work"] = None
categories["raw_survey_model"] = None
self.categories = categories
## --------------------
## Custom models (Misc)
@ -587,7 +586,7 @@ class ModelRegistry:
# self.stores.drop(columns='name', inplace=True)
self.stores.index.name = "name"
self.stores["in_menu"] = self.stores["in_menu"].astype(bool)
self.stores["status"].fillna("E", inplace=True)
self.stores.fillna({"status": "E"}, inplace=True)
self.categories.reset_index(inplace=True)
self.categories.set_index("category", inplace=True)
@ -624,11 +623,11 @@ class ModelRegistry:
lambda row: getattr(row.model, "viewable_role", None),
axis=1,
)
self.stores["viewable_role"].replace("", None, inplace=True)
self.stores.replace({"viewable_role": ""}, None, inplace=True)
# self.stores['gql_object_type'] = self.stores.apply(make_model_gql_object_type, axis=1)
self.stores["is_live"] = False
self.stores["description"].fillna("", inplace=True)
self.stores.fillna({"description": ""}, inplace=True)
## Layer groups: Misc, survey's primary groups, Live
async with db_session() as session:
@ -761,12 +760,12 @@ class ModelRegistry:
for store, model_info in self.geom_live_defs.items():
## Add provided live layers in the stores df
# Create the pydantic model
# NOTE: Unused at this point, but might be usedful
# NOTE: Unused at this point, but might be useful
field_definitions = {
k: (ClassVar[v.__class__], v) for k, v in model_info.items()
}
self.geom_live[store] = create_model(
__model_name=store, __base__=LiveGeoModel, **field_definitions
store, __base__=LiveGeoModel, **field_definitions
)

View file

@ -50,34 +50,37 @@ import aiosqlite
from gisaf.config import conf
logger = logging.getLogger('gisaf tile server')
logger = logging.getLogger("gisaf tile server")
api = FastAPI(
default_response_class=responses.ORJSONResponse,
)
OSM_ATTRIBUTION = '<a href=\"http://www.openstreetmap.org/about/\" target=\"_blank\">&copy; OpenStreetMap contributors</a>'
OSM_ATTRIBUTION = '<a href="http://www.openstreetmap.org/about/" target="_blank">&copy; OpenStreetMap contributors</a>'
class MBTiles:
def __init__(self, file_path, style_name):
self.file_path = file_path
self.name = style_name
self.scheme = 'tms'
self.scheme = "tms"
self.etag = f'W/"{hex(int(file_path.stat().st_mtime))[2:]}"'
async def connect(self):
self.db = await aiosqlite.connect(self.file_path)
self.metadata = {}
try:
async with self.db.execute('select name, value from metadata') as cursor:
async with self.db.execute("select name, value from metadata") as cursor:
async for row in cursor:
self.metadata[row[0]] = row[1]
except aiosqlite.DatabaseError as err:
logger.warning(f'Cannot read {self.file_path}, will not be able to serve tiles (error: {err.args[0]})')
logger.warning(
f"Cannot read {self.file_path}, will not be able to serve tiles (error: {err.args[0]})"
)
self.metadata['bounds'] = [float(v) for v in self.metadata['bounds'].split(',')]
self.metadata['maxzoom'] = int(self.metadata['maxzoom'])
self.metadata['minzoom'] = int(self.metadata['minzoom'])
self.metadata["bounds"] = [float(v) for v in self.metadata["bounds"].split(",")]
self.metadata["maxzoom"] = int(self.metadata["maxzoom"])
self.metadata["minzoom"] = int(self.metadata["minzoom"])
async def get_style(self, style_record, request):
"""
@ -90,51 +93,52 @@ class MBTiles:
base_tiles_url = f"{base_url}/tiles/{self.name}"
scheme = self.scheme
## TODO: avoid parse and serialize at every request
layers = loads(style_record['style'])['layers']
layers = loads(style_record["style"])["layers"]
for layer in layers:
if 'source' in layer:
layer['source'] = 'gisafTiles'
if "source" in layer:
layer["source"] = "gisafTiles"
resp = {
'basename': self.file_path.stem,
"basename": self.file_path.stem,
#'center': self.center,
'description': f'Extract of {self.file_path.stem} from OSM, powered by Gisaf',
'format': self.metadata['format'],
'id': f'gisaftiles_{self.name}',
'maskLevel': 5,
'name': self.name,
"description": f"Extract of {self.file_path.stem} from OSM, powered by Gisaf",
"format": self.metadata["format"],
"id": f"gisaftiles_{self.name}",
"maskLevel": 5,
"name": self.name,
#'pixel_scale': 256,
#'planettime': '1499040000000',
'tilejson': '2.0.0',
'version': 8,
'glyphs': f"/assets/fonts/glyphs/{{fontstack}}/{{range}}.pbf",
'sprite': f"{base_url}{conf.map.tileServer.spriteUrl}",
'sources': {
'gisafTiles': {
'type': 'vector',
'tiles': [
f'{base_tiles_url}/{{z}}/{{x}}/{{y}}.pbf',
"tilejson": "2.0.0",
"version": 8,
"glyphs": f"/assets/fonts/glyphs/{{fontstack}}/{{range}}.pbf",
"sprite": f"{base_url}{conf.map.tileServer.spriteUrl}",
"sources": {
"gisafTiles": {
"type": "vector",
"tiles": [
f"{base_tiles_url}/{{z}}/{{x}}/{{y}}.pbf",
],
'maxzoom': self.metadata['maxzoom'],
'minzoom': self.metadata['minzoom'],
'bounds': self.metadata['bounds'],
'scheme': scheme,
'attribution': OSM_ATTRIBUTION,
'version': self.metadata['version'],
"maxzoom": self.metadata["maxzoom"],
"minzoom": self.metadata["minzoom"],
"bounds": self.metadata["bounds"],
"scheme": scheme,
"attribution": OSM_ATTRIBUTION,
"version": self.metadata["version"],
}
},
'layers': layers,
"layers": layers,
}
return resp
class MBTilesRegistry:
mbtiles: dict[str, MBTiles]
async def setup(self):
"""
Read all mbtiles, construct styles
"""
self.mbtiles = {}
for file_path in Path(conf.map.tileServer.baseDir).glob('*.mbtiles'):
for file_path in Path(conf.map.tileServer.baseDir).glob("*.mbtiles"):
mbtiles = MBTiles(file_path, file_path.stem)
self.mbtiles[file_path.stem] = mbtiles
await mbtiles.connect()
@ -147,9 +151,10 @@ class MBTilesRegistry:
await mbtiles.db.close()
@api.get('/{style_name}/{z}/{x}/{y}.pbf')
async def get_tile(request, style_name: str, z:int, x: int, y: int,
response: Response):
@api.get("/{style_name}/{z}/{x}/{y}.pbf")
async def get_tile(
request, style_name: str, z: int, x: int, y: int, response: Response
):
"""
Return the specific tile
"""
@ -157,25 +162,27 @@ async def get_tile(request, style_name: str, z:int, x: int, y: int,
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
mbtiles = registry.mbtiles[style_name]
if request.headers.get('If-None-Match') == mbtiles.etag:
if request.headers.get("If-None-Match") == mbtiles.etag:
request.not_modified = True
return {}
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Type'] = 'application/octet-stream'
response.headers["Content-Encoding"] = "gzip"
response.headers["Content-Type"] = "application/octet-stream"
request.response_etag = mbtiles.etag
async with mbtiles.db.execute('select tile_data from tiles where zoom_level=? and tile_column=? and tile_row=?',
(z, x, y)) as cursor:
async with mbtiles.db.execute(
"select tile_data from tiles where zoom_level=? and tile_column=? and tile_row=?",
(z, x, y),
) as cursor:
async for row in cursor:
return row[0]
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
#@routes.get('/sprite/{name:\S+}')
#async def get_sprite(request):
# @routes.get('/sprite/{name:\S+}')
# async def get_sprite(request):
@api.get('/{style_name}')
@api.get("/{style_name}")
async def get_style(request, style_name: str):
"""
Return the base style.
@ -191,6 +198,6 @@ async def get_style(request, style_name: str):
registry = MBTilesRegistry()
api.mount("/sprite",
StaticFiles(directory=conf.map.tileServer.spriteBaseDir),
name="sprites")
api.mount(
"/sprite", StaticFiles(directory=conf.map.tileServer.spriteBaseDir), name="sprites"
)

2004
uv.lock generated Normal file

File diff suppressed because it is too large Load diff