Switch to uv package manager and static semver; fix for pydantic and pandas updates

This commit is contained in:
phil 2024-12-14 15:57:11 +01:00
parent 594c267731
commit ccb8728bbb
9 changed files with 2178 additions and 2183 deletions

1
.python-version Normal file
View file

@ -0,0 +1 @@
3.11

2030
pdm.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,64 +1,63 @@
[project] [project]
name = "gisaf" name = "gisaf-backend"
dynamic = ["version"] version = "0.6.0-alpha"
description = "" description = "Gisaf backend"
authors = [ authors = [{ name = "phil", email = "phil.dev@philome.mooo.com" }]
{name = "phil", email = "phil.dev@philome.mooo.com"},
]
dependencies = [ dependencies = [
"apscheduler>=3.10.4", "aiopath>=0.6.11",
"asyncpg>=0.28.0", "aiosqlite>=0.19.0",
"fastapi>=0.111", "apscheduler>=3.10.4",
"geoalchemy2>=0.14.2", "asyncpg>=0.28.0",
"geopandas>=0.14.4", "fastapi>=0.111",
"itsdangerous>=2.1.2", "geoalchemy2>=0.14.2",
"orjson>=3.9.10", "geopandas>=1.0.1",
"pandas>=2.1.1", "itsdangerous>=2.1.2",
"passlib[bcrypt]>=1.7.4", "matplotlib>=3.8.3",
"pydantic-settings>=2.0.3", "orjson>=3.9.10",
"pyshp>=2.3.1", "pandas>=2.1.1",
"python-jose[cryptography]>=3.3.0", "passlib[bcrypt]>=1.7.4",
"python-multipart>=0.0.9", "plotly>=5.20.0",
"pyyaml>=6.0.1", "psutil>=5.9.8",
"redis>=5.0.1", "psycopg2-binary>=2.9.9",
"sqlalchemy[asyncio]>=2.0.23", "psycopg>=3.1.18",
"sqlmodel>=0.0.18", "pydantic-settings[yaml]>=2.7",
"uvicorn>=0.23.2", "pyshp>=2.3.1",
"websockets>=12.0", "python-jose[cryptography]>=3.3.0",
"aiosqlite>=0.19.0", "python-multipart>=0.0.9",
"psycopg>=3.1.18", "pyyaml>=6.0.1",
"plotly>=5.20.0", "redis>=5.0.1",
"matplotlib>=3.8.3", "sqlalchemy[asyncio]>=2.0.23",
"aiopath>=0.6.11", "sqlmodel>=0.0.18",
"psycopg2-binary>=2.9.9", "uvicorn>=0.23.2",
"psutil>=5.9.8", "websockets>=12.0",
"pyxdg>=0.28",
] ]
requires-python = ">=3.11,<4" requires-python = ">=3.11"
readme = "README.md" readme = "README.md"
license = {text = "GPLv3"}
[build-system] [project.scripts]
requires = ["pdm-backend"] gisaf-backend = "gisaf_backend:main"
build-backend = "pdm.backend"
[project.optional-dependencies] [project.optional-dependencies]
contextily = ["contextily>=1.4.0"] contextily = ["contextily>=1.4.0"]
mqtt = ["aiomqtt>=1.2.1"] mqtt = ["aiomqtt>=1.2.1"]
all = ["gisaf[contextily]", "gisaf[mqtt]"] all = ["gisaf-backend[contextily]", "gisaf-backend[mqtt]"]
[tool.pdm.version] [build-system]
source = "scm" requires = ["hatchling"]
write_to = "gisaf/_version.py" build-backend = "hatchling.build"
write_template = "__version__: str = '{}'"
[tool.pdm.dev-dependencies] [tool.hatch.build.targets.wheel]
dev = [ packages = ["src/gisaf"]
"ipdb>=0.13.13",
"pandas-stubs>=2.1.4.231218", [tool.uv]
"pretty-errors>=1.2.25", dev-dependencies = [
"types-psycopg2>=2.9.21.20", "ipdb>=0.13.13",
"types-PyYAML>=6.0.12.12", "pandas-stubs>=2.1.4.231218",
"asyncpg-stubs>=0.29.1", "pretty-errors>=1.2.25",
"types-python-jose>=3.3.4.20240106", "types-psycopg2>=2.9.21.20",
"types-passlib>=1.7.7.20240311", "types-PyYAML>=6.0.12.12",
"asyncpg-stubs>=0.29.1",
"types-python-jose>=3.3.4.20240106",
"types-passlib>=1.7.7.20240311",
] ]

View file

@ -1 +0,0 @@
__version__: str = '2023.4.dev95+g46b5246.d20240520'

View file

@ -2,34 +2,36 @@ from os import environ
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any, Type, Tuple from typing import Any, Type, Tuple
from yaml import safe_load
from xdg import BaseDirectory
from pydantic_settings import ( from pydantic_settings import (
BaseSettings, BaseSettings,
PydanticBaseSettingsSource, PydanticBaseSettingsSource,
SettingsConfigDict, SettingsConfigDict,
) )
# from pydantic import ConfigDict
from pydantic.v1.utils import deep_update from pydantic.v1.utils import deep_update
from yaml import safe_load
from gisaf._version import __version__ from gisaf._version import __version__
# from sqlalchemy.ext.asyncio.engine import AsyncEngine from importlib.metadata import version
# from sqlalchemy.orm.session import sessionmaker
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
ENV = environ.get("env", "prod") ENV = environ.get("env", "prod")
config_files = [ config_files = [
Path(Path.cwd().root) / "etc" / "gisaf" / ENV, Path(Path.cwd().root) / "etc" / "gisaf" / ENV,
Path.home() / ".local" / "gisaf" / ENV, Path(BaseDirectory.xdg_config_home) / "gisaf" / ENV,
] ]
class DashboardHome(BaseSettings): class DashboardHome(BaseSettings):
title: str = "Gisaf - home/dashboards" title: str = "Gisaf - home/dashboards"
content_file: str = "/etc/gisaf/dashboard_home_content.html" content_file: Path = (
footer_file: str = "/etc/gisaf/dashboard_home_footer.html" Path(Path.cwd().root) / "etc" / "gisaf" / "dashboard_home_content.html"
)
footer_file: Path = (
Path(Path.cwd().root) / "etc" / "gisaf" / "dashboard_home_footer.html"
)
class GisafConfig(BaseSettings): class GisafConfig(BaseSettings):
@ -92,7 +94,6 @@ class DefaultSurvey(BaseSettings):
class Survey(BaseSettings): class Survey(BaseSettings):
# model_config = ConfigDict(extra='ignore')
db_schema_raw: str = "raw_survey" db_schema_raw: str = "raw_survey"
db_schema: str = "survey" db_schema: str = "survey"
default: DefaultSurvey = DefaultSurvey() default: DefaultSurvey = DefaultSurvey()
@ -173,7 +174,9 @@ class ServerBind(BaseSettings):
class OGCAPIServerMap(BaseSettings): class OGCAPIServerMap(BaseSettings):
url: str = "https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png" url: str = "https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png"
attribution: str = """<a href="https://wikimediafoundation.org/wiki/Maps_Terms_of_Use">Wikimedia maps</a> | Map data &copy; <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>""" attribution: str = (
"""<a href="https://wikimediafoundation.org/wiki/Maps_Terms_of_Use">Wikimedia maps</a> | Map data &copy; <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>"""
)
class OGCAPIServer(BaseSettings): class OGCAPIServer(BaseSettings):
@ -196,13 +199,20 @@ class OGCAPI(BaseSettings):
class TileServer(BaseSettings): class TileServer(BaseSettings):
baseDir: str = "/path/to/mbtiles_files_dir" baseDir: Path = Path(BaseDirectory.xdg_data_home) / "gisaf" / "mbtiles_files_dir"
useRequestUrl: bool = False useRequestUrl: bool = False
spriteBaseDir: str = "/path/to/mbtiles_sprites_dir" spriteBaseDir: Path = (
Path(BaseDirectory.xdg_data_home) / "gisaf" / "mbtiles_sprites_dir"
)
spriteUrl: str = "/tiles/sprite/sprite" spriteUrl: str = "/tiles/sprite/sprite"
spriteBaseUrl: str = "https://gisaf.example.org" spriteBaseUrl: str = "https://gisaf.example.org"
openMapTilesKey: str | None = None openMapTilesKey: str | None = None
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.baseDir.mkdir(parents=True, exist_ok=True)
self.spriteBaseDir.mkdir(parents=True, exist_ok=True)
class Map(BaseSettings): class Map(BaseSettings):
tileServer: TileServer = TileServer() tileServer: TileServer = TileServer()
@ -251,7 +261,9 @@ class Dashboard(BaseSettings):
class Widgets(BaseSettings): class Widgets(BaseSettings):
footer: str = """Generated by <span class='link' onclick="window.open('https://redmine.auroville.org.in/projects/gisaf/')">Gisaf</span>""" footer: str = (
"""Generated by <span class='link' onclick="window.open('https://redmine.auroville.org.in/projects/gisaf/')">Gisaf</span>"""
)
class Admin(BaseSettings): class Admin(BaseSettings):
@ -327,7 +339,7 @@ class Config(BaseSettings):
plot: Plot = Plot() plot: Plot = Plot()
plugins: dict[str, dict[str, Any]] = {} plugins: dict[str, dict[str, Any]] = {}
survey: Survey = Survey() survey: Survey = Survey()
version: str = __version__ version: str = version('gisaf-backend')
weather_station: dict[str, dict[str, Any]] = {} weather_station: dict[str, dict[str, Any]] = {}
widgets: Widgets = Widgets() widgets: Widgets = Widgets()

View file

@ -8,13 +8,16 @@ import tempfile
from geopandas.io.file import infer_schema from geopandas.io.file import infer_schema
import fiona
from gisaf.registry import registry from gisaf.registry import registry
from gisaf.redis_tools import store as redis_store, RedisError from gisaf.redis_tools import store as redis_store, RedisError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def export_with_pyogrio(store_names, driver, mimetype, extension, filter_columns=None, reproject=False):
import pyogrio
pass
async def export_with_fiona(store_names, driver, mimetype, extension, filter_columns=None, reproject=False): async def export_with_fiona(store_names, driver, mimetype, extension, filter_columns=None, reproject=False):
""" """
Use fiona to export geo data. Use fiona to export geo data.
@ -25,6 +28,7 @@ async def export_with_fiona(store_names, driver, mimetype, extension, filter_col
filter_columns: list of column names to filter out filter_columns: list of column names to filter out
reproject: if true-ish, the geometries are reprojected to the srid specified in conf.srid_for_proj reproject: if true-ish, the geometries are reprojected to the srid specified in conf.srid_for_proj
""" """
import fiona
layers_features = {} layers_features = {}
for store_name in store_names.split(','): for store_name in store_names.split(','):
try: try:

View file

@ -13,6 +13,7 @@ from pydantic import create_model
from sqlalchemy import text from sqlalchemy import text
from sqlalchemy.orm import selectinload, joinedload from sqlalchemy.orm import selectinload, joinedload
from sqlalchemy.exc import NoResultFound from sqlalchemy.exc import NoResultFound
from asyncpg.exceptions import PostgresError
from sqlmodel import SQLModel, select, inspect, Relationship from sqlmodel import SQLModel, select, inspect, Relationship
import pandas as pd import pandas as pd
import numpy as np import numpy as np
@ -145,7 +146,11 @@ class ModelRegistry:
.order_by(Category.long_name) .order_by(Category.long_name)
.options(selectinload(Category.category_group)) .options(selectinload(Category.category_group))
) # type: ignore ) # type: ignore
data = await session.exec(query) try:
data = await session.exec(query)
except PostgresError as err:
logger.error(f"Cannot make category models. DB error: {err}")
return
categories: list[Category] = data.all() # type: ignore categories: list[Category] = data.all() # type: ignore
for category in categories: for category in categories:
## Several statuses can coexist for the same model, so ## Several statuses can coexist for the same model, so
@ -169,9 +174,9 @@ class ModelRegistry:
} }
## Raw survey points ## Raw survey points
try: try:
self.raw_survey_models[store_name] = create_model( # type: ignore self.raw_survey_models[store_name] = create_model(
category.raw_survey_table_name,
__base__=RawSurveyBaseModel, __base__=RawSurveyBaseModel,
__model_name=category.raw_survey_table_name,
__cls_kwargs__={ __cls_kwargs__={
"table": True, "table": True,
}, },
@ -201,9 +206,9 @@ class ModelRegistry:
#'raw_model': (str, self.raw_survey_models.get(raw_store_name)), #'raw_model': (str, self.raw_survey_models.get(raw_store_name)),
# 'icon': (str, f'{survey.schema}-{category.table_name}'), # 'icon': (str, f'{survey.schema}-{category.table_name}'),
} }
self.survey_models[store_name] = create_model( # type: ignore self.survey_models[store_name] = create_model(
category.table_name,
__base__=model_class, __base__=model_class,
__model_name=category.table_name,
__cls_kwargs__={ __cls_kwargs__={
"table": True, "table": True,
}, },
@ -451,14 +456,11 @@ class ModelRegistry:
fragments.append(category.minor_group_2) fragments.append(category.minor_group_2)
return ".".join([survey.name, "_".join(fragments)]) return ".".join([survey.name, "_".join(fragments)])
self.categories = await Category.get_df() categories = await Category.get_df()
self.categories["title"] = self.categories.long_name.fillna( categories["title"] = categories.long_name.fillna(categories.description)
self.categories.description categories["store"] = categories.apply(get_store_name, axis=1)
) categories["count"] = pd.Series(dtype=pd.Int64Dtype())
categories = categories.reset_index().set_index("store")
self.categories["store"] = self.categories.apply(get_store_name, axis=1)
self.categories["count"] = pd.Series(dtype=pd.Int64Dtype())
df_models = pd.DataFrame( df_models = pd.DataFrame(
self.geom.items(), columns=["store", "model"] self.geom.items(), columns=["store", "model"]
@ -466,18 +468,14 @@ class ModelRegistry:
df_raw_models = pd.DataFrame( df_raw_models = pd.DataFrame(
self.raw_survey_models.items(), columns=("store", "raw_model") self.raw_survey_models.items(), columns=("store", "raw_model")
).set_index("store") ).set_index("store")
self.categories = self.categories.merge( categories = categories.merge(df_models, left_index=True, right_index=True)
df_models, left_on="store", right_index=True categories = categories.merge(df_raw_models, left_index=True, right_index=True)
) categories["custom"] = False
self.categories = self.categories.merge( categories["is_db"] = True
df_raw_models, left_on="store", right_index=True categories.reset_index(inplace=True)
) categories.rename(columns={"name": "category"}, inplace=True)
self.categories["custom"] = False categories.set_index("store", inplace=True)
self.categories["is_db"] = True categories.sort_values("category")
self.categories.reset_index(inplace=True)
self.categories.rename(columns={"name": "category"}, inplace=True)
self.categories.set_index("store", inplace=True)
self.categories.sort_values("category")
# self.categories.sort_index(inplace=True) # self.categories.sort_index(inplace=True)
# self.categories['name_letter'] = self.categories.index.str.slice(0, 1) # self.categories['name_letter'] = self.categories.index.str.slice(0, 1)
# self.categories['name_number'] = self.categories.index.str.slice(1).astype('int64') # self.categories['name_number'] = self.categories.index.str.slice(1).astype('int64')
@ -485,7 +483,7 @@ class ModelRegistry:
## Set in the stores dataframe some useful properties, from the model class ## Set in the stores dataframe some useful properties, from the model class
## Maybe at some point it makes sense to get away from class-based definitions ## Maybe at some point it makes sense to get away from class-based definitions
if len(self.categories) > 0: if len(categories) > 0:
## XXX: redundant self.categories['store_name'] with self.categories['store'] ## XXX: redundant self.categories['store_name'] with self.categories['store']
# self.categories['store_name'] = self.categories.apply( # self.categories['store_name'] = self.categories.apply(
# lambda row: row.model.get_store_name(), # lambda row: row.model.get_store_name(),
@ -495,14 +493,15 @@ class ModelRegistry:
# lambda row: row.raw_model.store_name, # lambda row: row.raw_model.store_name,
# axis=1 # axis=1
# ) # )
self.categories["is_line_work"] = self.categories.apply( categories["is_line_work"] = categories.apply(
lambda row: issubclass(row.model, LineWorkSurveyModel), axis=1 lambda row: issubclass(row.model, LineWorkSurveyModel), axis=1
) )
else: else:
self.categories["store_name"] = None categories["store_name"] = None
self.categories["raw_model_store_name"] = None categories["raw_model_store_name"] = None
self.categories["is_line_work"] = None categories["is_line_work"] = None
self.categories["raw_survey_model"] = None categories["raw_survey_model"] = None
self.categories = categories
## -------------------- ## --------------------
## Custom models (Misc) ## Custom models (Misc)
@ -587,7 +586,7 @@ class ModelRegistry:
# self.stores.drop(columns='name', inplace=True) # self.stores.drop(columns='name', inplace=True)
self.stores.index.name = "name" self.stores.index.name = "name"
self.stores["in_menu"] = self.stores["in_menu"].astype(bool) self.stores["in_menu"] = self.stores["in_menu"].astype(bool)
self.stores["status"].fillna("E", inplace=True) self.stores.fillna({"status": "E"}, inplace=True)
self.categories.reset_index(inplace=True) self.categories.reset_index(inplace=True)
self.categories.set_index("category", inplace=True) self.categories.set_index("category", inplace=True)
@ -624,11 +623,11 @@ class ModelRegistry:
lambda row: getattr(row.model, "viewable_role", None), lambda row: getattr(row.model, "viewable_role", None),
axis=1, axis=1,
) )
self.stores["viewable_role"].replace("", None, inplace=True) self.stores.replace({"viewable_role": ""}, None, inplace=True)
# self.stores['gql_object_type'] = self.stores.apply(make_model_gql_object_type, axis=1) # self.stores['gql_object_type'] = self.stores.apply(make_model_gql_object_type, axis=1)
self.stores["is_live"] = False self.stores["is_live"] = False
self.stores["description"].fillna("", inplace=True) self.stores.fillna({"description": ""}, inplace=True)
## Layer groups: Misc, survey's primary groups, Live ## Layer groups: Misc, survey's primary groups, Live
async with db_session() as session: async with db_session() as session:
@ -761,12 +760,12 @@ class ModelRegistry:
for store, model_info in self.geom_live_defs.items(): for store, model_info in self.geom_live_defs.items():
## Add provided live layers in the stores df ## Add provided live layers in the stores df
# Create the pydantic model # Create the pydantic model
# NOTE: Unused at this point, but might be usedful # NOTE: Unused at this point, but might be useful
field_definitions = { field_definitions = {
k: (ClassVar[v.__class__], v) for k, v in model_info.items() k: (ClassVar[v.__class__], v) for k, v in model_info.items()
} }
self.geom_live[store] = create_model( self.geom_live[store] = create_model(
__model_name=store, __base__=LiveGeoModel, **field_definitions store, __base__=LiveGeoModel, **field_definitions
) )

View file

@ -50,34 +50,37 @@ import aiosqlite
from gisaf.config import conf from gisaf.config import conf
logger = logging.getLogger('gisaf tile server') logger = logging.getLogger("gisaf tile server")
api = FastAPI( api = FastAPI(
default_response_class=responses.ORJSONResponse, default_response_class=responses.ORJSONResponse,
) )
OSM_ATTRIBUTION = '<a href=\"http://www.openstreetmap.org/about/\" target=\"_blank\">&copy; OpenStreetMap contributors</a>' OSM_ATTRIBUTION = '<a href="http://www.openstreetmap.org/about/" target="_blank">&copy; OpenStreetMap contributors</a>'
class MBTiles: class MBTiles:
def __init__(self, file_path, style_name): def __init__(self, file_path, style_name):
self.file_path = file_path self.file_path = file_path
self.name = style_name self.name = style_name
self.scheme = 'tms' self.scheme = "tms"
self.etag = f'W/"{hex(int(file_path.stat().st_mtime))[2:]}"' self.etag = f'W/"{hex(int(file_path.stat().st_mtime))[2:]}"'
async def connect(self): async def connect(self):
self.db = await aiosqlite.connect(self.file_path) self.db = await aiosqlite.connect(self.file_path)
self.metadata = {} self.metadata = {}
try: try:
async with self.db.execute('select name, value from metadata') as cursor: async with self.db.execute("select name, value from metadata") as cursor:
async for row in cursor: async for row in cursor:
self.metadata[row[0]] = row[1] self.metadata[row[0]] = row[1]
except aiosqlite.DatabaseError as err: except aiosqlite.DatabaseError as err:
logger.warning(f'Cannot read {self.file_path}, will not be able to serve tiles (error: {err.args[0]})') logger.warning(
f"Cannot read {self.file_path}, will not be able to serve tiles (error: {err.args[0]})"
)
self.metadata['bounds'] = [float(v) for v in self.metadata['bounds'].split(',')] self.metadata["bounds"] = [float(v) for v in self.metadata["bounds"].split(",")]
self.metadata['maxzoom'] = int(self.metadata['maxzoom']) self.metadata["maxzoom"] = int(self.metadata["maxzoom"])
self.metadata['minzoom'] = int(self.metadata['minzoom']) self.metadata["minzoom"] = int(self.metadata["minzoom"])
async def get_style(self, style_record, request): async def get_style(self, style_record, request):
""" """
@ -90,51 +93,52 @@ class MBTiles:
base_tiles_url = f"{base_url}/tiles/{self.name}" base_tiles_url = f"{base_url}/tiles/{self.name}"
scheme = self.scheme scheme = self.scheme
## TODO: avoid parse and serialize at every request ## TODO: avoid parse and serialize at every request
layers = loads(style_record['style'])['layers'] layers = loads(style_record["style"])["layers"]
for layer in layers: for layer in layers:
if 'source' in layer: if "source" in layer:
layer['source'] = 'gisafTiles' layer["source"] = "gisafTiles"
resp = { resp = {
'basename': self.file_path.stem, "basename": self.file_path.stem,
#'center': self.center, #'center': self.center,
'description': f'Extract of {self.file_path.stem} from OSM, powered by Gisaf', "description": f"Extract of {self.file_path.stem} from OSM, powered by Gisaf",
'format': self.metadata['format'], "format": self.metadata["format"],
'id': f'gisaftiles_{self.name}', "id": f"gisaftiles_{self.name}",
'maskLevel': 5, "maskLevel": 5,
'name': self.name, "name": self.name,
#'pixel_scale': 256, #'pixel_scale': 256,
#'planettime': '1499040000000', #'planettime': '1499040000000',
'tilejson': '2.0.0', "tilejson": "2.0.0",
'version': 8, "version": 8,
'glyphs': f"/assets/fonts/glyphs/{{fontstack}}/{{range}}.pbf", "glyphs": f"/assets/fonts/glyphs/{{fontstack}}/{{range}}.pbf",
'sprite': f"{base_url}{conf.map.tileServer.spriteUrl}", "sprite": f"{base_url}{conf.map.tileServer.spriteUrl}",
'sources': { "sources": {
'gisafTiles': { "gisafTiles": {
'type': 'vector', "type": "vector",
'tiles': [ "tiles": [
f'{base_tiles_url}/{{z}}/{{x}}/{{y}}.pbf', f"{base_tiles_url}/{{z}}/{{x}}/{{y}}.pbf",
], ],
'maxzoom': self.metadata['maxzoom'], "maxzoom": self.metadata["maxzoom"],
'minzoom': self.metadata['minzoom'], "minzoom": self.metadata["minzoom"],
'bounds': self.metadata['bounds'], "bounds": self.metadata["bounds"],
'scheme': scheme, "scheme": scheme,
'attribution': OSM_ATTRIBUTION, "attribution": OSM_ATTRIBUTION,
'version': self.metadata['version'], "version": self.metadata["version"],
} }
}, },
'layers': layers, "layers": layers,
} }
return resp return resp
class MBTilesRegistry: class MBTilesRegistry:
mbtiles: dict[str, MBTiles] mbtiles: dict[str, MBTiles]
async def setup(self): async def setup(self):
""" """
Read all mbtiles, construct styles Read all mbtiles, construct styles
""" """
self.mbtiles = {} self.mbtiles = {}
for file_path in Path(conf.map.tileServer.baseDir).glob('*.mbtiles'): for file_path in Path(conf.map.tileServer.baseDir).glob("*.mbtiles"):
mbtiles = MBTiles(file_path, file_path.stem) mbtiles = MBTiles(file_path, file_path.stem)
self.mbtiles[file_path.stem] = mbtiles self.mbtiles[file_path.stem] = mbtiles
await mbtiles.connect() await mbtiles.connect()
@ -147,9 +151,10 @@ class MBTilesRegistry:
await mbtiles.db.close() await mbtiles.db.close()
@api.get('/{style_name}/{z}/{x}/{y}.pbf') @api.get("/{style_name}/{z}/{x}/{y}.pbf")
async def get_tile(request, style_name: str, z:int, x: int, y: int, async def get_tile(
response: Response): request, style_name: str, z: int, x: int, y: int, response: Response
):
""" """
Return the specific tile Return the specific tile
""" """
@ -157,25 +162,27 @@ async def get_tile(request, style_name: str, z:int, x: int, y: int,
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
mbtiles = registry.mbtiles[style_name] mbtiles = registry.mbtiles[style_name]
if request.headers.get('If-None-Match') == mbtiles.etag: if request.headers.get("If-None-Match") == mbtiles.etag:
request.not_modified = True request.not_modified = True
return {} return {}
response.headers['Content-Encoding'] = 'gzip' response.headers["Content-Encoding"] = "gzip"
response.headers['Content-Type'] = 'application/octet-stream' response.headers["Content-Type"] = "application/octet-stream"
request.response_etag = mbtiles.etag request.response_etag = mbtiles.etag
async with mbtiles.db.execute('select tile_data from tiles where zoom_level=? and tile_column=? and tile_row=?', async with mbtiles.db.execute(
(z, x, y)) as cursor: "select tile_data from tiles where zoom_level=? and tile_column=? and tile_row=?",
(z, x, y),
) as cursor:
async for row in cursor: async for row in cursor:
return row[0] return row[0]
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
#@routes.get('/sprite/{name:\S+}') # @routes.get('/sprite/{name:\S+}')
#async def get_sprite(request): # async def get_sprite(request):
@api.get('/{style_name}') @api.get("/{style_name}")
async def get_style(request, style_name: str): async def get_style(request, style_name: str):
""" """
Return the base style. Return the base style.
@ -191,6 +198,6 @@ async def get_style(request, style_name: str):
registry = MBTilesRegistry() registry = MBTilesRegistry()
api.mount("/sprite", api.mount(
StaticFiles(directory=conf.map.tileServer.spriteBaseDir), "/sprite", StaticFiles(directory=conf.map.tileServer.spriteBaseDir), name="sprites"
name="sprites") )

2004
uv.lock generated Normal file

File diff suppressed because it is too large Load diff