gisaf-backend/src/gisaf/exporters.py
2024-04-28 01:10:41 +02:00

108 lines
4.4 KiB
Python

from os import remove
from pathlib import Path
from datetime import datetime
from io import BytesIO
from zipfile import ZipFile
import logging
import tempfile
from geopandas.io.file import infer_schema
import fiona
from gisaf.registry import registry
from gisaf.redis_tools import store as redis_store, RedisError
logger = logging.getLogger(__name__)
async def export_with_fiona(store_names, driver, mimetype, extension, filter_columns=None, reproject=False):
"""
Use fiona to export geo data.
registry: gisaf.registry.ModelRegistry
store_names: comma separated string of store (aka. layer) names
driver: fiona driver (one of fiona.supported_drivers)
extension: extension of the file name
filter_columns: list of column names to filter out
reproject: if true-ish, the geometries are reprojected to the srid specified in conf.srid_for_proj
"""
layers_features = {}
for store_name in store_names.split(','):
try:
if store_name in registry.geom:
layers_features[store_name] = await registry.geom[store_name].get_geo_df(reproject=reproject)
else:
## Live
## TODO: make live check more explicit
layers_features[store_name] = await redis_store.get_gdf(store_name, reproject=reproject)
except RedisError as err:
logger.warn(f'Cannot get store {store_name}: {err}')
except Exception as err:
logger.warn(f'Cannot get store {store_name}, see below')
logger.exception(err)
## FIXME: only 1 layer gets exported with BytesIO, so use a real file
#filename = '{}_{:%Y-%m-%d_%H:%M:%S}.{}'.format(layers, datetime.now(), extension)
filename = 'Gisaf export {:%Y-%m-%d_%H:%M:%S}_{}.{}'.format(
datetime.now(), next(tempfile._get_candidate_names()), extension)
## XXX: fails in case of a lot of layers
data_file_name = Path(tempfile._get_default_tempdir()) / filename
#data_file_name = Path(tempfile._get_default_tempdir()) / next(tempfile._get_candidate_names())
## XXX: MemoryFile doesn't support multiple layers (I opened https://github.com/Toblerity/Fiona/issues/830)
#with fiona.io.MemoryFile(filename='selected_layers.gpkg') as mem_file:
# for layer_name, gdf in layers_features.items():
# if filter_columns:
# gdf = gdf.filter(filter_columns)
# schema = infer_schema(gdf)
# with mem_file.open(layer=layer_name, driver=driver, crs=gdf.crs, schema=schema) as mem_sink:
# mem_sink.writerecords(gdf.iterfeatures())
#return mem_file, filename, mimetype
with fiona.Env():
for layer_name, gdf in layers_features.items():
## XXX: geopandas doesn't accept BytesIO: using fiona directly
#gdf.to_file(data, driver=driver, mode='a')
_gdf = gdf.reset_index()
_gdf['fid'] = _gdf['id']
if filter_columns:
_gdf = _gdf.filter(filter_columns)
schema = infer_schema(_gdf)
with fiona.Env(OSR_WKT_FORMAT="WKT2_2018"), fiona.open(
data_file_name, 'w',
driver=driver,
crs=_gdf.crs.to_string(),
layer=layer_name,
schema=schema) as colxn:
colxn.writerecords(_gdf.iterfeatures())
#data.seek(0)
with open(data_file_name, 'rb') as data_file:
data = data_file.read()
remove(data_file_name)
return data, filename, mimetype
async def export_with_pyshp(store_names, reproject=False):
"""
Zip and return data using "old style", ie. with pyshp 1.2
"""
## TODO: migrate to fiona, see below
zip_file = BytesIO()
with ZipFile(zip_file, 'w') as zip:
for layer_name in store_names.split(','):
model = registry.geom[layer_name]
dbf, shp, shx, qml, proj_str = await model.get_shapefile_files()
zip.writestr('{}.dbf'.format(layer_name), dbf.getvalue())
zip.writestr('{}.shp'.format(layer_name), shp.getvalue())
zip.writestr('{}.shx'.format(layer_name), shx.getvalue())
if qml:
zip.writestr('{}.qml'.format(layer_name), qml)
if proj_str:
zip.writestr('{}.prj'.format(layer_name), proj_str)
zip_file.seek(0)
filename = '{}_{:%Y-%m-%d_%H:%M}.zip'.format(store_names, datetime.now(), )
content_type = 'application/zip'
return zip_file.read(), filename, content_type