Setup redis cache (ttags)
This commit is contained in:
parent
f47e018e65
commit
5c9d82f507
6 changed files with 82 additions and 62 deletions
|
@ -9,9 +9,11 @@ import logging
|
|||
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
from asyncpg import connect
|
||||
from asyncpg.connection import Connection
|
||||
from asyncpg.exceptions import UndefinedTableError, InterfaceError
|
||||
from sqlalchemy import text
|
||||
from redis import asyncio as aioredis
|
||||
from pydantic import create_model
|
||||
|
||||
from .config import conf
|
||||
# from gisaf.models.live import LiveModel
|
||||
|
@ -20,6 +22,7 @@ from .utils import (SHAPELY_TYPE_TO_MAPBOX_TYPE, DEFAULT_MAPBOX_LAYOUT,
|
|||
from .registry import registry
|
||||
#from .models.geom import GeomGroup, GeomModel
|
||||
from .models.geo_models_base import LiveGeoModel
|
||||
from .database import db_session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -90,6 +93,8 @@ class Store:
|
|||
- redis: RedisConnection
|
||||
- pub (/sub) connections
|
||||
"""
|
||||
asyncpg_conn: Connection
|
||||
|
||||
async def setup(self):
|
||||
"""
|
||||
Setup the live service for the main Gisaf application:
|
||||
|
@ -328,7 +333,10 @@ class Store:
|
|||
Postgres/asyncpg listener for the trigger on data change.
|
||||
A task is created because this function is not asynchronous.
|
||||
"""
|
||||
create_task(self.set_ttag(store_name, time()))
|
||||
if store_name in registry.stores:
|
||||
create_task(self.set_ttag(store_name, time()))
|
||||
else:
|
||||
logger.warn(f'Notify received for an unexisting store: {store_name}')
|
||||
|
||||
async def get_ttag(self, store_name):
|
||||
"""
|
||||
|
@ -348,7 +356,7 @@ class Store:
|
|||
await self.set_ttag(store_name, weak_now_hex)
|
||||
return weak_now_hex
|
||||
|
||||
async def delete_all_ttags(self):
|
||||
async def delete_all_ttags(self) -> None:
|
||||
"""
|
||||
Delete all ttags in redis
|
||||
"""
|
||||
|
@ -357,7 +365,7 @@ class Store:
|
|||
if keys:
|
||||
await self.redis.delete(*keys)
|
||||
|
||||
async def _setup_db_cache_system(self):
|
||||
async def _setup_db_cache_system(self) -> None:
|
||||
"""
|
||||
Setup the caching system:
|
||||
- clear all Redis store at startup
|
||||
|
@ -365,52 +373,63 @@ class Store:
|
|||
function are setup on the database server
|
||||
- listen to the DB event emitter: setup a callback function
|
||||
"""
|
||||
## Setup the function and triggers on tables
|
||||
## Keep the connection alive: don't use a "with" block
|
||||
## It needs to be closed correctly: see _close_permanant_db_connection
|
||||
self._permanent_conn = await db.acquire()
|
||||
self._permanent_raw_conn = await self._permanent_conn.get_raw_connection()
|
||||
|
||||
## Create the function in the database
|
||||
await self._permanent_raw_conn.execute(ttag_function)
|
||||
|
||||
## Delete all the ttags, for safety
|
||||
## eg. the database was changed and Gisaf wasn't running, so the redis store wasn't updated
|
||||
await store.delete_all_ttags()
|
||||
|
||||
## Create DB triggers on the tables of the models
|
||||
all_triggers = await self._permanent_raw_conn.fetch(get_all_triggers)
|
||||
stores_with_trigger = {t['trigger_table'] for t in all_triggers if t['tigger_name'] == 'gisaf_ttag'}
|
||||
missing_triger_tables = set(registry.geom).difference(stores_with_trigger)
|
||||
if len(missing_triger_tables) > 0:
|
||||
logger.info(f'Create Postgres modification triggers for {len(missing_triger_tables)} tables')
|
||||
for store_name in missing_triger_tables:
|
||||
model = registry.geom[store_name]
|
||||
try:
|
||||
await self._permanent_raw_conn.execute(ttag_create_trigger.format(
|
||||
schema=model.__table__.schema, table=model.__table__.name))
|
||||
except UndefinedTableError:
|
||||
logger.warning(f'table {store_name} does not exist in the database: skip modification trigger')
|
||||
## Setup triggers on Category and Qml, for Mapbox layer styling
|
||||
for schema, table in (('gisaf_map', 'qml'), ('gisaf_survey', 'category')):
|
||||
triggers = [t for t in all_triggers
|
||||
if t['tigger_name'] == 'gisaf_ttag' and t['trigger_table'] == f'{schema}.{table}']
|
||||
if len(triggers) == 0:
|
||||
await self._permanent_raw_conn.execute(ttag_create_trigger.format(schema=schema, table=table))
|
||||
async with db_session() as session:
|
||||
## Create the function in the database
|
||||
await session.exec(text(ttag_function))
|
||||
|
||||
## Listen: define the callback function
|
||||
await self._permanent_raw_conn.add_listener('gisaf_ttag', store.create_task_store_ttag)
|
||||
## Create DB triggers on the tables of the models
|
||||
all_triggers_resp = await session.exec(text(get_all_triggers))
|
||||
all_triggers = all_triggers_resp.mappings().all()
|
||||
stores_with_trigger = {t['trigger_table']
|
||||
for t in all_triggers
|
||||
if t['tigger_name'] == 'gisaf_ttag'}
|
||||
missing_triger_tables = set(registry.geom).difference(stores_with_trigger)
|
||||
|
||||
# model: SQLModel = registry.stores.loc[store_name, 'model']
|
||||
if len(missing_triger_tables) > 0:
|
||||
logger.info('Create Postgres modification triggers for '
|
||||
f'{len(missing_triger_tables)} tables')
|
||||
for store_name in missing_triger_tables:
|
||||
## XXX: TODO: See https://stackoverflow.com/questions/7888846/trigger-in-sqlachemy
|
||||
model = registry.geom[store_name]
|
||||
|
||||
try:
|
||||
await session.exec(text(
|
||||
ttag_create_trigger.format(
|
||||
schema=model.metadata.schema,
|
||||
table=model.__tablename__)
|
||||
))
|
||||
except UndefinedTableError:
|
||||
logger.warning(f'table {store_name} does not exist in '
|
||||
'the database: skip modification trigger')
|
||||
## Setup triggers on Category and Qml, for Mapbox layer styling
|
||||
for schema, table in (('gisaf_map', 'qml'), ('gisaf_survey', 'category')):
|
||||
triggers = [t for t in all_triggers
|
||||
if t['tigger_name'] == 'gisaf_ttag'
|
||||
and t['trigger_table'] == f'{schema}.{table}']
|
||||
if len(triggers) == 0:
|
||||
await session.exec(text(
|
||||
ttag_create_trigger.format(schema=schema, table=table)
|
||||
))
|
||||
|
||||
## Listen: define the callback function
|
||||
self.asyncpg_conn = await connect(conf.db.get_pg_url())
|
||||
await self.asyncpg_conn.add_listener('gisaf_ttag', store.create_task_store_ttag)
|
||||
|
||||
async def _close_permanant_db_connection(self):
|
||||
"""
|
||||
Called at aiohttp server shutdown: remove the listener and close the connections
|
||||
"""
|
||||
try:
|
||||
await self._permanent_raw_conn.remove_listener('gisaf_ttag', store.create_task_store_ttag)
|
||||
await self.asyncpg_conn.remove_listener(
|
||||
'gisaf_ttag', store.create_task_store_ttag)
|
||||
except InterfaceError as err:
|
||||
logger.warning(f'Cannot remove asyncpg listener in _close_permanant_db_connection: {err}')
|
||||
await self._permanent_raw_conn.close()
|
||||
await self._permanent_conn.release()
|
||||
await self.asyncpg_conn.close()
|
||||
|
||||
|
||||
async def setup_redis():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue