重构数据库连接管理,添加元数据支持
This commit is contained in:
208
app/infra/db/dynamic_manager.py
Normal file
208
app/infra/db/dynamic_manager.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict
|
||||
from uuid import UUID
|
||||
|
||||
from psycopg_pool import AsyncConnectionPool
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncEngine,
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PgEngineEntry:
|
||||
engine: AsyncEngine
|
||||
sessionmaker: async_sessionmaker[AsyncSession]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CacheKey:
|
||||
project_id: UUID
|
||||
db_role: str
|
||||
|
||||
|
||||
class ProjectConnectionManager:
|
||||
def __init__(self) -> None:
|
||||
self._pg_cache: Dict[CacheKey, PgEngineEntry] = OrderedDict()
|
||||
self._ts_cache: Dict[CacheKey, AsyncConnectionPool] = OrderedDict()
|
||||
self._pg_raw_cache: Dict[CacheKey, AsyncConnectionPool] = OrderedDict()
|
||||
self._pg_lock = asyncio.Lock()
|
||||
self._ts_lock = asyncio.Lock()
|
||||
self._pg_raw_lock = asyncio.Lock()
|
||||
|
||||
def _normalize_pg_url(self, url: str) -> str:
|
||||
parsed = make_url(url)
|
||||
if parsed.drivername == "postgresql":
|
||||
parsed = parsed.set(drivername="postgresql+psycopg")
|
||||
return str(parsed)
|
||||
|
||||
async def get_pg_sessionmaker(
|
||||
self,
|
||||
project_id: UUID,
|
||||
db_role: str,
|
||||
connection_url: str,
|
||||
pool_min_size: int,
|
||||
pool_max_size: int,
|
||||
) -> async_sessionmaker[AsyncSession]:
|
||||
async with self._pg_lock:
|
||||
key = CacheKey(project_id=project_id, db_role=db_role)
|
||||
entry = self._pg_cache.get(key)
|
||||
if entry:
|
||||
self._pg_cache.move_to_end(key)
|
||||
return entry.sessionmaker
|
||||
|
||||
normalized_url = self._normalize_pg_url(connection_url)
|
||||
pool_min_size = max(1, pool_min_size)
|
||||
pool_max_size = max(pool_min_size, pool_max_size)
|
||||
engine = create_async_engine(
|
||||
normalized_url,
|
||||
pool_size=pool_min_size,
|
||||
max_overflow=max(0, pool_max_size - pool_min_size),
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
sessionmaker = async_sessionmaker(engine, expire_on_commit=False)
|
||||
self._pg_cache[key] = PgEngineEntry(
|
||||
engine=engine,
|
||||
sessionmaker=sessionmaker,
|
||||
)
|
||||
await self._evict_pg_if_needed()
|
||||
logger.info(
|
||||
"Created PostgreSQL engine for project %s (%s)", project_id, db_role
|
||||
)
|
||||
return sessionmaker
|
||||
|
||||
async def get_timescale_pool(
|
||||
self,
|
||||
project_id: UUID,
|
||||
db_role: str,
|
||||
connection_url: str,
|
||||
pool_min_size: int,
|
||||
pool_max_size: int,
|
||||
) -> AsyncConnectionPool:
|
||||
async with self._ts_lock:
|
||||
key = CacheKey(project_id=project_id, db_role=db_role)
|
||||
pool = self._ts_cache.get(key)
|
||||
if pool:
|
||||
self._ts_cache.move_to_end(key)
|
||||
return pool
|
||||
|
||||
pool_min_size = max(1, pool_min_size)
|
||||
pool_max_size = max(pool_min_size, pool_max_size)
|
||||
pool = AsyncConnectionPool(
|
||||
conninfo=connection_url,
|
||||
min_size=pool_min_size,
|
||||
max_size=pool_max_size,
|
||||
open=False,
|
||||
)
|
||||
await pool.open()
|
||||
self._ts_cache[key] = pool
|
||||
await self._evict_ts_if_needed()
|
||||
logger.info(
|
||||
"Created TimescaleDB pool for project %s (%s)", project_id, db_role
|
||||
)
|
||||
return pool
|
||||
|
||||
async def get_pg_pool(
|
||||
self,
|
||||
project_id: UUID,
|
||||
db_role: str,
|
||||
connection_url: str,
|
||||
pool_min_size: int,
|
||||
pool_max_size: int,
|
||||
) -> AsyncConnectionPool:
|
||||
async with self._pg_raw_lock:
|
||||
key = CacheKey(project_id=project_id, db_role=db_role)
|
||||
pool = self._pg_raw_cache.get(key)
|
||||
if pool:
|
||||
self._pg_raw_cache.move_to_end(key)
|
||||
return pool
|
||||
|
||||
pool_min_size = max(1, pool_min_size)
|
||||
pool_max_size = max(pool_min_size, pool_max_size)
|
||||
pool = AsyncConnectionPool(
|
||||
conninfo=connection_url,
|
||||
min_size=pool_min_size,
|
||||
max_size=pool_max_size,
|
||||
open=False,
|
||||
)
|
||||
await pool.open()
|
||||
self._pg_raw_cache[key] = pool
|
||||
await self._evict_pg_raw_if_needed()
|
||||
logger.info(
|
||||
"Created PostgreSQL pool for project %s (%s)", project_id, db_role
|
||||
)
|
||||
return pool
|
||||
|
||||
async def _evict_pg_if_needed(self) -> None:
|
||||
while len(self._pg_cache) > settings.PROJECT_PG_CACHE_SIZE:
|
||||
key, entry = self._pg_cache.popitem(last=False)
|
||||
await entry.engine.dispose()
|
||||
logger.info(
|
||||
"Evicted PostgreSQL engine for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
|
||||
async def _evict_ts_if_needed(self) -> None:
|
||||
while len(self._ts_cache) > settings.PROJECT_TS_CACHE_SIZE:
|
||||
key, pool = self._ts_cache.popitem(last=False)
|
||||
await pool.close()
|
||||
logger.info(
|
||||
"Evicted TimescaleDB pool for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
|
||||
async def _evict_pg_raw_if_needed(self) -> None:
|
||||
while len(self._pg_raw_cache) > settings.PROJECT_PG_CACHE_SIZE:
|
||||
key, pool = self._pg_raw_cache.popitem(last=False)
|
||||
await pool.close()
|
||||
logger.info(
|
||||
"Evicted PostgreSQL pool for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
|
||||
async def close_all(self) -> None:
|
||||
async with self._pg_lock:
|
||||
for key, entry in list(self._pg_cache.items()):
|
||||
await entry.engine.dispose()
|
||||
logger.info(
|
||||
"Closed PostgreSQL engine for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
self._pg_cache.clear()
|
||||
|
||||
async with self._ts_lock:
|
||||
for key, pool in list(self._ts_cache.items()):
|
||||
await pool.close()
|
||||
logger.info(
|
||||
"Closed TimescaleDB pool for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
self._ts_cache.clear()
|
||||
|
||||
async with self._pg_raw_lock:
|
||||
for key, pool in list(self._pg_raw_cache.items()):
|
||||
await pool.close()
|
||||
logger.info(
|
||||
"Closed PostgreSQL pool for project %s (%s)",
|
||||
key.project_id,
|
||||
key.db_role,
|
||||
)
|
||||
self._pg_raw_cache.clear()
|
||||
|
||||
|
||||
project_connection_manager = ProjectConnectionManager()
|
||||
3
app/infra/db/metadata/__init__.py
Normal file
3
app/infra/db/metadata/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .database import get_metadata_session, close_metadata_engine
|
||||
|
||||
__all__ = ["get_metadata_session", "close_metadata_engine"]
|
||||
27
app/infra/db/metadata/database.py
Normal file
27
app/infra/db/metadata/database.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import logging
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
engine = create_async_engine(
|
||||
settings.METADATA_DATABASE_URI,
|
||||
pool_size=settings.METADATA_DB_POOL_SIZE,
|
||||
max_overflow=settings.METADATA_DB_MAX_OVERFLOW,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
|
||||
async def get_metadata_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with SessionLocal() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def close_metadata_engine() -> None:
|
||||
await engine.dispose()
|
||||
logger.info("Metadata database engine disposed.")
|
||||
115
app/infra/db/metadata/models.py
Normal file
115
app/infra/db/metadata/models.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, Integer, String, Text
|
||||
from sqlalchemy.dialects.postgresql import JSONB, UUID as PGUUID
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
keycloak_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), unique=True, index=True
|
||||
)
|
||||
username: Mapped[str] = mapped_column(String(50), unique=True)
|
||||
email: Mapped[str] = mapped_column(String(100), unique=True)
|
||||
role: Mapped[str] = mapped_column(String(20), default="user")
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||
is_superuser: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
attributes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
last_login_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True
|
||||
)
|
||||
|
||||
|
||||
class Project(Base):
|
||||
__tablename__ = "projects"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String(100))
|
||||
code: Mapped[str] = mapped_column(String(50), unique=True)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
gs_workspace: Mapped[str] = mapped_column(String(100), unique=True)
|
||||
status: Mapped[str] = mapped_column(String(20), default="active")
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
|
||||
|
||||
class ProjectDatabase(Base):
|
||||
__tablename__ = "project_databases"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
project_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), index=True)
|
||||
db_role: Mapped[str] = mapped_column(String(20))
|
||||
db_type: Mapped[str] = mapped_column(String(20))
|
||||
dsn_encrypted: Mapped[str] = mapped_column(Text)
|
||||
pool_min_size: Mapped[int] = mapped_column(Integer, default=2)
|
||||
pool_max_size: Mapped[int] = mapped_column(Integer, default=10)
|
||||
|
||||
|
||||
class ProjectGeoServerConfig(Base):
|
||||
__tablename__ = "project_geoserver_configs"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
project_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), unique=True, index=True
|
||||
)
|
||||
gs_base_url: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
gs_admin_user: Mapped[str | None] = mapped_column(String(50), nullable=True)
|
||||
gs_admin_password_encrypted: Mapped[str | None] = mapped_column(
|
||||
Text, nullable=True
|
||||
)
|
||||
gs_datastore_name: Mapped[str] = mapped_column(String(100), default="ds_postgis")
|
||||
default_extent: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
srid: Mapped[int] = mapped_column(Integer, default=4326)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
|
||||
|
||||
class UserProjectMembership(Base):
|
||||
__tablename__ = "user_project_membership"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
user_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), index=True)
|
||||
project_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), index=True)
|
||||
project_role: Mapped[str] = mapped_column(String(20), default="viewer")
|
||||
|
||||
|
||||
class AuditLog(Base):
|
||||
__tablename__ = "audit_logs"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True)
|
||||
user_id: Mapped[UUID | None] = mapped_column(
|
||||
PGUUID(as_uuid=True), nullable=True, index=True
|
||||
)
|
||||
project_id: Mapped[UUID | None] = mapped_column(
|
||||
PGUUID(as_uuid=True), nullable=True, index=True
|
||||
)
|
||||
action: Mapped[str] = mapped_column(String(50))
|
||||
resource_type: Mapped[str | None] = mapped_column(String(50), nullable=True)
|
||||
resource_id: Mapped[str | None] = mapped_column(String(100), nullable=True)
|
||||
ip_address: Mapped[str | None] = mapped_column(String(45), nullable=True)
|
||||
request_method: Mapped[str | None] = mapped_column(String(10), nullable=True)
|
||||
request_path: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
request_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
response_status: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow
|
||||
)
|
||||
@@ -1,24 +1,18 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from psycopg import AsyncConnection
|
||||
|
||||
from .database import get_database_instance
|
||||
from .scada_info import ScadaRepository
|
||||
from .scheme import SchemeRepository
|
||||
from app.auth.project_dependencies import get_project_pg_connection
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# 创建支持数据库选择的连接依赖函数
|
||||
# 动态项目 PostgreSQL 连接依赖
|
||||
async def get_database_connection(
|
||||
db_name: Optional[str] = Query(
|
||||
None, description="指定要连接的数据库名称,为空时使用默认数据库"
|
||||
)
|
||||
conn: AsyncConnection = Depends(get_project_pg_connection),
|
||||
):
|
||||
"""获取数据库连接,支持通过查询参数指定数据库名称"""
|
||||
instance = await get_database_instance(db_name)
|
||||
async with instance.get_connection() as conn:
|
||||
yield conn
|
||||
yield conn
|
||||
|
||||
|
||||
@router.get("/scada-info")
|
||||
|
||||
@@ -1,42 +1,32 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import List, Optional
|
||||
from typing import List
|
||||
from datetime import datetime
|
||||
from psycopg import AsyncConnection
|
||||
|
||||
from .database import get_database_instance
|
||||
from .schemas.realtime import RealtimeRepository
|
||||
from .schemas.scheme import SchemeRepository
|
||||
from .schemas.scada import ScadaRepository
|
||||
from .composite_queries import CompositeQueries
|
||||
from app.infra.db.postgresql.database import (
|
||||
get_database_instance as get_postgres_database_instance,
|
||||
from app.auth.project_dependencies import (
|
||||
get_project_pg_connection,
|
||||
get_project_timescale_connection,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# 创建支持数据库选择的连接依赖函数
|
||||
# 动态项目 TimescaleDB 连接依赖
|
||||
async def get_database_connection(
|
||||
db_name: Optional[str] = Query(
|
||||
None, description="指定要连接的数据库名称,为空时使用默认数据库"
|
||||
)
|
||||
conn: AsyncConnection = Depends(get_project_timescale_connection),
|
||||
):
|
||||
"""获取数据库连接,支持通过查询参数指定数据库名称"""
|
||||
instance = await get_database_instance(db_name)
|
||||
async with instance.get_connection() as conn:
|
||||
yield conn
|
||||
yield conn
|
||||
|
||||
|
||||
# PostgreSQL 数据库连接依赖函数
|
||||
# 动态项目 PostgreSQL 连接依赖
|
||||
async def get_postgres_connection(
|
||||
db_name: Optional[str] = Query(
|
||||
None, description="指定要连接的 PostgreSQL 数据库名称,为空时使用默认数据库"
|
||||
)
|
||||
conn: AsyncConnection = Depends(get_project_pg_connection),
|
||||
):
|
||||
"""获取 PostgreSQL 数据库连接,支持通过查询参数指定数据库名称"""
|
||||
instance = await get_postgres_database_instance(db_name)
|
||||
async with instance.get_connection() as conn:
|
||||
yield conn
|
||||
yield conn
|
||||
|
||||
|
||||
# --- Realtime Endpoints ---
|
||||
|
||||
Reference in New Issue
Block a user