Coverage for mcpgateway / db.py: 100%

2258 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-03-09 03:05 +0000

1# -*- coding: utf-8 -*- 

2"""Location: ./mcpgateway/db.py 

3Copyright 2025 

4SPDX-License-Identifier: Apache-2.0 

5Authors: Mihai Criveti 

6 

7ContextForge Database Models. 

8This module defines SQLAlchemy models for storing MCP entities including: 

9- Tools with input schema validation 

10- Resources with subscription tracking 

11- Prompts with argument templates 

12- Federated gateways with capability tracking 

13- Updated to record server associations independently using many-to-many relationships, 

14- and to record tool execution metrics. 

15 

16Examples: 

17 >>> from mcpgateway.db import connect_args 

18 >>> isinstance(connect_args, dict) 

19 True 

20 >>> 'keepalives' in connect_args or 'check_same_thread' in connect_args or len(connect_args) == 0 

21 True 

22""" 

23 

24# Standard 

25from contextlib import contextmanager 

26from datetime import datetime, timedelta, timezone 

27import logging 

28import os 

29from typing import Any, cast, Dict, Generator, List, Optional, TYPE_CHECKING 

30import uuid 

31 

32# Third-Party 

33import jsonschema 

34from sqlalchemy import Boolean, Column, create_engine, DateTime, event, Float, ForeignKey, func, Index 

35from sqlalchemy import inspect as sa_inspect 

36from sqlalchemy import Integer, JSON, make_url, MetaData, select, String, Table, text, Text, UniqueConstraint, VARCHAR 

37from sqlalchemy.engine import Engine 

38from sqlalchemy.event import listen 

39from sqlalchemy.exc import OperationalError, ProgrammingError, SQLAlchemyError 

40from sqlalchemy.ext.hybrid import hybrid_property 

41from sqlalchemy.orm import DeclarativeBase, joinedload, Mapped, mapped_column, relationship, Session, sessionmaker 

42from sqlalchemy.orm.attributes import get_history 

43from sqlalchemy.pool import NullPool, QueuePool 

44from sqlalchemy.types import TypeDecorator 

45 

46# First-Party 

47from mcpgateway.common.validators import SecurityValidator 

48from mcpgateway.config import settings 

49from mcpgateway.utils.create_slug import slugify 

50from mcpgateway.utils.db_isready import wait_for_db_ready 

51 

52logger = logging.getLogger(__name__) 

53 

54if TYPE_CHECKING: 

55 # First-Party 

56 from mcpgateway.common.models import ResourceContent 

57 

58# ResourceContent will be imported locally where needed to avoid circular imports 

59# EmailUser models moved to this file to avoid circular imports 

60 

61# --------------------------------------------------------------------------- 

62# 1. Parse the URL so we can inspect backend ("postgresql", "sqlite", ...) 

63# and the specific driver ("psycopg", "asyncpg", empty string = default). 

64# --------------------------------------------------------------------------- 

65url = make_url(settings.database_url) 

66backend = url.get_backend_name() # e.g. 'postgresql', 'sqlite' 

67driver = url.get_driver_name() or "default" 

68 

69# Start with an empty dict and add options only when the driver can accept 

70# them; this prevents unexpected TypeError at connect time. 

71connect_args: dict[str, object] = {} 

72 

73# --------------------------------------------------------------------------- 

74# 2. PostgreSQL (synchronous psycopg3) 

75# The keep-alive parameters below are recognised by libpq and let the 

76# kernel detect broken network links quickly. 

77# 

78# Additionally, support PostgreSQL-specific options like search_path 

79# via the 'options' query parameter in DATABASE_URL. 

80# Example: postgresql+psycopg://user:pass@host/db?options=-c%20search_path=mcp_gateway 

81# 

82# IMPORTANT: Use postgresql+psycopg:// (not postgresql://) for psycopg3. 

83# --------------------------------------------------------------------------- 

84if backend == "postgresql" and driver in ("psycopg", "default", ""): 

85 connect_args.update( 

86 keepalives=1, # enable TCP keep-alive probes 

87 keepalives_idle=30, # seconds of idleness before first probe 

88 keepalives_interval=5, # seconds between probes 

89 keepalives_count=5, # drop the link after N failed probes 

90 # psycopg3: prepare_threshold controls automatic server-side prepared statements 

91 # After N executions of the same query, psycopg3 prepares it server-side 

92 # This significantly improves performance for frequently-executed queries 

93 prepare_threshold=settings.db_prepare_threshold, 

94 ) 

95 

96 # Extract and apply PostgreSQL options from URL query parameters 

97 # This allows users to specify search_path for custom schema support (Issue #1535) 

98 url_options = url.query.get("options") 

99 if url_options: 

100 connect_args["options"] = url_options 

101 logger.info(f"PostgreSQL connection options applied: {url_options}") 

102 

103 logger.info(f"psycopg3 prepare_threshold set to {settings.db_prepare_threshold}") 

104 

105# --------------------------------------------------------------------------- 

106# 3. SQLite (optional) - only one extra flag and it is *SQLite-specific*. 

107# --------------------------------------------------------------------------- 

108elif backend == "sqlite": 

109 # Allow pooled connections to hop across threads. 

110 connect_args["check_same_thread"] = False 

111 

112# 4. Other backends (MySQL, MSSQL, etc.) leave `connect_args` empty. 

113 

114# --------------------------------------------------------------------------- 

115# 5. Build the Engine with a single, clean connect_args mapping. 

116# --------------------------------------------------------------------------- 

117 

118# Check for SQLALCHEMY_ECHO environment variable for query debugging 

119# This is useful for N+1 detection and performance analysis 

120_sqlalchemy_echo = os.getenv("SQLALCHEMY_ECHO", "").lower() in ("true", "1", "yes") 

121 

122 

123def build_engine() -> Engine: 

124 """Build the SQLAlchemy engine with appropriate settings. 

125 

126 This function constructs the SQLAlchemy engine using the database URL 

127 and connection arguments determined by the backend type. It also configures 

128 the connection pool size and timeout based on application settings. 

129 

130 Environment variables: 

131 SQLALCHEMY_ECHO: Set to 'true' to log all SQL queries (useful for N+1 detection) 

132 

133 Returns: 

134 SQLAlchemy Engine instance configured for the specified database. 

135 """ 

136 if _sqlalchemy_echo: 

137 logger.info("SQLALCHEMY_ECHO enabled - all SQL queries will be logged") 

138 

139 if backend == "sqlite": 

140 # SQLite supports connection pooling with proper configuration 

141 # For SQLite, we use a smaller pool size since it's file-based 

142 sqlite_pool_size = min(settings.db_pool_size, 50) # Cap at 50 for SQLite 

143 sqlite_max_overflow = min(settings.db_max_overflow, 20) # Cap at 20 for SQLite 

144 

145 logger.info("Configuring SQLite with pool_size=%s, max_overflow=%s", sqlite_pool_size, sqlite_max_overflow) 

146 

147 return create_engine( 

148 settings.database_url, 

149 pool_pre_ping=True, # quick liveness check per checkout 

150 pool_size=sqlite_pool_size, 

151 max_overflow=sqlite_max_overflow, 

152 pool_timeout=settings.db_pool_timeout, 

153 pool_recycle=settings.db_pool_recycle, 

154 # SQLite specific optimizations 

155 poolclass=QueuePool, # Explicit pool class 

156 connect_args=connect_args, 

157 # Log pool events in debug mode 

158 echo_pool=settings.log_level == "DEBUG", 

159 # Log all SQL queries when SQLALCHEMY_ECHO=true (useful for N+1 detection) 

160 echo=_sqlalchemy_echo, 

161 ) 

162 

163 if backend in ("mysql", "mariadb"): 

164 # MariaDB/MySQL specific configuration 

165 logger.info("Configuring MariaDB/MySQL with pool_size=%s, max_overflow=%s", settings.db_pool_size, settings.db_max_overflow) 

166 

167 return create_engine( 

168 settings.database_url, 

169 pool_pre_ping=True, 

170 pool_size=settings.db_pool_size, 

171 max_overflow=settings.db_max_overflow, 

172 pool_timeout=settings.db_pool_timeout, 

173 pool_recycle=settings.db_pool_recycle, 

174 connect_args=connect_args, 

175 isolation_level="READ_COMMITTED", # Fix PyMySQL sync issues 

176 # Log all SQL queries when SQLALCHEMY_ECHO=true (useful for N+1 detection) 

177 echo=_sqlalchemy_echo, 

178 ) 

179 

180 # Determine if PgBouncer is in use (detected via URL or explicit config) 

181 is_pgbouncer = "pgbouncer" in settings.database_url.lower() 

182 

183 # Determine pool class based on configuration 

184 # - "auto": NullPool with PgBouncer (recommended), QueuePool otherwise 

185 # - "null": Always NullPool (delegate pooling to PgBouncer/external pooler) 

186 # - "queue": Always QueuePool (application-side pooling) 

187 use_null_pool = False 

188 if settings.db_pool_class == "null": 

189 use_null_pool = True 

190 logger.info("Using NullPool (explicit configuration)") 

191 elif settings.db_pool_class == "auto" and is_pgbouncer: 

192 use_null_pool = True 

193 logger.info("PgBouncer detected - using NullPool (recommended: let PgBouncer handle pooling)") 

194 elif settings.db_pool_class == "queue": 

195 logger.info("Using QueuePool (explicit configuration)") 

196 else: 

197 logger.info("Using QueuePool with pool_size=%s, max_overflow=%s", settings.db_pool_size, settings.db_max_overflow) 

198 

199 # Determine pre_ping setting 

200 # - "auto": Enabled for non-PgBouncer with QueuePool, disabled otherwise 

201 # - "true": Always enable (validates connections, catches stale connections) 

202 # - "false": Always disable 

203 if settings.db_pool_pre_ping == "true": 

204 use_pre_ping = True 

205 logger.info("pool_pre_ping enabled (explicit configuration)") 

206 elif settings.db_pool_pre_ping == "false": 

207 use_pre_ping = False 

208 logger.info("pool_pre_ping disabled (explicit configuration)") 

209 else: # "auto" 

210 # With NullPool, pre_ping is not needed (no pooled connections to validate) 

211 # With QueuePool + PgBouncer, pre_ping helps detect stale connections 

212 use_pre_ping = not use_null_pool and not is_pgbouncer 

213 if is_pgbouncer and not use_null_pool: 

214 logger.info("PgBouncer with QueuePool - consider enabling DB_POOL_PRE_PING=true to detect stale connections") 

215 

216 # Build engine with appropriate pool configuration 

217 if use_null_pool: 

218 return create_engine( 

219 settings.database_url, 

220 poolclass=NullPool, 

221 connect_args=connect_args, 

222 echo=_sqlalchemy_echo, 

223 ) 

224 

225 return create_engine( 

226 settings.database_url, 

227 pool_pre_ping=use_pre_ping, 

228 pool_size=settings.db_pool_size, 

229 max_overflow=settings.db_max_overflow, 

230 pool_timeout=settings.db_pool_timeout, 

231 pool_recycle=settings.db_pool_recycle, 

232 connect_args=connect_args, 

233 echo=_sqlalchemy_echo, 

234 ) 

235 

236 

237engine = build_engine() 

238 

239# Initialize SQLAlchemy instrumentation for observability 

240if settings.observability_enabled: 

241 try: 

242 # First-Party 

243 from mcpgateway.instrumentation import instrument_sqlalchemy 

244 

245 instrument_sqlalchemy(engine) 

246 logger.info("SQLAlchemy instrumentation enabled for observability") 

247 except ImportError: 

248 logger.warning("Failed to import SQLAlchemy instrumentation") 

249 

250 

251# --------------------------------------------------------------------------- 

252# 6. Function to return UTC timestamp 

253# --------------------------------------------------------------------------- 

254def utc_now() -> datetime: 

255 """Return the current Coordinated Universal Time (UTC). 

256 

257 Returns: 

258 datetime: A timezone-aware `datetime` whose `tzinfo` is 

259 `datetime.timezone.utc`. 

260 

261 Examples: 

262 >>> from mcpgateway.db import utc_now 

263 >>> now = utc_now() 

264 >>> now.tzinfo is not None 

265 True 

266 >>> str(now.tzinfo) 

267 'UTC' 

268 >>> isinstance(now, datetime) 

269 True 

270 """ 

271 return datetime.now(timezone.utc) 

272 

273 

274class TokenEncryptionWriteError(ValueError): 

275 """Raised when OAuth token encryption fails during DB write binding.""" 

276 

277 

278class EncryptedText(TypeDecorator): # pylint: disable=too-many-ancestors 

279 """Text type that applies best-effort encryption/decryption at ORM boundary. 

280 

281 This preserves compatibility with service-layer encryption: 

282 - Pre-encrypted values pass through unchanged. 

283 - Plaintext values are encrypted when possible before persistence. 

284 - On read, encrypted values are decrypted for runtime usage. 

285 """ 

286 

287 impl = Text 

288 cache_ok = True 

289 

290 @property 

291 def python_type(self): 

292 """Return the Python type represented by this SQLAlchemy type. 

293 

294 Returns: 

295 type: Python ``str`` type. 

296 """ 

297 return str 

298 

299 @staticmethod 

300 def _get_encryption(): 

301 """Resolve encryption service for column-level token protection. 

302 

303 Returns: 

304 Optional[EncryptionService]: Encryption service instance when configured, 

305 otherwise ``None``. 

306 """ 

307 secret = getattr(settings, "auth_encryption_secret", None) 

308 if not secret: 

309 return None 

310 try: 

311 # First-Party 

312 from mcpgateway.services.encryption_service import get_encryption_service # pylint: disable=import-outside-toplevel 

313 

314 return get_encryption_service(secret) 

315 except Exception as exc: 

316 logger.debug("Unable to initialize encryption service for EncryptedText: %s", exc) 

317 return None 

318 

319 def process_literal_param(self, value, _dialect): # pylint: disable=unused-argument 

320 """Render literal SQL parameter value via encrypted bind processing. 

321 

322 Args: 

323 value (Any): Raw value from SQLAlchemy. 

324 _dialect: SQLAlchemy dialect (unused). 

325 

326 Returns: 

327 Any: Bound parameter value after encryption handling. 

328 """ 

329 processed = self.process_bind_param(value, _dialect) 

330 return processed 

331 

332 def process_bind_param(self, value, _dialect): # pylint: disable=unused-argument 

333 """Encrypt plaintext values before persistence when encryption is available. 

334 

335 Args: 

336 value (Any): Raw value from SQLAlchemy. 

337 _dialect: SQLAlchemy dialect (unused). 

338 

339 Returns: 

340 Any: Encrypted value for persistence or unchanged value when no 

341 encryption is applied. 

342 

343 Raises: 

344 TokenEncryptionWriteError: If encryption is configured and token 

345 encryption fails. 

346 """ 

347 if value in (None, "") or not isinstance(value, str): 

348 return value 

349 

350 encryption = self._get_encryption() 

351 if not encryption: 

352 return value 

353 

354 try: 

355 if encryption.is_encrypted(value): 

356 return value 

357 return encryption.encrypt_secret(value) 

358 except Exception as exc: 

359 logger.warning("EncryptedText bind encryption failed; rejecting token write") 

360 logger.debug("EncryptedText bind encryption exception: %s", exc) 

361 raise TokenEncryptionWriteError("OAuth token encryption failed during write") from exc 

362 

363 def process_result_value(self, value, _dialect): # pylint: disable=unused-argument 

364 """Decrypt stored encrypted values when reading rows. 

365 

366 Args: 

367 value (Any): Raw value loaded from database. 

368 _dialect: SQLAlchemy dialect (unused). 

369 

370 Returns: 

371 Any: Decrypted value when encrypted, otherwise unchanged. 

372 """ 

373 if value in (None, "") or not isinstance(value, str): 

374 return value 

375 

376 encryption = self._get_encryption() 

377 if not encryption: 

378 return value 

379 

380 try: 

381 if not encryption.is_encrypted(value): 

382 return value 

383 decrypted = encryption.decrypt_secret_or_plaintext(value) 

384 return decrypted if decrypted is not None else value 

385 except Exception as exc: 

386 logger.warning("EncryptedText result decryption failed, returning stored value") 

387 logger.debug("EncryptedText result decryption exception: %s", exc) 

388 return value 

389 

390 

391# Configure SQLite for better concurrency if using SQLite 

392if backend == "sqlite": 

393 

394 @event.listens_for(engine, "connect") 

395 def set_sqlite_pragma(dbapi_conn, _connection_record): 

396 """Set SQLite pragmas for better concurrency. 

397 

398 This is critical for running with multiple gunicorn workers. 

399 WAL mode allows multiple readers and a single writer concurrently. 

400 

401 Args: 

402 dbapi_conn: The raw DBAPI connection. 

403 _connection_record: A SQLAlchemy-specific object that maintains 

404 information about the connection's context. 

405 """ 

406 cursor = dbapi_conn.cursor() 

407 # Enable WAL mode for better concurrency 

408 cursor.execute("PRAGMA journal_mode=WAL") 

409 # Configure SQLite lock wait upper bound (ms) to prevent prolonged blocking under contention 

410 cursor.execute(f"PRAGMA busy_timeout={settings.db_sqlite_busy_timeout}") 

411 # Synchronous=NORMAL is safe with WAL mode and improves performance 

412 cursor.execute("PRAGMA synchronous=NORMAL") 

413 # Increase cache size for better performance (negative value = KB) 

414 cursor.execute("PRAGMA cache_size=-64000") # 64MB cache 

415 # Enable foreign key constraints for ON DELETE CASCADE support 

416 cursor.execute("PRAGMA foreign_keys=ON") 

417 cursor.close() 

418 

419 

420# --------------------------------------------------------------------------- 

421# Resilient Session class for graceful error recovery 

422# --------------------------------------------------------------------------- 

423class ResilientSession(Session): 

424 """A Session subclass that auto-rollbacks on connection errors. 

425 

426 When a database operation fails due to a connection error (e.g., PgBouncer 

427 query_wait_timeout), this session automatically rolls back to clear the 

428 invalid transaction state. This prevents cascading PendingRollbackError 

429 failures when multiple queries run within the same request. 

430 

431 Without this, the first failed query leaves the session in a "needs rollback" 

432 state, and all subsequent queries fail with PendingRollbackError before 

433 even attempting to use the database. 

434 """ 

435 

436 # Error types that indicate connection issues requiring rollback 

437 _connection_error_patterns = ( 

438 "query_wait_timeout", 

439 "server closed the connection unexpectedly", 

440 "connection reset by peer", 

441 "connection timed out", 

442 "could not receive data from server", 

443 "could not send data to server", 

444 "terminating connection", 

445 "no connection to the server", 

446 ) 

447 

448 def _is_connection_error(self, exception: Exception) -> bool: 

449 """Check if an exception indicates a broken database connection. 

450 

451 Args: 

452 exception: The exception to check. 

453 

454 Returns: 

455 True if the exception indicates a connection error, False otherwise. 

456 """ 

457 exc_name = type(exception).__name__ 

458 exc_msg = str(exception).lower() 

459 

460 # Check for known connection error types 

461 if exc_name in ("ProtocolViolation", "OperationalError", "InterfaceError"): 

462 return True 

463 

464 # Check for connection error patterns in message 

465 for pattern in self._connection_error_patterns: 

466 if pattern in exc_msg: 

467 return True 

468 

469 return False 

470 

471 def _safe_rollback(self) -> None: 

472 """Attempt to rollback, invalidating the session if rollback fails.""" 

473 try: 

474 self.rollback() 

475 except Exception: 

476 try: 

477 self.invalidate() 

478 except Exception: 

479 pass # nosec B110 - Best effort cleanup on connection failure 

480 

481 def execute(self, statement, params=None, **kw): 

482 """Execute a statement with automatic rollback on connection errors. 

483 

484 Wraps the parent execute method to catch connection errors and 

485 automatically rollback the session to prevent PendingRollbackError cascade. 

486 

487 Args: 

488 statement: The SQL statement to execute. 

489 params: Optional parameters for the statement. 

490 **kw: Additional keyword arguments passed to Session.execute(). 

491 

492 Returns: 

493 The result of the execute operation. 

494 

495 Raises: 

496 Exception: Re-raises any exception after rolling back on connection errors. 

497 """ 

498 try: 

499 return super().execute(statement, params, **kw) 

500 except Exception as e: 

501 if self._is_connection_error(e): 

502 logger.warning( 

503 "Connection error during execute, auto-rolling back session: %s", 

504 type(e).__name__, 

505 ) 

506 self._safe_rollback() 

507 raise 

508 

509 def scalar(self, statement, params=None, **kw): 

510 """Execute and return a scalar with automatic rollback on connection errors. 

511 

512 Wraps the parent scalar method to catch connection errors and 

513 automatically rollback the session to prevent PendingRollbackError cascade. 

514 

515 Args: 

516 statement: The SQL statement to execute. 

517 params: Optional parameters for the statement. 

518 **kw: Additional keyword arguments passed to Session.scalar(). 

519 

520 Returns: 

521 The scalar result of the query. 

522 

523 Raises: 

524 Exception: Re-raises any exception after rolling back on connection errors. 

525 """ 

526 try: 

527 return super().scalar(statement, params, **kw) 

528 except Exception as e: 

529 if self._is_connection_error(e): 

530 logger.warning( 

531 "Connection error during scalar, auto-rolling back session: %s", 

532 type(e).__name__, 

533 ) 

534 self._safe_rollback() 

535 raise 

536 

537 def scalars(self, statement, params=None, **kw): 

538 """Execute and return scalars with automatic rollback on connection errors. 

539 

540 Wraps the parent scalars method to catch connection errors and 

541 automatically rollback the session to prevent PendingRollbackError cascade. 

542 

543 Args: 

544 statement: The SQL statement to execute. 

545 params: Optional parameters for the statement. 

546 **kw: Additional keyword arguments passed to Session.scalars(). 

547 

548 Returns: 

549 The scalars result of the query. 

550 

551 Raises: 

552 Exception: Re-raises any exception after rolling back on connection errors. 

553 """ 

554 try: 

555 return super().scalars(statement, params, **kw) 

556 except Exception as e: 

557 if self._is_connection_error(e): 

558 logger.warning( 

559 "Connection error during scalars, auto-rolling back session: %s", 

560 type(e).__name__, 

561 ) 

562 self._safe_rollback() 

563 raise 

564 

565 

566# Session factory using ResilientSession 

567# expire_on_commit=False prevents SQLAlchemy from expiring ORM objects after commit, 

568# allowing continued access to attributes without re-querying the database. 

569# This is essential when commits happen during read operations (e.g., to release transactions). 

570SessionLocal = sessionmaker(class_=ResilientSession, autocommit=False, autoflush=False, expire_on_commit=False, bind=engine) 

571 

572 

573@event.listens_for(ResilientSession, "after_transaction_end") 

574def end_transaction_cleanup(_session, _transaction): 

575 """Ensure connection is properly released after transaction ends. 

576 

577 This event fires after COMMIT or ROLLBACK, ensuring the connection 

578 is returned to PgBouncer cleanly with no open transaction. 

579 

580 Args: 

581 _session: The SQLAlchemy session that ended the transaction. 

582 _transaction: The transaction that was ended. 

583 """ 

584 # The transaction has already ended - nothing to do here 

585 # This is just for monitoring/logging if needed 

586 

587 

588@event.listens_for(ResilientSession, "before_commit") 

589def before_commit_handler(session): 

590 """Handler before commit to ensure transaction is in good state. 

591 

592 This is called before COMMIT, ensuring any pending work is flushed. 

593 If the flush fails, the exception is propagated so the commit also fails 

594 and the caller's error handling (e.g. get_db rollback) can clean up properly. 

595 

596 Args: 

597 session: The SQLAlchemy session about to commit. 

598 """ 

599 session.flush() 

600 

601 

602# --------------------------------------------------------------------------- 

603# Pool event listeners for connection resilience 

604# These handlers ensure broken connections are properly invalidated and 

605# discarded from the pool, preventing "poisoned" connections from causing 

606# cascading failures (e.g., PendingRollbackError after PgBouncer timeout). 

607# 

608# Key issue: PgBouncer returns ProtocolViolation (SQL error 08P01) for 

609# query_wait_timeout, but SQLAlchemy doesn't recognize this as a disconnect 

610# by default. We must explicitly mark these errors as disconnects so the 

611# connection pool properly invalidates these connections. 

612# 

613# References: 

614# - https://github.com/zodb/relstorage/issues/412 

615# - https://docs.sqlalchemy.org/en/20/core/pooling.html#custom-legacy-pessimistic-ping 

616# --------------------------------------------------------------------------- 

617@event.listens_for(engine, "handle_error") 

618def handle_pool_error(exception_context): 

619 """Mark PgBouncer and connection errors as disconnects for proper pool invalidation. 

620 

621 This event fires when an error occurs during query execution. By marking 

622 certain errors as disconnects (is_disconnect=True), SQLAlchemy will: 

623 1. Invalidate the current connection (discard from pool) 

624 2. Invalidate all other pooled connections older than current time 

625 

626 Without this, PgBouncer errors like query_wait_timeout result in 

627 ProtocolViolation which is classified as DatabaseError, not a disconnect. 

628 The connection stays in the pool and causes PendingRollbackError on reuse. 

629 

630 Args: 

631 exception_context: SQLAlchemy ExceptionContext with error details. 

632 """ 

633 original = exception_context.original_exception 

634 if original is None: 

635 return 

636 

637 # Get the exception class name and message for pattern matching 

638 exc_class = type(original).__name__ 

639 exc_msg = str(original).lower() 

640 

641 # List of error patterns that indicate the connection is broken 

642 # and should be treated as a disconnect for pool invalidation 

643 disconnect_patterns = [ 

644 # PgBouncer errors 

645 "query_wait_timeout", 

646 "server_login_retry", 

647 "client_login_timeout", 

648 "client_idle_timeout", 

649 "idle_transaction_timeout", 

650 "server closed the connection unexpectedly", 

651 "connection reset by peer", 

652 "connection timed out", 

653 "no connection to the server", 

654 "terminating connection", 

655 "connection has been closed unexpectedly", 

656 # PostgreSQL errors indicating dead connection 

657 "could not receive data from server", 

658 "could not send data to server", 

659 "ssl connection has been closed unexpectedly", 

660 "canceling statement due to conflict with recovery", 

661 ] 

662 

663 # Check for ProtocolViolation or OperationalError with disconnect patterns 

664 is_connection_error = exc_class in ("ProtocolViolation", "OperationalError", "InterfaceError", "DatabaseError") 

665 

666 if is_connection_error: 

667 for pattern in disconnect_patterns: 

668 if pattern in exc_msg: 

669 exception_context.is_disconnect = True 

670 logger.warning( 

671 "Connection error detected, marking as disconnect for pool invalidation: %s: %s", 

672 exc_class, 

673 pattern, 

674 ) 

675 return 

676 

677 # Also treat ProtocolViolation from PgBouncer as disconnect even without message match 

678 # PgBouncer sends 08P01 PROTOCOL_VIOLATION for various connection issues 

679 if exc_class == "ProtocolViolation": 

680 exception_context.is_disconnect = True 

681 logger.warning( 

682 "ProtocolViolation detected (likely PgBouncer), marking as disconnect: %s", 

683 exc_msg[:200], 

684 ) 

685 

686 

687@event.listens_for(engine, "checkin") 

688def reset_connection_on_checkin(dbapi_connection, _connection_record): 

689 """Reset connection state when returned to pool. 

690 

691 This ensures transactions are properly closed before the connection 

692 is returned to PgBouncer, preventing 'idle in transaction' buildup. 

693 With PgBouncer in transaction mode, connections stays reserved until 

694 the transaction ends - this rollback releases them immediately. 

695 

696 Args: 

697 dbapi_connection: The raw DBAPI connection being checked in. 

698 _connection_record: The connection record tracking this connection. 

699 """ 

700 try: 

701 # Issue a rollback to close any open transaction 

702 # This is safe for both read and write operations: 

703 # - For reads: rollback has no effect but closes the transaction 

704 # - For writes: they should already be committed by the application 

705 dbapi_connection.rollback() 

706 except Exception as e: 

707 # Connection may be invalid - log and try to force close 

708 logger.debug("Connection checkin rollback failed: %s", e) 

709 try: 

710 # Try to close the raw connection to release it from PgBouncer 

711 dbapi_connection.close() 

712 except Exception: # nosec B110 

713 pass # Nothing more we can do 

714 

715 

716@event.listens_for(engine, "reset") 

717def reset_connection_on_reset(dbapi_connection, _connection_record, _reset_state): 

718 """Reset connection state when the pool resets a connection. 

719 

720 This handles the case where a connection is being reset before reuse. 

721 

722 Args: 

723 dbapi_connection: The raw DBAPI connection being reset. 

724 _connection_record: The connection record tracking this connection. 

725 """ 

726 try: 

727 dbapi_connection.rollback() 

728 except Exception: # nosec B110 

729 pass # Connection may be invalid 

730 

731 

732def _refresh_gateway_slugs_batched(session: Session, batch_size: int) -> None: 

733 """Refresh gateway slugs in small batches to reduce memory usage. 

734 

735 Args: 

736 session: Active SQLAlchemy session. 

737 batch_size: Maximum number of rows to process per batch. 

738 """ 

739 

740 last_id: Optional[str] = None 

741 

742 while True: 

743 query = session.query(Gateway).order_by(Gateway.id) 

744 if last_id is not None: 

745 query = query.filter(Gateway.id > last_id) 

746 

747 gateways = query.limit(batch_size).all() 

748 if not gateways: 

749 break 

750 

751 updated = False 

752 for gateway in gateways: 

753 new_slug = slugify(gateway.name) 

754 if gateway.slug != new_slug: 

755 gateway.slug = new_slug 

756 updated = True 

757 

758 if updated: 

759 session.commit() 

760 

761 # Free ORM state from memory between batches 

762 session.expire_all() 

763 last_id = gateways[-1].id 

764 

765 

766def _refresh_tool_names_batched(session: Session, batch_size: int) -> None: 

767 """Refresh tool names in batches with eager-loaded gateways. 

768 

769 Uses joinedload(Tool.gateway) to avoid N+1 queries when accessing the 

770 gateway relationship while regenerating tool names. 

771 

772 Args: 

773 session: Active SQLAlchemy session. 

774 batch_size: Maximum number of rows to process per batch. 

775 """ 

776 

777 last_id: Optional[str] = None 

778 separator = settings.gateway_tool_name_separator 

779 

780 while True: 

781 stmt = select(Tool).options(joinedload(Tool.gateway)).order_by(Tool.id).limit(batch_size) 

782 if last_id is not None: 

783 stmt = stmt.where(Tool.id > last_id) 

784 

785 tools = session.execute(stmt).scalars().all() 

786 if not tools: 

787 break 

788 

789 updated = False 

790 for tool in tools: 

791 # Prefer custom_name_slug when available; fall back to original_name 

792 name_slug_source = getattr(tool, "custom_name_slug", None) or tool.original_name 

793 name_slug = slugify(name_slug_source) 

794 

795 if tool.gateway: 

796 gateway_slug = slugify(tool.gateway.name) 

797 new_name = f"{gateway_slug}{separator}{name_slug}" 

798 else: 

799 new_name = name_slug 

800 

801 if tool.name != new_name: 

802 tool.name = new_name 

803 updated = True 

804 

805 if updated: 

806 session.commit() 

807 

808 # Free ORM state from memory between batches 

809 session.expire_all() 

810 last_id = tools[-1].id 

811 

812 

813def _refresh_prompt_names_batched(session: Session, batch_size: int) -> None: 

814 """Refresh prompt names in batches with eager-loaded gateways. 

815 

816 Uses joinedload(Prompt.gateway) to avoid N+1 queries when accessing the 

817 gateway relationship while regenerating prompt names. 

818 

819 Args: 

820 session: Active SQLAlchemy session. 

821 batch_size: Maximum number of rows to process per batch. 

822 """ 

823 last_id: Optional[str] = None 

824 separator = settings.gateway_tool_name_separator 

825 

826 while True: 

827 stmt = select(Prompt).options(joinedload(Prompt.gateway)).order_by(Prompt.id).limit(batch_size) 

828 if last_id is not None: 

829 stmt = stmt.where(Prompt.id > last_id) 

830 

831 prompts = session.execute(stmt).scalars().all() 

832 if not prompts: 

833 break 

834 

835 updated = False 

836 for prompt in prompts: 

837 name_slug_source = getattr(prompt, "custom_name_slug", None) or prompt.original_name 

838 name_slug = slugify(name_slug_source) 

839 

840 if prompt.gateway: 

841 gateway_slug = slugify(prompt.gateway.name) 

842 new_name = f"{gateway_slug}{separator}{name_slug}" 

843 else: 

844 new_name = name_slug 

845 

846 if prompt.name != new_name: 

847 prompt.name = new_name 

848 updated = True 

849 

850 if updated: 

851 session.commit() 

852 

853 session.expire_all() 

854 last_id = prompts[-1].id 

855 

856 

857def refresh_slugs_on_startup(batch_size: Optional[int] = None) -> None: 

858 """Refresh slugs for all gateways and tool names on startup. 

859 

860 This implementation avoids loading all rows into memory at once by 

861 streaming through the tables in batches and eager-loading tool.gateway 

862 relationships to prevent N+1 query patterns. 

863 

864 Args: 

865 batch_size: Optional maximum number of rows to process per batch. If 

866 not provided, the value is taken from 

867 ``settings.slug_refresh_batch_size`` with a default of ``1000``. 

868 """ 

869 

870 effective_batch_size = batch_size or getattr(settings, "slug_refresh_batch_size", 1000) 

871 

872 try: 

873 with cast(Any, SessionLocal)() as session: 

874 # Skip if tables don't exist yet (fresh database) 

875 try: 

876 _refresh_gateway_slugs_batched(session, effective_batch_size) 

877 except (OperationalError, ProgrammingError) as e: 

878 # Table doesn't exist yet - expected on fresh database 

879 logger.info("Gateway table not found, skipping slug refresh: %s", e) 

880 return 

881 

882 try: 

883 _refresh_tool_names_batched(session, effective_batch_size) 

884 except (OperationalError, ProgrammingError) as e: 

885 # Table doesn't exist yet - expected on fresh database 

886 logger.info("Tool table not found, skipping tool name refresh: %s", e) 

887 

888 try: 

889 _refresh_prompt_names_batched(session, effective_batch_size) 

890 except (OperationalError, ProgrammingError) as e: 

891 # Table doesn't exist yet - expected on fresh database 

892 logger.info("Prompt table not found, skipping prompt name refresh: %s", e) 

893 

894 except SQLAlchemyError as e: 

895 logger.warning("Failed to refresh slugs on startup (database error): %s", e) 

896 except Exception as e: 

897 logger.warning("Failed to refresh slugs on startup (unexpected error): %s", e) 

898 

899 

900class Base(DeclarativeBase): 

901 """Base class for all models.""" 

902 

903 # MariaDB-compatible naming convention for foreign keys 

904 metadata = MetaData( 

905 naming_convention={ 

906 "fk": "fk_%(table_name)s_%(column_0_name)s", 

907 "pk": "pk_%(table_name)s", 

908 "ix": "ix_%(table_name)s_%(column_0_name)s", 

909 "uq": "uq_%(table_name)s_%(column_0_name)s", 

910 "ck": "ck_%(table_name)s_%(constraint_name)s", 

911 } 

912 ) 

913 

914 

915# --------------------------------------------------------------------------- 

916# RBAC Models - SQLAlchemy Database Models 

917# --------------------------------------------------------------------------- 

918 

919 

920class Role(Base): 

921 """Role model for RBAC system.""" 

922 

923 __tablename__ = "roles" 

924 

925 # Primary key 

926 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

927 

928 # Role metadata 

929 name: Mapped[str] = mapped_column(String(255), nullable=False) 

930 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

931 scope: Mapped[str] = mapped_column(String(20), nullable=False) # 'global', 'team', 'personal' 

932 

933 # Permissions and inheritance 

934 permissions: Mapped[List[str]] = mapped_column(JSON, nullable=False, default=list) 

935 inherits_from: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("roles.id"), nullable=True) 

936 

937 # Metadata 

938 created_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

939 is_system_role: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) 

940 is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) 

941 

942 # Timestamps 

943 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now) 

944 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, onupdate=utc_now) 

945 

946 # Relationships 

947 parent_role: Mapped[Optional["Role"]] = relationship("Role", remote_side=[id], backref="child_roles") 

948 user_assignments: Mapped[List["UserRole"]] = relationship("UserRole", back_populates="role", cascade="all, delete-orphan") 

949 

950 def get_effective_permissions(self) -> List[str]: 

951 """Get all permissions including inherited ones. 

952 

953 Returns: 

954 List of permission strings including inherited permissions 

955 """ 

956 effective_permissions = set(self.permissions) 

957 if self.parent_role: 

958 effective_permissions.update(self.parent_role.get_effective_permissions()) 

959 return sorted(list(effective_permissions)) 

960 

961 

962class UserRole(Base): 

963 """User role assignment model.""" 

964 

965 __tablename__ = "user_roles" 

966 

967 # Primary key 

968 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

969 

970 # Assignment details 

971 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

972 role_id: Mapped[str] = mapped_column(String(36), ForeignKey("roles.id"), nullable=False) 

973 scope: Mapped[str] = mapped_column(String(20), nullable=False) # 'global', 'team', 'personal' 

974 scope_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) # Team ID if team-scoped 

975 

976 # Grant metadata 

977 granted_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

978 granted_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now) 

979 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

980 is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) 

981 grant_source: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, default=None) 

982 

983 # Relationships 

984 role: Mapped["Role"] = relationship("Role", back_populates="user_assignments") 

985 

986 def is_expired(self) -> bool: 

987 """Check if the role assignment has expired. 

988 

989 Returns: 

990 True if assignment has expired, False otherwise 

991 """ 

992 if not self.expires_at: 

993 return False 

994 return utc_now() > self.expires_at 

995 

996 

997class PermissionAuditLog(Base): 

998 """Permission audit log model.""" 

999 

1000 __tablename__ = "permission_audit_log" 

1001 

1002 # Primary key 

1003 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

1004 

1005 # Audit metadata 

1006 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now) 

1007 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

1008 

1009 # Permission details 

1010 permission: Mapped[str] = mapped_column(String(100), nullable=False) 

1011 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True) 

1012 resource_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

1013 team_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

1014 

1015 # Result 

1016 granted: Mapped[bool] = mapped_column(Boolean, nullable=False) 

1017 roles_checked: Mapped[Optional[Dict]] = mapped_column(JSON, nullable=True) 

1018 

1019 # Request metadata 

1020 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length 

1021 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

1022 

1023 

1024# Permission constants for the system 

1025class Permissions: 

1026 """System permission constants.""" 

1027 

1028 # User permissions 

1029 USERS_CREATE = "users.create" 

1030 USERS_READ = "users.read" 

1031 USERS_UPDATE = "users.update" 

1032 USERS_DELETE = "users.delete" 

1033 USERS_INVITE = "users.invite" 

1034 

1035 # Team permissions 

1036 TEAMS_CREATE = "teams.create" 

1037 TEAMS_READ = "teams.read" 

1038 TEAMS_UPDATE = "teams.update" 

1039 TEAMS_DELETE = "teams.delete" 

1040 TEAMS_JOIN = "teams.join" 

1041 TEAMS_MANAGE_MEMBERS = "teams.manage_members" 

1042 

1043 # Tool permissions 

1044 TOOLS_CREATE = "tools.create" 

1045 TOOLS_READ = "tools.read" 

1046 TOOLS_UPDATE = "tools.update" 

1047 TOOLS_DELETE = "tools.delete" 

1048 TOOLS_EXECUTE = "tools.execute" 

1049 

1050 # Resource permissions 

1051 RESOURCES_CREATE = "resources.create" 

1052 RESOURCES_READ = "resources.read" 

1053 RESOURCES_UPDATE = "resources.update" 

1054 RESOURCES_DELETE = "resources.delete" 

1055 RESOURCES_SHARE = "resources.share" 

1056 

1057 # Gateway permissions 

1058 GATEWAYS_CREATE = "gateways.create" 

1059 GATEWAYS_READ = "gateways.read" 

1060 GATEWAYS_UPDATE = "gateways.update" 

1061 GATEWAYS_DELETE = "gateways.delete" 

1062 

1063 # Prompt permissions 

1064 PROMPTS_CREATE = "prompts.create" 

1065 PROMPTS_READ = "prompts.read" 

1066 PROMPTS_UPDATE = "prompts.update" 

1067 PROMPTS_DELETE = "prompts.delete" 

1068 PROMPTS_EXECUTE = "prompts.execute" 

1069 

1070 # MCP method permission prefixes — used by token_catalog_service (generation-time) 

1071 # and token_scoping middleware (runtime) to auto-grant servers.use transport access. 

1072 MCP_METHOD_PREFIXES = ("tools.", "resources.", "prompts.") 

1073 

1074 # LLM proxy permissions 

1075 LLM_READ = "llm.read" 

1076 LLM_INVOKE = "llm.invoke" 

1077 

1078 # Server permissions 

1079 SERVERS_CREATE = "servers.create" 

1080 SERVERS_READ = "servers.read" 

1081 SERVERS_USE = "servers.use" 

1082 SERVERS_UPDATE = "servers.update" 

1083 SERVERS_DELETE = "servers.delete" 

1084 SERVERS_MANAGE = "servers.manage" 

1085 

1086 # Token permissions 

1087 TOKENS_CREATE = "tokens.create" 

1088 TOKENS_READ = "tokens.read" 

1089 TOKENS_UPDATE = "tokens.update" 

1090 TOKENS_REVOKE = "tokens.revoke" 

1091 

1092 # Admin permissions 

1093 ADMIN_SYSTEM_CONFIG = "admin.system_config" 

1094 ADMIN_USER_MANAGEMENT = "admin.user_management" 

1095 ADMIN_SECURITY_AUDIT = "admin.security_audit" 

1096 ADMIN_OVERVIEW = "admin.overview" 

1097 ADMIN_DASHBOARD = "admin.dashboard" 

1098 ADMIN_EVENTS = "admin.events" 

1099 ADMIN_GRPC = "admin.grpc" 

1100 ADMIN_PLUGINS = "admin.plugins" 

1101 ADMIN_METRICS = "admin.metrics" 

1102 ADMIN_EXPORT = "admin.export" 

1103 ADMIN_IMPORT = "admin.import" 

1104 ADMIN_SSO_PROVIDERS_CREATE = "admin.sso_providers:create" 

1105 ADMIN_SSO_PROVIDERS_READ = "admin.sso_providers:read" 

1106 ADMIN_SSO_PROVIDERS_UPDATE = "admin.sso_providers:update" 

1107 ADMIN_SSO_PROVIDERS_DELETE = "admin.sso_providers:delete" 

1108 

1109 # Observability and audit read permissions 

1110 LOGS_READ = "logs:read" 

1111 METRICS_READ = "metrics:read" 

1112 AUDIT_READ = "audit:read" 

1113 SECURITY_READ = "security:read" 

1114 

1115 # A2A Agent permissions 

1116 A2A_CREATE = "a2a.create" 

1117 A2A_READ = "a2a.read" 

1118 A2A_UPDATE = "a2a.update" 

1119 A2A_DELETE = "a2a.delete" 

1120 A2A_INVOKE = "a2a.invoke" 

1121 

1122 # Tag permissions 

1123 TAGS_READ = "tags.read" 

1124 TAGS_CREATE = "tags.create" 

1125 TAGS_UPDATE = "tags.update" 

1126 TAGS_DELETE = "tags.delete" 

1127 

1128 # Special permissions 

1129 ALL_PERMISSIONS = "*" # Wildcard for all permissions 

1130 

1131 @classmethod 

1132 def get_all_permissions(cls) -> List[str]: 

1133 """Get list of all defined permissions. 

1134 

1135 Returns: 

1136 List of all permission strings defined in the class 

1137 """ 

1138 permissions = [] 

1139 for attr_name in dir(cls): 

1140 if not attr_name.startswith("_") and attr_name.isupper() and attr_name != "ALL_PERMISSIONS": 

1141 attr_value = getattr(cls, attr_name) 

1142 if isinstance(attr_value, str): 

1143 permissions.append(attr_value) 

1144 return sorted(permissions) 

1145 

1146 @classmethod 

1147 def get_permissions_by_resource(cls) -> Dict[str, List[str]]: 

1148 """Get permissions organized by resource type. 

1149 

1150 Returns: 

1151 Dictionary mapping resource types to their permissions 

1152 """ 

1153 resource_permissions = {} 

1154 for permission in cls.get_all_permissions(): 

1155 if "." in permission: 

1156 resource_type = permission.split(".", 1)[0] 

1157 elif ":" in permission: 

1158 resource_type = permission.split(":", 1)[0] 

1159 else: 

1160 resource_type = permission 

1161 if resource_type not in resource_permissions: 

1162 resource_permissions[resource_type] = [] 

1163 resource_permissions[resource_type].append(permission) 

1164 return resource_permissions 

1165 

1166 

1167# --------------------------------------------------------------------------- 

1168# Email-based User Authentication Models 

1169# --------------------------------------------------------------------------- 

1170 

1171 

1172class EmailUser(Base): 

1173 """Email-based user model for authentication. 

1174 

1175 This model provides email-based authentication as the foundation 

1176 for all multi-user features. Users are identified by email addresses 

1177 instead of usernames. 

1178 

1179 Attributes: 

1180 email (str): Primary key, unique email identifier 

1181 password_hash (str): Argon2id hashed password 

1182 full_name (str): Optional display name for professional appearance 

1183 is_admin (bool): Admin privileges flag 

1184 is_active (bool): Account status flag 

1185 auth_provider (str): Authentication provider ('local', 'github', etc.) 

1186 password_hash_type (str): Type of password hash used 

1187 failed_login_attempts (int): Count of failed login attempts 

1188 locked_until (datetime): Account lockout expiration 

1189 created_at (datetime): Account creation timestamp 

1190 updated_at (datetime): Last account update timestamp 

1191 last_login (datetime): Last successful login timestamp 

1192 email_verified_at (datetime): Email verification timestamp 

1193 

1194 Examples: 

1195 >>> user = EmailUser( 

1196 ... email="alice@example.com", 

1197 ... password_hash="$argon2id$v=19$m=65536,t=3,p=1$...", 

1198 ... full_name="Alice Smith", 

1199 ... is_admin=False 

1200 ... ) 

1201 >>> user.email 

1202 'alice@example.com' 

1203 >>> user.is_email_verified() 

1204 False 

1205 >>> user.is_account_locked() 

1206 False 

1207 """ 

1208 

1209 __tablename__ = "email_users" 

1210 

1211 # Core identity fields 

1212 email: Mapped[str] = mapped_column(String(255), primary_key=True, index=True) 

1213 password_hash: Mapped[str] = mapped_column(String(255), nullable=False) 

1214 full_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) 

1215 is_admin: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

1216 # Track how admin status was granted: "sso" (synced from IdP), "manual" (Admin UI), "api" (API grant), or None (legacy) 

1217 admin_origin: Mapped[Optional[str]] = mapped_column(String(20), nullable=True) 

1218 

1219 # Status fields 

1220 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

1221 email_verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

1222 

1223 # Security fields 

1224 auth_provider: Mapped[str] = mapped_column(String(50), default="local", nullable=False) 

1225 password_hash_type: Mapped[str] = mapped_column(String(20), default="argon2id", nullable=False) 

1226 failed_login_attempts: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

1227 locked_until: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

1228 password_change_required: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

1229 password_changed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=True) 

1230 

1231 # Timestamps 

1232 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1233 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

1234 last_login: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

1235 

1236 def __repr__(self) -> str: 

1237 """String representation of the user. 

1238 

1239 Returns: 

1240 str: String representation of EmailUser instance 

1241 """ 

1242 return f"<EmailUser(email='{self.email}', full_name='{self.full_name}', is_admin={self.is_admin})>" 

1243 

1244 def is_email_verified(self) -> bool: 

1245 """Check if the user's email is verified. 

1246 

1247 Returns: 

1248 bool: True if email is verified, False otherwise 

1249 

1250 Examples: 

1251 >>> user = EmailUser(email="test@example.com") 

1252 >>> user.is_email_verified() 

1253 False 

1254 >>> user.email_verified_at = utc_now() 

1255 >>> user.is_email_verified() 

1256 True 

1257 """ 

1258 return self.email_verified_at is not None 

1259 

1260 def is_account_locked(self) -> bool: 

1261 """Check if the account is currently locked. 

1262 

1263 Returns: 

1264 bool: True if account is locked, False otherwise 

1265 

1266 Examples: 

1267 >>> from datetime import timedelta 

1268 >>> user = EmailUser(email="test@example.com") 

1269 >>> user.is_account_locked() 

1270 False 

1271 >>> user.locked_until = utc_now() + timedelta(hours=1) 

1272 >>> user.is_account_locked() 

1273 True 

1274 """ 

1275 if self.locked_until is None: 

1276 return False 

1277 locked_until = self.locked_until 

1278 if locked_until.tzinfo is None: 

1279 # Treat naive datetimes as UTC (SQLite strips timezone info) 

1280 locked_until = locked_until.replace(tzinfo=timezone.utc) 

1281 if utc_now() >= locked_until: 

1282 # Lockout expired: reset counters so users get a fresh attempt window. 

1283 self.failed_login_attempts = 0 

1284 self.locked_until = None 

1285 return False 

1286 return True 

1287 

1288 def get_display_name(self) -> str: 

1289 """Get the user's display name. 

1290 

1291 Returns the full_name if available, otherwise extracts 

1292 the local part from the email address. 

1293 

1294 Returns: 

1295 str: Display name for the user 

1296 

1297 Examples: 

1298 >>> user = EmailUser(email="john@example.com", full_name="John Doe") 

1299 >>> user.get_display_name() 

1300 'John Doe' 

1301 >>> user_no_name = EmailUser(email="jane@example.com") 

1302 >>> user_no_name.get_display_name() 

1303 'jane' 

1304 """ 

1305 if self.full_name: 

1306 return self.full_name 

1307 return self.email.split("@")[0] 

1308 

1309 def reset_failed_attempts(self) -> None: 

1310 """Reset failed login attempts counter. 

1311 

1312 Called after successful authentication to reset the 

1313 failed attempts counter and clear any account lockout. 

1314 

1315 Examples: 

1316 >>> user = EmailUser(email="test@example.com", failed_login_attempts=3) 

1317 >>> user.reset_failed_attempts() 

1318 >>> user.failed_login_attempts 

1319 0 

1320 >>> user.locked_until is None 

1321 True 

1322 """ 

1323 self.failed_login_attempts = 0 

1324 self.locked_until = None 

1325 self.last_login = utc_now() 

1326 

1327 def increment_failed_attempts(self, max_attempts: int = 5, lockout_duration_minutes: int = 30) -> bool: 

1328 """Increment failed login attempts and potentially lock account. 

1329 

1330 Args: 

1331 max_attempts: Maximum allowed failed attempts before lockout 

1332 lockout_duration_minutes: Duration of lockout in minutes 

1333 

1334 Returns: 

1335 bool: True if account is now locked, False otherwise 

1336 

1337 Examples: 

1338 >>> user = EmailUser(email="test@example.com", password_hash="test", failed_login_attempts=0) 

1339 >>> user.increment_failed_attempts(max_attempts=3) 

1340 False 

1341 >>> user.failed_login_attempts 

1342 1 

1343 >>> for _ in range(2): 

1344 ... user.increment_failed_attempts(max_attempts=3) 

1345 False 

1346 True 

1347 >>> user.is_account_locked() 

1348 True 

1349 """ 

1350 self.failed_login_attempts += 1 

1351 

1352 if self.failed_login_attempts >= max_attempts: 

1353 self.locked_until = utc_now() + timedelta(minutes=lockout_duration_minutes) 

1354 return True 

1355 

1356 return False 

1357 

1358 # Team relationships 

1359 team_memberships: Mapped[List["EmailTeamMember"]] = relationship("EmailTeamMember", foreign_keys="EmailTeamMember.user_email", back_populates="user") 

1360 created_teams: Mapped[List["EmailTeam"]] = relationship("EmailTeam", foreign_keys="EmailTeam.created_by", back_populates="creator") 

1361 sent_invitations: Mapped[List["EmailTeamInvitation"]] = relationship("EmailTeamInvitation", foreign_keys="EmailTeamInvitation.invited_by", back_populates="inviter") 

1362 

1363 # API token relationships 

1364 api_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="user", cascade="all, delete-orphan") 

1365 

1366 def get_teams(self) -> List["EmailTeam"]: 

1367 """Get all teams this user is a member of. 

1368 

1369 Returns: 

1370 List[EmailTeam]: List of teams the user belongs to 

1371 

1372 Examples: 

1373 >>> user = EmailUser(email="user@example.com") 

1374 >>> teams = user.get_teams() 

1375 >>> isinstance(teams, list) 

1376 True 

1377 """ 

1378 return [membership.team for membership in self.team_memberships if membership.is_active] 

1379 

1380 def get_personal_team(self) -> Optional["EmailTeam"]: 

1381 """Get the user's personal team. 

1382 

1383 Returns: 

1384 EmailTeam: The user's personal team or None if not found 

1385 

1386 Examples: 

1387 >>> user = EmailUser(email="user@example.com") 

1388 >>> personal_team = user.get_personal_team() 

1389 """ 

1390 for team in self.created_teams: 

1391 if team.is_personal and team.is_active: 

1392 return team 

1393 return None 

1394 

1395 def is_team_member(self, team_id: str) -> bool: 

1396 """Check if user is a member of the specified team. 

1397 

1398 Args: 

1399 team_id: ID of the team to check 

1400 

1401 Returns: 

1402 bool: True if user is a member, False otherwise 

1403 

1404 Examples: 

1405 >>> user = EmailUser(email="user@example.com") 

1406 >>> user.is_team_member("team-123") 

1407 False 

1408 """ 

1409 return any(membership.team_id == team_id and membership.is_active for membership in self.team_memberships) 

1410 

1411 def get_team_role(self, team_id: str) -> Optional[str]: 

1412 """Get user's role in a specific team. 

1413 

1414 Args: 

1415 team_id: ID of the team to check 

1416 

1417 Returns: 

1418 str: User's role or None if not a member 

1419 

1420 Examples: 

1421 >>> user = EmailUser(email="user@example.com") 

1422 >>> role = user.get_team_role("team-123") 

1423 """ 

1424 for membership in self.team_memberships: 

1425 if membership.team_id == team_id and membership.is_active: 

1426 return membership.role 

1427 return None 

1428 

1429 

1430class EmailAuthEvent(Base): 

1431 """Authentication event logging for email users. 

1432 

1433 This model tracks all authentication attempts for auditing, 

1434 security monitoring, and compliance purposes. 

1435 

1436 Attributes: 

1437 id (int): Primary key 

1438 timestamp (datetime): Event timestamp 

1439 user_email (str): Email of the user 

1440 event_type (str): Type of authentication event 

1441 success (bool): Whether the authentication was successful 

1442 ip_address (str): Client IP address 

1443 user_agent (str): Client user agent string 

1444 failure_reason (str): Reason for authentication failure 

1445 details (dict): Additional event details as JSON 

1446 

1447 Examples: 

1448 >>> event = EmailAuthEvent( 

1449 ... user_email="alice@example.com", 

1450 ... event_type="login", 

1451 ... success=True, 

1452 ... ip_address="192.168.1.100" 

1453 ... ) 

1454 >>> event.event_type 

1455 'login' 

1456 >>> event.success 

1457 True 

1458 """ 

1459 

1460 __tablename__ = "email_auth_events" 

1461 

1462 # Primary key 

1463 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

1464 

1465 # Event details 

1466 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1467 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) 

1468 event_type: Mapped[str] = mapped_column(String(50), nullable=False) 

1469 success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

1470 

1471 # Client information 

1472 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 compatible 

1473 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

1474 

1475 # Failure information 

1476 failure_reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

1477 details: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON string 

1478 

1479 def __repr__(self) -> str: 

1480 """String representation of the auth event. 

1481 

1482 Returns: 

1483 str: String representation of EmailAuthEvent instance 

1484 """ 

1485 return f"<EmailAuthEvent(user_email='{self.user_email}', event_type='{self.event_type}', success={self.success})>" 

1486 

1487 @classmethod 

1488 def create_login_attempt( 

1489 cls, 

1490 user_email: str, 

1491 success: bool, 

1492 ip_address: Optional[str] = None, 

1493 user_agent: Optional[str] = None, 

1494 failure_reason: Optional[str] = None, 

1495 ) -> "EmailAuthEvent": 

1496 """Create a login attempt event. 

1497 

1498 Args: 

1499 user_email: Email address of the user 

1500 success: Whether the login was successful 

1501 ip_address: Client IP address 

1502 user_agent: Client user agent 

1503 failure_reason: Reason for failure (if applicable) 

1504 

1505 Returns: 

1506 EmailAuthEvent: New authentication event 

1507 

1508 Examples: 

1509 >>> event = EmailAuthEvent.create_login_attempt( 

1510 ... user_email="user@example.com", 

1511 ... success=True, 

1512 ... ip_address="192.168.1.1" 

1513 ... ) 

1514 >>> event.event_type 

1515 'login' 

1516 >>> event.success 

1517 True 

1518 """ 

1519 return cls(user_email=user_email, event_type="login", success=success, ip_address=ip_address, user_agent=user_agent, failure_reason=failure_reason) 

1520 

1521 @classmethod 

1522 def create_registration_event( 

1523 cls, 

1524 user_email: str, 

1525 success: bool, 

1526 ip_address: Optional[str] = None, 

1527 user_agent: Optional[str] = None, 

1528 failure_reason: Optional[str] = None, 

1529 ) -> "EmailAuthEvent": 

1530 """Create a registration event. 

1531 

1532 Args: 

1533 user_email: Email address of the user 

1534 success: Whether the registration was successful 

1535 ip_address: Client IP address 

1536 user_agent: Client user agent 

1537 failure_reason: Reason for failure (if applicable) 

1538 

1539 Returns: 

1540 EmailAuthEvent: New authentication event 

1541 """ 

1542 return cls(user_email=user_email, event_type="registration", success=success, ip_address=ip_address, user_agent=user_agent, failure_reason=failure_reason) 

1543 

1544 @classmethod 

1545 def create_password_change_event( 

1546 cls, 

1547 user_email: str, 

1548 success: bool, 

1549 ip_address: Optional[str] = None, 

1550 user_agent: Optional[str] = None, 

1551 ) -> "EmailAuthEvent": 

1552 """Create a password change event. 

1553 

1554 Args: 

1555 user_email: Email address of the user 

1556 success: Whether the password change was successful 

1557 ip_address: Client IP address 

1558 user_agent: Client user agent 

1559 

1560 Returns: 

1561 EmailAuthEvent: New authentication event 

1562 """ 

1563 return cls(user_email=user_email, event_type="password_change", success=success, ip_address=ip_address, user_agent=user_agent) 

1564 

1565 

1566class PasswordResetToken(Base): 

1567 """One-time password reset token record. 

1568 

1569 Stores only a SHA-256 hash of the user-facing token. Tokens are one-time use 

1570 and expire after a configured duration. 

1571 """ 

1572 

1573 __tablename__ = "password_reset_tokens" 

1574 

1575 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

1576 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email", ondelete="CASCADE"), nullable=False, index=True) 

1577 token_hash: Mapped[str] = mapped_column(String(64), nullable=False, unique=True, index=True) 

1578 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

1579 used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

1580 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1581 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

1582 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

1583 

1584 user: Mapped["EmailUser"] = relationship("EmailUser") 

1585 

1586 __table_args__ = (Index("ix_password_reset_tokens_expires_at", "expires_at"),) 

1587 

1588 def is_expired(self) -> bool: 

1589 """Return whether the reset token has expired. 

1590 

1591 Returns: 

1592 bool: True when `expires_at` is in the past. 

1593 """ 

1594 return self.expires_at <= utc_now() 

1595 

1596 def is_used(self) -> bool: 

1597 """Return whether the reset token was already consumed. 

1598 

1599 Returns: 

1600 bool: True when `used_at` is set. 

1601 """ 

1602 return self.used_at is not None 

1603 

1604 

1605class EmailTeam(Base): 

1606 """Email-based team model for multi-team collaboration. 

1607 

1608 This model represents teams that users can belong to, with automatic 

1609 personal team creation and role-based access control. 

1610 

1611 Attributes: 

1612 id (str): Primary key UUID 

1613 name (str): Team display name 

1614 slug (str): URL-friendly team identifier 

1615 description (str): Team description 

1616 created_by (str): Email of the user who created the team 

1617 is_personal (bool): Whether this is a personal team 

1618 visibility (str): Team visibility (private, public) 

1619 max_members (int): Maximum number of team members allowed 

1620 created_at (datetime): Team creation timestamp 

1621 updated_at (datetime): Last update timestamp 

1622 is_active (bool): Whether the team is active 

1623 

1624 Examples: 

1625 >>> team = EmailTeam( 

1626 ... name="Engineering Team", 

1627 ... slug="engineering-team", 

1628 ... created_by="admin@example.com", 

1629 ... is_personal=False 

1630 ... ) 

1631 >>> team.name 

1632 'Engineering Team' 

1633 >>> team.is_personal 

1634 False 

1635 """ 

1636 

1637 __tablename__ = "email_teams" 

1638 

1639 # Primary key 

1640 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

1641 

1642 # Basic team information 

1643 name: Mapped[str] = mapped_column(String(255), nullable=False) 

1644 slug: Mapped[str] = mapped_column(String(255), unique=True, nullable=False) 

1645 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

1646 created_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

1647 

1648 # Team settings 

1649 is_personal: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

1650 visibility: Mapped[str] = mapped_column(String(20), default="public", nullable=False) 

1651 max_members: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

1652 

1653 # Timestamps 

1654 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1655 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

1656 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

1657 

1658 # Relationships 

1659 members: Mapped[List["EmailTeamMember"]] = relationship("EmailTeamMember", back_populates="team", cascade="all, delete-orphan") 

1660 invitations: Mapped[List["EmailTeamInvitation"]] = relationship("EmailTeamInvitation", back_populates="team", cascade="all, delete-orphan") 

1661 api_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="team", cascade="all, delete-orphan") 

1662 creator: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[created_by]) 

1663 

1664 # Index for search and pagination performance 

1665 __table_args__ = (Index("ix_email_teams_name_id", "name", "id"),) 

1666 

1667 def __repr__(self) -> str: 

1668 """String representation of the team. 

1669 

1670 Returns: 

1671 str: String representation of EmailTeam instance 

1672 """ 

1673 return f"<EmailTeam(id='{self.id}', name='{self.name}', is_personal={self.is_personal})>" 

1674 

1675 def get_member_count(self) -> int: 

1676 """Get the current number of team members. 

1677 

1678 Uses direct SQL COUNT to avoid loading all members into memory. 

1679 

1680 Returns: 

1681 int: Number of active team members 

1682 

1683 Examples: 

1684 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com") 

1685 >>> team.get_member_count() 

1686 0 

1687 """ 

1688 # Third-Party 

1689 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

1690 

1691 session = object_session(self) 

1692 if session is None: 

1693 # Fallback for detached objects (e.g., in doctests) 

1694 return len([m for m in self.members if m.is_active]) 

1695 

1696 count = session.query(func.count(EmailTeamMember.id)).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.is_active.is_(True)).scalar() # pylint: disable=not-callable 

1697 return count or 0 

1698 

1699 def is_member(self, user_email: str) -> bool: 

1700 """Check if a user is a member of this team. 

1701 

1702 Uses direct SQL EXISTS to avoid loading all members into memory. 

1703 

1704 Args: 

1705 user_email: Email address to check 

1706 

1707 Returns: 

1708 bool: True if user is an active member, False otherwise 

1709 

1710 Examples: 

1711 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com") 

1712 >>> team.is_member("admin@example.com") 

1713 False 

1714 """ 

1715 # Third-Party 

1716 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

1717 

1718 session = object_session(self) 

1719 if session is None: 

1720 # Fallback for detached objects (e.g., in doctests) 

1721 return any(m.user_email == user_email and m.is_active for m in self.members) 

1722 

1723 exists = session.query(EmailTeamMember.id).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.user_email == user_email, EmailTeamMember.is_active.is_(True)).first() 

1724 return exists is not None 

1725 

1726 def get_member_role(self, user_email: str) -> Optional[str]: 

1727 """Get the role of a user in this team. 

1728 

1729 Uses direct SQL query to avoid loading all members into memory. 

1730 

1731 Args: 

1732 user_email: Email address to check 

1733 

1734 Returns: 

1735 str: User's role or None if not a member 

1736 

1737 Examples: 

1738 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com") 

1739 >>> team.get_member_role("admin@example.com") 

1740 """ 

1741 # Third-Party 

1742 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

1743 

1744 session = object_session(self) 

1745 if session is None: 

1746 # Fallback for detached objects (e.g., in doctests) 

1747 for member in self.members: 

1748 if member.user_email == user_email and member.is_active: 

1749 return member.role 

1750 return None 

1751 

1752 member = session.query(EmailTeamMember.role).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.user_email == user_email, EmailTeamMember.is_active.is_(True)).first() 

1753 return member[0] if member else None 

1754 

1755 

1756class EmailTeamMember(Base): 

1757 """Team membership model linking users to teams with roles. 

1758 

1759 This model represents the many-to-many relationship between users and teams 

1760 with additional role information and audit trails. 

1761 

1762 Attributes: 

1763 id (str): Primary key UUID 

1764 team_id (str): Foreign key to email_teams 

1765 user_email (str): Foreign key to email_users 

1766 role (str): Member role (owner, member) 

1767 joined_at (datetime): When the user joined the team 

1768 invited_by (str): Email of the user who invited this member 

1769 is_active (bool): Whether the membership is active 

1770 

1771 Examples: 

1772 >>> member = EmailTeamMember( 

1773 ... team_id="team-123", 

1774 ... user_email="user@example.com", 

1775 ... role="member", 

1776 ... invited_by="admin@example.com" 

1777 ... ) 

1778 >>> member.role 

1779 'member' 

1780 """ 

1781 

1782 __tablename__ = "email_team_members" 

1783 

1784 # Primary key 

1785 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

1786 

1787 # Foreign keys 

1788 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False) 

1789 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

1790 

1791 # Membership details 

1792 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False) 

1793 joined_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1794 invited_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True) 

1795 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

1796 

1797 # Relationships 

1798 team: Mapped["EmailTeam"] = relationship("EmailTeam", back_populates="members") 

1799 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email]) 

1800 inviter: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[invited_by]) 

1801 

1802 # Unique constraint to prevent duplicate memberships 

1803 __table_args__ = (UniqueConstraint("team_id", "user_email", name="uq_team_member"),) 

1804 

1805 def __repr__(self) -> str: 

1806 """String representation of the team member. 

1807 

1808 Returns: 

1809 str: String representation of EmailTeamMember instance 

1810 """ 

1811 return f"<EmailTeamMember(team_id='{self.team_id}', user_email='{self.user_email}', role='{self.role}')>" 

1812 

1813 

1814# Team member history model 

1815class EmailTeamMemberHistory(Base): 

1816 """ 

1817 History of team member actions (add, remove, reactivate, role change). 

1818 

1819 This model records every membership-related event for audit and compliance. 

1820 Each record tracks the team, user, role, action type, actor, and timestamp. 

1821 

1822 Attributes: 

1823 id (str): Primary key UUID 

1824 team_id (str): Foreign key to email_teams 

1825 user_email (str): Foreign key to email_users 

1826 role (str): Role at the time of action 

1827 action (str): Action type ("added", "removed", "reactivated", "role_changed") 

1828 action_by (str): Email of the user who performed the action 

1829 action_timestamp (datetime): When the action occurred 

1830 

1831 Examples: 

1832 >>> from mcpgateway.db import EmailTeamMemberHistory, utc_now 

1833 >>> history = EmailTeamMemberHistory( 

1834 ... team_id="team-123", 

1835 ... user_email="user@example.com", 

1836 ... role="member", 

1837 ... action="added", 

1838 ... action_by="admin@example.com", 

1839 ... action_timestamp=utc_now() 

1840 ... ) 

1841 >>> history.action 

1842 'added' 

1843 >>> history.role 

1844 'member' 

1845 >>> isinstance(history.action_timestamp, type(utc_now())) 

1846 True 

1847 """ 

1848 

1849 __tablename__ = "email_team_member_history" 

1850 

1851 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

1852 team_member_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_team_members.id", ondelete="CASCADE"), nullable=False) 

1853 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id"), nullable=False) 

1854 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

1855 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False) 

1856 action: Mapped[str] = mapped_column(String(50), nullable=False) # e.g. "added", "removed", "reactivated", "role_changed" 

1857 action_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True) 

1858 action_timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1859 

1860 team_member: Mapped["EmailTeamMember"] = relationship("EmailTeamMember") 

1861 team: Mapped["EmailTeam"] = relationship("EmailTeam") 

1862 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email]) 

1863 actor: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[action_by]) 

1864 

1865 def __repr__(self) -> str: 

1866 """ 

1867 Return a string representation of the EmailTeamMemberHistory instance. 

1868 

1869 Returns: 

1870 str: A string summarizing the team member history record. 

1871 

1872 Examples: 

1873 >>> from mcpgateway.db import EmailTeamMemberHistory, utc_now 

1874 >>> history = EmailTeamMemberHistory( 

1875 ... team_member_id="tm-123", 

1876 ... team_id="team-123", 

1877 ... user_email="user@example.com", 

1878 ... role="member", 

1879 ... action="added", 

1880 ... action_by="admin@example.com", 

1881 ... action_timestamp=utc_now() 

1882 ... ) 

1883 >>> isinstance(repr(history), str) 

1884 True 

1885 """ 

1886 return f"<EmailTeamMemberHistory(team_member_id='{self.team_member_id}', team_id='{self.team_id}', user_email='{self.user_email}', role='{self.role}', action='{self.action}', action_by='{self.action_by}', action_timestamp='{self.action_timestamp}')>" 

1887 

1888 

1889class EmailTeamInvitation(Base): 

1890 """Team invitation model for managing team member invitations. 

1891 

1892 This model tracks invitations sent to users to join teams, including 

1893 expiration dates and invitation tokens. 

1894 

1895 Attributes: 

1896 id (str): Primary key UUID 

1897 team_id (str): Foreign key to email_teams 

1898 email (str): Email address of the invited user 

1899 role (str): Role the user will have when they accept 

1900 invited_by (str): Email of the user who sent the invitation 

1901 invited_at (datetime): When the invitation was sent 

1902 expires_at (datetime): When the invitation expires 

1903 token (str): Unique invitation token 

1904 is_active (bool): Whether the invitation is still active 

1905 

1906 Examples: 

1907 >>> invitation = EmailTeamInvitation( 

1908 ... team_id="team-123", 

1909 ... email="newuser@example.com", 

1910 ... role="member", 

1911 ... invited_by="admin@example.com" 

1912 ... ) 

1913 >>> invitation.role 

1914 'member' 

1915 """ 

1916 

1917 __tablename__ = "email_team_invitations" 

1918 

1919 # Primary key 

1920 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

1921 

1922 # Foreign keys 

1923 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False) 

1924 

1925 # Invitation details 

1926 email: Mapped[str] = mapped_column(String(255), nullable=False) 

1927 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False) 

1928 invited_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

1929 

1930 # Timing 

1931 invited_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

1932 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

1933 

1934 # Security 

1935 token: Mapped[str] = mapped_column(String(500), unique=True, nullable=False) 

1936 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

1937 

1938 # Relationships 

1939 team: Mapped["EmailTeam"] = relationship("EmailTeam", back_populates="invitations") 

1940 inviter: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[invited_by]) 

1941 

1942 def __repr__(self) -> str: 

1943 """String representation of the team invitation. 

1944 

1945 Returns: 

1946 str: String representation of EmailTeamInvitation instance 

1947 """ 

1948 return f"<EmailTeamInvitation(team_id='{self.team_id}', email='{self.email}', role='{self.role}')>" 

1949 

1950 def is_expired(self) -> bool: 

1951 """Check if the invitation has expired. 

1952 

1953 Returns: 

1954 bool: True if the invitation has expired, False otherwise 

1955 

1956 Examples: 

1957 >>> from datetime import timedelta 

1958 >>> invitation = EmailTeamInvitation( 

1959 ... team_id="team-123", 

1960 ... email="user@example.com", 

1961 ... role="member", 

1962 ... invited_by="admin@example.com", 

1963 ... expires_at=utc_now() + timedelta(days=7) 

1964 ... ) 

1965 >>> invitation.is_expired() 

1966 False 

1967 """ 

1968 now = utc_now() 

1969 expires_at = self.expires_at 

1970 

1971 # Handle timezone awareness mismatch 

1972 if now.tzinfo is not None and expires_at.tzinfo is None: 

1973 expires_at = expires_at.replace(tzinfo=timezone.utc) 

1974 elif now.tzinfo is None and expires_at.tzinfo is not None: 

1975 now = now.replace(tzinfo=timezone.utc) 

1976 

1977 return now > expires_at 

1978 

1979 def is_valid(self) -> bool: 

1980 """Check if the invitation is valid (active and not expired). 

1981 

1982 Returns: 

1983 bool: True if the invitation is valid, False otherwise 

1984 

1985 Examples: 

1986 >>> from datetime import timedelta 

1987 >>> invitation = EmailTeamInvitation( 

1988 ... team_id="team-123", 

1989 ... email="user@example.com", 

1990 ... role="member", 

1991 ... invited_by="admin@example.com", 

1992 ... expires_at=utc_now() + timedelta(days=7), 

1993 ... is_active=True 

1994 ... ) 

1995 >>> invitation.is_valid() 

1996 True 

1997 """ 

1998 return self.is_active and not self.is_expired() 

1999 

2000 

2001class EmailTeamJoinRequest(Base): 

2002 """Team join request model for managing public team join requests. 

2003 

2004 This model tracks user requests to join public teams, including 

2005 approval workflow and expiration dates. 

2006 

2007 Attributes: 

2008 id (str): Primary key UUID 

2009 team_id (str): Foreign key to email_teams 

2010 user_email (str): Email of the user requesting to join 

2011 message (str): Optional message from the user 

2012 status (str): Request status (pending, approved, rejected, expired) 

2013 requested_at (datetime): When the request was made 

2014 expires_at (datetime): When the request expires 

2015 reviewed_at (datetime): When the request was reviewed 

2016 reviewed_by (str): Email of user who reviewed the request 

2017 notes (str): Optional admin notes 

2018 """ 

2019 

2020 __tablename__ = "email_team_join_requests" 

2021 

2022 # Primary key 

2023 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

2024 

2025 # Foreign keys 

2026 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False) 

2027 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

2028 

2029 # Request details 

2030 message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2031 status: Mapped[str] = mapped_column(String(20), default="pending", nullable=False) 

2032 

2033 # Timing 

2034 requested_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2035 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2036 reviewed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

2037 reviewed_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True) 

2038 notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2039 

2040 # Relationships 

2041 team: Mapped["EmailTeam"] = relationship("EmailTeam") 

2042 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email]) 

2043 reviewer: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[reviewed_by]) 

2044 

2045 # Unique constraint to prevent duplicate requests 

2046 __table_args__ = (UniqueConstraint("team_id", "user_email", name="uq_team_join_request"),) 

2047 

2048 def __repr__(self) -> str: 

2049 """String representation of the team join request. 

2050 

2051 Returns: 

2052 str: String representation of the team join request. 

2053 """ 

2054 return f"<EmailTeamJoinRequest(team_id='{self.team_id}', user_email='{self.user_email}', status='{self.status}')>" 

2055 

2056 def is_expired(self) -> bool: 

2057 """Check if the join request has expired. 

2058 

2059 Returns: 

2060 bool: True if the request has expired, False otherwise. 

2061 """ 

2062 now = utc_now() 

2063 expires_at = self.expires_at 

2064 

2065 # Handle timezone awareness mismatch 

2066 if now.tzinfo is not None and expires_at.tzinfo is None: 

2067 expires_at = expires_at.replace(tzinfo=timezone.utc) 

2068 elif now.tzinfo is None and expires_at.tzinfo is not None: 

2069 now = now.replace(tzinfo=timezone.utc) 

2070 

2071 return now > expires_at 

2072 

2073 def is_pending(self) -> bool: 

2074 """Check if the join request is still pending. 

2075 

2076 Returns: 

2077 bool: True if the request is pending and not expired, False otherwise. 

2078 """ 

2079 return self.status == "pending" and not self.is_expired() 

2080 

2081 

2082class PendingUserApproval(Base): 

2083 """Model for pending SSO user registrations awaiting admin approval. 

2084 

2085 This model stores information about users who have authenticated via SSO 

2086 but require admin approval before their account is fully activated. 

2087 

2088 Attributes: 

2089 id (str): Primary key 

2090 email (str): Email address of the pending user 

2091 full_name (str): Full name from SSO provider 

2092 auth_provider (str): SSO provider (github, google, etc.) 

2093 sso_metadata (dict): Additional metadata from SSO provider 

2094 requested_at (datetime): When the approval was requested 

2095 expires_at (datetime): When the approval request expires 

2096 approved_by (str): Email of admin who approved (if approved) 

2097 approved_at (datetime): When the approval was granted 

2098 status (str): Current status (pending, approved, rejected, expired) 

2099 rejection_reason (str): Reason for rejection (if applicable) 

2100 admin_notes (str): Notes from admin review 

2101 

2102 Examples: 

2103 >>> from datetime import timedelta 

2104 >>> approval = PendingUserApproval( 

2105 ... email="newuser@example.com", 

2106 ... full_name="New User", 

2107 ... auth_provider="github", 

2108 ... expires_at=utc_now() + timedelta(days=30), 

2109 ... status="pending" 

2110 ... ) 

2111 >>> approval.status 

2112 'pending' 

2113 """ 

2114 

2115 __tablename__ = "pending_user_approvals" 

2116 

2117 # Primary key 

2118 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

2119 

2120 # User details 

2121 email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) 

2122 full_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2123 auth_provider: Mapped[str] = mapped_column(String(50), nullable=False) 

2124 sso_metadata: Mapped[Optional[Dict]] = mapped_column(JSON, nullable=True) 

2125 

2126 # Request details 

2127 requested_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2128 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2129 

2130 # Approval details 

2131 approved_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True) 

2132 approved_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

2133 status: Mapped[str] = mapped_column(String(20), default="pending", nullable=False) # pending, approved, rejected, expired 

2134 rejection_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2135 admin_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2136 

2137 # Relationships 

2138 approver: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[approved_by]) 

2139 

2140 def __repr__(self) -> str: 

2141 """String representation of the pending approval. 

2142 

2143 Returns: 

2144 str: String representation of PendingUserApproval instance 

2145 """ 

2146 return f"<PendingUserApproval(email='{self.email}', status='{self.status}', provider='{self.auth_provider}')>" 

2147 

2148 def is_expired(self) -> bool: 

2149 """Check if the approval request has expired. 

2150 

2151 Returns: 

2152 bool: True if the approval request has expired 

2153 """ 

2154 now = utc_now() 

2155 expires_at = self.expires_at 

2156 

2157 # Handle timezone awareness mismatch 

2158 if now.tzinfo is not None and expires_at.tzinfo is None: 

2159 expires_at = expires_at.replace(tzinfo=timezone.utc) 

2160 elif now.tzinfo is None and expires_at.tzinfo is not None: 

2161 now = now.replace(tzinfo=timezone.utc) 

2162 

2163 return now > expires_at 

2164 

2165 def approve(self, admin_email: str, notes: Optional[str] = None) -> None: 

2166 """Approve the user registration. 

2167 

2168 Args: 

2169 admin_email: Email of the admin approving the request 

2170 notes: Optional admin notes 

2171 """ 

2172 self.status = "approved" 

2173 self.approved_by = admin_email 

2174 self.approved_at = utc_now() 

2175 self.admin_notes = notes 

2176 

2177 def reject(self, admin_email: str, reason: str, notes: Optional[str] = None) -> None: 

2178 """Reject the user registration. 

2179 

2180 Args: 

2181 admin_email: Email of the admin rejecting the request 

2182 reason: Reason for rejection 

2183 notes: Optional admin notes 

2184 """ 

2185 self.status = "rejected" 

2186 self.approved_by = admin_email 

2187 self.approved_at = utc_now() 

2188 self.rejection_reason = reason 

2189 self.admin_notes = notes 

2190 

2191 

2192# Association table for servers and tools 

2193server_tool_association = Table( 

2194 "server_tool_association", 

2195 Base.metadata, 

2196 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True), 

2197 Column("tool_id", String(36), ForeignKey("tools.id"), primary_key=True), 

2198) 

2199 

2200# Association table for servers and resources 

2201server_resource_association = Table( 

2202 "server_resource_association", 

2203 Base.metadata, 

2204 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True), 

2205 Column("resource_id", String(36), ForeignKey("resources.id"), primary_key=True), 

2206) 

2207 

2208# Association table for servers and prompts 

2209server_prompt_association = Table( 

2210 "server_prompt_association", 

2211 Base.metadata, 

2212 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True), 

2213 Column("prompt_id", String(36), ForeignKey("prompts.id"), primary_key=True), 

2214) 

2215 

2216# Association table for servers and A2A agents 

2217server_a2a_association = Table( 

2218 "server_a2a_association", 

2219 Base.metadata, 

2220 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True), 

2221 Column("a2a_agent_id", String(36), ForeignKey("a2a_agents.id"), primary_key=True), 

2222) 

2223 

2224 

2225class GlobalConfig(Base): 

2226 """Global configuration settings. 

2227 

2228 Attributes: 

2229 id (int): Primary key 

2230 passthrough_headers (List[str]): List of headers allowed to be passed through globally 

2231 """ 

2232 

2233 __tablename__ = "global_config" 

2234 

2235 id: Mapped[int] = mapped_column(Integer, primary_key=True) 

2236 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array 

2237 

2238 

2239class ToolMetric(Base): 

2240 """ 

2241 ORM model for recording individual metrics for tool executions. 

2242 

2243 Each record in this table corresponds to a single tool invocation and records: 

2244 - timestamp (datetime): When the invocation occurred. 

2245 - response_time (float): The execution time in seconds. 

2246 - is_success (bool): True if the execution succeeded, False otherwise. 

2247 - error_message (Optional[str]): Error message if the execution failed. 

2248 

2249 Aggregated metrics (such as total executions, successful/failed counts, failure rate, 

2250 minimum, maximum, and average response times, and last execution time) should be computed 

2251 on the fly using SQL aggregate functions over the rows in this table. 

2252 """ 

2253 

2254 __tablename__ = "tool_metrics" 

2255 

2256 id: Mapped[int] = mapped_column(primary_key=True) 

2257 tool_id: Mapped[str] = mapped_column(String(36), ForeignKey("tools.id"), nullable=False, index=True) 

2258 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True) 

2259 response_time: Mapped[float] = mapped_column(Float, nullable=False) 

2260 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

2261 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2262 

2263 # Relationship back to the Tool model. 

2264 tool: Mapped["Tool"] = relationship("Tool", back_populates="metrics") 

2265 

2266 

2267class ResourceMetric(Base): 

2268 """ 

2269 ORM model for recording metrics for resource invocations. 

2270 

2271 Attributes: 

2272 id (int): Primary key. 

2273 resource_id (str): Foreign key linking to the resource. 

2274 timestamp (datetime): The time when the invocation occurred. 

2275 response_time (float): The response time in seconds. 

2276 is_success (bool): True if the invocation succeeded, False otherwise. 

2277 error_message (Optional[str]): Error message if the invocation failed. 

2278 """ 

2279 

2280 __tablename__ = "resource_metrics" 

2281 

2282 id: Mapped[int] = mapped_column(primary_key=True) 

2283 resource_id: Mapped[str] = mapped_column(String(36), ForeignKey("resources.id"), nullable=False, index=True) 

2284 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True) 

2285 response_time: Mapped[float] = mapped_column(Float, nullable=False) 

2286 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

2287 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2288 

2289 # Relationship back to the Resource model. 

2290 resource: Mapped["Resource"] = relationship("Resource", back_populates="metrics") 

2291 

2292 

2293class ServerMetric(Base): 

2294 """ 

2295 ORM model for recording metrics for server invocations. 

2296 

2297 Attributes: 

2298 id (int): Primary key. 

2299 server_id (str): Foreign key linking to the server. 

2300 timestamp (datetime): The time when the invocation occurred. 

2301 response_time (float): The response time in seconds. 

2302 is_success (bool): True if the invocation succeeded, False otherwise. 

2303 error_message (Optional[str]): Error message if the invocation failed. 

2304 """ 

2305 

2306 __tablename__ = "server_metrics" 

2307 

2308 id: Mapped[int] = mapped_column(primary_key=True) 

2309 server_id: Mapped[str] = mapped_column(String(36), ForeignKey("servers.id"), nullable=False, index=True) 

2310 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True) 

2311 response_time: Mapped[float] = mapped_column(Float, nullable=False) 

2312 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

2313 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2314 

2315 # Relationship back to the Server model. 

2316 server: Mapped["Server"] = relationship("Server", back_populates="metrics") 

2317 

2318 

2319class PromptMetric(Base): 

2320 """ 

2321 ORM model for recording metrics for prompt invocations. 

2322 

2323 Attributes: 

2324 id (int): Primary key. 

2325 prompt_id (str): Foreign key linking to the prompt. 

2326 timestamp (datetime): The time when the invocation occurred. 

2327 response_time (float): The response time in seconds. 

2328 is_success (bool): True if the invocation succeeded, False otherwise. 

2329 error_message (Optional[str]): Error message if the invocation failed. 

2330 """ 

2331 

2332 __tablename__ = "prompt_metrics" 

2333 

2334 id: Mapped[int] = mapped_column(primary_key=True) 

2335 prompt_id: Mapped[str] = mapped_column(String(36), ForeignKey("prompts.id"), nullable=False, index=True) 

2336 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True) 

2337 response_time: Mapped[float] = mapped_column(Float, nullable=False) 

2338 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

2339 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2340 

2341 # Relationship back to the Prompt model. 

2342 prompt: Mapped["Prompt"] = relationship("Prompt", back_populates="metrics") 

2343 

2344 

2345class A2AAgentMetric(Base): 

2346 """ 

2347 ORM model for recording metrics for A2A agent interactions. 

2348 

2349 Attributes: 

2350 id (int): Primary key. 

2351 a2a_agent_id (str): Foreign key linking to the A2A agent. 

2352 timestamp (datetime): The time when the interaction occurred. 

2353 response_time (float): The response time in seconds. 

2354 is_success (bool): True if the interaction succeeded, False otherwise. 

2355 error_message (Optional[str]): Error message if the interaction failed. 

2356 interaction_type (str): Type of interaction (invoke, query, etc.). 

2357 """ 

2358 

2359 __tablename__ = "a2a_agent_metrics" 

2360 

2361 id: Mapped[int] = mapped_column(primary_key=True) 

2362 a2a_agent_id: Mapped[str] = mapped_column(String(36), ForeignKey("a2a_agents.id"), nullable=False, index=True) 

2363 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True) 

2364 response_time: Mapped[float] = mapped_column(Float, nullable=False) 

2365 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False) 

2366 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2367 interaction_type: Mapped[str] = mapped_column(String(50), nullable=False, default="invoke") 

2368 

2369 # Relationship back to the A2AAgent model. 

2370 a2a_agent: Mapped["A2AAgent"] = relationship("A2AAgent", back_populates="metrics") 

2371 

2372 

2373# =================================== 

2374# Metrics Hourly Rollup Tables 

2375# These tables store pre-aggregated hourly summaries for efficient historical queries. 

2376# Raw metrics can be cleaned up after rollup, reducing storage while preserving trends. 

2377# =================================== 

2378 

2379 

2380class ToolMetricsHourly(Base): 

2381 """ 

2382 Hourly rollup of tool metrics for efficient historical trend analysis. 

2383 

2384 This table stores pre-aggregated metrics per tool per hour, enabling fast 

2385 queries for dashboards and reports without scanning millions of raw metrics. 

2386 

2387 Attributes: 

2388 id: Primary key. 

2389 tool_id: Foreign key to the tool (nullable for deleted tools). 

2390 tool_name: Tool name snapshot (preserved even if tool is deleted). 

2391 hour_start: Start of the aggregation hour (UTC). 

2392 total_count: Total invocations during this hour. 

2393 success_count: Successful invocations. 

2394 failure_count: Failed invocations. 

2395 min_response_time: Minimum response time in seconds. 

2396 max_response_time: Maximum response time in seconds. 

2397 avg_response_time: Average response time in seconds. 

2398 p50_response_time: 50th percentile (median) response time. 

2399 p95_response_time: 95th percentile response time. 

2400 p99_response_time: 99th percentile response time. 

2401 created_at: When this rollup was created. 

2402 """ 

2403 

2404 __tablename__ = "tool_metrics_hourly" 

2405 __table_args__ = ( 

2406 UniqueConstraint("tool_id", "hour_start", name="uq_tool_metrics_hourly_tool_hour"), 

2407 Index("ix_tool_metrics_hourly_hour_start", "hour_start"), 

2408 ) 

2409 

2410 id: Mapped[int] = mapped_column(primary_key=True) 

2411 tool_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("tools.id", ondelete="SET NULL"), nullable=True, index=True) 

2412 tool_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2413 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2414 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2415 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2416 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2417 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2418 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2419 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2420 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2421 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2422 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2423 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2424 

2425 

2426class ResourceMetricsHourly(Base): 

2427 """Hourly rollup of resource metrics for efficient historical trend analysis.""" 

2428 

2429 __tablename__ = "resource_metrics_hourly" 

2430 __table_args__ = ( 

2431 UniqueConstraint("resource_id", "hour_start", name="uq_resource_metrics_hourly_resource_hour"), 

2432 Index("ix_resource_metrics_hourly_hour_start", "hour_start"), 

2433 ) 

2434 

2435 id: Mapped[int] = mapped_column(primary_key=True) 

2436 resource_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("resources.id", ondelete="SET NULL"), nullable=True, index=True) 

2437 resource_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2438 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2439 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2440 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2441 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2442 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2443 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2444 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2445 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2446 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2447 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2448 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2449 

2450 

2451class PromptMetricsHourly(Base): 

2452 """Hourly rollup of prompt metrics for efficient historical trend analysis.""" 

2453 

2454 __tablename__ = "prompt_metrics_hourly" 

2455 __table_args__ = ( 

2456 UniqueConstraint("prompt_id", "hour_start", name="uq_prompt_metrics_hourly_prompt_hour"), 

2457 Index("ix_prompt_metrics_hourly_hour_start", "hour_start"), 

2458 ) 

2459 

2460 id: Mapped[int] = mapped_column(primary_key=True) 

2461 prompt_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("prompts.id", ondelete="SET NULL"), nullable=True, index=True) 

2462 prompt_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2463 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2464 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2465 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2466 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2467 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2468 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2469 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2470 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2471 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2472 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2473 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2474 

2475 

2476class ServerMetricsHourly(Base): 

2477 """Hourly rollup of server metrics for efficient historical trend analysis.""" 

2478 

2479 __tablename__ = "server_metrics_hourly" 

2480 __table_args__ = ( 

2481 UniqueConstraint("server_id", "hour_start", name="uq_server_metrics_hourly_server_hour"), 

2482 Index("ix_server_metrics_hourly_hour_start", "hour_start"), 

2483 ) 

2484 

2485 id: Mapped[int] = mapped_column(primary_key=True) 

2486 server_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("servers.id", ondelete="SET NULL"), nullable=True, index=True) 

2487 server_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2488 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2489 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2490 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2491 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2492 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2493 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2494 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2495 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2496 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2497 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2498 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2499 

2500 

2501class A2AAgentMetricsHourly(Base): 

2502 """Hourly rollup of A2A agent metrics for efficient historical trend analysis.""" 

2503 

2504 __tablename__ = "a2a_agent_metrics_hourly" 

2505 __table_args__ = ( 

2506 UniqueConstraint("a2a_agent_id", "hour_start", "interaction_type", name="uq_a2a_agent_metrics_hourly_agent_hour_type"), 

2507 Index("ix_a2a_agent_metrics_hourly_hour_start", "hour_start"), 

2508 ) 

2509 

2510 id: Mapped[int] = mapped_column(primary_key=True) 

2511 a2a_agent_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("a2a_agents.id", ondelete="SET NULL"), nullable=True, index=True) 

2512 agent_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2513 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2514 interaction_type: Mapped[str] = mapped_column(String(50), nullable=False, default="invoke") 

2515 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2516 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2517 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

2518 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2519 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2520 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2521 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2522 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2523 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2524 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2525 

2526 

2527# =================================== 

2528# Observability Models (OpenTelemetry-style traces, spans, events) 

2529# =================================== 

2530 

2531 

2532class ObservabilityTrace(Base): 

2533 """ 

2534 ORM model for observability traces (similar to OpenTelemetry traces). 

2535 

2536 A trace represents a complete request flow through the system. It contains 

2537 one or more spans representing individual operations. 

2538 

2539 Attributes: 

2540 trace_id (str): Unique trace identifier (UUID or OpenTelemetry trace ID format). 

2541 name (str): Human-readable name for the trace (e.g., "POST /tools/invoke"). 

2542 start_time (datetime): When the trace started. 

2543 end_time (datetime): When the trace ended (optional, set when completed). 

2544 duration_ms (float): Total duration in milliseconds. 

2545 status (str): Trace status (success, error, timeout). 

2546 status_message (str): Optional status message or error description. 

2547 http_method (str): HTTP method for the request (GET, POST, etc.). 

2548 http_url (str): Full URL of the request. 

2549 http_status_code (int): HTTP response status code. 

2550 user_email (str): User who initiated the request (if authenticated). 

2551 user_agent (str): Client user agent string. 

2552 ip_address (str): Client IP address. 

2553 attributes (dict): Additional trace attributes (JSON). 

2554 resource_attributes (dict): Resource attributes (service name, version, etc.). 

2555 created_at (datetime): Trace creation timestamp. 

2556 """ 

2557 

2558 __tablename__ = "observability_traces" 

2559 

2560 # Primary key 

2561 trace_id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

2562 

2563 # Trace metadata 

2564 name: Mapped[str] = mapped_column(String(255), nullable=False) 

2565 start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True) 

2566 end_time: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

2567 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2568 status: Mapped[str] = mapped_column(String(20), nullable=False, default="unset") # unset, ok, error 

2569 status_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2570 

2571 # HTTP request context 

2572 http_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True) 

2573 http_url: Mapped[Optional[str]] = mapped_column(String(767), nullable=True) 

2574 http_status_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

2575 

2576 # User context 

2577 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) 

2578 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2579 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

2580 

2581 # Attributes (flexible key-value storage) 

2582 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict) 

2583 resource_attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict) 

2584 

2585 # Timestamps 

2586 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2587 

2588 # Relationships 

2589 spans: Mapped[List["ObservabilitySpan"]] = relationship("ObservabilitySpan", back_populates="trace", cascade="all, delete-orphan") 

2590 

2591 # Indexes for performance 

2592 __table_args__ = ( 

2593 Index("idx_observability_traces_start_time", "start_time"), 

2594 Index("idx_observability_traces_user_email", "user_email"), 

2595 Index("idx_observability_traces_status", "status"), 

2596 Index("idx_observability_traces_http_status_code", "http_status_code"), 

2597 ) 

2598 

2599 

2600class ObservabilitySpan(Base): 

2601 """ 

2602 ORM model for observability spans (similar to OpenTelemetry spans). 

2603 

2604 A span represents a single operation within a trace. Spans can be nested 

2605 to represent hierarchical operations. 

2606 

2607 Attributes: 

2608 span_id (str): Unique span identifier. 

2609 trace_id (str): Parent trace ID. 

2610 parent_span_id (str): Parent span ID (for nested spans). 

2611 name (str): Span name (e.g., "database_query", "tool_invocation"). 

2612 kind (str): Span kind (internal, server, client, producer, consumer). 

2613 start_time (datetime): When the span started. 

2614 end_time (datetime): When the span ended. 

2615 duration_ms (float): Span duration in milliseconds. 

2616 status (str): Span status (success, error). 

2617 status_message (str): Optional status message. 

2618 attributes (dict): Span attributes (JSON). 

2619 resource_name (str): Name of the resource being operated on. 

2620 resource_type (str): Type of resource (tool, resource, prompt, gateway, etc.). 

2621 resource_id (str): ID of the specific resource. 

2622 created_at (datetime): Span creation timestamp. 

2623 """ 

2624 

2625 __tablename__ = "observability_spans" 

2626 

2627 # Primary key 

2628 span_id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

2629 

2630 # Trace relationship 

2631 trace_id: Mapped[str] = mapped_column(String(36), ForeignKey("observability_traces.trace_id", ondelete="CASCADE"), nullable=False, index=True) 

2632 parent_span_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("observability_spans.span_id", ondelete="CASCADE"), nullable=True, index=True) 

2633 

2634 # Span metadata 

2635 name: Mapped[str] = mapped_column(String(255), nullable=False) 

2636 kind: Mapped[str] = mapped_column(String(20), nullable=False, default="internal") # internal, server, client, producer, consumer 

2637 start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True) 

2638 end_time: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

2639 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

2640 status: Mapped[str] = mapped_column(String(20), nullable=False, default="unset") 

2641 status_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2642 

2643 # Attributes 

2644 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict) 

2645 

2646 # Resource context 

2647 resource_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) 

2648 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True) # tool, resource, prompt, gateway, a2a_agent 

2649 resource_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True, index=True) 

2650 

2651 # Timestamps 

2652 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2653 

2654 # Relationships 

2655 trace: Mapped["ObservabilityTrace"] = relationship("ObservabilityTrace", back_populates="spans") 

2656 parent_span: Mapped[Optional["ObservabilitySpan"]] = relationship("ObservabilitySpan", remote_side=[span_id], backref="child_spans") 

2657 events: Mapped[List["ObservabilityEvent"]] = relationship("ObservabilityEvent", back_populates="span", cascade="all, delete-orphan") 

2658 

2659 # Indexes for performance 

2660 __table_args__ = ( 

2661 Index("idx_observability_spans_trace_id", "trace_id"), 

2662 Index("idx_observability_spans_parent_span_id", "parent_span_id"), 

2663 Index("idx_observability_spans_start_time", "start_time"), 

2664 Index("idx_observability_spans_resource_type", "resource_type"), 

2665 Index("idx_observability_spans_resource_name", "resource_name"), 

2666 ) 

2667 

2668 

2669class ObservabilityEvent(Base): 

2670 """ 

2671 ORM model for observability events (logs within spans). 

2672 

2673 Events represent discrete occurrences within a span, such as log messages, 

2674 exceptions, or state changes. 

2675 

2676 Attributes: 

2677 id (int): Auto-incrementing primary key. 

2678 span_id (str): Parent span ID. 

2679 name (str): Event name (e.g., "exception", "log", "checkpoint"). 

2680 timestamp (datetime): When the event occurred. 

2681 attributes (dict): Event attributes (JSON). 

2682 severity (str): Log severity level (debug, info, warning, error, critical). 

2683 message (str): Event message. 

2684 exception_type (str): Exception class name (if event is an exception). 

2685 exception_message (str): Exception message. 

2686 exception_stacktrace (str): Exception stacktrace. 

2687 created_at (datetime): Event creation timestamp. 

2688 """ 

2689 

2690 __tablename__ = "observability_events" 

2691 

2692 # Primary key 

2693 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

2694 

2695 # Span relationship 

2696 span_id: Mapped[str] = mapped_column(String(36), ForeignKey("observability_spans.span_id", ondelete="CASCADE"), nullable=False, index=True) 

2697 

2698 # Event metadata 

2699 name: Mapped[str] = mapped_column(String(255), nullable=False) 

2700 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, index=True) 

2701 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict) 

2702 

2703 # Log fields 

2704 severity: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, index=True) # debug, info, warning, error, critical 

2705 message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2706 

2707 # Exception fields 

2708 exception_type: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

2709 exception_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2710 exception_stacktrace: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2711 

2712 # Timestamps 

2713 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2714 

2715 # Relationships 

2716 span: Mapped["ObservabilitySpan"] = relationship("ObservabilitySpan", back_populates="events") 

2717 

2718 # Indexes for performance 

2719 __table_args__ = ( 

2720 Index("idx_observability_events_span_id", "span_id"), 

2721 Index("idx_observability_events_timestamp", "timestamp"), 

2722 Index("idx_observability_events_severity", "severity"), 

2723 ) 

2724 

2725 

2726class ObservabilityMetric(Base): 

2727 """ 

2728 ORM model for observability metrics (time-series numerical data). 

2729 

2730 Metrics represent numerical measurements over time, such as request rates, 

2731 error rates, latencies, and custom business metrics. 

2732 

2733 Attributes: 

2734 id (int): Auto-incrementing primary key. 

2735 name (str): Metric name (e.g., "http.request.duration", "tool.invocation.count"). 

2736 metric_type (str): Metric type (counter, gauge, histogram). 

2737 value (float): Metric value. 

2738 timestamp (datetime): When the metric was recorded. 

2739 unit (str): Metric unit (ms, count, bytes, etc.). 

2740 attributes (dict): Metric attributes/labels (JSON). 

2741 resource_type (str): Type of resource (tool, resource, prompt, etc.). 

2742 resource_id (str): ID of the specific resource. 

2743 trace_id (str): Associated trace ID (optional). 

2744 created_at (datetime): Metric creation timestamp. 

2745 """ 

2746 

2747 __tablename__ = "observability_metrics" 

2748 

2749 # Primary key 

2750 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

2751 

2752 # Metric metadata 

2753 name: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

2754 metric_type: Mapped[str] = mapped_column(String(20), nullable=False) # counter, gauge, histogram 

2755 value: Mapped[float] = mapped_column(Float, nullable=False) 

2756 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, index=True) 

2757 unit: Mapped[Optional[str]] = mapped_column(String(20), nullable=True) 

2758 

2759 # Attributes/labels 

2760 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict) 

2761 

2762 # Resource context 

2763 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True) 

2764 resource_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True, index=True) 

2765 

2766 # Trace association (optional) 

2767 trace_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("observability_traces.trace_id", ondelete="SET NULL"), nullable=True, index=True) 

2768 

2769 # Timestamps 

2770 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2771 

2772 # Indexes for performance 

2773 __table_args__ = ( 

2774 Index("idx_observability_metrics_name_timestamp", "name", "timestamp"), 

2775 Index("idx_observability_metrics_resource_type", "resource_type"), 

2776 Index("idx_observability_metrics_trace_id", "trace_id"), 

2777 ) 

2778 

2779 

2780class ObservabilitySavedQuery(Base): 

2781 """ 

2782 ORM model for saved observability queries (filter presets). 

2783 

2784 Allows users to save their filter configurations for quick access and 

2785 historical query tracking. Queries can be personal or shared with the team. 

2786 

2787 Attributes: 

2788 id (int): Auto-incrementing primary key. 

2789 name (str): User-given name for the saved query. 

2790 description (str): Optional description of what this query finds. 

2791 user_email (str): Email of the user who created this query. 

2792 filter_config (dict): JSON containing all filter values (time_range, status_filter, etc.). 

2793 is_shared (bool): Whether this query is visible to other users. 

2794 created_at (datetime): When the query was created. 

2795 updated_at (datetime): When the query was last modified. 

2796 last_used_at (datetime): When the query was last executed. 

2797 use_count (int): How many times this query has been used. 

2798 """ 

2799 

2800 __tablename__ = "observability_saved_queries" 

2801 

2802 # Primary key 

2803 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

2804 

2805 # Query metadata 

2806 name: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

2807 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2808 user_email: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

2809 

2810 # Filter configuration (stored as JSON) 

2811 filter_config: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False) 

2812 

2813 # Sharing settings 

2814 is_shared: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

2815 

2816 # Timestamps and usage tracking 

2817 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2818 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

2819 last_used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

2820 use_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

2821 

2822 # Indexes for performance 

2823 __table_args__ = ( 

2824 Index("idx_observability_saved_queries_user_email", "user_email"), 

2825 Index("idx_observability_saved_queries_is_shared", "is_shared"), 

2826 Index("idx_observability_saved_queries_created_at", "created_at"), 

2827 ) 

2828 

2829 

2830# --------------------------------------------------------------------------- 

2831# Performance Monitoring Models 

2832# --------------------------------------------------------------------------- 

2833 

2834 

2835class PerformanceSnapshot(Base): 

2836 """ 

2837 ORM model for point-in-time performance snapshots. 

2838 

2839 Stores comprehensive system, request, and worker metrics at regular intervals 

2840 for historical analysis and trend detection. 

2841 

2842 Attributes: 

2843 id (int): Auto-incrementing primary key. 

2844 timestamp (datetime): When the snapshot was taken. 

2845 host (str): Hostname of the machine. 

2846 worker_id (str): Worker identifier (PID or UUID). 

2847 metrics_json (dict): JSON blob containing all metrics data. 

2848 created_at (datetime): Record creation timestamp. 

2849 """ 

2850 

2851 __tablename__ = "performance_snapshots" 

2852 

2853 # Primary key 

2854 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

2855 

2856 # Snapshot metadata 

2857 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False, index=True) 

2858 host: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

2859 worker_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True) 

2860 

2861 # Metrics data (JSON blob) 

2862 metrics_json: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False) 

2863 

2864 # Timestamps 

2865 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2866 

2867 # Indexes for efficient querying 

2868 __table_args__ = ( 

2869 Index("idx_performance_snapshots_timestamp", "timestamp"), 

2870 Index("idx_performance_snapshots_host_timestamp", "host", "timestamp"), 

2871 Index("idx_performance_snapshots_created_at", "created_at"), 

2872 ) 

2873 

2874 

2875class PerformanceAggregate(Base): 

2876 """ 

2877 ORM model for aggregated performance metrics. 

2878 

2879 Stores hourly and daily aggregations of performance data for efficient 

2880 historical reporting and trend analysis. 

2881 

2882 Attributes: 

2883 id (int): Auto-incrementing primary key. 

2884 period_start (datetime): Start of the aggregation period. 

2885 period_end (datetime): End of the aggregation period. 

2886 period_type (str): Type of aggregation (hourly, daily). 

2887 host (str): Hostname (None for cluster-wide aggregates). 

2888 Various aggregate metrics for requests and resources. 

2889 created_at (datetime): Record creation timestamp. 

2890 """ 

2891 

2892 __tablename__ = "performance_aggregates" 

2893 

2894 # Primary key 

2895 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

2896 

2897 # Period metadata 

2898 period_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True) 

2899 period_end: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

2900 period_type: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # hourly, daily 

2901 host: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) 

2902 

2903 # Request aggregates 

2904 requests_total: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

2905 requests_2xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

2906 requests_4xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

2907 requests_5xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

2908 avg_response_time_ms: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2909 p95_response_time_ms: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2910 peak_requests_per_second: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2911 

2912 # Resource aggregates 

2913 avg_cpu_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2914 avg_memory_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2915 peak_cpu_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2916 peak_memory_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False) 

2917 

2918 # Timestamps 

2919 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

2920 

2921 # Indexes and constraints 

2922 __table_args__ = ( 

2923 Index("idx_performance_aggregates_period", "period_type", "period_start"), 

2924 Index("idx_performance_aggregates_host_period", "host", "period_type", "period_start"), 

2925 UniqueConstraint("period_type", "period_start", "host", name="uq_performance_aggregate_period_host"), 

2926 ) 

2927 

2928 

2929class Tool(Base): 

2930 """ 

2931 ORM model for a registered Tool. 

2932 

2933 Supports both local tools and federated tools from other gateways. 

2934 The integration_type field indicates the tool format: 

2935 - "MCP" for MCP-compliant tools (default) 

2936 - "REST" for REST tools 

2937 

2938 Additionally, this model provides computed properties for aggregated metrics based 

2939 on the associated ToolMetric records. These include: 

2940 - execution_count: Total number of invocations. 

2941 - successful_executions: Count of successful invocations. 

2942 - failed_executions: Count of failed invocations. 

2943 - failure_rate: Ratio of failed invocations to total invocations. 

2944 - min_response_time: Fastest recorded response time. 

2945 - max_response_time: Slowest recorded response time. 

2946 - avg_response_time: Mean response time. 

2947 - last_execution_time: Timestamp of the most recent invocation. 

2948 

2949 The property `metrics_summary` returns a dictionary with these aggregated values. 

2950 

2951 Team association is handled via the `email_team` relationship (default lazy loading) 

2952 which only includes active teams. For list operations, use explicit joinedload() 

2953 to eager load team names. The `team` property provides convenient access to 

2954 the team name: 

2955 - team: Returns the team name if the tool belongs to an active team, otherwise None. 

2956 

2957 The following fields have been added to support tool invocation configuration: 

2958 - request_type: HTTP method to use when invoking the tool. 

2959 - auth_type: Type of authentication ("basic", "bearer", or None). 

2960 - auth_username: Username for basic authentication. 

2961 - auth_password: Password for basic authentication. 

2962 - auth_token: Token for bearer token authentication. 

2963 - auth_header_key: header key for authentication. 

2964 - auth_header_value: header value for authentication. 

2965 """ 

2966 

2967 __tablename__ = "tools" 

2968 

2969 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

2970 original_name: Mapped[str] = mapped_column(String(255), nullable=False) 

2971 url: Mapped[str] = mapped_column(String(767), nullable=True) 

2972 original_description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2973 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2974 integration_type: Mapped[str] = mapped_column(String(20), default="MCP") 

2975 request_type: Mapped[str] = mapped_column(String(20), default="SSE") 

2976 headers: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON) 

2977 input_schema: Mapped[Dict[str, Any]] = mapped_column(JSON) 

2978 output_schema: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

2979 annotations: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, default=lambda: {}) 

2980 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

2981 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

2982 enabled: Mapped[bool] = mapped_column(default=True) 

2983 reachable: Mapped[bool] = mapped_column(default=True) 

2984 jsonpath_filter: Mapped[str] = mapped_column(Text, default="") 

2985 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

2986 

2987 # Comprehensive metadata for audit tracking 

2988 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

2989 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

2990 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

2991 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2992 

2993 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

2994 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

2995 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

2996 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

2997 

2998 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

2999 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3000 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

3001 

3002 # Request type and authentication fields 

3003 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", or None 

3004 auth_value: Mapped[Optional[str]] = mapped_column(Text, default=None) 

3005 

3006 # custom_name,custom_name_slug, display_name 

3007 custom_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=False) 

3008 custom_name_slug: Mapped[Optional[str]] = mapped_column(String(255), nullable=False) 

3009 display_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3010 

3011 # Passthrough REST fields 

3012 base_url: Mapped[Optional[str]] = mapped_column(String, nullable=True) 

3013 path_template: Mapped[Optional[str]] = mapped_column(String, nullable=True) 

3014 query_mapping: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

3015 header_mapping: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

3016 timeout_ms: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, default=None) 

3017 expose_passthrough: Mapped[bool] = mapped_column(Boolean, default=True) 

3018 allowlist: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) 

3019 plugin_chain_pre: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) 

3020 plugin_chain_post: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) 

3021 

3022 # Federation relationship with a local gateway 

3023 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE")) 

3024 # gateway_slug: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.slug")) 

3025 gateway: Mapped["Gateway"] = relationship("Gateway", primaryjoin="Tool.gateway_id == Gateway.id", foreign_keys=[gateway_id], back_populates="tools") 

3026 # federated_with = relationship("Gateway", secondary=tool_gateway_table, back_populates="federated_tools") 

3027 

3028 # Many-to-many relationship with Servers 

3029 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_tool_association, back_populates="tools") 

3030 

3031 # Relationship with ToolMetric records 

3032 metrics: Mapped[List["ToolMetric"]] = relationship("ToolMetric", back_populates="tool", cascade="all, delete-orphan") 

3033 

3034 # Team scoping fields for resource organization 

3035 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

3036 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3037 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

3038 

3039 # Relationship for loading team names (only active teams) 

3040 # Uses default lazy loading - team name is only loaded when accessed 

3041 # For list/admin views, use explicit joinedload(DbTool.email_team) for single-query loading 

3042 # This avoids adding overhead to hot paths like tool invocation that don't need team names 

3043 email_team: Mapped[Optional["EmailTeam"]] = relationship( 

3044 "EmailTeam", 

3045 primaryjoin="and_(Tool.team_id == EmailTeam.id, EmailTeam.is_active == True)", 

3046 foreign_keys=[team_id], 

3047 ) 

3048 

3049 @property 

3050 def team(self) -> Optional[str]: 

3051 """Return the team name from the eagerly-loaded email_team relationship. 

3052 

3053 Returns: 

3054 Optional[str]: The team name if the tool belongs to an active team, otherwise None. 

3055 """ 

3056 return self.email_team.name if self.email_team else None 

3057 

3058 # @property 

3059 # def gateway_slug(self) -> str: 

3060 # return self.gateway.slug 

3061 

3062 _computed_name: Mapped[str] = mapped_column("name", String(255), nullable=False) # Stored column 

3063 

3064 @hybrid_property 

3065 def name(self) -> str: 

3066 """Return the display/lookup name computed from gateway and custom slug. 

3067 

3068 Returns: 

3069 str: Display/lookup name to use for this tool. 

3070 """ 

3071 # Instance access resolves Column to Python value; cast ensures static acceptance 

3072 if getattr(self, "_computed_name", None): 

3073 return cast(str, getattr(self, "_computed_name")) 

3074 custom_name_slug = slugify(getattr(self, "custom_name_slug")) 

3075 if getattr(self, "gateway_id", None): 

3076 gateway_slug = slugify(self.gateway.name) # type: ignore[attr-defined] 

3077 return f"{gateway_slug}{settings.gateway_tool_name_separator}{custom_name_slug}" 

3078 return custom_name_slug 

3079 

3080 @name.setter 

3081 def name(self, value: str) -> None: 

3082 """Setter for the stored name column. 

3083 

3084 Args: 

3085 value: Explicit name to persist to the underlying column. 

3086 """ 

3087 setattr(self, "_computed_name", value) 

3088 

3089 @name.expression 

3090 @classmethod 

3091 def name(cls) -> Any: 

3092 """SQL expression for name used in queries (backs onto stored column). 

3093 

3094 Returns: 

3095 Any: SQLAlchemy expression referencing the stored name column. 

3096 """ 

3097 return cls._computed_name 

3098 

3099 __table_args__ = ( 

3100 UniqueConstraint("gateway_id", "original_name", name="uq_gateway_id__original_name"), 

3101 UniqueConstraint("team_id", "owner_email", "name", name="uq_team_owner_email_name_tool"), 

3102 Index("idx_tools_created_at_id", "created_at", "id"), 

3103 ) 

3104 

3105 @hybrid_property 

3106 def gateway_slug(self) -> Optional[str]: 

3107 """Python accessor returning the related gateway's slug if available. 

3108 

3109 Returns: 

3110 Optional[str]: The gateway slug, or None if no gateway relation. 

3111 """ 

3112 return self.gateway.slug if self.gateway else None 

3113 

3114 @gateway_slug.expression 

3115 @classmethod 

3116 def gateway_slug(cls) -> Any: 

3117 """SQL expression to select current gateway slug for this tool. 

3118 

3119 Returns: 

3120 Any: SQLAlchemy scalar subquery selecting the gateway slug. 

3121 """ 

3122 return select(Gateway.slug).where(Gateway.id == cls.gateway_id).scalar_subquery() 

3123 

3124 def _metrics_loaded(self) -> bool: 

3125 """Check if metrics relationship is loaded without triggering lazy load. 

3126 

3127 Returns: 

3128 bool: True if metrics are loaded, False otherwise. 

3129 """ 

3130 return "metrics" in sa_inspect(self).dict 

3131 

3132 def _get_metric_counts(self) -> tuple[int, int, int]: 

3133 """Get total, successful, and failed metric counts in a single operation. 

3134 

3135 When metrics are already loaded, computes from memory in O(n). 

3136 When not loaded, uses a single SQL query with conditional aggregation. 

3137 

3138 Note: For bulk operations, use metrics_summary which computes all fields 

3139 in a single pass, or ensure metrics are preloaded via selectinload. 

3140 

3141 Returns: 

3142 tuple[int, int, int]: (total, successful, failed) counts. 

3143 """ 

3144 # If metrics are loaded, compute from memory in a single pass 

3145 if self._metrics_loaded(): 

3146 total = 0 

3147 successful = 0 

3148 for m in self.metrics: 

3149 total += 1 

3150 if m.is_success: 

3151 successful += 1 

3152 return (total, successful, total - successful) 

3153 

3154 # Use single SQL query with conditional aggregation 

3155 # Third-Party 

3156 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

3157 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

3158 

3159 session = object_session(self) 

3160 if session is None: 

3161 return (0, 0, 0) 

3162 

3163 result = ( 

3164 session.query( 

3165 func.count(ToolMetric.id), # pylint: disable=not-callable 

3166 func.sum(case((ToolMetric.is_success.is_(True), 1), else_=0)), 

3167 ) 

3168 .filter(ToolMetric.tool_id == self.id) 

3169 .one() 

3170 ) 

3171 

3172 total = result[0] or 0 

3173 successful = result[1] or 0 

3174 return (total, successful, total - successful) 

3175 

3176 @hybrid_property 

3177 def execution_count(self) -> int: 

3178 """Number of ToolMetric records associated with this tool instance. 

3179 

3180 Note: Each property access may trigger a SQL query if metrics aren't loaded. 

3181 For reading multiple metric fields, use metrics_summary or preload metrics. 

3182 

3183 Returns: 

3184 int: Count of ToolMetric records for this tool. 

3185 """ 

3186 return self._get_metric_counts()[0] 

3187 

3188 @execution_count.expression 

3189 @classmethod 

3190 def execution_count(cls) -> Any: 

3191 """SQL expression that counts ToolMetric rows for this tool. 

3192 

3193 Returns: 

3194 Any: SQLAlchemy labeled count expression for tool metrics. 

3195 """ 

3196 return select(func.count(ToolMetric.id)).where(ToolMetric.tool_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable 

3197 

3198 @property 

3199 def successful_executions(self) -> int: 

3200 """Count of successful tool executions. 

3201 

3202 Returns: 

3203 int: The count of successful tool executions. 

3204 """ 

3205 return self._get_metric_counts()[1] 

3206 

3207 @property 

3208 def failed_executions(self) -> int: 

3209 """Count of failed tool executions. 

3210 

3211 Returns: 

3212 int: The count of failed tool executions. 

3213 """ 

3214 return self._get_metric_counts()[2] 

3215 

3216 @property 

3217 def failure_rate(self) -> float: 

3218 """Failure rate as a float between 0 and 1. 

3219 

3220 Returns: 

3221 float: The failure rate as a value between 0 and 1. 

3222 """ 

3223 total, _, failed = self._get_metric_counts() 

3224 return failed / total if total > 0 else 0.0 

3225 

3226 @property 

3227 def min_response_time(self) -> Optional[float]: 

3228 """Minimum response time among all tool executions. 

3229 

3230 Returns None if metrics are not loaded (use metrics_summary for SQL fallback). 

3231 

3232 Returns: 

3233 Optional[float]: The minimum response time, or None. 

3234 """ 

3235 if not self._metrics_loaded(): 

3236 return None 

3237 times: List[float] = [m.response_time for m in self.metrics] 

3238 return min(times) if times else None 

3239 

3240 @property 

3241 def max_response_time(self) -> Optional[float]: 

3242 """Maximum response time among all tool executions. 

3243 

3244 Returns None if metrics are not loaded (use metrics_summary for SQL fallback). 

3245 

3246 Returns: 

3247 Optional[float]: The maximum response time, or None. 

3248 """ 

3249 if not self._metrics_loaded(): 

3250 return None 

3251 times: List[float] = [m.response_time for m in self.metrics] 

3252 return max(times) if times else None 

3253 

3254 @property 

3255 def avg_response_time(self) -> Optional[float]: 

3256 """Average response time among all tool executions. 

3257 

3258 Returns None if metrics are not loaded (use metrics_summary for SQL fallback). 

3259 

3260 Returns: 

3261 Optional[float]: The average response time, or None. 

3262 """ 

3263 if not self._metrics_loaded(): 

3264 return None 

3265 times: List[float] = [m.response_time for m in self.metrics] 

3266 return sum(times) / len(times) if times else None 

3267 

3268 @property 

3269 def last_execution_time(self) -> Optional[datetime]: 

3270 """Timestamp of the most recent tool execution. 

3271 

3272 Returns None if metrics are not loaded (use metrics_summary for SQL fallback). 

3273 

3274 Returns: 

3275 Optional[datetime]: The timestamp of the most recent execution, or None. 

3276 """ 

3277 if not self._metrics_loaded(): 

3278 return None 

3279 if not self.metrics: 

3280 return None 

3281 return max(m.timestamp for m in self.metrics) 

3282 

3283 @property 

3284 def metrics_summary(self) -> Dict[str, Any]: 

3285 """Aggregated metrics for the tool. 

3286 

3287 When metrics are loaded: computes all values from memory in a single pass. 

3288 When not loaded: uses a single SQL query with aggregation for all fields. 

3289 

3290 Returns: 

3291 Dict[str, Any]: Dictionary containing aggregated metrics: 

3292 - total_executions, successful_executions, failed_executions 

3293 - failure_rate, min/max/avg_response_time, last_execution_time 

3294 """ 

3295 # If metrics are loaded, compute everything in a single pass 

3296 if self._metrics_loaded(): 

3297 total = 0 

3298 successful = 0 

3299 min_rt: Optional[float] = None 

3300 max_rt: Optional[float] = None 

3301 sum_rt = 0.0 

3302 last_time: Optional[datetime] = None 

3303 

3304 for m in self.metrics: 

3305 total += 1 

3306 if m.is_success: 

3307 successful += 1 

3308 rt = m.response_time 

3309 if min_rt is None or rt < min_rt: 

3310 min_rt = rt 

3311 if max_rt is None or rt > max_rt: 

3312 max_rt = rt 

3313 sum_rt += rt 

3314 if last_time is None or m.timestamp > last_time: 

3315 last_time = m.timestamp 

3316 

3317 failed = total - successful 

3318 return { 

3319 "total_executions": total, 

3320 "successful_executions": successful, 

3321 "failed_executions": failed, 

3322 "failure_rate": failed / total if total > 0 else 0.0, 

3323 "min_response_time": min_rt, 

3324 "max_response_time": max_rt, 

3325 "avg_response_time": sum_rt / total if total > 0 else None, 

3326 "last_execution_time": last_time, 

3327 } 

3328 

3329 # Use single SQL query with full aggregation 

3330 # Third-Party 

3331 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

3332 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

3333 

3334 session = object_session(self) 

3335 if session is None: 

3336 return { 

3337 "total_executions": 0, 

3338 "successful_executions": 0, 

3339 "failed_executions": 0, 

3340 "failure_rate": 0.0, 

3341 "min_response_time": None, 

3342 "max_response_time": None, 

3343 "avg_response_time": None, 

3344 "last_execution_time": None, 

3345 } 

3346 

3347 result = ( 

3348 session.query( 

3349 func.count(ToolMetric.id), # pylint: disable=not-callable 

3350 func.sum(case((ToolMetric.is_success.is_(True), 1), else_=0)), 

3351 func.min(ToolMetric.response_time), # pylint: disable=not-callable 

3352 func.max(ToolMetric.response_time), # pylint: disable=not-callable 

3353 func.avg(ToolMetric.response_time), # pylint: disable=not-callable 

3354 func.max(ToolMetric.timestamp), # pylint: disable=not-callable 

3355 ) 

3356 .filter(ToolMetric.tool_id == self.id) 

3357 .one() 

3358 ) 

3359 

3360 total = result[0] or 0 

3361 successful = result[1] or 0 

3362 failed = total - successful 

3363 

3364 return { 

3365 "total_executions": total, 

3366 "successful_executions": successful, 

3367 "failed_executions": failed, 

3368 "failure_rate": failed / total if total > 0 else 0.0, 

3369 "min_response_time": result[2], 

3370 "max_response_time": result[3], 

3371 "avg_response_time": float(result[4]) if result[4] is not None else None, 

3372 "last_execution_time": result[5], 

3373 } 

3374 

3375 

3376class Resource(Base): 

3377 """ 

3378 ORM model for a registered Resource. 

3379 

3380 Resources represent content that can be read by clients. 

3381 Supports subscriptions for real-time updates. 

3382 Additionally, this model provides a relationship with ResourceMetric records 

3383 to capture invocation metrics (such as execution counts, response times, and failures). 

3384 """ 

3385 

3386 __tablename__ = "resources" 

3387 

3388 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

3389 uri: Mapped[str] = mapped_column(String(767), nullable=False) 

3390 name: Mapped[str] = mapped_column(String(255), nullable=False) 

3391 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3392 mime_type: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3393 size: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

3394 uri_template: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # URI template for parameterized resources 

3395 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

3396 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

3397 # is_active: Mapped[bool] = mapped_column(default=True) 

3398 enabled: Mapped[bool] = mapped_column(default=True) 

3399 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

3400 

3401 # Comprehensive metadata for audit tracking 

3402 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3403 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

3404 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

3405 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3406 

3407 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3408 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

3409 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

3410 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3411 

3412 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

3413 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3414 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

3415 

3416 metrics: Mapped[List["ResourceMetric"]] = relationship("ResourceMetric", back_populates="resource", cascade="all, delete-orphan") 

3417 

3418 # Content storage - can be text or binary 

3419 text_content: Mapped[Optional[str]] = mapped_column(Text) 

3420 binary_content: Mapped[Optional[bytes]] 

3421 

3422 # Subscription tracking 

3423 subscriptions: Mapped[List["ResourceSubscription"]] = relationship("ResourceSubscription", back_populates="resource", cascade="all, delete-orphan") 

3424 

3425 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE")) 

3426 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="resources") 

3427 # federated_with = relationship("Gateway", secondary=resource_gateway_table, back_populates="federated_resources") 

3428 

3429 # Many-to-many relationship with Servers 

3430 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_resource_association, back_populates="resources") 

3431 __table_args__ = ( 

3432 UniqueConstraint("team_id", "owner_email", "gateway_id", "uri", name="uq_team_owner_gateway_uri_resource"), 

3433 Index("uq_team_owner_uri_resource_local", "team_id", "owner_email", "uri", unique=True, postgresql_where=text("gateway_id IS NULL"), sqlite_where=text("gateway_id IS NULL")), 

3434 Index("idx_resources_created_at_id", "created_at", "id"), 

3435 ) 

3436 

3437 @property 

3438 def content(self) -> "ResourceContent": 

3439 """ 

3440 Returns the resource content in the appropriate format. 

3441 

3442 If text content exists, returns a ResourceContent with text. 

3443 Otherwise, if binary content exists, returns a ResourceContent with blob data. 

3444 Raises a ValueError if no content is available. 

3445 

3446 Returns: 

3447 ResourceContent: The resource content with appropriate format (text or blob). 

3448 

3449 Raises: 

3450 ValueError: If the resource has no content available. 

3451 

3452 Examples: 

3453 >>> resource = Resource(uri="test://example", name="test") 

3454 >>> resource.text_content = "Hello, World!" 

3455 >>> content = resource.content 

3456 >>> content.text 

3457 'Hello, World!' 

3458 >>> content.type 

3459 'resource' 

3460 

3461 >>> binary_resource = Resource(uri="test://binary", name="binary") 

3462 >>> binary_resource.binary_content = b"\\x00\\x01\\x02" 

3463 >>> binary_content = binary_resource.content 

3464 >>> binary_content.blob 

3465 b'\\x00\\x01\\x02' 

3466 

3467 >>> empty_resource = Resource(uri="test://empty", name="empty") 

3468 >>> try: 

3469 ... empty_resource.content 

3470 ... except ValueError as e: 

3471 ... str(e) 

3472 'Resource has no content' 

3473 """ 

3474 

3475 # Local import to avoid circular import 

3476 # First-Party 

3477 from mcpgateway.common.models import ResourceContent # pylint: disable=import-outside-toplevel 

3478 

3479 if self.text_content is not None: 

3480 return ResourceContent( 

3481 type="resource", 

3482 id=str(self.id), 

3483 uri=self.uri, 

3484 mime_type=self.mime_type, 

3485 text=self.text_content, 

3486 ) 

3487 if self.binary_content is not None: 

3488 return ResourceContent( 

3489 type="resource", 

3490 id=str(self.id), 

3491 uri=self.uri, 

3492 mime_type=self.mime_type or "application/octet-stream", 

3493 blob=self.binary_content, 

3494 ) 

3495 raise ValueError("Resource has no content") 

3496 

3497 def _metrics_loaded(self) -> bool: 

3498 """Check if metrics relationship is loaded without triggering lazy load. 

3499 

3500 Returns: 

3501 bool: True if metrics are loaded, False otherwise. 

3502 """ 

3503 return "metrics" in sa_inspect(self).dict 

3504 

3505 def _get_metric_counts(self) -> tuple[int, int, int]: 

3506 """Get total, successful, and failed metric counts in a single operation. 

3507 

3508 When metrics are already loaded, computes from memory in O(n). 

3509 When not loaded, uses a single SQL query with conditional aggregation. 

3510 

3511 Returns: 

3512 tuple[int, int, int]: (total, successful, failed) counts. 

3513 """ 

3514 if self._metrics_loaded(): 

3515 total = 0 

3516 successful = 0 

3517 for m in self.metrics: 

3518 total += 1 

3519 if m.is_success: 

3520 successful += 1 

3521 return (total, successful, total - successful) 

3522 

3523 # Third-Party 

3524 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

3525 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

3526 

3527 session = object_session(self) 

3528 if session is None: 

3529 return (0, 0, 0) 

3530 

3531 result = ( 

3532 session.query( 

3533 func.count(ResourceMetric.id), # pylint: disable=not-callable 

3534 func.sum(case((ResourceMetric.is_success.is_(True), 1), else_=0)), 

3535 ) 

3536 .filter(ResourceMetric.resource_id == self.id) 

3537 .one() 

3538 ) 

3539 

3540 total = result[0] or 0 

3541 successful = result[1] or 0 

3542 return (total, successful, total - successful) 

3543 

3544 @hybrid_property 

3545 def execution_count(self) -> int: 

3546 """Number of ResourceMetric records associated with this resource instance. 

3547 

3548 Returns: 

3549 int: Count of ResourceMetric records for this resource. 

3550 """ 

3551 return self._get_metric_counts()[0] 

3552 

3553 @execution_count.expression 

3554 @classmethod 

3555 def execution_count(cls) -> Any: 

3556 """SQL expression that counts ResourceMetric rows for this resource. 

3557 

3558 Returns: 

3559 Any: SQLAlchemy labeled count expression for resource metrics. 

3560 """ 

3561 return select(func.count(ResourceMetric.id)).where(ResourceMetric.resource_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable 

3562 

3563 @property 

3564 def successful_executions(self) -> int: 

3565 """Count of successful resource invocations. 

3566 

3567 Returns: 

3568 int: The count of successful resource invocations. 

3569 """ 

3570 return self._get_metric_counts()[1] 

3571 

3572 @property 

3573 def failed_executions(self) -> int: 

3574 """Count of failed resource invocations. 

3575 

3576 Returns: 

3577 int: The count of failed resource invocations. 

3578 """ 

3579 return self._get_metric_counts()[2] 

3580 

3581 @property 

3582 def failure_rate(self) -> float: 

3583 """Failure rate as a float between 0 and 1. 

3584 

3585 Returns: 

3586 float: The failure rate as a value between 0 and 1. 

3587 """ 

3588 total, _, failed = self._get_metric_counts() 

3589 return failed / total if total > 0 else 0.0 

3590 

3591 @property 

3592 def min_response_time(self) -> Optional[float]: 

3593 """Minimum response time among all resource invocations. 

3594 

3595 Returns None if metrics are not loaded. Note: counts may be non-zero 

3596 (via SQL) while timing is None. Use service layer converters for 

3597 consistent metrics, or preload metrics via selectinload. 

3598 

3599 Returns: 

3600 Optional[float]: The minimum response time, or None. 

3601 """ 

3602 if not self._metrics_loaded(): 

3603 return None 

3604 times: List[float] = [m.response_time for m in self.metrics] 

3605 return min(times) if times else None 

3606 

3607 @property 

3608 def max_response_time(self) -> Optional[float]: 

3609 """Maximum response time among all resource invocations. 

3610 

3611 Returns None if metrics are not loaded. Note: counts may be non-zero 

3612 (via SQL) while timing is None. Use service layer converters for 

3613 consistent metrics, or preload metrics via selectinload. 

3614 

3615 Returns: 

3616 Optional[float]: The maximum response time, or None. 

3617 """ 

3618 if not self._metrics_loaded(): 

3619 return None 

3620 times: List[float] = [m.response_time for m in self.metrics] 

3621 return max(times) if times else None 

3622 

3623 @property 

3624 def avg_response_time(self) -> Optional[float]: 

3625 """Average response time among all resource invocations. 

3626 

3627 Returns None if metrics are not loaded. Note: counts may be non-zero 

3628 (via SQL) while timing is None. Use service layer converters for 

3629 consistent metrics, or preload metrics via selectinload. 

3630 

3631 Returns: 

3632 Optional[float]: The average response time, or None. 

3633 """ 

3634 if not self._metrics_loaded(): 

3635 return None 

3636 times: List[float] = [m.response_time for m in self.metrics] 

3637 return sum(times) / len(times) if times else None 

3638 

3639 @property 

3640 def last_execution_time(self) -> Optional[datetime]: 

3641 """Timestamp of the most recent resource invocation. 

3642 

3643 Returns None if metrics are not loaded. Note: counts may be non-zero 

3644 (via SQL) while timing is None. Use service layer converters for 

3645 consistent metrics, or preload metrics via selectinload. 

3646 

3647 Returns: 

3648 Optional[datetime]: The timestamp of the most recent invocation, or None. 

3649 """ 

3650 if not self._metrics_loaded(): 

3651 return None 

3652 if not self.metrics: 

3653 return None 

3654 return max(m.timestamp for m in self.metrics) 

3655 

3656 @property 

3657 def metrics_summary(self) -> Dict[str, Any]: 

3658 """Aggregated metrics for the resource. 

3659 

3660 When metrics are loaded: computes all values from memory in a single pass. 

3661 When not loaded: uses a single SQL query with aggregation for all fields. 

3662 

3663 Returns: 

3664 Dict[str, Any]: Dictionary containing aggregated metrics: 

3665 - total_executions, successful_executions, failed_executions 

3666 - failure_rate, min/max/avg_response_time, last_execution_time 

3667 """ 

3668 if self._metrics_loaded(): 

3669 total = 0 

3670 successful = 0 

3671 min_rt: Optional[float] = None 

3672 max_rt: Optional[float] = None 

3673 sum_rt = 0.0 

3674 last_time: Optional[datetime] = None 

3675 

3676 for m in self.metrics: 

3677 total += 1 

3678 if m.is_success: 

3679 successful += 1 

3680 rt = m.response_time 

3681 if min_rt is None or rt < min_rt: 

3682 min_rt = rt 

3683 if max_rt is None or rt > max_rt: 

3684 max_rt = rt 

3685 sum_rt += rt 

3686 if last_time is None or m.timestamp > last_time: 

3687 last_time = m.timestamp 

3688 

3689 failed = total - successful 

3690 return { 

3691 "total_executions": total, 

3692 "successful_executions": successful, 

3693 "failed_executions": failed, 

3694 "failure_rate": failed / total if total > 0 else 0.0, 

3695 "min_response_time": min_rt, 

3696 "max_response_time": max_rt, 

3697 "avg_response_time": sum_rt / total if total > 0 else None, 

3698 "last_execution_time": last_time, 

3699 } 

3700 

3701 # Third-Party 

3702 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

3703 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

3704 

3705 session = object_session(self) 

3706 if session is None: 

3707 return { 

3708 "total_executions": 0, 

3709 "successful_executions": 0, 

3710 "failed_executions": 0, 

3711 "failure_rate": 0.0, 

3712 "min_response_time": None, 

3713 "max_response_time": None, 

3714 "avg_response_time": None, 

3715 "last_execution_time": None, 

3716 } 

3717 

3718 result = ( 

3719 session.query( 

3720 func.count(ResourceMetric.id), # pylint: disable=not-callable 

3721 func.sum(case((ResourceMetric.is_success.is_(True), 1), else_=0)), 

3722 func.min(ResourceMetric.response_time), # pylint: disable=not-callable 

3723 func.max(ResourceMetric.response_time), # pylint: disable=not-callable 

3724 func.avg(ResourceMetric.response_time), # pylint: disable=not-callable 

3725 func.max(ResourceMetric.timestamp), # pylint: disable=not-callable 

3726 ) 

3727 .filter(ResourceMetric.resource_id == self.id) 

3728 .one() 

3729 ) 

3730 

3731 total = result[0] or 0 

3732 successful = result[1] or 0 

3733 failed = total - successful 

3734 

3735 return { 

3736 "total_executions": total, 

3737 "successful_executions": successful, 

3738 "failed_executions": failed, 

3739 "failure_rate": failed / total if total > 0 else 0.0, 

3740 "min_response_time": result[2], 

3741 "max_response_time": result[3], 

3742 "avg_response_time": float(result[4]) if result[4] is not None else None, 

3743 "last_execution_time": result[5], 

3744 } 

3745 

3746 # Team scoping fields for resource organization 

3747 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

3748 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3749 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

3750 

3751 

3752class ResourceSubscription(Base): 

3753 """Tracks subscriptions to resource updates.""" 

3754 

3755 __tablename__ = "resource_subscriptions" 

3756 

3757 id: Mapped[int] = mapped_column(primary_key=True) 

3758 resource_id: Mapped[str] = mapped_column(ForeignKey("resources.id")) 

3759 subscriber_id: Mapped[str] = mapped_column(String(255), nullable=False) # Client identifier 

3760 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

3761 last_notification: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

3762 

3763 resource: Mapped["Resource"] = relationship(back_populates="subscriptions") 

3764 

3765 

3766class ToolOpsTestCases(Base): 

3767 """ 

3768 ORM model for a registered Tool test cases. 

3769 

3770 Represents a tool and the generated test cases. 

3771 Includes: 

3772 - tool_id: unique tool identifier 

3773 - test_cases: generated test cases. 

3774 - run_status: status of test case generation 

3775 """ 

3776 

3777 __tablename__ = "toolops_test_cases" 

3778 

3779 tool_id: Mapped[str] = mapped_column(String(255), primary_key=True) 

3780 test_cases: Mapped[Dict[str, Any]] = mapped_column(JSON) 

3781 run_status: Mapped[str] = mapped_column(String(255), nullable=False) 

3782 

3783 

3784class Prompt(Base): 

3785 """ 

3786 ORM model for a registered Prompt template. 

3787 

3788 Represents a prompt template along with its argument schema. 

3789 Supports rendering and invocation of prompts. 

3790 Additionally, this model provides computed properties for aggregated metrics based 

3791 on the associated PromptMetric records. These include: 

3792 - execution_count: Total number of prompt invocations. 

3793 - successful_executions: Count of successful invocations. 

3794 - failed_executions: Count of failed invocations. 

3795 - failure_rate: Ratio of failed invocations to total invocations. 

3796 - min_response_time: Fastest recorded response time. 

3797 - max_response_time: Slowest recorded response time. 

3798 - avg_response_time: Mean response time. 

3799 - last_execution_time: Timestamp of the most recent invocation. 

3800 """ 

3801 

3802 __tablename__ = "prompts" 

3803 

3804 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

3805 original_name: Mapped[str] = mapped_column(String(255), nullable=False) 

3806 custom_name: Mapped[str] = mapped_column(String(255), nullable=False) 

3807 custom_name_slug: Mapped[str] = mapped_column(String(255), nullable=False) 

3808 display_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3809 name: Mapped[str] = mapped_column(String(255), nullable=False) 

3810 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3811 template: Mapped[str] = mapped_column(Text) 

3812 argument_schema: Mapped[Dict[str, Any]] = mapped_column(JSON) 

3813 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

3814 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

3815 # is_active: Mapped[bool] = mapped_column(default=True) 

3816 enabled: Mapped[bool] = mapped_column(default=True) 

3817 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

3818 

3819 # Comprehensive metadata for audit tracking 

3820 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3821 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

3822 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

3823 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3824 

3825 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3826 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

3827 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

3828 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

3829 

3830 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

3831 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3832 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

3833 

3834 metrics: Mapped[List["PromptMetric"]] = relationship("PromptMetric", back_populates="prompt", cascade="all, delete-orphan") 

3835 

3836 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE")) 

3837 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="prompts") 

3838 # federated_with = relationship("Gateway", secondary=prompt_gateway_table, back_populates="federated_prompts") 

3839 

3840 # Many-to-many relationship with Servers 

3841 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_prompt_association, back_populates="prompts") 

3842 

3843 # Team scoping fields for resource organization 

3844 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

3845 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

3846 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

3847 

3848 __table_args__ = ( 

3849 UniqueConstraint("team_id", "owner_email", "gateway_id", "name", name="uq_team_owner_gateway_name_prompt"), 

3850 UniqueConstraint("gateway_id", "original_name", name="uq_gateway_id__original_name_prompt"), 

3851 Index("uq_team_owner_name_prompt_local", "team_id", "owner_email", "name", unique=True, postgresql_where=text("gateway_id IS NULL"), sqlite_where=text("gateway_id IS NULL")), 

3852 Index("idx_prompts_created_at_id", "created_at", "id"), 

3853 ) 

3854 

3855 @hybrid_property 

3856 def gateway_slug(self) -> Optional[str]: 

3857 """Return the related gateway's slug if available. 

3858 

3859 Returns: 

3860 Optional[str]: Gateway slug or None when no gateway is attached. 

3861 """ 

3862 return self.gateway.slug if self.gateway else None 

3863 

3864 @gateway_slug.expression 

3865 @classmethod 

3866 def gateway_slug(cls) -> Any: 

3867 """SQL expression to select current gateway slug for this prompt. 

3868 

3869 Returns: 

3870 Any: SQLAlchemy scalar subquery selecting the gateway slug. 

3871 """ 

3872 return select(Gateway.slug).where(Gateway.id == cls.gateway_id).scalar_subquery() 

3873 

3874 def validate_arguments(self, args: Dict[str, str]) -> None: 

3875 """ 

3876 Validate prompt arguments against the argument schema. 

3877 

3878 Args: 

3879 args (Dict[str, str]): Dictionary of arguments to validate. 

3880 

3881 Raises: 

3882 ValueError: If the arguments do not conform to the schema. 

3883 

3884 Examples: 

3885 >>> prompt = Prompt( 

3886 ... name="test_prompt", 

3887 ... template="Hello {name}", 

3888 ... argument_schema={ 

3889 ... "type": "object", 

3890 ... "properties": { 

3891 ... "name": {"type": "string"} 

3892 ... }, 

3893 ... "required": ["name"] 

3894 ... } 

3895 ... ) 

3896 >>> prompt.validate_arguments({"name": "Alice"}) # No exception 

3897 >>> try: 

3898 ... prompt.validate_arguments({"age": 25}) # Missing required field 

3899 ... except ValueError as e: 

3900 ... "name" in str(e) 

3901 True 

3902 """ 

3903 try: 

3904 jsonschema.validate(args, self.argument_schema) 

3905 except jsonschema.exceptions.ValidationError as e: 

3906 raise ValueError(f"Invalid prompt arguments: {str(e)}") from e 

3907 

3908 def _metrics_loaded(self) -> bool: 

3909 """Check if metrics relationship is loaded without triggering lazy load. 

3910 

3911 Returns: 

3912 bool: True if metrics are loaded, False otherwise. 

3913 """ 

3914 return "metrics" in sa_inspect(self).dict 

3915 

3916 def _get_metric_counts(self) -> tuple[int, int, int]: 

3917 """Get total, successful, and failed metric counts in a single operation. 

3918 

3919 When metrics are already loaded, computes from memory in O(n). 

3920 When not loaded, uses a single SQL query with conditional aggregation. 

3921 

3922 Returns: 

3923 tuple[int, int, int]: (total, successful, failed) counts. 

3924 """ 

3925 if self._metrics_loaded(): 

3926 total = 0 

3927 successful = 0 

3928 for m in self.metrics: 

3929 total += 1 

3930 if m.is_success: 

3931 successful += 1 

3932 return (total, successful, total - successful) 

3933 

3934 # Third-Party 

3935 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

3936 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

3937 

3938 session = object_session(self) 

3939 if session is None: 

3940 return (0, 0, 0) 

3941 

3942 result = ( 

3943 session.query( 

3944 func.count(PromptMetric.id), # pylint: disable=not-callable 

3945 func.sum(case((PromptMetric.is_success.is_(True), 1), else_=0)), 

3946 ) 

3947 .filter(PromptMetric.prompt_id == self.id) 

3948 .one() 

3949 ) 

3950 

3951 total = result[0] or 0 

3952 successful = result[1] or 0 

3953 return (total, successful, total - successful) 

3954 

3955 @hybrid_property 

3956 def execution_count(self) -> int: 

3957 """Number of PromptMetric records associated with this prompt instance. 

3958 

3959 Returns: 

3960 int: Count of PromptMetric records for this prompt. 

3961 """ 

3962 return self._get_metric_counts()[0] 

3963 

3964 @execution_count.expression 

3965 @classmethod 

3966 def execution_count(cls) -> Any: 

3967 """SQL expression that counts PromptMetric rows for this prompt. 

3968 

3969 Returns: 

3970 Any: SQLAlchemy labeled count expression for prompt metrics. 

3971 """ 

3972 return select(func.count(PromptMetric.id)).where(PromptMetric.prompt_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable 

3973 

3974 @property 

3975 def successful_executions(self) -> int: 

3976 """Count of successful prompt invocations. 

3977 

3978 Returns: 

3979 int: The count of successful prompt invocations. 

3980 """ 

3981 return self._get_metric_counts()[1] 

3982 

3983 @property 

3984 def failed_executions(self) -> int: 

3985 """Count of failed prompt invocations. 

3986 

3987 Returns: 

3988 int: The count of failed prompt invocations. 

3989 """ 

3990 return self._get_metric_counts()[2] 

3991 

3992 @property 

3993 def failure_rate(self) -> float: 

3994 """Failure rate as a float between 0 and 1. 

3995 

3996 Returns: 

3997 float: The failure rate as a value between 0 and 1. 

3998 """ 

3999 total, _, failed = self._get_metric_counts() 

4000 return failed / total if total > 0 else 0.0 

4001 

4002 @property 

4003 def min_response_time(self) -> Optional[float]: 

4004 """Minimum response time among all prompt invocations. 

4005 

4006 Returns None if metrics are not loaded. Note: counts may be non-zero 

4007 (via SQL) while timing is None. Use service layer converters for 

4008 consistent metrics, or preload metrics via selectinload. 

4009 

4010 Returns: 

4011 Optional[float]: The minimum response time, or None. 

4012 """ 

4013 if not self._metrics_loaded(): 

4014 return None 

4015 times: List[float] = [m.response_time for m in self.metrics] 

4016 return min(times) if times else None 

4017 

4018 @property 

4019 def max_response_time(self) -> Optional[float]: 

4020 """Maximum response time among all prompt invocations. 

4021 

4022 Returns None if metrics are not loaded. Note: counts may be non-zero 

4023 (via SQL) while timing is None. Use service layer converters for 

4024 consistent metrics, or preload metrics via selectinload. 

4025 

4026 Returns: 

4027 Optional[float]: The maximum response time, or None. 

4028 """ 

4029 if not self._metrics_loaded(): 

4030 return None 

4031 times: List[float] = [m.response_time for m in self.metrics] 

4032 return max(times) if times else None 

4033 

4034 @property 

4035 def avg_response_time(self) -> Optional[float]: 

4036 """Average response time among all prompt invocations. 

4037 

4038 Returns None if metrics are not loaded. Note: counts may be non-zero 

4039 (via SQL) while timing is None. Use service layer converters for 

4040 consistent metrics, or preload metrics via selectinload. 

4041 

4042 Returns: 

4043 Optional[float]: The average response time, or None. 

4044 """ 

4045 if not self._metrics_loaded(): 

4046 return None 

4047 times: List[float] = [m.response_time for m in self.metrics] 

4048 return sum(times) / len(times) if times else None 

4049 

4050 @property 

4051 def last_execution_time(self) -> Optional[datetime]: 

4052 """Timestamp of the most recent prompt invocation. 

4053 

4054 Returns None if metrics are not loaded. Note: counts may be non-zero 

4055 (via SQL) while timing is None. Use service layer converters for 

4056 consistent metrics, or preload metrics via selectinload. 

4057 

4058 Returns: 

4059 Optional[datetime]: The timestamp of the most recent invocation, or None if no invocations exist. 

4060 """ 

4061 if not self._metrics_loaded(): 

4062 return None 

4063 if not self.metrics: 

4064 return None 

4065 return max(m.timestamp for m in self.metrics) 

4066 

4067 @property 

4068 def metrics_summary(self) -> Dict[str, Any]: 

4069 """Aggregated metrics for the prompt. 

4070 

4071 When metrics are loaded: computes all values from memory in a single pass. 

4072 When not loaded: uses a single SQL query with aggregation for all fields. 

4073 

4074 Returns: 

4075 Dict[str, Any]: Dictionary containing aggregated metrics: 

4076 - total_executions, successful_executions, failed_executions 

4077 - failure_rate, min/max/avg_response_time, last_execution_time 

4078 """ 

4079 if self._metrics_loaded(): 

4080 total = 0 

4081 successful = 0 

4082 min_rt: Optional[float] = None 

4083 max_rt: Optional[float] = None 

4084 sum_rt = 0.0 

4085 last_time: Optional[datetime] = None 

4086 

4087 for m in self.metrics: 

4088 total += 1 

4089 if m.is_success: 

4090 successful += 1 

4091 rt = m.response_time 

4092 if min_rt is None or rt < min_rt: 

4093 min_rt = rt 

4094 if max_rt is None or rt > max_rt: 

4095 max_rt = rt 

4096 sum_rt += rt 

4097 if last_time is None or m.timestamp > last_time: 

4098 last_time = m.timestamp 

4099 

4100 failed = total - successful 

4101 return { 

4102 "total_executions": total, 

4103 "successful_executions": successful, 

4104 "failed_executions": failed, 

4105 "failure_rate": failed / total if total > 0 else 0.0, 

4106 "min_response_time": min_rt, 

4107 "max_response_time": max_rt, 

4108 "avg_response_time": sum_rt / total if total > 0 else None, 

4109 "last_execution_time": last_time, 

4110 } 

4111 

4112 # Third-Party 

4113 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

4114 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

4115 

4116 session = object_session(self) 

4117 if session is None: 

4118 return { 

4119 "total_executions": 0, 

4120 "successful_executions": 0, 

4121 "failed_executions": 0, 

4122 "failure_rate": 0.0, 

4123 "min_response_time": None, 

4124 "max_response_time": None, 

4125 "avg_response_time": None, 

4126 "last_execution_time": None, 

4127 } 

4128 

4129 result = ( 

4130 session.query( 

4131 func.count(PromptMetric.id), # pylint: disable=not-callable 

4132 func.sum(case((PromptMetric.is_success.is_(True), 1), else_=0)), 

4133 func.min(PromptMetric.response_time), # pylint: disable=not-callable 

4134 func.max(PromptMetric.response_time), # pylint: disable=not-callable 

4135 func.avg(PromptMetric.response_time), # pylint: disable=not-callable 

4136 func.max(PromptMetric.timestamp), # pylint: disable=not-callable 

4137 ) 

4138 .filter(PromptMetric.prompt_id == self.id) 

4139 .one() 

4140 ) 

4141 

4142 total = result[0] or 0 

4143 successful = result[1] or 0 

4144 failed = total - successful 

4145 

4146 return { 

4147 "total_executions": total, 

4148 "successful_executions": successful, 

4149 "failed_executions": failed, 

4150 "failure_rate": failed / total if total > 0 else 0.0, 

4151 "min_response_time": result[2], 

4152 "max_response_time": result[3], 

4153 "avg_response_time": float(result[4]) if result[4] is not None else None, 

4154 "last_execution_time": result[5], 

4155 } 

4156 

4157 

4158class Server(Base): 

4159 """ 

4160 ORM model for MCP Servers Catalog. 

4161 

4162 Represents a server that composes catalog items (tools, resources, prompts). 

4163 Additionally, this model provides computed properties for aggregated metrics based 

4164 on the associated ServerMetric records. These include: 

4165 - execution_count: Total number of invocations. 

4166 - successful_executions: Count of successful invocations. 

4167 - failed_executions: Count of failed invocations. 

4168 - failure_rate: Ratio of failed invocations to total invocations. 

4169 - min_response_time: Fastest recorded response time. 

4170 - max_response_time: Slowest recorded response time. 

4171 - avg_response_time: Mean response time. 

4172 - last_execution_time: Timestamp of the most recent invocation. 

4173 """ 

4174 

4175 __tablename__ = "servers" 

4176 

4177 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4178 name: Mapped[str] = mapped_column(String(255), nullable=False) 

4179 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4180 icon: Mapped[Optional[str]] = mapped_column(String(767), nullable=True) 

4181 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

4182 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

4183 # is_active: Mapped[bool] = mapped_column(default=True) 

4184 enabled: Mapped[bool] = mapped_column(default=True) 

4185 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

4186 

4187 # Comprehensive metadata for audit tracking 

4188 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4189 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4190 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4191 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4192 

4193 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4194 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4195 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4196 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4197 

4198 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

4199 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4200 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

4201 

4202 metrics: Mapped[List["ServerMetric"]] = relationship("ServerMetric", back_populates="server", cascade="all, delete-orphan") 

4203 

4204 # Many-to-many relationships for associated items 

4205 tools: Mapped[List["Tool"]] = relationship("Tool", secondary=server_tool_association, back_populates="servers") 

4206 resources: Mapped[List["Resource"]] = relationship("Resource", secondary=server_resource_association, back_populates="servers") 

4207 prompts: Mapped[List["Prompt"]] = relationship("Prompt", secondary=server_prompt_association, back_populates="servers") 

4208 a2a_agents: Mapped[List["A2AAgent"]] = relationship("A2AAgent", secondary=server_a2a_association, back_populates="servers") 

4209 

4210 # API token relationships 

4211 scoped_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="server") 

4212 

4213 def _metrics_loaded(self) -> bool: 

4214 """Check if metrics relationship is loaded without triggering lazy load. 

4215 

4216 Returns: 

4217 bool: True if metrics are loaded, False otherwise. 

4218 """ 

4219 return "metrics" in sa_inspect(self).dict 

4220 

4221 def _get_metric_counts(self) -> tuple[int, int, int]: 

4222 """Get total, successful, and failed metric counts in a single operation. 

4223 

4224 When metrics are already loaded, computes from memory in O(n). 

4225 When not loaded, uses a single SQL query with conditional aggregation. 

4226 

4227 Returns: 

4228 tuple[int, int, int]: (total, successful, failed) counts. 

4229 """ 

4230 if self._metrics_loaded(): 

4231 total = 0 

4232 successful = 0 

4233 for m in self.metrics: 

4234 total += 1 

4235 if m.is_success: 

4236 successful += 1 

4237 return (total, successful, total - successful) 

4238 

4239 # Third-Party 

4240 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

4241 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

4242 

4243 session = object_session(self) 

4244 if session is None: 

4245 return (0, 0, 0) 

4246 

4247 result = ( 

4248 session.query( 

4249 func.count(ServerMetric.id), # pylint: disable=not-callable 

4250 func.sum(case((ServerMetric.is_success.is_(True), 1), else_=0)), 

4251 ) 

4252 .filter(ServerMetric.server_id == self.id) 

4253 .one() 

4254 ) 

4255 

4256 total = result[0] or 0 

4257 successful = result[1] or 0 

4258 return (total, successful, total - successful) 

4259 

4260 @hybrid_property 

4261 def execution_count(self) -> int: 

4262 """Number of ServerMetric records associated with this server instance. 

4263 

4264 Returns: 

4265 int: Count of ServerMetric records for this server. 

4266 """ 

4267 return self._get_metric_counts()[0] 

4268 

4269 @execution_count.expression 

4270 @classmethod 

4271 def execution_count(cls) -> Any: 

4272 """SQL expression that counts ServerMetric rows for this server. 

4273 

4274 Returns: 

4275 Any: SQLAlchemy labeled count expression for server metrics. 

4276 """ 

4277 return select(func.count(ServerMetric.id)).where(ServerMetric.server_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable 

4278 

4279 @property 

4280 def successful_executions(self) -> int: 

4281 """Count of successful server invocations. 

4282 

4283 Returns: 

4284 int: The count of successful server invocations. 

4285 """ 

4286 return self._get_metric_counts()[1] 

4287 

4288 @property 

4289 def failed_executions(self) -> int: 

4290 """Count of failed server invocations. 

4291 

4292 Returns: 

4293 int: The count of failed server invocations. 

4294 """ 

4295 return self._get_metric_counts()[2] 

4296 

4297 @property 

4298 def failure_rate(self) -> float: 

4299 """Failure rate as a float between 0 and 1. 

4300 

4301 Returns: 

4302 float: The failure rate as a value between 0 and 1. 

4303 """ 

4304 total, _, failed = self._get_metric_counts() 

4305 return failed / total if total > 0 else 0.0 

4306 

4307 @property 

4308 def min_response_time(self) -> Optional[float]: 

4309 """Minimum response time among all server invocations. 

4310 

4311 Returns None if metrics are not loaded. Note: counts may be non-zero 

4312 (via SQL) while timing is None. Use service layer converters for 

4313 consistent metrics, or preload metrics via selectinload. 

4314 

4315 Returns: 

4316 Optional[float]: The minimum response time, or None. 

4317 """ 

4318 if not self._metrics_loaded(): 

4319 return None 

4320 times: List[float] = [m.response_time for m in self.metrics] 

4321 return min(times) if times else None 

4322 

4323 @property 

4324 def max_response_time(self) -> Optional[float]: 

4325 """Maximum response time among all server invocations. 

4326 

4327 Returns None if metrics are not loaded. Note: counts may be non-zero 

4328 (via SQL) while timing is None. Use service layer converters for 

4329 consistent metrics, or preload metrics via selectinload. 

4330 

4331 Returns: 

4332 Optional[float]: The maximum response time, or None. 

4333 """ 

4334 if not self._metrics_loaded(): 

4335 return None 

4336 times: List[float] = [m.response_time for m in self.metrics] 

4337 return max(times) if times else None 

4338 

4339 @property 

4340 def avg_response_time(self) -> Optional[float]: 

4341 """Average response time among all server invocations. 

4342 

4343 Returns None if metrics are not loaded. Note: counts may be non-zero 

4344 (via SQL) while timing is None. Use service layer converters for 

4345 consistent metrics, or preload metrics via selectinload. 

4346 

4347 Returns: 

4348 Optional[float]: The average response time, or None. 

4349 """ 

4350 if not self._metrics_loaded(): 

4351 return None 

4352 times: List[float] = [m.response_time for m in self.metrics] 

4353 return sum(times) / len(times) if times else None 

4354 

4355 @property 

4356 def last_execution_time(self) -> Optional[datetime]: 

4357 """Timestamp of the most recent server invocation. 

4358 

4359 Returns None if metrics are not loaded. Note: counts may be non-zero 

4360 (via SQL) while timing is None. Use service layer converters for 

4361 consistent metrics, or preload metrics via selectinload. 

4362 

4363 Returns: 

4364 Optional[datetime]: The timestamp of the most recent invocation, or None. 

4365 """ 

4366 if not self._metrics_loaded(): 

4367 return None 

4368 if not self.metrics: 

4369 return None 

4370 return max(m.timestamp for m in self.metrics) 

4371 

4372 @property 

4373 def metrics_summary(self) -> Dict[str, Any]: 

4374 """Aggregated metrics for the server. 

4375 

4376 When metrics are loaded: computes all values from memory in a single pass. 

4377 When not loaded: uses a single SQL query with aggregation for all fields. 

4378 

4379 Returns: 

4380 Dict[str, Any]: Dictionary containing aggregated metrics: 

4381 - total_executions, successful_executions, failed_executions 

4382 - failure_rate, min/max/avg_response_time, last_execution_time 

4383 """ 

4384 if self._metrics_loaded(): 

4385 total = 0 

4386 successful = 0 

4387 min_rt: Optional[float] = None 

4388 max_rt: Optional[float] = None 

4389 sum_rt = 0.0 

4390 last_time: Optional[datetime] = None 

4391 

4392 for m in self.metrics: 

4393 total += 1 

4394 if m.is_success: 

4395 successful += 1 

4396 rt = m.response_time 

4397 if min_rt is None or rt < min_rt: 

4398 min_rt = rt 

4399 if max_rt is None or rt > max_rt: 

4400 max_rt = rt 

4401 sum_rt += rt 

4402 if last_time is None or m.timestamp > last_time: 

4403 last_time = m.timestamp 

4404 

4405 failed = total - successful 

4406 return { 

4407 "total_executions": total, 

4408 "successful_executions": successful, 

4409 "failed_executions": failed, 

4410 "failure_rate": failed / total if total > 0 else 0.0, 

4411 "min_response_time": min_rt, 

4412 "max_response_time": max_rt, 

4413 "avg_response_time": sum_rt / total if total > 0 else None, 

4414 "last_execution_time": last_time, 

4415 } 

4416 

4417 # Third-Party 

4418 from sqlalchemy import case # pylint: disable=import-outside-toplevel 

4419 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel 

4420 

4421 session = object_session(self) 

4422 if session is None: 

4423 return { 

4424 "total_executions": 0, 

4425 "successful_executions": 0, 

4426 "failed_executions": 0, 

4427 "failure_rate": 0.0, 

4428 "min_response_time": None, 

4429 "max_response_time": None, 

4430 "avg_response_time": None, 

4431 "last_execution_time": None, 

4432 } 

4433 

4434 result = ( 

4435 session.query( 

4436 func.count(ServerMetric.id), # pylint: disable=not-callable 

4437 func.sum(case((ServerMetric.is_success.is_(True), 1), else_=0)), 

4438 func.min(ServerMetric.response_time), # pylint: disable=not-callable 

4439 func.max(ServerMetric.response_time), # pylint: disable=not-callable 

4440 func.avg(ServerMetric.response_time), # pylint: disable=not-callable 

4441 func.max(ServerMetric.timestamp), # pylint: disable=not-callable 

4442 ) 

4443 .filter(ServerMetric.server_id == self.id) 

4444 .one() 

4445 ) 

4446 

4447 total = result[0] or 0 

4448 successful = result[1] or 0 

4449 failed = total - successful 

4450 

4451 return { 

4452 "total_executions": total, 

4453 "successful_executions": successful, 

4454 "failed_executions": failed, 

4455 "failure_rate": failed / total if total > 0 else 0.0, 

4456 "min_response_time": result[2], 

4457 "max_response_time": result[3], 

4458 "avg_response_time": float(result[4]) if result[4] is not None else None, 

4459 "last_execution_time": result[5], 

4460 } 

4461 

4462 # Team scoping fields for resource organization 

4463 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

4464 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4465 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

4466 

4467 # OAuth 2.0 configuration for RFC 9728 Protected Resource Metadata 

4468 # When enabled, MCP clients can authenticate using OAuth with browser-based IDP SSO 

4469 oauth_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

4470 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

4471 

4472 # Relationship for loading team names (only active teams) 

4473 # Uses default lazy loading - team name is only loaded when accessed 

4474 # For list/admin views, use explicit joinedload(DbServer.email_team) for single-query loading 

4475 # This avoids adding overhead to hot paths that don't need team names 

4476 email_team: Mapped[Optional["EmailTeam"]] = relationship( 

4477 "EmailTeam", 

4478 primaryjoin="and_(Server.team_id == EmailTeam.id, EmailTeam.is_active == True)", 

4479 foreign_keys=[team_id], 

4480 ) 

4481 

4482 @property 

4483 def team(self) -> Optional[str]: 

4484 """Return the team name from the `email_team` relationship. 

4485 

4486 Returns: 

4487 Optional[str]: The team name if the server belongs to an active team, otherwise None. 

4488 """ 

4489 return self.email_team.name if self.email_team else None 

4490 

4491 __table_args__ = ( 

4492 UniqueConstraint("team_id", "owner_email", "name", name="uq_team_owner_name_server"), 

4493 Index("idx_servers_created_at_id", "created_at", "id"), 

4494 ) 

4495 

4496 

4497class Gateway(Base): 

4498 """ORM model for a federated peer Gateway.""" 

4499 

4500 __tablename__ = "gateways" 

4501 

4502 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4503 name: Mapped[str] = mapped_column(String(255), nullable=False) 

4504 slug: Mapped[str] = mapped_column(String(255), nullable=False) 

4505 url: Mapped[str] = mapped_column(String(767), nullable=False) 

4506 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4507 transport: Mapped[str] = mapped_column(String(20), default="SSE") 

4508 capabilities: Mapped[Dict[str, Any]] = mapped_column(JSON) 

4509 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

4510 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

4511 enabled: Mapped[bool] = mapped_column(default=True) 

4512 reachable: Mapped[bool] = mapped_column(default=True) 

4513 last_seen: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

4514 tags: Mapped[List[Dict[str, str]]] = mapped_column(JSON, default=list, nullable=False) 

4515 

4516 # Comprehensive metadata for audit tracking 

4517 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4518 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4519 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4520 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4521 

4522 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4523 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4524 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4525 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4526 

4527 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

4528 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4529 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

4530 

4531 # Header passthrough configuration 

4532 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array 

4533 

4534 # CA certificate 

4535 ca_certificate: Mapped[Optional[bytes]] = mapped_column(Text, nullable=True) 

4536 ca_certificate_sig: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) 

4537 signing_algorithm: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, default="ed25519") # e.g., "sha256" 

4538 

4539 # Relationship with local tools this gateway provides 

4540 tools: Mapped[List["Tool"]] = relationship(back_populates="gateway", foreign_keys="Tool.gateway_id", cascade="all, delete-orphan", passive_deletes=True) 

4541 

4542 # Relationship with local prompts this gateway provides 

4543 prompts: Mapped[List["Prompt"]] = relationship(back_populates="gateway", cascade="all, delete-orphan", passive_deletes=True) 

4544 

4545 # Relationship with local resources this gateway provides 

4546 resources: Mapped[List["Resource"]] = relationship(back_populates="gateway", cascade="all, delete-orphan", passive_deletes=True) 

4547 

4548 # # Tools federated from this gateway 

4549 # federated_tools: Mapped[List["Tool"]] = relationship(secondary=tool_gateway_table, back_populates="federated_with") 

4550 

4551 # # Prompts federated from this resource 

4552 # federated_resources: Mapped[List["Resource"]] = relationship(secondary=resource_gateway_table, back_populates="federated_with") 

4553 

4554 # # Prompts federated from this gateway 

4555 # federated_prompts: Mapped[List["Prompt"]] = relationship(secondary=prompt_gateway_table, back_populates="federated_with") 

4556 

4557 # Authorizations 

4558 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", "authheaders", "oauth", "query_param" or None 

4559 auth_value: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON) 

4560 auth_query_params: Mapped[Optional[Dict[str, str]]] = mapped_column( 

4561 JSON, 

4562 nullable=True, 

4563 default=None, 

4564 comment="Encrypted query parameters for auth. Format: {'param_name': 'encrypted_value'}", 

4565 ) 

4566 

4567 # OAuth configuration 

4568 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes") 

4569 

4570 # Team scoping fields for resource organization 

4571 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

4572 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4573 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

4574 

4575 # Relationship for loading team names (only active teams) 

4576 # Uses default lazy loading - team name is only loaded when accessed 

4577 # For list/admin views, use explicit joinedload(DbGateway.email_team) for single-query loading 

4578 # This avoids adding overhead to hot paths that don't need team names 

4579 email_team: Mapped[Optional["EmailTeam"]] = relationship( 

4580 "EmailTeam", 

4581 primaryjoin="and_(Gateway.team_id == EmailTeam.id, EmailTeam.is_active == True)", 

4582 foreign_keys=[team_id], 

4583 ) 

4584 

4585 @property 

4586 def team(self) -> Optional[str]: 

4587 """Return the team name from the `email_team` relationship. 

4588 

4589 Returns: 

4590 Optional[str]: The team name if the gateway belongs to an active team, otherwise None. 

4591 """ 

4592 return self.email_team.name if self.email_team else None 

4593 

4594 # Per-gateway refresh configuration 

4595 refresh_interval_seconds: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, comment="Per-gateway refresh interval in seconds; NULL uses global default") 

4596 last_refresh_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True, comment="Timestamp of the last successful tools/resources/prompts refresh") 

4597 

4598 # Gateway mode: 'cache' (default) or 'direct_proxy' 

4599 # - 'cache': Tools/resources/prompts are cached in database upon gateway registration (current behavior) 

4600 # - 'direct_proxy': All RPC calls are proxied directly to remote MCP server with no database caching 

4601 gateway_mode: Mapped[str] = mapped_column(String(20), nullable=False, default="cache", comment="Gateway mode: 'cache' (database caching) or 'direct_proxy' (pass-through mode)") 

4602 

4603 # Relationship with OAuth tokens 

4604 oauth_tokens: Mapped[List["OAuthToken"]] = relationship("OAuthToken", back_populates="gateway", cascade="all, delete-orphan") 

4605 

4606 # Relationship with registered OAuth clients (DCR) 

4607 

4608 registered_oauth_clients: Mapped[List["RegisteredOAuthClient"]] = relationship("RegisteredOAuthClient", back_populates="gateway", cascade="all, delete-orphan") 

4609 

4610 __table_args__ = ( 

4611 UniqueConstraint("team_id", "owner_email", "slug", name="uq_team_owner_slug_gateway"), 

4612 Index("idx_gateways_created_at_id", "created_at", "id"), 

4613 ) 

4614 

4615 

4616@event.listens_for(Gateway, "after_update") 

4617def update_tool_names_on_gateway_update(_mapper, connection, target): 

4618 """ 

4619 If a Gateway's name is updated, efficiently update all of its 

4620 child Tools' names with a single SQL statement. 

4621 

4622 Args: 

4623 _mapper: Mapper 

4624 connection: Connection 

4625 target: Target 

4626 """ 

4627 # 1. Check if the 'name' field was actually part of the update. 

4628 # This is a concise way to see if the value has changed. 

4629 if not get_history(target, "name").has_changes(): 

4630 return 

4631 

4632 logger.info("Gateway name changed for ID %s. Issuing bulk update for tools.", target.id) 

4633 

4634 # 2. Get a reference to the underlying database table for Tools 

4635 tools_table = Tool.__table__ 

4636 

4637 # 3. Prepare the new values 

4638 new_gateway_slug = slugify(target.name) 

4639 separator = settings.gateway_tool_name_separator 

4640 

4641 # 4. Construct a single, powerful UPDATE statement using SQLAlchemy Core. 

4642 # This is highly efficient as it all happens in the database. 

4643 stmt = ( 

4644 cast(Any, tools_table) 

4645 .update() 

4646 .where(tools_table.c.gateway_id == target.id) 

4647 .values(name=new_gateway_slug + separator + tools_table.c.custom_name_slug) 

4648 .execution_options(synchronize_session=False) 

4649 ) 

4650 

4651 # 5. Execute the statement using the connection from the ongoing transaction. 

4652 connection.execute(stmt) 

4653 

4654 

4655@event.listens_for(Gateway, "after_update") 

4656def update_prompt_names_on_gateway_update(_mapper, connection, target): 

4657 """Update prompt names when a gateway name changes. 

4658 

4659 Args: 

4660 _mapper: SQLAlchemy mapper for the Gateway model. 

4661 connection: Database connection for the update transaction. 

4662 target: Gateway instance being updated. 

4663 """ 

4664 if not get_history(target, "name").has_changes(): 

4665 return 

4666 

4667 logger.info("Gateway name changed for ID %s. Issuing bulk update for prompts.", target.id) 

4668 

4669 prompts_table = Prompt.__table__ 

4670 new_gateway_slug = slugify(target.name) 

4671 separator = settings.gateway_tool_name_separator 

4672 

4673 stmt = ( 

4674 cast(Any, prompts_table) 

4675 .update() 

4676 .where(prompts_table.c.gateway_id == target.id) 

4677 .values(name=new_gateway_slug + separator + prompts_table.c.custom_name_slug) 

4678 .execution_options(synchronize_session=False) 

4679 ) 

4680 

4681 connection.execute(stmt) 

4682 

4683 

4684class A2AAgent(Base): 

4685 """ 

4686 ORM model for A2A (Agent-to-Agent) compatible agents. 

4687 

4688 A2A agents represent external AI agents that can be integrated into the gateway 

4689 and exposed as tools within virtual servers. They support standardized 

4690 Agent-to-Agent communication protocols for interoperability. 

4691 """ 

4692 

4693 __tablename__ = "a2a_agents" 

4694 

4695 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4696 name: Mapped[str] = mapped_column(String(255), nullable=False) 

4697 slug: Mapped[str] = mapped_column(String(255), nullable=False) 

4698 description: Mapped[Optional[str]] = mapped_column(Text) 

4699 endpoint_url: Mapped[str] = mapped_column(String(767), nullable=False) 

4700 agent_type: Mapped[str] = mapped_column(String(50), nullable=False, default="generic") # e.g., "openai", "anthropic", "custom" 

4701 protocol_version: Mapped[str] = mapped_column(String(10), nullable=False, default="1.0") 

4702 capabilities: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict) 

4703 # Configuration 

4704 config: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict) 

4705 

4706 # Authorizations 

4707 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", "authheaders", "oauth", "query_param" or None 

4708 auth_value: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON) 

4709 auth_query_params: Mapped[Optional[Dict[str, str]]] = mapped_column( 

4710 JSON, 

4711 nullable=True, 

4712 default=None, 

4713 comment="Encrypted query parameters for auth. Format: {'param_name': 'encrypted_value'}", 

4714 ) 

4715 

4716 # OAuth configuration 

4717 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes") 

4718 

4719 # Header passthrough configuration 

4720 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array 

4721 

4722 # Status and metadata 

4723 enabled: Mapped[bool] = mapped_column(Boolean, default=True) 

4724 reachable: Mapped[bool] = mapped_column(Boolean, default=True) 

4725 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

4726 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

4727 last_interaction: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) 

4728 

4729 # Tags for categorization 

4730 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

4731 

4732 # Comprehensive metadata for audit tracking 

4733 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4734 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4735 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4736 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4737 

4738 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4739 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4740 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4741 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4742 

4743 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

4744 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4745 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

4746 

4747 # Team scoping fields for resource organization 

4748 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

4749 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4750 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

4751 

4752 # Associated tool ID (A2A agents are automatically registered as tools) 

4753 tool_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("tools.id", ondelete="SET NULL"), nullable=True) 

4754 

4755 # Relationships 

4756 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_a2a_association, back_populates="a2a_agents") 

4757 tool: Mapped[Optional["Tool"]] = relationship("Tool", foreign_keys=[tool_id]) 

4758 metrics: Mapped[List["A2AAgentMetric"]] = relationship("A2AAgentMetric", back_populates="a2a_agent", cascade="all, delete-orphan") 

4759 __table_args__ = ( 

4760 UniqueConstraint("team_id", "owner_email", "slug", name="uq_team_owner_slug_a2a_agent"), 

4761 Index("idx_a2a_agents_created_at_id", "created_at", "id"), 

4762 Index("idx_a2a_agents_tool_id", "tool_id"), 

4763 ) 

4764 

4765 # Relationship with OAuth tokens 

4766 # oauth_tokens: Mapped[List["OAuthToken"]] = relationship("OAuthToken", back_populates="gateway", cascade="all, delete-orphan") 

4767 

4768 # Relationship with registered OAuth clients (DCR) 

4769 # registered_oauth_clients: Mapped[List["RegisteredOAuthClient"]] = relationship("RegisteredOAuthClient", back_populates="gateway", cascade="all, delete-orphan") 

4770 

4771 def _metrics_loaded(self) -> bool: 

4772 """Check if metrics relationship is loaded without triggering lazy load. 

4773 

4774 Returns: 

4775 bool: True if metrics are loaded, False otherwise. 

4776 """ 

4777 return "metrics" in sa_inspect(self).dict 

4778 

4779 @property 

4780 def execution_count(self) -> int: 

4781 """Total number of interactions with this agent. 

4782 Returns 0 if metrics are not loaded (avoids lazy loading). 

4783 

4784 Returns: 

4785 int: The total count of interactions. 

4786 """ 

4787 if not self._metrics_loaded(): 

4788 return 0 

4789 return len(self.metrics) 

4790 

4791 @property 

4792 def successful_executions(self) -> int: 

4793 """Number of successful interactions. 

4794 Returns 0 if metrics are not loaded (avoids lazy loading). 

4795 

4796 Returns: 

4797 int: The count of successful interactions. 

4798 """ 

4799 if not self._metrics_loaded(): 

4800 return 0 

4801 return sum(1 for m in self.metrics if m.is_success) 

4802 

4803 @property 

4804 def failed_executions(self) -> int: 

4805 """Number of failed interactions. 

4806 Returns 0 if metrics are not loaded (avoids lazy loading). 

4807 

4808 Returns: 

4809 int: The count of failed interactions. 

4810 """ 

4811 if not self._metrics_loaded(): 

4812 return 0 

4813 return sum(1 for m in self.metrics if not m.is_success) 

4814 

4815 @property 

4816 def failure_rate(self) -> float: 

4817 """Failure rate as a percentage. 

4818 Returns 0.0 if metrics are not loaded (avoids lazy loading). 

4819 

4820 Returns: 

4821 float: The failure rate percentage. 

4822 """ 

4823 if not self._metrics_loaded(): 

4824 return 0.0 

4825 if not self.metrics: 

4826 return 0.0 

4827 return (self.failed_executions / len(self.metrics)) * 100 

4828 

4829 @property 

4830 def avg_response_time(self) -> Optional[float]: 

4831 """Average response time in seconds. 

4832 Returns None if metrics are not loaded (avoids lazy loading). 

4833 

4834 Returns: 

4835 Optional[float]: The average response time, or None if no metrics. 

4836 """ 

4837 if not self._metrics_loaded(): 

4838 return None 

4839 if not self.metrics: 

4840 return None 

4841 return sum(m.response_time for m in self.metrics) / len(self.metrics) 

4842 

4843 @property 

4844 def last_execution_time(self) -> Optional[datetime]: 

4845 """Timestamp of the most recent interaction. 

4846 Returns None if metrics are not loaded (avoids lazy loading). 

4847 

4848 Returns: 

4849 Optional[datetime]: The timestamp of the last interaction, or None if no metrics. 

4850 """ 

4851 if not self._metrics_loaded(): 

4852 return None 

4853 if not self.metrics: 

4854 return None 

4855 return max(m.timestamp for m in self.metrics) 

4856 

4857 def __repr__(self) -> str: 

4858 """Return a string representation of the A2AAgent instance. 

4859 

4860 Returns: 

4861 str: A formatted string containing the agent's ID, name, and type. 

4862 

4863 Examples: 

4864 >>> agent = A2AAgent(id='123', name='test-agent', agent_type='custom') 

4865 >>> repr(agent) 

4866 "<A2AAgent(id='123', name='test-agent', agent_type='custom')>" 

4867 """ 

4868 return f"<A2AAgent(id='{self.id}', name='{self.name}', agent_type='{self.agent_type}')>" 

4869 

4870 

4871class GrpcService(Base): 

4872 """ 

4873 ORM model for gRPC services with reflection-based discovery. 

4874 

4875 gRPC services represent external gRPC servers that can be automatically discovered 

4876 via server reflection and exposed as MCP tools. The gateway translates between 

4877 gRPC/Protobuf and MCP/JSON protocols. 

4878 """ 

4879 

4880 __tablename__ = "grpc_services" 

4881 

4882 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4883 name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) 

4884 slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) 

4885 description: Mapped[Optional[str]] = mapped_column(Text) 

4886 target: Mapped[str] = mapped_column(String(767), nullable=False) # host:port format 

4887 

4888 # Configuration 

4889 reflection_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

4890 tls_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

4891 tls_cert_path: Mapped[Optional[str]] = mapped_column(String(767)) 

4892 tls_key_path: Mapped[Optional[str]] = mapped_column(String(767)) 

4893 grpc_metadata: Mapped[Dict[str, str]] = mapped_column(JSON, default=dict) # gRPC metadata headers 

4894 

4895 # Status 

4896 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

4897 reachable: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

4898 

4899 # Discovery results from reflection 

4900 service_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

4901 method_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False) 

4902 discovered_services: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict) # Service descriptors 

4903 last_reflection: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) 

4904 

4905 # Tags for categorization 

4906 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

4907 

4908 # Timestamps 

4909 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

4910 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

4911 

4912 # Comprehensive metadata for audit tracking 

4913 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4914 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4915 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4916 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4917 

4918 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4919 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

4920 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) 

4921 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

4922 

4923 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) 

4924 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4925 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False) 

4926 

4927 # Team scoping fields for resource organization 

4928 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True) 

4929 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

4930 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public") 

4931 

4932 def __repr__(self) -> str: 

4933 """Return a string representation of the GrpcService instance. 

4934 

4935 Returns: 

4936 str: A formatted string containing the service's ID, name, and target. 

4937 """ 

4938 return f"<GrpcService(id='{self.id}', name='{self.name}', target='{self.target}')>" 

4939 

4940 

4941class SessionRecord(Base): 

4942 """ORM model for sessions from SSE client.""" 

4943 

4944 __tablename__ = "mcp_sessions" 

4945 

4946 session_id: Mapped[str] = mapped_column(String(255), primary_key=True) 

4947 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) # pylint: disable=not-callable 

4948 last_accessed: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # pylint: disable=not-callable 

4949 data: Mapped[str] = mapped_column(Text, nullable=True) 

4950 

4951 messages: Mapped[List["SessionMessageRecord"]] = relationship("SessionMessageRecord", back_populates="session", cascade="all, delete-orphan") 

4952 

4953 

4954class SessionMessageRecord(Base): 

4955 """ORM model for messages from SSE client.""" 

4956 

4957 __tablename__ = "mcp_messages" 

4958 

4959 id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) 

4960 session_id: Mapped[str] = mapped_column(String(255), ForeignKey("mcp_sessions.session_id")) 

4961 message: Mapped[str] = mapped_column(Text, nullable=True) 

4962 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) # pylint: disable=not-callable 

4963 last_accessed: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # pylint: disable=not-callable 

4964 

4965 session: Mapped["SessionRecord"] = relationship("SessionRecord", back_populates="messages") 

4966 

4967 

4968class OAuthToken(Base): 

4969 """ORM model for OAuth access and refresh tokens with user association.""" 

4970 

4971 __tablename__ = "oauth_tokens" 

4972 

4973 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4974 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False) 

4975 user_id: Mapped[str] = mapped_column(String(255), nullable=False) # OAuth provider's user ID 

4976 app_user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email", ondelete="CASCADE"), nullable=False) # ContextForge user 

4977 access_token: Mapped[str] = mapped_column(EncryptedText(), nullable=False) 

4978 refresh_token: Mapped[Optional[str]] = mapped_column(EncryptedText(), nullable=True) 

4979 token_type: Mapped[str] = mapped_column(String(50), default="Bearer") 

4980 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

4981 scopes: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) 

4982 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

4983 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) 

4984 

4985 # Relationships 

4986 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="oauth_tokens") 

4987 app_user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[app_user_email]) 

4988 

4989 # Unique constraint: one token per user per gateway 

4990 __table_args__ = (UniqueConstraint("gateway_id", "app_user_email", name="uq_oauth_gateway_user"),) 

4991 

4992 

4993class OAuthState(Base): 

4994 """ORM model for OAuth authorization states with TTL for CSRF protection.""" 

4995 

4996 __tablename__ = "oauth_states" 

4997 

4998 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

4999 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False) 

5000 state: Mapped[str] = mapped_column(String(500), nullable=False, unique=True) # The state parameter 

5001 code_verifier: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # PKCE code verifier (RFC 7636) 

5002 app_user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) # Requesting user context for token association 

5003 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

5004 used: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

5005 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

5006 

5007 # Relationships 

5008 gateway: Mapped["Gateway"] = relationship("Gateway") 

5009 

5010 # Index for efficient lookups 

5011 __table_args__ = (Index("idx_oauth_state_lookup", "gateway_id", "state"),) 

5012 

5013 

5014class RegisteredOAuthClient(Base): 

5015 """Stores dynamically registered OAuth clients (RFC 7591 client mode). 

5016 

5017 This model maintains client credentials obtained through Dynamic Client 

5018 Registration with upstream Authorization Servers. 

5019 """ 

5020 

5021 __tablename__ = "registered_oauth_clients" 

5022 

5023 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

5024 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False, index=True) 

5025 

5026 # Registration details 

5027 issuer: Mapped[str] = mapped_column(String(500), nullable=False) # AS issuer URL 

5028 client_id: Mapped[str] = mapped_column(String(500), nullable=False) 

5029 client_secret_encrypted: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # Encrypted 

5030 

5031 # RFC 7591 fields 

5032 redirect_uris: Mapped[str] = mapped_column(Text, nullable=False) # JSON array 

5033 grant_types: Mapped[str] = mapped_column(Text, nullable=False) # JSON array 

5034 response_types: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON array 

5035 scope: Mapped[Optional[str]] = mapped_column(String(1000), nullable=True) 

5036 token_endpoint_auth_method: Mapped[str] = mapped_column(String(50), default="client_secret_basic") 

5037 

5038 # Registration management (RFC 7591 section 4) 

5039 registration_client_uri: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) 

5040 registration_access_token_encrypted: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5041 

5042 # Metadata 

5043 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) 

5044 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

5045 is_active: Mapped[bool] = mapped_column(Boolean, default=True) 

5046 

5047 # Relationships 

5048 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="registered_oauth_clients") 

5049 

5050 # Unique constraint: one registration per gateway+issuer 

5051 __table_args__ = (Index("idx_gateway_issuer", "gateway_id", "issuer", unique=True),) 

5052 

5053 

5054class EmailApiToken(Base): 

5055 """Email user API token model for token catalog management. 

5056 

5057 This model provides comprehensive API token management with scoping, 

5058 revocation, and usage tracking for email-based users. 

5059 

5060 Attributes: 

5061 id (str): Unique token identifier 

5062 user_email (str): Owner's email address 

5063 team_id (str): Team the token is associated with (required for team-based access) 

5064 name (str): Human-readable token name 

5065 jti (str): JWT ID for revocation checking 

5066 token_hash (str): Hashed token value for security 

5067 server_id (str): Optional server scope limitation 

5068 resource_scopes (List[str]): Permission scopes like ['tools.read'] 

5069 ip_restrictions (List[str]): IP address/CIDR restrictions 

5070 time_restrictions (dict): Time-based access restrictions 

5071 usage_limits (dict): Rate limiting and usage quotas 

5072 created_at (datetime): Token creation timestamp 

5073 expires_at (datetime): Optional expiry timestamp 

5074 last_used (datetime): Last usage timestamp 

5075 is_active (bool): Active status flag 

5076 description (str): Token description 

5077 tags (List[str]): Organizational tags 

5078 

5079 Examples: 

5080 >>> token = EmailApiToken( 

5081 ... user_email="alice@example.com", 

5082 ... name="Production API Access", 

5083 ... server_id="prod-server-123", 

5084 ... resource_scopes=["tools.read", "resources.read"], 

5085 ... description="Read-only access to production tools" 

5086 ... ) 

5087 >>> token.is_scoped_to_server("prod-server-123") 

5088 True 

5089 >>> token.has_permission("tools.read") 

5090 True 

5091 """ 

5092 

5093 __tablename__ = "email_api_tokens" 

5094 

5095 # Core identity fields 

5096 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

5097 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email", ondelete="CASCADE"), nullable=False, index=True) 

5098 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True, index=True) 

5099 name: Mapped[str] = mapped_column(String(255), nullable=False) 

5100 jti: Mapped[str] = mapped_column(String(36), unique=True, nullable=False, default=lambda: str(uuid.uuid4())) 

5101 token_hash: Mapped[str] = mapped_column(String(255), nullable=False) 

5102 

5103 # Scoping fields 

5104 server_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("servers.id", ondelete="CASCADE"), nullable=True) 

5105 resource_scopes: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list) 

5106 ip_restrictions: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list) 

5107 time_restrictions: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True, default=dict) 

5108 usage_limits: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True, default=dict) 

5109 

5110 # Lifecycle fields 

5111 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

5112 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

5113 last_used: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

5114 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

5115 

5116 # Metadata fields 

5117 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5118 tags: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list) 

5119 

5120 # Unique constraint for user+name+team_id combination (per-team scope). 

5121 # The composite UniqueConstraint handles non-NULL team_id rows. SQL NULL != NULL 

5122 # semantics mean it cannot protect global-scope tokens (team_id IS NULL), so we add 

5123 # a partial unique index for that case — matching the pattern used by resources/prompts. 

5124 __table_args__ = ( 

5125 UniqueConstraint("user_email", "name", "team_id", name="uq_email_api_tokens_user_name_team"), 

5126 Index("uq_email_api_tokens_user_name_global", "user_email", "name", unique=True, postgresql_where=text("team_id IS NULL"), sqlite_where=text("team_id IS NULL")), 

5127 Index("idx_email_api_tokens_user_email", "user_email"), 

5128 Index("idx_email_api_tokens_jti", "jti"), 

5129 Index("idx_email_api_tokens_expires_at", "expires_at"), 

5130 Index("idx_email_api_tokens_is_active", "is_active"), 

5131 ) 

5132 

5133 # Relationships 

5134 user: Mapped["EmailUser"] = relationship("EmailUser", back_populates="api_tokens") 

5135 team: Mapped[Optional["EmailTeam"]] = relationship("EmailTeam", back_populates="api_tokens") 

5136 server: Mapped[Optional["Server"]] = relationship("Server", back_populates="scoped_tokens") 

5137 

5138 def is_scoped_to_server(self, server_id: str) -> bool: 

5139 """Check if token is scoped to a specific server. 

5140 

5141 Args: 

5142 server_id: Server ID to check against. 

5143 

5144 Returns: 

5145 bool: True if token is scoped to the server, False otherwise. 

5146 """ 

5147 return self.server_id == server_id if self.server_id else False 

5148 

5149 def has_permission(self, permission: str) -> bool: 

5150 """Check if token has a specific permission. 

5151 

5152 Args: 

5153 permission: Permission string to check for. 

5154 

5155 Returns: 

5156 bool: True if token has the permission, False otherwise. 

5157 """ 

5158 return permission in (self.resource_scopes or []) 

5159 

5160 def is_team_token(self) -> bool: 

5161 """Check if this is a team-based token. 

5162 

5163 Returns: 

5164 bool: True if token is associated with a team, False otherwise. 

5165 """ 

5166 return self.team_id is not None 

5167 

5168 def get_effective_permissions(self) -> List[str]: 

5169 """Get effective permissions for this token. 

5170 

5171 For team tokens, this should inherit team permissions. 

5172 For personal tokens, this uses the resource_scopes. 

5173 

5174 Returns: 

5175 List[str]: List of effective permissions for this token. 

5176 """ 

5177 if self.is_team_token() and self.team: 

5178 # For team tokens, we would inherit team permissions 

5179 # This would need to be implemented based on your RBAC system 

5180 return self.resource_scopes or [] 

5181 return self.resource_scopes or [] 

5182 

5183 def is_expired(self) -> bool: 

5184 """Check if token is expired. 

5185 

5186 Returns: 

5187 bool: True if token is expired, False otherwise. 

5188 """ 

5189 if not self.expires_at: 

5190 return False 

5191 return utc_now() > self.expires_at 

5192 

5193 def is_valid(self) -> bool: 

5194 """Check if token is valid (active and not expired). 

5195 

5196 Returns: 

5197 bool: True if token is valid, False otherwise. 

5198 """ 

5199 return self.is_active and not self.is_expired() 

5200 

5201 

5202class TokenUsageLog(Base): 

5203 """Token usage logging for analytics and security monitoring. 

5204 

5205 This model tracks every API request made with email API tokens 

5206 for security auditing and usage analytics. 

5207 

5208 Attributes: 

5209 id (int): Auto-incrementing log ID 

5210 token_jti (str): Token JWT ID reference 

5211 user_email (str): Token owner's email 

5212 timestamp (datetime): Request timestamp 

5213 endpoint (str): API endpoint accessed 

5214 method (str): HTTP method used 

5215 ip_address (str): Client IP address 

5216 user_agent (str): Client user agent 

5217 status_code (int): HTTP response status 

5218 response_time_ms (int): Response time in milliseconds 

5219 blocked (bool): Whether request was blocked 

5220 block_reason (str): Reason for blocking if applicable 

5221 

5222 Examples: 

5223 >>> log = TokenUsageLog( 

5224 ... token_jti="token-uuid-123", 

5225 ... user_email="alice@example.com", 

5226 ... endpoint="/tools", 

5227 ... method="GET", 

5228 ... ip_address="192.168.1.100", 

5229 ... status_code=200, 

5230 ... response_time_ms=45 

5231 ... ) 

5232 """ 

5233 

5234 __tablename__ = "token_usage_logs" 

5235 

5236 # Primary key 

5237 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) 

5238 

5239 # Token reference 

5240 token_jti: Mapped[str] = mapped_column(String(36), nullable=False, index=True) 

5241 user_email: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

5242 

5243 # Timestamp 

5244 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False, index=True) 

5245 

5246 # Request details 

5247 endpoint: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

5248 method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True) 

5249 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length 

5250 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5251 

5252 # Response details 

5253 status_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

5254 response_time_ms: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

5255 

5256 # Security fields 

5257 blocked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

5258 block_reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

5259 

5260 # Indexes for performance 

5261 __table_args__ = ( 

5262 Index("idx_token_usage_logs_token_jti_timestamp", "token_jti", "timestamp"), 

5263 Index("idx_token_usage_logs_user_email_timestamp", "user_email", "timestamp"), 

5264 ) 

5265 

5266 

5267class TokenRevocation(Base): 

5268 """Token revocation blacklist for immediate token invalidation. 

5269 

5270 This model maintains a blacklist of revoked JWT tokens to provide 

5271 immediate token invalidation capabilities. 

5272 

5273 Attributes: 

5274 jti (str): JWT ID (primary key) 

5275 revoked_at (datetime): Revocation timestamp 

5276 revoked_by (str): Email of user who revoked the token 

5277 reason (str): Optional reason for revocation 

5278 

5279 Examples: 

5280 >>> revocation = TokenRevocation( 

5281 ... jti="token-uuid-123", 

5282 ... revoked_by="admin@example.com", 

5283 ... reason="Security compromise" 

5284 ... ) 

5285 """ 

5286 

5287 __tablename__ = "token_revocations" 

5288 

5289 # JWT ID as primary key 

5290 jti: Mapped[str] = mapped_column(String(36), primary_key=True) 

5291 

5292 # Revocation details 

5293 revoked_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

5294 revoked_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False) 

5295 reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

5296 

5297 # Relationship 

5298 revoker: Mapped["EmailUser"] = relationship("EmailUser") 

5299 

5300 

5301class SSOProvider(Base): 

5302 """SSO identity provider configuration for OAuth2/OIDC authentication. 

5303 

5304 Stores configuration and credentials for external identity providers 

5305 like GitHub, Google, IBM Security Verify, Okta, Microsoft Entra ID, 

5306 and any generic OIDC-compliant provider (Keycloak, Auth0, Authentik, etc.). 

5307 

5308 Attributes: 

5309 id (str): Unique provider ID (e.g., 'github', 'google', 'ibm_verify') 

5310 name (str): Human-readable provider name 

5311 display_name (str): Display name for UI 

5312 provider_type (str): Protocol type ('oauth2', 'oidc') 

5313 is_enabled (bool): Whether provider is active 

5314 client_id (str): OAuth client ID 

5315 client_secret_encrypted (str): Encrypted client secret 

5316 authorization_url (str): OAuth authorization endpoint 

5317 token_url (str): OAuth token endpoint 

5318 userinfo_url (str): User info endpoint 

5319 issuer (str): OIDC issuer (optional) 

5320 jwks_uri (str): OIDC JWKS endpoint for token signature verification (optional) 

5321 trusted_domains (List[str]): Auto-approved email domains 

5322 scope (str): OAuth scope string 

5323 auto_create_users (bool): Auto-create users on first login 

5324 team_mapping (dict): Organization/domain to team mapping rules 

5325 created_at (datetime): Provider creation timestamp 

5326 updated_at (datetime): Last configuration update 

5327 

5328 Examples: 

5329 >>> provider = SSOProvider( 

5330 ... id="github", 

5331 ... name="github", 

5332 ... display_name="GitHub", 

5333 ... provider_type="oauth2", 

5334 ... client_id="gh_client_123", 

5335 ... authorization_url="https://github.com/login/oauth/authorize", 

5336 ... token_url="https://github.com/login/oauth/access_token", 

5337 ... userinfo_url="https://api.github.com/user", 

5338 ... scope="user:email" 

5339 ... ) 

5340 """ 

5341 

5342 __tablename__ = "sso_providers" 

5343 

5344 # Provider identification 

5345 id: Mapped[str] = mapped_column(String(50), primary_key=True) # github, google, ibm_verify, okta, keycloak, entra, or any custom ID 

5346 name: Mapped[str] = mapped_column(String(100), nullable=False, unique=True) 

5347 display_name: Mapped[str] = mapped_column(String(100), nullable=False) 

5348 provider_type: Mapped[str] = mapped_column(String(20), nullable=False) # oauth2, oidc 

5349 is_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

5350 

5351 # OAuth2/OIDC Configuration 

5352 client_id: Mapped[str] = mapped_column(String(255), nullable=False) 

5353 client_secret_encrypted: Mapped[str] = mapped_column(Text, nullable=False) # Encrypted storage 

5354 authorization_url: Mapped[str] = mapped_column(String(500), nullable=False) 

5355 token_url: Mapped[str] = mapped_column(String(500), nullable=False) 

5356 userinfo_url: Mapped[str] = mapped_column(String(500), nullable=False) 

5357 issuer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # For OIDC 

5358 jwks_uri: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # OIDC JWKS endpoint for token signature verification 

5359 

5360 # Provider Settings 

5361 trusted_domains: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

5362 scope: Mapped[str] = mapped_column(String(200), default="openid profile email", nullable=False) 

5363 auto_create_users: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

5364 team_mapping: Mapped[dict] = mapped_column(JSON, default=dict, nullable=False) 

5365 

5366 # Provider-specific metadata (e.g., role mappings, claim configurations) 

5367 provider_metadata: Mapped[dict] = mapped_column(JSON, default=dict, nullable=False) 

5368 

5369 # Timestamps 

5370 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

5371 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

5372 

5373 def __repr__(self): 

5374 """String representation of SSO provider. 

5375 

5376 Returns: 

5377 String representation of the SSO provider instance 

5378 """ 

5379 return f"<SSOProvider(id='{self.id}', name='{self.name}', enabled={self.is_enabled})>" 

5380 

5381 

5382class SSOAuthSession(Base): 

5383 """Tracks SSO authentication sessions and state. 

5384 

5385 Maintains OAuth state parameters and callback information during 

5386 the SSO authentication flow for security and session management. 

5387 

5388 Attributes: 

5389 id (str): Unique session ID (UUID) 

5390 provider_id (str): Reference to SSO provider 

5391 state (str): OAuth state parameter for CSRF protection 

5392 code_verifier (str): PKCE code verifier (for OAuth 2.1) 

5393 nonce (str): OIDC nonce parameter 

5394 redirect_uri (str): OAuth callback URI 

5395 expires_at (datetime): Session expiration time 

5396 user_email (str): User email after successful auth (optional) 

5397 created_at (datetime): Session creation timestamp 

5398 

5399 Examples: 

5400 >>> session = SSOAuthSession( 

5401 ... provider_id="github", 

5402 ... state="csrf-state-token", 

5403 ... redirect_uri="https://gateway.example.com/auth/sso-callback/github" 

5404 ... ) 

5405 """ 

5406 

5407 __tablename__ = "sso_auth_sessions" 

5408 

5409 # Session identification 

5410 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) 

5411 provider_id: Mapped[str] = mapped_column(String(50), ForeignKey("sso_providers.id"), nullable=False) 

5412 

5413 # OAuth/OIDC parameters 

5414 state: Mapped[str] = mapped_column(String(128), nullable=False, unique=True) # CSRF protection 

5415 code_verifier: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # PKCE 

5416 nonce: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # OIDC 

5417 redirect_uri: Mapped[str] = mapped_column(String(500), nullable=False) 

5418 

5419 # Session lifecycle 

5420 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: utc_now() + timedelta(minutes=10), nullable=False) # 10-minute expiration 

5421 user_email: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True) 

5422 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

5423 

5424 # Relationships 

5425 provider: Mapped["SSOProvider"] = relationship("SSOProvider") 

5426 user: Mapped[Optional["EmailUser"]] = relationship("EmailUser") 

5427 

5428 @property 

5429 def is_expired(self) -> bool: 

5430 """Check if SSO auth session has expired. 

5431 

5432 Returns: 

5433 True if the session has expired, False otherwise 

5434 """ 

5435 now = utc_now() 

5436 expires = self.expires_at 

5437 

5438 # Handle timezone mismatch by converting naive datetime to UTC if needed 

5439 if expires.tzinfo is None: 

5440 # expires_at is timezone-naive, assume it's UTC 

5441 expires = expires.replace(tzinfo=timezone.utc) 

5442 elif now.tzinfo is None: 

5443 # now is timezone-naive (shouldn't happen with utc_now, but just in case) 

5444 now = now.replace(tzinfo=timezone.utc) 

5445 

5446 return now > expires 

5447 

5448 def __repr__(self): 

5449 """String representation of SSO auth session. 

5450 

5451 Returns: 

5452 str: String representation of the session object 

5453 """ 

5454 return f"<SSOAuthSession(id='{self.id}', provider='{self.provider_id}', expired={self.is_expired})>" 

5455 

5456 

5457# Event listeners for validation 

5458def validate_tool_schema(mapper, connection, target): 

5459 """ 

5460 Validate tool schema before insert/update. 

5461 

5462 Args: 

5463 mapper: The mapper being used for the operation. 

5464 connection: The database connection. 

5465 target: The target object being validated. 

5466 

5467 Raises: 

5468 ValueError: If the tool input schema is invalid. 

5469 

5470 """ 

5471 # You can use mapper and connection later, if required. 

5472 _ = mapper 

5473 _ = connection 

5474 

5475 allowed_validator_names = { 

5476 "Draft4Validator", 

5477 "Draft6Validator", 

5478 "Draft7Validator", 

5479 "Draft201909Validator", 

5480 "Draft202012Validator", 

5481 } 

5482 

5483 if hasattr(target, "input_schema"): 

5484 schema = target.input_schema 

5485 if schema is None: 

5486 return 

5487 

5488 try: 

5489 # If $schema is missing, default to Draft 2020-12 as per MCP spec. 

5490 if schema.get("$schema") is None: 

5491 validator_cls = jsonschema.Draft202012Validator 

5492 else: 

5493 validator_cls = jsonschema.validators.validator_for(schema) 

5494 

5495 if validator_cls.__name__ not in allowed_validator_names: 

5496 logger.warning(f"Unsupported JSON Schema draft: {validator_cls.__name__}") 

5497 

5498 validator_cls.check_schema(schema) 

5499 except jsonschema.exceptions.SchemaError as e: 

5500 logger.warning(f"Invalid tool input schema: {str(e)}") 

5501 if settings.json_schema_validation_strict: 

5502 raise ValueError(f"Invalid tool input schema: {str(e)}") from e 

5503 

5504 

5505def validate_tool_name(mapper, connection, target): 

5506 """ 

5507 Validate tool name before insert/update. Check if the name matches the required pattern. 

5508 

5509 Args: 

5510 mapper: The mapper being used for the operation. 

5511 connection: The database connection. 

5512 target: The target object being validated. 

5513 

5514 Raises: 

5515 ValueError: If the tool name contains invalid characters. 

5516 """ 

5517 # You can use mapper and connection later, if required. 

5518 _ = mapper 

5519 _ = connection 

5520 if hasattr(target, "name"): 

5521 try: 

5522 SecurityValidator.validate_tool_name(target.name) 

5523 except ValueError as e: 

5524 raise ValueError(f"Invalid tool name: {str(e)}") from e 

5525 

5526 

5527def validate_prompt_schema(mapper, connection, target): 

5528 """ 

5529 Validate prompt argument schema before insert/update. 

5530 

5531 Args: 

5532 mapper: The mapper being used for the operation. 

5533 connection: The database connection. 

5534 target: The target object being validated. 

5535 

5536 Raises: 

5537 ValueError: If the prompt argument schema is invalid. 

5538 """ 

5539 # You can use mapper and connection later, if required. 

5540 _ = mapper 

5541 _ = connection 

5542 

5543 allowed_validator_names = { 

5544 "Draft4Validator", 

5545 "Draft6Validator", 

5546 "Draft7Validator", 

5547 "Draft201909Validator", 

5548 "Draft202012Validator", 

5549 } 

5550 

5551 if hasattr(target, "argument_schema"): 

5552 schema = target.argument_schema 

5553 if schema is None: 

5554 return 

5555 

5556 try: 

5557 # If $schema is missing, default to Draft 2020-12 as per MCP spec. 

5558 if schema.get("$schema") is None: 

5559 validator_cls = jsonschema.Draft202012Validator 

5560 else: 

5561 validator_cls = jsonschema.validators.validator_for(schema) 

5562 

5563 if validator_cls.__name__ not in allowed_validator_names: 

5564 logger.warning(f"Unsupported JSON Schema draft: {validator_cls.__name__}") 

5565 

5566 validator_cls.check_schema(schema) 

5567 except jsonschema.exceptions.SchemaError as e: 

5568 logger.warning(f"Invalid prompt argument schema: {str(e)}") 

5569 if settings.json_schema_validation_strict: 

5570 raise ValueError(f"Invalid prompt argument schema: {str(e)}") from e 

5571 

5572 

5573# Register validation listeners 

5574 

5575listen(Tool, "before_insert", validate_tool_schema) 

5576listen(Tool, "before_update", validate_tool_schema) 

5577listen(Tool, "before_insert", validate_tool_name) 

5578listen(Tool, "before_update", validate_tool_name) 

5579listen(Prompt, "before_insert", validate_prompt_schema) 

5580listen(Prompt, "before_update", validate_prompt_schema) 

5581 

5582 

5583def get_db() -> Generator[Session, Any, None]: 

5584 """ 

5585 Dependency to get database session. 

5586 

5587 Commits the transaction on successful completion to avoid implicit rollbacks 

5588 for read-only operations. Rolls back explicitly on exception. 

5589 

5590 Yields: 

5591 SessionLocal: A SQLAlchemy database session. 

5592 

5593 Raises: 

5594 Exception: Re-raises any exception after rolling back the transaction. 

5595 

5596 Examples: 

5597 >>> from mcpgateway.db import get_db 

5598 >>> gen = get_db() 

5599 >>> db = next(gen) 

5600 >>> hasattr(db, 'query') 

5601 True 

5602 >>> hasattr(db, 'commit') 

5603 True 

5604 >>> gen.close() 

5605 """ 

5606 db = SessionLocal() 

5607 try: 

5608 yield db 

5609 db.commit() 

5610 except Exception: 

5611 try: 

5612 db.rollback() 

5613 except Exception: 

5614 try: 

5615 db.invalidate() 

5616 except Exception: 

5617 pass # nosec B110 - Best effort cleanup on connection failure 

5618 raise 

5619 finally: 

5620 db.close() 

5621 

5622 

5623def get_for_update( 

5624 db: Session, 

5625 model, 

5626 entity_id=None, 

5627 where: Optional[Any] = None, 

5628 skip_locked: bool = False, 

5629 nowait: bool = False, 

5630 lock_timeout_ms: Optional[int] = None, 

5631 options: Optional[List] = None, 

5632): 

5633 """Get entity with row lock for update operations. 

5634 

5635 Args: 

5636 db: SQLAlchemy Session 

5637 model: ORM model class 

5638 entity_id: Primary key value (optional if `where` provided) 

5639 where: Optional SQLAlchemy WHERE clause to locate rows for conflict detection 

5640 skip_locked: If False (default), wait for locked rows. If True, skip locked 

5641 rows (returns None if row is locked). Use False for conflict checks and 

5642 entity updates to ensure consistency. Use True only for job-queue patterns. 

5643 nowait: If True, fail immediately if row is locked (raises OperationalError). 

5644 Use this for operations that should not block. Default False. 

5645 lock_timeout_ms: Optional lock timeout in milliseconds for PostgreSQL. 

5646 If set, the query will wait at most this long for locks before failing. 

5647 Only applies to PostgreSQL. Default None (use database default). 

5648 options: Optional list of loader options (e.g., selectinload(...)) 

5649 

5650 Returns: 

5651 The model instance or None 

5652 

5653 Raises: 

5654 sqlalchemy.exc.OperationalError: If nowait=True and row is locked, or if 

5655 lock_timeout_ms is exceeded. 

5656 

5657 Notes: 

5658 - On PostgreSQL this acquires a FOR UPDATE row lock. 

5659 - On SQLite (or other backends that don't support FOR UPDATE) it 

5660 falls back to a regular select; when ``options`` is None it uses 

5661 ``db.get`` for efficiency, otherwise it executes a select with 

5662 the provided loader options. 

5663 """ 

5664 dialect = "" 

5665 try: 

5666 dialect = db.bind.dialect.name 

5667 except Exception: 

5668 dialect = "" 

5669 

5670 # Build base select statement. Prefer `where` when provided, otherwise use primary key `entity_id`. 

5671 if where is not None: 

5672 stmt = select(model).where(where) 

5673 elif entity_id is not None: 

5674 stmt = select(model).where(model.id == entity_id) 

5675 else: 

5676 return None 

5677 

5678 if options: 

5679 stmt = stmt.options(*options) 

5680 

5681 if dialect != "postgresql": 

5682 # SQLite and others: no FOR UPDATE support 

5683 # Use db.get optimization only when querying by primary key without loader options 

5684 if not options and where is None and entity_id is not None: 

5685 return db.get(model, entity_id) 

5686 return db.execute(stmt).scalar_one_or_none() 

5687 

5688 # PostgreSQL: set lock timeout if specified 

5689 if lock_timeout_ms is not None: 

5690 db.execute(text(f"SET LOCAL lock_timeout = '{lock_timeout_ms}ms'")) 

5691 

5692 # PostgreSQL: apply FOR UPDATE with optional nowait 

5693 stmt = stmt.with_for_update(skip_locked=skip_locked, nowait=nowait) 

5694 return db.execute(stmt).scalar_one_or_none() 

5695 

5696 

5697# Using the existing get_db generator to create a context manager for fresh sessions 

5698fresh_db_session = contextmanager(get_db) # type: ignore 

5699 

5700 

5701def patch_string_columns_for_mariadb(base, engine_) -> None: 

5702 """ 

5703 MariaDB requires VARCHAR to have an explicit length. 

5704 Auto-assign VARCHAR(255) to any String() columns without a length. 

5705 

5706 Args: 

5707 base (DeclarativeBase): SQLAlchemy Declarative Base containing metadata. 

5708 engine_ (Engine): SQLAlchemy engine, used to detect MariaDB dialect. 

5709 """ 

5710 if engine_.dialect.name != "mariadb": 

5711 return 

5712 

5713 for table in base.metadata.tables.values(): 

5714 for column in table.columns: 

5715 if isinstance(column.type, String) and column.type.length is None: 

5716 # Replace with VARCHAR(255) 

5717 column.type = VARCHAR(255) 

5718 

5719 

5720def extract_json_field(column, json_path: str, dialect_name: Optional[str] = None): 

5721 """Extract a JSON field in a database-agnostic way. 

5722 

5723 This function provides cross-database compatibility for JSON field extraction, 

5724 supporting both SQLite and PostgreSQL backends. 

5725 

5726 Args: 

5727 column: SQLAlchemy column containing JSON data 

5728 json_path: JSON path in SQLite format (e.g., '$.\"tool.name\"') 

5729 dialect_name: Optional database dialect name to override global backend. 

5730 If not provided, uses the global backend from DATABASE_URL. 

5731 Use this when querying a different database than the default. 

5732 

5733 Returns: 

5734 SQLAlchemy expression for extracting the JSON field as text 

5735 

5736 Note: 

5737 - For SQLite: Uses json_extract(column, '$.\"key\"') 

5738 - For PostgreSQL: Uses column ->> 'key' operator 

5739 - Backend-specific behavior is tested via unit tests in test_db.py 

5740 """ 

5741 effective_backend = dialect_name if dialect_name is not None else backend 

5742 

5743 if effective_backend == "postgresql": 

5744 # PostgreSQL uses ->> operator for text extraction 

5745 # Convert $.\"key\" or $.\"nested.key\" format to just the key 

5746 # Handle both simple keys and nested keys with dots 

5747 path_key = json_path.replace('$."', "").replace('"', "") 

5748 return column.op("->>")(path_key) 

5749 

5750 # SQLite and other databases use json_extract function 

5751 # Keep the original $.\"key\" format 

5752 return func.json_extract(column, json_path) 

5753 

5754 

5755# Create all tables 

5756def init_db(): 

5757 """ 

5758 Initialize database tables. 

5759 

5760 Raises: 

5761 Exception: If database initialization fails. 

5762 """ 

5763 try: 

5764 # Apply MariaDB compatibility fix 

5765 patch_string_columns_for_mariadb(Base, engine) 

5766 

5767 # Base.metadata.drop_all(bind=engine) 

5768 Base.metadata.create_all(bind=engine) 

5769 except SQLAlchemyError as e: 

5770 raise Exception(f"Failed to initialize database: {str(e)}") 

5771 

5772 

5773# ============================================================================ 

5774# Structured Logging Models 

5775# ============================================================================ 

5776 

5777 

5778class StructuredLogEntry(Base): 

5779 """Structured log entry for comprehensive logging and analysis. 

5780 

5781 Stores all log entries with correlation IDs, performance metrics, 

5782 and security context for advanced search and analytics. 

5783 """ 

5784 

5785 __tablename__ = "structured_log_entries" 

5786 

5787 # Primary key 

5788 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

5789 

5790 # Timestamps 

5791 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now) 

5792 

5793 # Correlation and request tracking 

5794 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) 

5795 request_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) 

5796 

5797 # Log metadata 

5798 level: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # DEBUG, INFO, WARNING, ERROR, CRITICAL 

5799 component: Mapped[str] = mapped_column(String(100), nullable=False, index=True) 

5800 message: Mapped[str] = mapped_column(Text, nullable=False) 

5801 logger: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

5802 

5803 # User and request context 

5804 user_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

5805 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

5806 client_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length 

5807 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5808 request_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) 

5809 request_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True) 

5810 

5811 # Performance data 

5812 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True) 

5813 operation_type: Mapped[Optional[str]] = mapped_column(String(100), index=True, nullable=True) 

5814 

5815 # Security context 

5816 is_security_event: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False) 

5817 security_severity: Mapped[Optional[str]] = mapped_column(String(20), index=True, nullable=True) # LOW, MEDIUM, HIGH, CRITICAL 

5818 threat_indicators: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5819 

5820 # Structured context data 

5821 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5822 error_details: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5823 performance_metrics: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5824 

5825 # System information 

5826 hostname: Mapped[str] = mapped_column(String(255), nullable=False) 

5827 process_id: Mapped[int] = mapped_column(Integer, nullable=False) 

5828 thread_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

5829 version: Mapped[str] = mapped_column(String(50), nullable=False) 

5830 environment: Mapped[str] = mapped_column(String(50), nullable=False, default="production") 

5831 

5832 # OpenTelemetry trace context 

5833 trace_id: Mapped[Optional[str]] = mapped_column(String(32), index=True, nullable=True) 

5834 span_id: Mapped[Optional[str]] = mapped_column(String(16), nullable=True) 

5835 

5836 # Indexes for performance 

5837 __table_args__ = ( 

5838 Index("idx_log_correlation_time", "correlation_id", "timestamp"), 

5839 Index("idx_log_user_time", "user_id", "timestamp"), 

5840 Index("idx_log_level_time", "level", "timestamp"), 

5841 Index("idx_log_component_time", "component", "timestamp"), 

5842 Index("idx_log_security", "is_security_event", "security_severity", "timestamp"), 

5843 Index("idx_log_operation", "operation_type", "timestamp"), 

5844 Index("idx_log_trace", "trace_id", "timestamp"), 

5845 ) 

5846 

5847 

5848class PerformanceMetric(Base): 

5849 """Aggregated performance metrics from log analysis. 

5850 

5851 Stores time-windowed aggregations of operation performance 

5852 for analytics and trend analysis. 

5853 """ 

5854 

5855 __tablename__ = "performance_metrics" 

5856 

5857 # Primary key 

5858 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

5859 

5860 # Timestamp 

5861 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now) 

5862 

5863 # Metric identification 

5864 operation_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True) 

5865 component: Mapped[str] = mapped_column(String(100), nullable=False, index=True) 

5866 

5867 # Aggregated metrics 

5868 request_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

5869 error_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

5870 error_rate: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) 

5871 

5872 # Duration metrics (in milliseconds) 

5873 avg_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5874 min_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5875 max_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5876 p50_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5877 p95_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5878 p99_duration_ms: Mapped[float] = mapped_column(Float, nullable=False) 

5879 

5880 # Time window 

5881 window_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True) 

5882 window_end: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) 

5883 window_duration_seconds: Mapped[int] = mapped_column(Integer, nullable=False) 

5884 

5885 # Additional context 

5886 metric_metadata: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5887 

5888 __table_args__ = ( 

5889 Index("idx_perf_operation_time", "operation_type", "window_start"), 

5890 Index("idx_perf_component_time", "component", "window_start"), 

5891 Index("idx_perf_window", "window_start", "window_end"), 

5892 ) 

5893 

5894 

5895class SecurityEvent(Base): 

5896 """Security event logging for threat detection and audit trails. 

5897 

5898 Specialized table for security events with enhanced context 

5899 and threat analysis capabilities. 

5900 """ 

5901 

5902 __tablename__ = "security_events" 

5903 

5904 # Primary key 

5905 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

5906 

5907 # Timestamps 

5908 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now) 

5909 detected_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now) 

5910 

5911 # Correlation tracking 

5912 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) 

5913 log_entry_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("structured_log_entries.id"), index=True, nullable=True) 

5914 

5915 # Event classification 

5916 event_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # auth_failure, suspicious_activity, rate_limit, etc. 

5917 severity: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # LOW, MEDIUM, HIGH, CRITICAL 

5918 category: Mapped[str] = mapped_column(String(50), nullable=False, index=True) # authentication, authorization, data_access, etc. 

5919 

5920 # User and request context 

5921 user_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

5922 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

5923 client_ip: Mapped[str] = mapped_column(String(45), nullable=False, index=True) 

5924 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5925 

5926 # Event details 

5927 description: Mapped[str] = mapped_column(Text, nullable=False) 

5928 action_taken: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # blocked, allowed, flagged, etc. 

5929 

5930 # Threat analysis 

5931 threat_score: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) # 0.0-1.0 

5932 threat_indicators: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False, default=dict) 

5933 failed_attempts_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) 

5934 

5935 # Resolution tracking 

5936 resolved: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False) 

5937 resolved_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

5938 resolved_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

5939 resolution_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

5940 

5941 # Alert tracking 

5942 alert_sent: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

5943 alert_sent_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

5944 alert_recipients: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) 

5945 

5946 # Additional context 

5947 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

5948 

5949 __table_args__ = ( 

5950 Index("idx_security_type_time", "event_type", "timestamp"), 

5951 Index("idx_security_severity_time", "severity", "timestamp"), 

5952 Index("idx_security_user_time", "user_id", "timestamp"), 

5953 Index("idx_security_ip_time", "client_ip", "timestamp"), 

5954 Index("idx_security_unresolved", "resolved", "severity", "timestamp"), 

5955 ) 

5956 

5957 

5958# --------------------------------------------------------------------------- 

5959# LLM Provider Configuration Models 

5960# --------------------------------------------------------------------------- 

5961 

5962 

5963class LLMProviderType: 

5964 """Constants for LLM provider types.""" 

5965 

5966 OPENAI = "openai" 

5967 AZURE_OPENAI = "azure_openai" 

5968 ANTHROPIC = "anthropic" 

5969 BEDROCK = "bedrock" 

5970 GOOGLE_VERTEX = "google_vertex" 

5971 WATSONX = "watsonx" 

5972 OLLAMA = "ollama" 

5973 OPENAI_COMPATIBLE = "openai_compatible" 

5974 COHERE = "cohere" 

5975 MISTRAL = "mistral" 

5976 GROQ = "groq" 

5977 TOGETHER = "together" 

5978 

5979 @classmethod 

5980 def get_all_types(cls) -> List[str]: 

5981 """Get list of all supported provider types. 

5982 

5983 Returns: 

5984 List of provider type strings. 

5985 """ 

5986 return [ 

5987 cls.OPENAI, 

5988 cls.AZURE_OPENAI, 

5989 cls.ANTHROPIC, 

5990 cls.BEDROCK, 

5991 cls.GOOGLE_VERTEX, 

5992 cls.WATSONX, 

5993 cls.OLLAMA, 

5994 cls.OPENAI_COMPATIBLE, 

5995 cls.COHERE, 

5996 cls.MISTRAL, 

5997 cls.GROQ, 

5998 cls.TOGETHER, 

5999 ] 

6000 

6001 @classmethod 

6002 def get_provider_defaults(cls) -> Dict[str, Dict[str, Any]]: 

6003 """Get default configuration for each provider type. 

6004 

6005 Returns: 

6006 Dictionary mapping provider type to default config. 

6007 """ 

6008 return { 

6009 cls.OPENAI: { 

6010 "api_base": "https://api.openai.com/v1", 

6011 "default_model": "gpt-4o", 

6012 "supports_model_list": True, 

6013 "models_endpoint": "/models", 

6014 "requires_api_key": True, 

6015 "description": "OpenAI GPT models (GPT-4, GPT-4o, etc.)", 

6016 }, 

6017 cls.AZURE_OPENAI: { 

6018 "api_base": "https://{resource}.openai.azure.com/openai/deployments/{deployment}", 

6019 "default_model": "", 

6020 "supports_model_list": False, 

6021 "requires_api_key": True, 

6022 "description": "Azure OpenAI Service", 

6023 }, 

6024 cls.ANTHROPIC: { 

6025 "api_base": "https://api.anthropic.com", 

6026 "default_model": "claude-sonnet-4-20250514", 

6027 "supports_model_list": False, 

6028 "requires_api_key": True, 

6029 "description": "Anthropic Claude models", 

6030 }, 

6031 cls.OLLAMA: { 

6032 "api_base": "http://localhost:11434/v1", 

6033 "default_model": "llama3.2", 

6034 "supports_model_list": True, 

6035 "models_endpoint": "/models", 

6036 "requires_api_key": False, 

6037 "description": "Local Ollama server (OpenAI-compatible)", 

6038 }, 

6039 cls.OPENAI_COMPATIBLE: { 

6040 "api_base": "http://localhost:8080/v1", 

6041 "default_model": "", 

6042 "supports_model_list": True, 

6043 "models_endpoint": "/models", 

6044 "requires_api_key": False, 

6045 "description": "Any OpenAI-compatible API server", 

6046 }, 

6047 cls.COHERE: { 

6048 "api_base": "https://api.cohere.ai/v1", 

6049 "default_model": "command-r-plus", 

6050 "supports_model_list": True, 

6051 "models_endpoint": "/models", 

6052 "requires_api_key": True, 

6053 "description": "Cohere Command models", 

6054 }, 

6055 cls.MISTRAL: { 

6056 "api_base": "https://api.mistral.ai/v1", 

6057 "default_model": "mistral-large-latest", 

6058 "supports_model_list": True, 

6059 "models_endpoint": "/models", 

6060 "requires_api_key": True, 

6061 "description": "Mistral AI models", 

6062 }, 

6063 cls.GROQ: { 

6064 "api_base": "https://api.groq.com/openai/v1", 

6065 "default_model": "llama-3.3-70b-versatile", 

6066 "supports_model_list": True, 

6067 "models_endpoint": "/models", 

6068 "requires_api_key": True, 

6069 "description": "Groq high-speed inference", 

6070 }, 

6071 cls.TOGETHER: { 

6072 "api_base": "https://api.together.xyz/v1", 

6073 "default_model": "meta-llama/Llama-3.3-70B-Instruct-Turbo", 

6074 "supports_model_list": True, 

6075 "models_endpoint": "/models", 

6076 "requires_api_key": True, 

6077 "description": "Together AI inference", 

6078 }, 

6079 cls.BEDROCK: { 

6080 "api_base": "", 

6081 "default_model": "anthropic.claude-3-sonnet-20240229-v1:0", 

6082 "supports_model_list": False, 

6083 "requires_api_key": False, 

6084 "description": "AWS Bedrock (uses IAM credentials)", 

6085 }, 

6086 cls.GOOGLE_VERTEX: { 

6087 "api_base": "", 

6088 "default_model": "gemini-1.5-pro", 

6089 "supports_model_list": False, 

6090 "requires_api_key": False, 

6091 "description": "Google Vertex AI (uses service account)", 

6092 }, 

6093 cls.WATSONX: { 

6094 "api_base": "https://us-south.ml.cloud.ibm.com", 

6095 "default_model": "ibm/granite-13b-chat-v2", 

6096 "supports_model_list": False, 

6097 "requires_api_key": True, 

6098 "description": "IBM watsonx.ai", 

6099 }, 

6100 } 

6101 

6102 

6103class LLMProvider(Base): 

6104 """ORM model for LLM provider configurations. 

6105 

6106 Stores credentials and settings for external LLM providers 

6107 used by the internal LLM Chat feature. 

6108 

6109 Attributes: 

6110 id: Unique identifier (UUID) 

6111 name: Display name (unique) 

6112 slug: URL-safe identifier (unique) 

6113 provider_type: Provider type (openai, anthropic, etc.) 

6114 api_key: Encrypted API key 

6115 api_base: Base URL for API requests 

6116 api_version: API version (for Azure OpenAI) 

6117 config: Provider-specific settings (JSON) 

6118 default_model: Default model ID 

6119 default_temperature: Default temperature (0.0-2.0) 

6120 default_max_tokens: Default max tokens 

6121 enabled: Whether provider is enabled 

6122 health_status: Current health status (healthy/unhealthy/unknown) 

6123 last_health_check: Last health check timestamp 

6124 plugin_ids: Attached plugin IDs (JSON) 

6125 """ 

6126 

6127 __tablename__ = "llm_providers" 

6128 

6129 # Primary key 

6130 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

6131 

6132 # Basic info 

6133 name: Mapped[str] = mapped_column(String(255), nullable=False) 

6134 slug: Mapped[str] = mapped_column(String(255), nullable=False) 

6135 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

6136 

6137 # Provider type 

6138 provider_type: Mapped[str] = mapped_column(String(50), nullable=False) 

6139 

6140 # Credentials (encrypted) 

6141 api_key: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

6142 api_base: Mapped[Optional[str]] = mapped_column(String(512), nullable=True) 

6143 api_version: Mapped[Optional[str]] = mapped_column(String(50), nullable=True) 

6144 

6145 # Provider-specific configuration 

6146 config: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict, nullable=False) 

6147 

6148 # Default settings 

6149 default_model: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

6150 default_temperature: Mapped[float] = mapped_column(Float, default=0.7, nullable=False) 

6151 default_max_tokens: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

6152 

6153 # Status 

6154 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

6155 health_status: Mapped[str] = mapped_column(String(20), default="unknown", nullable=False) 

6156 last_health_check: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) 

6157 

6158 # Plugin integration 

6159 plugin_ids: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False) 

6160 

6161 # Timestamps 

6162 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

6163 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

6164 

6165 # Audit fields 

6166 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

6167 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

6168 

6169 # Relationships 

6170 models: Mapped[List["LLMModel"]] = relationship("LLMModel", back_populates="provider", cascade="all, delete-orphan") 

6171 

6172 __table_args__ = ( 

6173 UniqueConstraint("name", name="uq_llm_providers_name"), 

6174 UniqueConstraint("slug", name="uq_llm_providers_slug"), 

6175 Index("idx_llm_providers_enabled", "enabled"), 

6176 Index("idx_llm_providers_type", "provider_type"), 

6177 Index("idx_llm_providers_health", "health_status"), 

6178 ) 

6179 

6180 def __repr__(self) -> str: 

6181 """Return string representation. 

6182 

6183 Returns: 

6184 String representation of the provider. 

6185 """ 

6186 return f"<LLMProvider(id='{self.id}', name='{self.name}', type='{self.provider_type}')>" 

6187 

6188 

6189class LLMModel(Base): 

6190 """ORM model for LLM model definitions. 

6191 

6192 Stores model metadata and capabilities for each provider. 

6193 

6194 Attributes: 

6195 id: Unique identifier (UUID) 

6196 provider_id: Foreign key to llm_providers 

6197 model_id: Provider's model ID (e.g., gpt-4o) 

6198 model_name: Display name 

6199 model_alias: Optional routing alias 

6200 supports_chat: Whether model supports chat completions 

6201 supports_streaming: Whether model supports streaming 

6202 supports_function_calling: Whether model supports function/tool calling 

6203 supports_vision: Whether model supports vision/images 

6204 context_window: Maximum context tokens 

6205 max_output_tokens: Maximum output tokens 

6206 enabled: Whether model is enabled 

6207 deprecated: Whether model is deprecated 

6208 """ 

6209 

6210 __tablename__ = "llm_models" 

6211 

6212 # Primary key 

6213 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

6214 

6215 # Provider relationship 

6216 provider_id: Mapped[str] = mapped_column(String(36), ForeignKey("llm_providers.id", ondelete="CASCADE"), nullable=False) 

6217 

6218 # Model identification 

6219 model_id: Mapped[str] = mapped_column(String(255), nullable=False) 

6220 model_name: Mapped[str] = mapped_column(String(255), nullable=False) 

6221 model_alias: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) 

6222 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

6223 

6224 # Capabilities 

6225 supports_chat: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

6226 supports_streaming: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

6227 supports_function_calling: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

6228 supports_vision: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

6229 

6230 # Limits 

6231 context_window: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

6232 max_output_tokens: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) 

6233 

6234 # Status 

6235 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) 

6236 deprecated: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) 

6237 

6238 # Timestamps 

6239 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False) 

6240 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False) 

6241 

6242 # Relationship 

6243 provider: Mapped["LLMProvider"] = relationship("LLMProvider", back_populates="models") 

6244 

6245 __table_args__ = ( 

6246 UniqueConstraint("provider_id", "model_id", name="uq_llm_models_provider_model"), 

6247 Index("idx_llm_models_provider", "provider_id"), 

6248 Index("idx_llm_models_enabled", "enabled"), 

6249 Index("idx_llm_models_deprecated", "deprecated"), 

6250 ) 

6251 

6252 def __repr__(self) -> str: 

6253 """Return string representation. 

6254 

6255 Returns: 

6256 String representation of the model. 

6257 """ 

6258 return f"<LLMModel(id='{self.id}', model_id='{self.model_id}', provider_id='{self.provider_id}')>" 

6259 

6260 

6261class AuditTrail(Base): 

6262 """Comprehensive audit trail for data access and changes. 

6263 

6264 Tracks all significant system changes and data access for 

6265 compliance and security auditing. 

6266 """ 

6267 

6268 __tablename__ = "audit_trails" 

6269 

6270 # Primary key 

6271 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) 

6272 

6273 # Timestamps 

6274 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now) 

6275 

6276 # Correlation tracking 

6277 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) 

6278 request_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) 

6279 

6280 # Action details 

6281 action: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # create, read, update, delete, execute, etc. 

6282 resource_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # tool, resource, prompt, user, etc. 

6283 resource_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

6284 resource_name: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) 

6285 

6286 # User context 

6287 user_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True) 

6288 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True) 

6289 team_id: Mapped[Optional[str]] = mapped_column(String(36), index=True, nullable=True) 

6290 

6291 # Request context 

6292 client_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) 

6293 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

6294 request_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) 

6295 request_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True) 

6296 

6297 # Change tracking 

6298 old_values: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

6299 new_values: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

6300 changes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

6301 

6302 # Data classification 

6303 data_classification: Mapped[Optional[str]] = mapped_column(String(50), index=True, nullable=True) # public, internal, confidential, restricted 

6304 requires_review: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False) 

6305 

6306 # Result 

6307 success: Mapped[bool] = mapped_column(Boolean, nullable=False, index=True) 

6308 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) 

6309 

6310 # Additional context 

6311 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True) 

6312 

6313 __table_args__ = ( 

6314 Index("idx_audit_action_time", "action", "timestamp"), 

6315 Index("idx_audit_resource_time", "resource_type", "resource_id", "timestamp"), 

6316 Index("idx_audit_user_time", "user_id", "timestamp"), 

6317 Index("idx_audit_classification", "data_classification", "timestamp"), 

6318 Index("idx_audit_review", "requires_review", "timestamp"), 

6319 ) 

6320 

6321 

6322if __name__ == "__main__": 

6323 # Wait for database to be ready before initializing 

6324 wait_for_db_ready(max_tries=int(settings.db_max_retries), interval=int(settings.db_retry_interval_ms) / 1000, sync=True) # Converting ms to s 

6325 

6326 init_db() 

6327 

6328 

6329@event.listens_for(Gateway, "before_insert") 

6330def set_gateway_slug(_mapper, _conn, target): 

6331 """Set the slug for a Gateway before insert. 

6332 

6333 Args: 

6334 _mapper: Mapper 

6335 _conn: Connection 

6336 target: Target Gateway instance 

6337 """ 

6338 

6339 target.slug = slugify(target.name) 

6340 

6341 

6342@event.listens_for(A2AAgent, "before_insert") 

6343def set_a2a_agent_slug(_mapper, _conn, target): 

6344 """Set the slug for an A2AAgent before insert. 

6345 

6346 Args: 

6347 _mapper: Mapper 

6348 _conn: Connection 

6349 target: Target A2AAgent instance 

6350 """ 

6351 target.slug = slugify(target.name) 

6352 

6353 

6354@event.listens_for(GrpcService, "before_insert") 

6355def set_grpc_service_slug(_mapper, _conn, target): 

6356 """Set the slug for a GrpcService before insert. 

6357 

6358 Args: 

6359 _mapper: Mapper 

6360 _conn: Connection 

6361 target: Target GrpcService instance 

6362 """ 

6363 target.slug = slugify(target.name) 

6364 

6365 

6366@event.listens_for(LLMProvider, "before_insert") 

6367def set_llm_provider_slug(_mapper, _conn, target): 

6368 """Set the slug for an LLMProvider before insert. 

6369 

6370 Args: 

6371 _mapper: Mapper 

6372 _conn: Connection 

6373 target: Target LLMProvider instance 

6374 """ 

6375 target.slug = slugify(target.name) 

6376 

6377 

6378@event.listens_for(EmailTeam, "before_insert") 

6379def set_email_team_slug(_mapper, _conn, target): 

6380 """Set the slug for an EmailTeam before insert. 

6381 

6382 Args: 

6383 _mapper: Mapper 

6384 _conn: Connection 

6385 target: Target EmailTeam instance 

6386 """ 

6387 if not target.slug: 

6388 target.slug = slugify(target.name) 

6389 

6390 

6391@event.listens_for(Tool, "before_insert") 

6392@event.listens_for(Tool, "before_update") 

6393def set_custom_name_and_slug(mapper, connection, target): # pylint: disable=unused-argument 

6394 """ 

6395 Event listener to set custom_name, custom_name_slug, and name for Tool before insert/update. 

6396 

6397 - Sets custom_name to original_name if not provided. 

6398 - Calculates custom_name_slug from custom_name using slugify. 

6399 - Updates name to gateway_slug + separator + custom_name_slug. 

6400 - Sets display_name to custom_name if not provided. 

6401 

6402 Note: The gateway relationship must be explicitly set (via target.gateway = gateway_obj) 

6403 before adding the tool to the session if gateway namespacing is needed. If only 

6404 gateway_id is set without the relationship, we look up the gateway name via a direct 

6405 SQL query. 

6406 

6407 Args: 

6408 mapper: SQLAlchemy mapper for the Tool model. 

6409 connection: Database connection. 

6410 target: The Tool instance being inserted or updated. 

6411 """ 

6412 # Set custom_name to original_name if not provided 

6413 if not target.custom_name: 

6414 target.custom_name = target.original_name 

6415 # Set display_name to custom_name if not provided 

6416 if not target.display_name: 

6417 target.display_name = target.custom_name 

6418 # Always update custom_name_slug from custom_name 

6419 target.custom_name_slug = slugify(target.custom_name) 

6420 

6421 # Get gateway_slug - check for explicitly set gateway relationship first 

6422 gateway_slug = "" 

6423 if target.gateway: 

6424 # Gateway relationship is already loaded 

6425 gateway_slug = slugify(target.gateway.name) 

6426 elif target.gateway_id: 

6427 # Gateway relationship not loaded but gateway_id is set 

6428 # Use a cached gateway name if available from gateway_name_cache attribute 

6429 if hasattr(target, "gateway_name_cache") and target.gateway_name_cache: 

6430 gateway_slug = slugify(target.gateway_name_cache) 

6431 else: 

6432 # Fall back to querying the database 

6433 try: 

6434 result = connection.execute(text("SELECT name FROM gateways WHERE id = :gw_id"), {"gw_id": target.gateway_id}) 

6435 row = result.fetchone() 

6436 if row: 

6437 gateway_slug = slugify(row[0]) 

6438 except Exception: # nosec B110 - intentionally proceed without prefix on failure 

6439 pass 

6440 

6441 if gateway_slug: 

6442 sep = settings.gateway_tool_name_separator 

6443 target.name = f"{gateway_slug}{sep}{target.custom_name_slug}" 

6444 else: 

6445 target.name = target.custom_name_slug 

6446 

6447 

6448@event.listens_for(Prompt, "before_insert") 

6449@event.listens_for(Prompt, "before_update") 

6450def set_prompt_name_and_slug(mapper, connection, target): # pylint: disable=unused-argument 

6451 """Set name fields for Prompt before insert/update. 

6452 

6453 - Sets original_name from name if missing (legacy compatibility). 

6454 - Sets custom_name to original_name if not provided. 

6455 - Sets display_name to custom_name if not provided. 

6456 - Calculates custom_name_slug from custom_name. 

6457 - Updates name to gateway_slug + separator + custom_name_slug. 

6458 

6459 Note: The gateway relationship must be explicitly set (via target.gateway = gateway_obj) 

6460 before adding the prompt to the session if gateway namespacing is needed. If only 

6461 gateway_id is set without the relationship, we look up the gateway name via a direct 

6462 SQL query. 

6463 

6464 Args: 

6465 mapper: SQLAlchemy mapper for the Prompt model. 

6466 connection: Database connection for the insert/update. 

6467 target: Prompt instance being inserted or updated. 

6468 """ 

6469 if not target.original_name: 

6470 target.original_name = target.name 

6471 if not target.custom_name: 

6472 target.custom_name = target.original_name 

6473 if not target.display_name: 

6474 target.display_name = target.custom_name 

6475 target.custom_name_slug = slugify(target.custom_name) 

6476 

6477 # Get gateway_slug - check for explicitly set gateway relationship first 

6478 gateway_slug = "" 

6479 if target.gateway: 

6480 # Gateway relationship is already loaded 

6481 gateway_slug = slugify(target.gateway.name) 

6482 elif target.gateway_id: 

6483 # Gateway relationship not loaded but gateway_id is set 

6484 # Use a cached gateway name if available from gateway_name_cache attribute 

6485 if hasattr(target, "gateway_name_cache") and target.gateway_name_cache: 

6486 gateway_slug = slugify(target.gateway_name_cache) 

6487 else: 

6488 # Fall back to querying the database 

6489 try: 

6490 result = connection.execute(text("SELECT name FROM gateways WHERE id = :gw_id"), {"gw_id": target.gateway_id}) 

6491 row = result.fetchone() 

6492 if row: 

6493 gateway_slug = slugify(row[0]) 

6494 except Exception: # nosec B110 - intentionally proceed without prefix on failure 

6495 pass 

6496 

6497 if gateway_slug: 

6498 sep = settings.gateway_tool_name_separator 

6499 target.name = f"{gateway_slug}{sep}{target.custom_name_slug}" 

6500 else: 

6501 target.name = target.custom_name_slug