Coverage for mcpgateway / db.py: 99%
2164 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-02-11 07:10 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-02-11 07:10 +0000
1# -*- coding: utf-8 -*-
2"""Location: ./mcpgateway/db.py
3Copyright 2025
4SPDX-License-Identifier: Apache-2.0
5Authors: Mihai Criveti
7MCP Gateway Database Models.
8This module defines SQLAlchemy models for storing MCP entities including:
9- Tools with input schema validation
10- Resources with subscription tracking
11- Prompts with argument templates
12- Federated gateways with capability tracking
13- Updated to record server associations independently using many-to-many relationships,
14- and to record tool execution metrics.
16Examples:
17 >>> from mcpgateway.db import connect_args
18 >>> isinstance(connect_args, dict)
19 True
20 >>> 'keepalives' in connect_args or 'check_same_thread' in connect_args or len(connect_args) == 0
21 True
22"""
24# Standard
25from contextlib import contextmanager
26from datetime import datetime, timedelta, timezone
27import logging
28import os
29from typing import Any, cast, Dict, Generator, List, Optional, TYPE_CHECKING
30import uuid
32# Third-Party
33import jsonschema
34from sqlalchemy import Boolean, Column, create_engine, DateTime, event, Float, ForeignKey, func, Index
35from sqlalchemy import inspect as sa_inspect
36from sqlalchemy import Integer, JSON, make_url, MetaData, select, String, Table, text, Text, UniqueConstraint, VARCHAR
37from sqlalchemy.engine import Engine
38from sqlalchemy.event import listen
39from sqlalchemy.exc import OperationalError, ProgrammingError, SQLAlchemyError
40from sqlalchemy.ext.hybrid import hybrid_property
41from sqlalchemy.orm import DeclarativeBase, joinedload, Mapped, mapped_column, relationship, Session, sessionmaker
42from sqlalchemy.orm.attributes import get_history
43from sqlalchemy.pool import NullPool, QueuePool
45# First-Party
46from mcpgateway.common.validators import SecurityValidator
47from mcpgateway.config import settings
48from mcpgateway.utils.create_slug import slugify
49from mcpgateway.utils.db_isready import wait_for_db_ready
51logger = logging.getLogger(__name__)
53if TYPE_CHECKING: 53 ↛ 55line 53 didn't jump to line 55 because the condition on line 53 was never true
54 # First-Party
55 from mcpgateway.common.models import ResourceContent
57# ResourceContent will be imported locally where needed to avoid circular imports
58# EmailUser models moved to this file to avoid circular imports
60# ---------------------------------------------------------------------------
61# 1. Parse the URL so we can inspect backend ("postgresql", "sqlite", ...)
62# and the specific driver ("psycopg", "asyncpg", empty string = default).
63# ---------------------------------------------------------------------------
64url = make_url(settings.database_url)
65backend = url.get_backend_name() # e.g. 'postgresql', 'sqlite'
66driver = url.get_driver_name() or "default"
68# Start with an empty dict and add options only when the driver can accept
69# them; this prevents unexpected TypeError at connect time.
70connect_args: dict[str, object] = {}
72# ---------------------------------------------------------------------------
73# 2. PostgreSQL (synchronous psycopg3)
74# The keep-alive parameters below are recognised by libpq and let the
75# kernel detect broken network links quickly.
76#
77# Additionally, support PostgreSQL-specific options like search_path
78# via the 'options' query parameter in DATABASE_URL.
79# Example: postgresql+psycopg://user:pass@host/db?options=-c%20search_path=mcp_gateway
80#
81# IMPORTANT: Use postgresql+psycopg:// (not postgresql://) for psycopg3.
82# ---------------------------------------------------------------------------
83if backend == "postgresql" and driver in ("psycopg", "default", ""):
84 connect_args.update(
85 keepalives=1, # enable TCP keep-alive probes
86 keepalives_idle=30, # seconds of idleness before first probe
87 keepalives_interval=5, # seconds between probes
88 keepalives_count=5, # drop the link after N failed probes
89 # psycopg3: prepare_threshold controls automatic server-side prepared statements
90 # After N executions of the same query, psycopg3 prepares it server-side
91 # This significantly improves performance for frequently-executed queries
92 prepare_threshold=settings.db_prepare_threshold,
93 )
95 # Extract and apply PostgreSQL options from URL query parameters
96 # This allows users to specify search_path for custom schema support (Issue #1535)
97 url_options = url.query.get("options")
98 if url_options: 98 ↛ 102line 98 didn't jump to line 102 because the condition on line 98 was always true
99 connect_args["options"] = url_options
100 logger.info(f"PostgreSQL connection options applied: {url_options}")
102 logger.info(f"psycopg3 prepare_threshold set to {settings.db_prepare_threshold}")
104# ---------------------------------------------------------------------------
105# 3. SQLite (optional) - only one extra flag and it is *SQLite-specific*.
106# ---------------------------------------------------------------------------
107elif backend == "sqlite": 107 ↛ 119line 107 didn't jump to line 119 because the condition on line 107 was always true
108 # Allow pooled connections to hop across threads.
109 connect_args["check_same_thread"] = False
111# 4. Other backends (MySQL, MSSQL, etc.) leave `connect_args` empty.
113# ---------------------------------------------------------------------------
114# 5. Build the Engine with a single, clean connect_args mapping.
115# ---------------------------------------------------------------------------
117# Check for SQLALCHEMY_ECHO environment variable for query debugging
118# This is useful for N+1 detection and performance analysis
119_sqlalchemy_echo = os.getenv("SQLALCHEMY_ECHO", "").lower() in ("true", "1", "yes")
122def build_engine() -> Engine:
123 """Build the SQLAlchemy engine with appropriate settings.
125 This function constructs the SQLAlchemy engine using the database URL
126 and connection arguments determined by the backend type. It also configures
127 the connection pool size and timeout based on application settings.
129 Environment variables:
130 SQLALCHEMY_ECHO: Set to 'true' to log all SQL queries (useful for N+1 detection)
132 Returns:
133 SQLAlchemy Engine instance configured for the specified database.
134 """
135 if _sqlalchemy_echo:
136 logger.info("SQLALCHEMY_ECHO enabled - all SQL queries will be logged")
138 if backend == "sqlite":
139 # SQLite supports connection pooling with proper configuration
140 # For SQLite, we use a smaller pool size since it's file-based
141 sqlite_pool_size = min(settings.db_pool_size, 50) # Cap at 50 for SQLite
142 sqlite_max_overflow = min(settings.db_max_overflow, 20) # Cap at 20 for SQLite
144 logger.info("Configuring SQLite with pool_size=%s, max_overflow=%s", sqlite_pool_size, sqlite_max_overflow)
146 return create_engine(
147 settings.database_url,
148 pool_pre_ping=True, # quick liveness check per checkout
149 pool_size=sqlite_pool_size,
150 max_overflow=sqlite_max_overflow,
151 pool_timeout=settings.db_pool_timeout,
152 pool_recycle=settings.db_pool_recycle,
153 # SQLite specific optimizations
154 poolclass=QueuePool, # Explicit pool class
155 connect_args=connect_args,
156 # Log pool events in debug mode
157 echo_pool=settings.log_level == "DEBUG",
158 # Log all SQL queries when SQLALCHEMY_ECHO=true (useful for N+1 detection)
159 echo=_sqlalchemy_echo,
160 )
162 if backend in ("mysql", "mariadb"):
163 # MariaDB/MySQL specific configuration
164 logger.info("Configuring MariaDB/MySQL with pool_size=%s, max_overflow=%s", settings.db_pool_size, settings.db_max_overflow)
166 return create_engine(
167 settings.database_url,
168 pool_pre_ping=True,
169 pool_size=settings.db_pool_size,
170 max_overflow=settings.db_max_overflow,
171 pool_timeout=settings.db_pool_timeout,
172 pool_recycle=settings.db_pool_recycle,
173 connect_args=connect_args,
174 isolation_level="READ_COMMITTED", # Fix PyMySQL sync issues
175 # Log all SQL queries when SQLALCHEMY_ECHO=true (useful for N+1 detection)
176 echo=_sqlalchemy_echo,
177 )
179 # Determine if PgBouncer is in use (detected via URL or explicit config)
180 is_pgbouncer = "pgbouncer" in settings.database_url.lower()
182 # Determine pool class based on configuration
183 # - "auto": NullPool with PgBouncer (recommended), QueuePool otherwise
184 # - "null": Always NullPool (delegate pooling to PgBouncer/external pooler)
185 # - "queue": Always QueuePool (application-side pooling)
186 use_null_pool = False
187 if settings.db_pool_class == "null":
188 use_null_pool = True
189 logger.info("Using NullPool (explicit configuration)")
190 elif settings.db_pool_class == "auto" and is_pgbouncer:
191 use_null_pool = True
192 logger.info("PgBouncer detected - using NullPool (recommended: let PgBouncer handle pooling)")
193 elif settings.db_pool_class == "queue":
194 logger.info("Using QueuePool (explicit configuration)")
195 else:
196 logger.info("Using QueuePool with pool_size=%s, max_overflow=%s", settings.db_pool_size, settings.db_max_overflow)
198 # Determine pre_ping setting
199 # - "auto": Enabled for non-PgBouncer with QueuePool, disabled otherwise
200 # - "true": Always enable (validates connections, catches stale connections)
201 # - "false": Always disable
202 if settings.db_pool_pre_ping == "true":
203 use_pre_ping = True
204 logger.info("pool_pre_ping enabled (explicit configuration)")
205 elif settings.db_pool_pre_ping == "false":
206 use_pre_ping = False
207 logger.info("pool_pre_ping disabled (explicit configuration)")
208 else: # "auto"
209 # With NullPool, pre_ping is not needed (no pooled connections to validate)
210 # With QueuePool + PgBouncer, pre_ping helps detect stale connections
211 use_pre_ping = not use_null_pool and not is_pgbouncer
212 if is_pgbouncer and not use_null_pool:
213 logger.info("PgBouncer with QueuePool - consider enabling DB_POOL_PRE_PING=true to detect stale connections")
215 # Build engine with appropriate pool configuration
216 if use_null_pool:
217 return create_engine(
218 settings.database_url,
219 poolclass=NullPool,
220 connect_args=connect_args,
221 echo=_sqlalchemy_echo,
222 )
224 return create_engine(
225 settings.database_url,
226 pool_pre_ping=use_pre_ping,
227 pool_size=settings.db_pool_size,
228 max_overflow=settings.db_max_overflow,
229 pool_timeout=settings.db_pool_timeout,
230 pool_recycle=settings.db_pool_recycle,
231 connect_args=connect_args,
232 echo=_sqlalchemy_echo,
233 )
236engine = build_engine()
238# Initialize SQLAlchemy instrumentation for observability
239if settings.observability_enabled:
240 try:
241 # First-Party
242 from mcpgateway.instrumentation import instrument_sqlalchemy
244 instrument_sqlalchemy(engine)
245 logger.info("SQLAlchemy instrumentation enabled for observability")
246 except ImportError:
247 logger.warning("Failed to import SQLAlchemy instrumentation")
250# ---------------------------------------------------------------------------
251# 6. Function to return UTC timestamp
252# ---------------------------------------------------------------------------
253def utc_now() -> datetime:
254 """Return the current Coordinated Universal Time (UTC).
256 Returns:
257 datetime: A timezone-aware `datetime` whose `tzinfo` is
258 `datetime.timezone.utc`.
260 Examples:
261 >>> from mcpgateway.db import utc_now
262 >>> now = utc_now()
263 >>> now.tzinfo is not None
264 True
265 >>> str(now.tzinfo)
266 'UTC'
267 >>> isinstance(now, datetime)
268 True
269 """
270 return datetime.now(timezone.utc)
273# Configure SQLite for better concurrency if using SQLite
274if backend == "sqlite":
276 @event.listens_for(engine, "connect")
277 def set_sqlite_pragma(dbapi_conn, _connection_record):
278 """Set SQLite pragmas for better concurrency.
280 This is critical for running with multiple gunicorn workers.
281 WAL mode allows multiple readers and a single writer concurrently.
283 Args:
284 dbapi_conn: The raw DBAPI connection.
285 _connection_record: A SQLAlchemy-specific object that maintains
286 information about the connection's context.
287 """
288 cursor = dbapi_conn.cursor()
289 # Enable WAL mode for better concurrency
290 cursor.execute("PRAGMA journal_mode=WAL")
291 # Configure SQLite lock wait upper bound (ms) to prevent prolonged blocking under contention
292 cursor.execute(f"PRAGMA busy_timeout={settings.db_sqlite_busy_timeout}")
293 # Synchronous=NORMAL is safe with WAL mode and improves performance
294 cursor.execute("PRAGMA synchronous=NORMAL")
295 # Increase cache size for better performance (negative value = KB)
296 cursor.execute("PRAGMA cache_size=-64000") # 64MB cache
297 # Enable foreign key constraints for ON DELETE CASCADE support
298 cursor.execute("PRAGMA foreign_keys=ON")
299 cursor.close()
302# ---------------------------------------------------------------------------
303# Resilient Session class for graceful error recovery
304# ---------------------------------------------------------------------------
305class ResilientSession(Session):
306 """A Session subclass that auto-rollbacks on connection errors.
308 When a database operation fails due to a connection error (e.g., PgBouncer
309 query_wait_timeout), this session automatically rolls back to clear the
310 invalid transaction state. This prevents cascading PendingRollbackError
311 failures when multiple queries run within the same request.
313 Without this, the first failed query leaves the session in a "needs rollback"
314 state, and all subsequent queries fail with PendingRollbackError before
315 even attempting to use the database.
316 """
318 # Error types that indicate connection issues requiring rollback
319 _connection_error_patterns = (
320 "query_wait_timeout",
321 "server closed the connection unexpectedly",
322 "connection reset by peer",
323 "connection timed out",
324 "could not receive data from server",
325 "could not send data to server",
326 "terminating connection",
327 "no connection to the server",
328 )
330 def _is_connection_error(self, exception: Exception) -> bool:
331 """Check if an exception indicates a broken database connection.
333 Args:
334 exception: The exception to check.
336 Returns:
337 True if the exception indicates a connection error, False otherwise.
338 """
339 exc_name = type(exception).__name__
340 exc_msg = str(exception).lower()
342 # Check for known connection error types
343 if exc_name in ("ProtocolViolation", "OperationalError", "InterfaceError"):
344 return True
346 # Check for connection error patterns in message
347 for pattern in self._connection_error_patterns:
348 if pattern in exc_msg:
349 return True
351 return False
353 def _safe_rollback(self) -> None:
354 """Attempt to rollback, invalidating the session if rollback fails."""
355 try:
356 self.rollback()
357 except Exception:
358 try:
359 self.invalidate()
360 except Exception:
361 pass # nosec B110 - Best effort cleanup on connection failure
363 def execute(self, statement, params=None, **kw):
364 """Execute a statement with automatic rollback on connection errors.
366 Wraps the parent execute method to catch connection errors and
367 automatically rollback the session to prevent PendingRollbackError cascade.
369 Args:
370 statement: The SQL statement to execute.
371 params: Optional parameters for the statement.
372 **kw: Additional keyword arguments passed to Session.execute().
374 Returns:
375 The result of the execute operation.
377 Raises:
378 Exception: Re-raises any exception after rolling back on connection errors.
379 """
380 try:
381 return super().execute(statement, params, **kw)
382 except Exception as e:
383 if self._is_connection_error(e):
384 logger.warning(
385 "Connection error during execute, auto-rolling back session: %s",
386 type(e).__name__,
387 )
388 self._safe_rollback()
389 raise
391 def scalar(self, statement, params=None, **kw):
392 """Execute and return a scalar with automatic rollback on connection errors.
394 Wraps the parent scalar method to catch connection errors and
395 automatically rollback the session to prevent PendingRollbackError cascade.
397 Args:
398 statement: The SQL statement to execute.
399 params: Optional parameters for the statement.
400 **kw: Additional keyword arguments passed to Session.scalar().
402 Returns:
403 The scalar result of the query.
405 Raises:
406 Exception: Re-raises any exception after rolling back on connection errors.
407 """
408 try:
409 return super().scalar(statement, params, **kw)
410 except Exception as e:
411 if self._is_connection_error(e):
412 logger.warning(
413 "Connection error during scalar, auto-rolling back session: %s",
414 type(e).__name__,
415 )
416 self._safe_rollback()
417 raise
419 def scalars(self, statement, params=None, **kw):
420 """Execute and return scalars with automatic rollback on connection errors.
422 Wraps the parent scalars method to catch connection errors and
423 automatically rollback the session to prevent PendingRollbackError cascade.
425 Args:
426 statement: The SQL statement to execute.
427 params: Optional parameters for the statement.
428 **kw: Additional keyword arguments passed to Session.scalars().
430 Returns:
431 The scalars result of the query.
433 Raises:
434 Exception: Re-raises any exception after rolling back on connection errors.
435 """
436 try:
437 return super().scalars(statement, params, **kw)
438 except Exception as e:
439 if self._is_connection_error(e):
440 logger.warning(
441 "Connection error during scalars, auto-rolling back session: %s",
442 type(e).__name__,
443 )
444 self._safe_rollback()
445 raise
448# Session factory using ResilientSession
449# expire_on_commit=False prevents SQLAlchemy from expiring ORM objects after commit,
450# allowing continued access to attributes without re-querying the database.
451# This is essential when commits happen during read operations (e.g., to release transactions).
452SessionLocal = sessionmaker(class_=ResilientSession, autocommit=False, autoflush=False, expire_on_commit=False, bind=engine)
455@event.listens_for(ResilientSession, "after_transaction_end")
456def end_transaction_cleanup(_session, _transaction):
457 """Ensure connection is properly released after transaction ends.
459 This event fires after COMMIT or ROLLBACK, ensuring the connection
460 is returned to PgBouncer cleanly with no open transaction.
462 Args:
463 _session: The SQLAlchemy session that ended the transaction.
464 _transaction: The transaction that was ended.
465 """
466 # The transaction has already ended - nothing to do here
467 # This is just for monitoring/logging if needed
470@event.listens_for(ResilientSession, "before_commit")
471def before_commit_handler(session):
472 """Handler before commit to ensure transaction is in good state.
474 This is called before COMMIT, ensuring any pending work is flushed.
476 Args:
477 session: The SQLAlchemy session about to commit.
478 """
479 try:
480 session.flush()
481 except Exception: # nosec B110
482 # If flush fails, the commit will also fail and trigger rollback
483 pass
486# ---------------------------------------------------------------------------
487# Pool event listeners for connection resilience
488# These handlers ensure broken connections are properly invalidated and
489# discarded from the pool, preventing "poisoned" connections from causing
490# cascading failures (e.g., PendingRollbackError after PgBouncer timeout).
491#
492# Key issue: PgBouncer returns ProtocolViolation (SQL error 08P01) for
493# query_wait_timeout, but SQLAlchemy doesn't recognize this as a disconnect
494# by default. We must explicitly mark these errors as disconnects so the
495# connection pool properly invalidates these connections.
496#
497# References:
498# - https://github.com/zodb/relstorage/issues/412
499# - https://docs.sqlalchemy.org/en/20/core/pooling.html#custom-legacy-pessimistic-ping
500# ---------------------------------------------------------------------------
501@event.listens_for(engine, "handle_error")
502def handle_pool_error(exception_context):
503 """Mark PgBouncer and connection errors as disconnects for proper pool invalidation.
505 This event fires when an error occurs during query execution. By marking
506 certain errors as disconnects (is_disconnect=True), SQLAlchemy will:
507 1. Invalidate the current connection (discard from pool)
508 2. Invalidate all other pooled connections older than current time
510 Without this, PgBouncer errors like query_wait_timeout result in
511 ProtocolViolation which is classified as DatabaseError, not a disconnect.
512 The connection stays in the pool and causes PendingRollbackError on reuse.
514 Args:
515 exception_context: SQLAlchemy ExceptionContext with error details.
516 """
517 original = exception_context.original_exception
518 if original is None:
519 return
521 # Get the exception class name and message for pattern matching
522 exc_class = type(original).__name__
523 exc_msg = str(original).lower()
525 # List of error patterns that indicate the connection is broken
526 # and should be treated as a disconnect for pool invalidation
527 disconnect_patterns = [
528 # PgBouncer errors
529 "query_wait_timeout",
530 "server_login_retry",
531 "client_login_timeout",
532 "client_idle_timeout",
533 "idle_transaction_timeout",
534 "server closed the connection unexpectedly",
535 "connection reset by peer",
536 "connection timed out",
537 "no connection to the server",
538 "terminating connection",
539 "connection has been closed unexpectedly",
540 # PostgreSQL errors indicating dead connection
541 "could not receive data from server",
542 "could not send data to server",
543 "ssl connection has been closed unexpectedly",
544 "canceling statement due to conflict with recovery",
545 ]
547 # Check for ProtocolViolation or OperationalError with disconnect patterns
548 is_connection_error = exc_class in ("ProtocolViolation", "OperationalError", "InterfaceError", "DatabaseError")
550 if is_connection_error:
551 for pattern in disconnect_patterns:
552 if pattern in exc_msg:
553 exception_context.is_disconnect = True
554 logger.warning(
555 "Connection error detected, marking as disconnect for pool invalidation: %s: %s",
556 exc_class,
557 pattern,
558 )
559 return
561 # Also treat ProtocolViolation from PgBouncer as disconnect even without message match
562 # PgBouncer sends 08P01 PROTOCOL_VIOLATION for various connection issues
563 if exc_class == "ProtocolViolation":
564 exception_context.is_disconnect = True
565 logger.warning(
566 "ProtocolViolation detected (likely PgBouncer), marking as disconnect: %s",
567 exc_msg[:200],
568 )
571@event.listens_for(engine, "checkin")
572def reset_connection_on_checkin(dbapi_connection, _connection_record):
573 """Reset connection state when returned to pool.
575 This ensures transactions are properly closed before the connection
576 is returned to PgBouncer, preventing 'idle in transaction' buildup.
577 With PgBouncer in transaction mode, connections stays reserved until
578 the transaction ends - this rollback releases them immediately.
580 Args:
581 dbapi_connection: The raw DBAPI connection being checked in.
582 _connection_record: The connection record tracking this connection.
583 """
584 try:
585 # Issue a rollback to close any open transaction
586 # This is safe for both read and write operations:
587 # - For reads: rollback has no effect but closes the transaction
588 # - For writes: they should already be committed by the application
589 dbapi_connection.rollback()
590 except Exception as e:
591 # Connection may be invalid - log and try to force close
592 logger.debug("Connection checkin rollback failed: %s", e)
593 try:
594 # Try to close the raw connection to release it from PgBouncer
595 dbapi_connection.close()
596 except Exception: # nosec B110
597 pass # Nothing more we can do
600@event.listens_for(engine, "reset")
601def reset_connection_on_reset(dbapi_connection, _connection_record, _reset_state):
602 """Reset connection state when the pool resets a connection.
604 This handles the case where a connection is being reset before reuse.
606 Args:
607 dbapi_connection: The raw DBAPI connection being reset.
608 _connection_record: The connection record tracking this connection.
609 """
610 try:
611 dbapi_connection.rollback()
612 except Exception: # nosec B110
613 pass # Connection may be invalid
616def _refresh_gateway_slugs_batched(session: Session, batch_size: int) -> None:
617 """Refresh gateway slugs in small batches to reduce memory usage.
619 Args:
620 session: Active SQLAlchemy session.
621 batch_size: Maximum number of rows to process per batch.
622 """
624 last_id: Optional[str] = None
626 while True:
627 query = session.query(Gateway).order_by(Gateway.id)
628 if last_id is not None:
629 query = query.filter(Gateway.id > last_id)
631 gateways = query.limit(batch_size).all()
632 if not gateways:
633 break
635 updated = False
636 for gateway in gateways:
637 new_slug = slugify(gateway.name)
638 if gateway.slug != new_slug:
639 gateway.slug = new_slug
640 updated = True
642 if updated: 642 ↛ 646line 642 didn't jump to line 646 because the condition on line 642 was always true
643 session.commit()
645 # Free ORM state from memory between batches
646 session.expire_all()
647 last_id = gateways[-1].id
650def _refresh_tool_names_batched(session: Session, batch_size: int) -> None:
651 """Refresh tool names in batches with eager-loaded gateways.
653 Uses joinedload(Tool.gateway) to avoid N+1 queries when accessing the
654 gateway relationship while regenerating tool names.
656 Args:
657 session: Active SQLAlchemy session.
658 batch_size: Maximum number of rows to process per batch.
659 """
661 last_id: Optional[str] = None
662 separator = settings.gateway_tool_name_separator
664 while True:
665 stmt = select(Tool).options(joinedload(Tool.gateway)).order_by(Tool.id).limit(batch_size)
666 if last_id is not None:
667 stmt = stmt.where(Tool.id > last_id)
669 tools = session.execute(stmt).scalars().all()
670 if not tools:
671 break
673 updated = False
674 for tool in tools:
675 # Prefer custom_name_slug when available; fall back to original_name
676 name_slug_source = getattr(tool, "custom_name_slug", None) or tool.original_name
677 name_slug = slugify(name_slug_source)
679 if tool.gateway:
680 gateway_slug = slugify(tool.gateway.name)
681 new_name = f"{gateway_slug}{separator}{name_slug}"
682 else:
683 new_name = name_slug
685 if tool.name != new_name: 685 ↛ 674line 685 didn't jump to line 674 because the condition on line 685 was always true
686 tool.name = new_name
687 updated = True
689 if updated: 689 ↛ 693line 689 didn't jump to line 693 because the condition on line 689 was always true
690 session.commit()
692 # Free ORM state from memory between batches
693 session.expire_all()
694 last_id = tools[-1].id
697def _refresh_prompt_names_batched(session: Session, batch_size: int) -> None:
698 """Refresh prompt names in batches with eager-loaded gateways.
700 Uses joinedload(Prompt.gateway) to avoid N+1 queries when accessing the
701 gateway relationship while regenerating prompt names.
703 Args:
704 session: Active SQLAlchemy session.
705 batch_size: Maximum number of rows to process per batch.
706 """
707 last_id: Optional[str] = None
708 separator = settings.gateway_tool_name_separator
710 while True:
711 stmt = select(Prompt).options(joinedload(Prompt.gateway)).order_by(Prompt.id).limit(batch_size)
712 if last_id is not None:
713 stmt = stmt.where(Prompt.id > last_id)
715 prompts = session.execute(stmt).scalars().all()
716 if not prompts:
717 break
719 updated = False
720 for prompt in prompts:
721 name_slug_source = getattr(prompt, "custom_name_slug", None) or prompt.original_name
722 name_slug = slugify(name_slug_source)
724 if prompt.gateway:
725 gateway_slug = slugify(prompt.gateway.name)
726 new_name = f"{gateway_slug}{separator}{name_slug}"
727 else:
728 new_name = name_slug
730 if prompt.name != new_name: 730 ↛ 720line 730 didn't jump to line 720 because the condition on line 730 was always true
731 prompt.name = new_name
732 updated = True
734 if updated: 734 ↛ 737line 734 didn't jump to line 737 because the condition on line 734 was always true
735 session.commit()
737 session.expire_all()
738 last_id = prompts[-1].id
741def refresh_slugs_on_startup(batch_size: Optional[int] = None) -> None:
742 """Refresh slugs for all gateways and tool names on startup.
744 This implementation avoids loading all rows into memory at once by
745 streaming through the tables in batches and eager-loading tool.gateway
746 relationships to prevent N+1 query patterns.
748 Args:
749 batch_size: Optional maximum number of rows to process per batch. If
750 not provided, the value is taken from
751 ``settings.slug_refresh_batch_size`` with a default of ``1000``.
752 """
754 effective_batch_size = batch_size or getattr(settings, "slug_refresh_batch_size", 1000)
756 try:
757 with cast(Any, SessionLocal)() as session:
758 # Skip if tables don't exist yet (fresh database)
759 try:
760 _refresh_gateway_slugs_batched(session, effective_batch_size)
761 except (OperationalError, ProgrammingError) as e:
762 # Table doesn't exist yet - expected on fresh database
763 logger.info("Gateway table not found, skipping slug refresh: %s", e)
764 return
766 try:
767 _refresh_tool_names_batched(session, effective_batch_size)
768 except (OperationalError, ProgrammingError) as e:
769 # Table doesn't exist yet - expected on fresh database
770 logger.info("Tool table not found, skipping tool name refresh: %s", e)
772 try:
773 _refresh_prompt_names_batched(session, effective_batch_size)
774 except (OperationalError, ProgrammingError) as e:
775 # Table doesn't exist yet - expected on fresh database
776 logger.info("Prompt table not found, skipping prompt name refresh: %s", e)
778 except SQLAlchemyError as e:
779 logger.warning("Failed to refresh slugs on startup (database error): %s", e)
780 except Exception as e:
781 logger.warning("Failed to refresh slugs on startup (unexpected error): %s", e)
784class Base(DeclarativeBase):
785 """Base class for all models."""
787 # MariaDB-compatible naming convention for foreign keys
788 metadata = MetaData(
789 naming_convention={
790 "fk": "fk_%(table_name)s_%(column_0_name)s",
791 "pk": "pk_%(table_name)s",
792 "ix": "ix_%(table_name)s_%(column_0_name)s",
793 "uq": "uq_%(table_name)s_%(column_0_name)s",
794 "ck": "ck_%(table_name)s_%(constraint_name)s",
795 }
796 )
799# ---------------------------------------------------------------------------
800# RBAC Models - SQLAlchemy Database Models
801# ---------------------------------------------------------------------------
804class Role(Base):
805 """Role model for RBAC system."""
807 __tablename__ = "roles"
809 # Primary key
810 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
812 # Role metadata
813 name: Mapped[str] = mapped_column(String(255), nullable=False)
814 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
815 scope: Mapped[str] = mapped_column(String(20), nullable=False) # 'global', 'team', 'personal'
817 # Permissions and inheritance
818 permissions: Mapped[List[str]] = mapped_column(JSON, nullable=False, default=list)
819 inherits_from: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("roles.id"), nullable=True)
821 # Metadata
822 created_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
823 is_system_role: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
824 is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
826 # Timestamps
827 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now)
828 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, onupdate=utc_now)
830 # Relationships
831 parent_role: Mapped[Optional["Role"]] = relationship("Role", remote_side=[id], backref="child_roles")
832 user_assignments: Mapped[List["UserRole"]] = relationship("UserRole", back_populates="role", cascade="all, delete-orphan")
834 def get_effective_permissions(self) -> List[str]:
835 """Get all permissions including inherited ones.
837 Returns:
838 List of permission strings including inherited permissions
839 """
840 effective_permissions = set(self.permissions)
841 if self.parent_role:
842 effective_permissions.update(self.parent_role.get_effective_permissions())
843 return sorted(list(effective_permissions))
846class UserRole(Base):
847 """User role assignment model."""
849 __tablename__ = "user_roles"
851 # Primary key
852 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
854 # Assignment details
855 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
856 role_id: Mapped[str] = mapped_column(String(36), ForeignKey("roles.id"), nullable=False)
857 scope: Mapped[str] = mapped_column(String(20), nullable=False) # 'global', 'team', 'personal'
858 scope_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True) # Team ID if team-scoped
860 # Grant metadata
861 granted_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
862 granted_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now)
863 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
864 is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
866 # Relationships
867 role: Mapped["Role"] = relationship("Role", back_populates="user_assignments")
869 def is_expired(self) -> bool:
870 """Check if the role assignment has expired.
872 Returns:
873 True if assignment has expired, False otherwise
874 """
875 if not self.expires_at:
876 return False
877 return utc_now() > self.expires_at
880class PermissionAuditLog(Base):
881 """Permission audit log model."""
883 __tablename__ = "permission_audit_log"
885 # Primary key
886 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
888 # Audit metadata
889 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now)
890 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
892 # Permission details
893 permission: Mapped[str] = mapped_column(String(100), nullable=False)
894 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
895 resource_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
896 team_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
898 # Result
899 granted: Mapped[bool] = mapped_column(Boolean, nullable=False)
900 roles_checked: Mapped[Optional[Dict]] = mapped_column(JSON, nullable=True)
902 # Request metadata
903 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length
904 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
907# Permission constants for the system
908class Permissions:
909 """System permission constants."""
911 # User permissions
912 USERS_CREATE = "users.create"
913 USERS_READ = "users.read"
914 USERS_UPDATE = "users.update"
915 USERS_DELETE = "users.delete"
916 USERS_INVITE = "users.invite"
918 # Team permissions
919 TEAMS_CREATE = "teams.create"
920 TEAMS_READ = "teams.read"
921 TEAMS_UPDATE = "teams.update"
922 TEAMS_DELETE = "teams.delete"
923 TEAMS_JOIN = "teams.join"
924 TEAMS_MANAGE_MEMBERS = "teams.manage_members"
926 # Tool permissions
927 TOOLS_CREATE = "tools.create"
928 TOOLS_READ = "tools.read"
929 TOOLS_UPDATE = "tools.update"
930 TOOLS_DELETE = "tools.delete"
931 TOOLS_EXECUTE = "tools.execute"
933 # Resource permissions
934 RESOURCES_CREATE = "resources.create"
935 RESOURCES_READ = "resources.read"
936 RESOURCES_UPDATE = "resources.update"
937 RESOURCES_DELETE = "resources.delete"
938 RESOURCES_SHARE = "resources.share"
940 # Gateway permissions
941 GATEWAYS_CREATE = "gateways.create"
942 GATEWAYS_READ = "gateways.read"
943 GATEWAYS_UPDATE = "gateways.update"
944 GATEWAYS_DELETE = "gateways.delete"
946 # Prompt permissions
947 PROMPTS_CREATE = "prompts.create"
948 PROMPTS_READ = "prompts.read"
949 PROMPTS_UPDATE = "prompts.update"
950 PROMPTS_DELETE = "prompts.delete"
951 PROMPTS_EXECUTE = "prompts.execute"
953 # Server permissions
954 SERVERS_CREATE = "servers.create"
955 SERVERS_READ = "servers.read"
956 SERVERS_UPDATE = "servers.update"
957 SERVERS_DELETE = "servers.delete"
958 SERVERS_MANAGE = "servers.manage"
960 # Token permissions
961 TOKENS_CREATE = "tokens.create"
962 TOKENS_READ = "tokens.read"
963 TOKENS_UPDATE = "tokens.update"
964 TOKENS_REVOKE = "tokens.revoke"
966 # Admin permissions
967 ADMIN_SYSTEM_CONFIG = "admin.system_config"
968 ADMIN_USER_MANAGEMENT = "admin.user_management"
969 ADMIN_SECURITY_AUDIT = "admin.security_audit"
970 ADMIN_OVERVIEW = "admin.overview"
971 ADMIN_DASHBOARD = "admin.dashboard"
972 ADMIN_EVENTS = "admin.events"
973 ADMIN_GRPC = "admin.grpc"
974 ADMIN_PLUGINS = "admin.plugins"
976 # A2A Agent permissions
977 A2A_CREATE = "a2a.create"
978 A2A_READ = "a2a.read"
979 A2A_UPDATE = "a2a.update"
980 A2A_DELETE = "a2a.delete"
981 A2A_INVOKE = "a2a.invoke"
983 # Tag permissions
984 TAGS_READ = "tags.read"
985 TAGS_CREATE = "tags.create"
986 TAGS_UPDATE = "tags.update"
987 TAGS_DELETE = "tags.delete"
989 # Special permissions
990 ALL_PERMISSIONS = "*" # Wildcard for all permissions
992 @classmethod
993 def get_all_permissions(cls) -> List[str]:
994 """Get list of all defined permissions.
996 Returns:
997 List of all permission strings defined in the class
998 """
999 permissions = []
1000 for attr_name in dir(cls):
1001 if not attr_name.startswith("_") and attr_name.isupper() and attr_name != "ALL_PERMISSIONS":
1002 attr_value = getattr(cls, attr_name)
1003 if isinstance(attr_value, str) and "." in attr_value: 1003 ↛ 1000line 1003 didn't jump to line 1000 because the condition on line 1003 was always true
1004 permissions.append(attr_value)
1005 return sorted(permissions)
1007 @classmethod
1008 def get_permissions_by_resource(cls) -> Dict[str, List[str]]:
1009 """Get permissions organized by resource type.
1011 Returns:
1012 Dictionary mapping resource types to their permissions
1013 """
1014 resource_permissions = {}
1015 for permission in cls.get_all_permissions():
1016 resource_type = permission.split(".")[0]
1017 if resource_type not in resource_permissions:
1018 resource_permissions[resource_type] = []
1019 resource_permissions[resource_type].append(permission)
1020 return resource_permissions
1023# ---------------------------------------------------------------------------
1024# Email-based User Authentication Models
1025# ---------------------------------------------------------------------------
1028class EmailUser(Base):
1029 """Email-based user model for authentication.
1031 This model provides email-based authentication as the foundation
1032 for all multi-user features. Users are identified by email addresses
1033 instead of usernames.
1035 Attributes:
1036 email (str): Primary key, unique email identifier
1037 password_hash (str): Argon2id hashed password
1038 full_name (str): Optional display name for professional appearance
1039 is_admin (bool): Admin privileges flag
1040 is_active (bool): Account status flag
1041 auth_provider (str): Authentication provider ('local', 'github', etc.)
1042 password_hash_type (str): Type of password hash used
1043 failed_login_attempts (int): Count of failed login attempts
1044 locked_until (datetime): Account lockout expiration
1045 created_at (datetime): Account creation timestamp
1046 updated_at (datetime): Last account update timestamp
1047 last_login (datetime): Last successful login timestamp
1048 email_verified_at (datetime): Email verification timestamp
1050 Examples:
1051 >>> user = EmailUser(
1052 ... email="alice@example.com",
1053 ... password_hash="$argon2id$v=19$m=65536,t=3,p=1$...",
1054 ... full_name="Alice Smith",
1055 ... is_admin=False
1056 ... )
1057 >>> user.email
1058 'alice@example.com'
1059 >>> user.is_email_verified()
1060 False
1061 >>> user.is_account_locked()
1062 False
1063 """
1065 __tablename__ = "email_users"
1067 # Core identity fields
1068 email: Mapped[str] = mapped_column(String(255), primary_key=True, index=True)
1069 password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
1070 full_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
1071 is_admin: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
1072 # Track how admin status was granted: "sso" (synced from IdP), "manual" (Admin UI), "api" (API grant), or None (legacy)
1073 admin_origin: Mapped[Optional[str]] = mapped_column(String(20), nullable=True)
1075 # Status fields
1076 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
1077 email_verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
1079 # Security fields
1080 auth_provider: Mapped[str] = mapped_column(String(50), default="local", nullable=False)
1081 password_hash_type: Mapped[str] = mapped_column(String(20), default="argon2id", nullable=False)
1082 failed_login_attempts: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
1083 locked_until: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
1084 password_change_required: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
1085 password_changed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=True)
1087 # Timestamps
1088 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1089 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
1090 last_login: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
1092 def __repr__(self) -> str:
1093 """String representation of the user.
1095 Returns:
1096 str: String representation of EmailUser instance
1097 """
1098 return f"<EmailUser(email='{self.email}', full_name='{self.full_name}', is_admin={self.is_admin})>"
1100 def is_email_verified(self) -> bool:
1101 """Check if the user's email is verified.
1103 Returns:
1104 bool: True if email is verified, False otherwise
1106 Examples:
1107 >>> user = EmailUser(email="test@example.com")
1108 >>> user.is_email_verified()
1109 False
1110 >>> user.email_verified_at = utc_now()
1111 >>> user.is_email_verified()
1112 True
1113 """
1114 return self.email_verified_at is not None
1116 def is_account_locked(self) -> bool:
1117 """Check if the account is currently locked.
1119 Returns:
1120 bool: True if account is locked, False otherwise
1122 Examples:
1123 >>> from datetime import timedelta
1124 >>> user = EmailUser(email="test@example.com")
1125 >>> user.is_account_locked()
1126 False
1127 >>> user.locked_until = utc_now() + timedelta(hours=1)
1128 >>> user.is_account_locked()
1129 True
1130 """
1131 if self.locked_until is None:
1132 return False
1133 return utc_now() < self.locked_until
1135 def get_display_name(self) -> str:
1136 """Get the user's display name.
1138 Returns the full_name if available, otherwise extracts
1139 the local part from the email address.
1141 Returns:
1142 str: Display name for the user
1144 Examples:
1145 >>> user = EmailUser(email="john@example.com", full_name="John Doe")
1146 >>> user.get_display_name()
1147 'John Doe'
1148 >>> user_no_name = EmailUser(email="jane@example.com")
1149 >>> user_no_name.get_display_name()
1150 'jane'
1151 """
1152 if self.full_name:
1153 return self.full_name
1154 return self.email.split("@")[0]
1156 def reset_failed_attempts(self) -> None:
1157 """Reset failed login attempts counter.
1159 Called after successful authentication to reset the
1160 failed attempts counter and clear any account lockout.
1162 Examples:
1163 >>> user = EmailUser(email="test@example.com", failed_login_attempts=3)
1164 >>> user.reset_failed_attempts()
1165 >>> user.failed_login_attempts
1166 0
1167 >>> user.locked_until is None
1168 True
1169 """
1170 self.failed_login_attempts = 0
1171 self.locked_until = None
1172 self.last_login = utc_now()
1174 def increment_failed_attempts(self, max_attempts: int = 5, lockout_duration_minutes: int = 30) -> bool:
1175 """Increment failed login attempts and potentially lock account.
1177 Args:
1178 max_attempts: Maximum allowed failed attempts before lockout
1179 lockout_duration_minutes: Duration of lockout in minutes
1181 Returns:
1182 bool: True if account is now locked, False otherwise
1184 Examples:
1185 >>> user = EmailUser(email="test@example.com", password_hash="test", failed_login_attempts=0)
1186 >>> user.increment_failed_attempts(max_attempts=3)
1187 False
1188 >>> user.failed_login_attempts
1189 1
1190 >>> for _ in range(2):
1191 ... user.increment_failed_attempts(max_attempts=3)
1192 False
1193 True
1194 >>> user.is_account_locked()
1195 True
1196 """
1197 self.failed_login_attempts += 1
1199 if self.failed_login_attempts >= max_attempts:
1200 self.locked_until = utc_now() + timedelta(minutes=lockout_duration_minutes)
1201 return True
1203 return False
1205 # Team relationships
1206 team_memberships: Mapped[List["EmailTeamMember"]] = relationship("EmailTeamMember", foreign_keys="EmailTeamMember.user_email", back_populates="user")
1207 created_teams: Mapped[List["EmailTeam"]] = relationship("EmailTeam", foreign_keys="EmailTeam.created_by", back_populates="creator")
1208 sent_invitations: Mapped[List["EmailTeamInvitation"]] = relationship("EmailTeamInvitation", foreign_keys="EmailTeamInvitation.invited_by", back_populates="inviter")
1210 # API token relationships
1211 api_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="user", cascade="all, delete-orphan")
1213 def get_teams(self) -> List["EmailTeam"]:
1214 """Get all teams this user is a member of.
1216 Returns:
1217 List[EmailTeam]: List of teams the user belongs to
1219 Examples:
1220 >>> user = EmailUser(email="user@example.com")
1221 >>> teams = user.get_teams()
1222 >>> isinstance(teams, list)
1223 True
1224 """
1225 return [membership.team for membership in self.team_memberships if membership.is_active]
1227 def get_personal_team(self) -> Optional["EmailTeam"]:
1228 """Get the user's personal team.
1230 Returns:
1231 EmailTeam: The user's personal team or None if not found
1233 Examples:
1234 >>> user = EmailUser(email="user@example.com")
1235 >>> personal_team = user.get_personal_team()
1236 """
1237 for team in self.created_teams:
1238 if team.is_personal and team.is_active: 1238 ↛ 1237line 1238 didn't jump to line 1237 because the condition on line 1238 was always true
1239 return team
1240 return None
1242 def is_team_member(self, team_id: str) -> bool:
1243 """Check if user is a member of the specified team.
1245 Args:
1246 team_id: ID of the team to check
1248 Returns:
1249 bool: True if user is a member, False otherwise
1251 Examples:
1252 >>> user = EmailUser(email="user@example.com")
1253 >>> user.is_team_member("team-123")
1254 False
1255 """
1256 return any(membership.team_id == team_id and membership.is_active for membership in self.team_memberships)
1258 def get_team_role(self, team_id: str) -> Optional[str]:
1259 """Get user's role in a specific team.
1261 Args:
1262 team_id: ID of the team to check
1264 Returns:
1265 str: User's role or None if not a member
1267 Examples:
1268 >>> user = EmailUser(email="user@example.com")
1269 >>> role = user.get_team_role("team-123")
1270 """
1271 for membership in self.team_memberships:
1272 if membership.team_id == team_id and membership.is_active:
1273 return membership.role
1274 return None
1277class EmailAuthEvent(Base):
1278 """Authentication event logging for email users.
1280 This model tracks all authentication attempts for auditing,
1281 security monitoring, and compliance purposes.
1283 Attributes:
1284 id (int): Primary key
1285 timestamp (datetime): Event timestamp
1286 user_email (str): Email of the user
1287 event_type (str): Type of authentication event
1288 success (bool): Whether the authentication was successful
1289 ip_address (str): Client IP address
1290 user_agent (str): Client user agent string
1291 failure_reason (str): Reason for authentication failure
1292 details (dict): Additional event details as JSON
1294 Examples:
1295 >>> event = EmailAuthEvent(
1296 ... user_email="alice@example.com",
1297 ... event_type="login",
1298 ... success=True,
1299 ... ip_address="192.168.1.100"
1300 ... )
1301 >>> event.event_type
1302 'login'
1303 >>> event.success
1304 True
1305 """
1307 __tablename__ = "email_auth_events"
1309 # Primary key
1310 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
1312 # Event details
1313 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1314 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
1315 event_type: Mapped[str] = mapped_column(String(50), nullable=False)
1316 success: Mapped[bool] = mapped_column(Boolean, nullable=False)
1318 # Client information
1319 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 compatible
1320 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1322 # Failure information
1323 failure_reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
1324 details: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON string
1326 def __repr__(self) -> str:
1327 """String representation of the auth event.
1329 Returns:
1330 str: String representation of EmailAuthEvent instance
1331 """
1332 return f"<EmailAuthEvent(user_email='{self.user_email}', event_type='{self.event_type}', success={self.success})>"
1334 @classmethod
1335 def create_login_attempt(
1336 cls,
1337 user_email: str,
1338 success: bool,
1339 ip_address: Optional[str] = None,
1340 user_agent: Optional[str] = None,
1341 failure_reason: Optional[str] = None,
1342 ) -> "EmailAuthEvent":
1343 """Create a login attempt event.
1345 Args:
1346 user_email: Email address of the user
1347 success: Whether the login was successful
1348 ip_address: Client IP address
1349 user_agent: Client user agent
1350 failure_reason: Reason for failure (if applicable)
1352 Returns:
1353 EmailAuthEvent: New authentication event
1355 Examples:
1356 >>> event = EmailAuthEvent.create_login_attempt(
1357 ... user_email="user@example.com",
1358 ... success=True,
1359 ... ip_address="192.168.1.1"
1360 ... )
1361 >>> event.event_type
1362 'login'
1363 >>> event.success
1364 True
1365 """
1366 return cls(user_email=user_email, event_type="login", success=success, ip_address=ip_address, user_agent=user_agent, failure_reason=failure_reason)
1368 @classmethod
1369 def create_registration_event(
1370 cls,
1371 user_email: str,
1372 success: bool,
1373 ip_address: Optional[str] = None,
1374 user_agent: Optional[str] = None,
1375 failure_reason: Optional[str] = None,
1376 ) -> "EmailAuthEvent":
1377 """Create a registration event.
1379 Args:
1380 user_email: Email address of the user
1381 success: Whether the registration was successful
1382 ip_address: Client IP address
1383 user_agent: Client user agent
1384 failure_reason: Reason for failure (if applicable)
1386 Returns:
1387 EmailAuthEvent: New authentication event
1388 """
1389 return cls(user_email=user_email, event_type="registration", success=success, ip_address=ip_address, user_agent=user_agent, failure_reason=failure_reason)
1391 @classmethod
1392 def create_password_change_event(
1393 cls,
1394 user_email: str,
1395 success: bool,
1396 ip_address: Optional[str] = None,
1397 user_agent: Optional[str] = None,
1398 ) -> "EmailAuthEvent":
1399 """Create a password change event.
1401 Args:
1402 user_email: Email address of the user
1403 success: Whether the password change was successful
1404 ip_address: Client IP address
1405 user_agent: Client user agent
1407 Returns:
1408 EmailAuthEvent: New authentication event
1409 """
1410 return cls(user_email=user_email, event_type="password_change", success=success, ip_address=ip_address, user_agent=user_agent)
1413class EmailTeam(Base):
1414 """Email-based team model for multi-team collaboration.
1416 This model represents teams that users can belong to, with automatic
1417 personal team creation and role-based access control.
1419 Attributes:
1420 id (str): Primary key UUID
1421 name (str): Team display name
1422 slug (str): URL-friendly team identifier
1423 description (str): Team description
1424 created_by (str): Email of the user who created the team
1425 is_personal (bool): Whether this is a personal team
1426 visibility (str): Team visibility (private, public)
1427 max_members (int): Maximum number of team members allowed
1428 created_at (datetime): Team creation timestamp
1429 updated_at (datetime): Last update timestamp
1430 is_active (bool): Whether the team is active
1432 Examples:
1433 >>> team = EmailTeam(
1434 ... name="Engineering Team",
1435 ... slug="engineering-team",
1436 ... created_by="admin@example.com",
1437 ... is_personal=False
1438 ... )
1439 >>> team.name
1440 'Engineering Team'
1441 >>> team.is_personal
1442 False
1443 """
1445 __tablename__ = "email_teams"
1447 # Primary key
1448 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
1450 # Basic team information
1451 name: Mapped[str] = mapped_column(String(255), nullable=False)
1452 slug: Mapped[str] = mapped_column(String(255), unique=True, nullable=False)
1453 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1454 created_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
1456 # Team settings
1457 is_personal: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
1458 visibility: Mapped[str] = mapped_column(String(20), default="public", nullable=False)
1459 max_members: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
1461 # Timestamps
1462 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1463 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
1464 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
1466 # Relationships
1467 members: Mapped[List["EmailTeamMember"]] = relationship("EmailTeamMember", back_populates="team", cascade="all, delete-orphan")
1468 invitations: Mapped[List["EmailTeamInvitation"]] = relationship("EmailTeamInvitation", back_populates="team", cascade="all, delete-orphan")
1469 api_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="team", cascade="all, delete-orphan")
1470 creator: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[created_by])
1472 # Index for search and pagination performance
1473 __table_args__ = (Index("ix_email_teams_name_id", "name", "id"),)
1475 def __repr__(self) -> str:
1476 """String representation of the team.
1478 Returns:
1479 str: String representation of EmailTeam instance
1480 """
1481 return f"<EmailTeam(id='{self.id}', name='{self.name}', is_personal={self.is_personal})>"
1483 def get_member_count(self) -> int:
1484 """Get the current number of team members.
1486 Uses direct SQL COUNT to avoid loading all members into memory.
1488 Returns:
1489 int: Number of active team members
1491 Examples:
1492 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com")
1493 >>> team.get_member_count()
1494 0
1495 """
1496 # Third-Party
1497 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
1499 session = object_session(self)
1500 if session is None:
1501 # Fallback for detached objects (e.g., in doctests)
1502 return len([m for m in self.members if m.is_active])
1504 count = session.query(func.count(EmailTeamMember.id)).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.is_active.is_(True)).scalar() # pylint: disable=not-callable
1505 return count or 0
1507 def is_member(self, user_email: str) -> bool:
1508 """Check if a user is a member of this team.
1510 Uses direct SQL EXISTS to avoid loading all members into memory.
1512 Args:
1513 user_email: Email address to check
1515 Returns:
1516 bool: True if user is an active member, False otherwise
1518 Examples:
1519 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com")
1520 >>> team.is_member("admin@example.com")
1521 False
1522 """
1523 # Third-Party
1524 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
1526 session = object_session(self)
1527 if session is None:
1528 # Fallback for detached objects (e.g., in doctests)
1529 return any(m.user_email == user_email and m.is_active for m in self.members)
1531 exists = session.query(EmailTeamMember.id).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.user_email == user_email, EmailTeamMember.is_active.is_(True)).first()
1532 return exists is not None
1534 def get_member_role(self, user_email: str) -> Optional[str]:
1535 """Get the role of a user in this team.
1537 Uses direct SQL query to avoid loading all members into memory.
1539 Args:
1540 user_email: Email address to check
1542 Returns:
1543 str: User's role or None if not a member
1545 Examples:
1546 >>> team = EmailTeam(name="Test Team", slug="test-team", created_by="admin@example.com")
1547 >>> team.get_member_role("admin@example.com")
1548 """
1549 # Third-Party
1550 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
1552 session = object_session(self)
1553 if session is None:
1554 # Fallback for detached objects (e.g., in doctests)
1555 for member in self.members:
1556 if member.user_email == user_email and member.is_active:
1557 return member.role
1558 return None
1560 member = session.query(EmailTeamMember.role).filter(EmailTeamMember.team_id == self.id, EmailTeamMember.user_email == user_email, EmailTeamMember.is_active.is_(True)).first()
1561 return member[0] if member else None
1564class EmailTeamMember(Base):
1565 """Team membership model linking users to teams with roles.
1567 This model represents the many-to-many relationship between users and teams
1568 with additional role information and audit trails.
1570 Attributes:
1571 id (str): Primary key UUID
1572 team_id (str): Foreign key to email_teams
1573 user_email (str): Foreign key to email_users
1574 role (str): Member role (owner, member)
1575 joined_at (datetime): When the user joined the team
1576 invited_by (str): Email of the user who invited this member
1577 is_active (bool): Whether the membership is active
1579 Examples:
1580 >>> member = EmailTeamMember(
1581 ... team_id="team-123",
1582 ... user_email="user@example.com",
1583 ... role="member",
1584 ... invited_by="admin@example.com"
1585 ... )
1586 >>> member.role
1587 'member'
1588 """
1590 __tablename__ = "email_team_members"
1592 # Primary key
1593 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
1595 # Foreign keys
1596 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False)
1597 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
1599 # Membership details
1600 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False)
1601 joined_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1602 invited_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True)
1603 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
1605 # Relationships
1606 team: Mapped["EmailTeam"] = relationship("EmailTeam", back_populates="members")
1607 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email])
1608 inviter: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[invited_by])
1610 # Unique constraint to prevent duplicate memberships
1611 __table_args__ = (UniqueConstraint("team_id", "user_email", name="uq_team_member"),)
1613 def __repr__(self) -> str:
1614 """String representation of the team member.
1616 Returns:
1617 str: String representation of EmailTeamMember instance
1618 """
1619 return f"<EmailTeamMember(team_id='{self.team_id}', user_email='{self.user_email}', role='{self.role}')>"
1622# Team member history model
1623class EmailTeamMemberHistory(Base):
1624 """
1625 History of team member actions (add, remove, reactivate, role change).
1627 This model records every membership-related event for audit and compliance.
1628 Each record tracks the team, user, role, action type, actor, and timestamp.
1630 Attributes:
1631 id (str): Primary key UUID
1632 team_id (str): Foreign key to email_teams
1633 user_email (str): Foreign key to email_users
1634 role (str): Role at the time of action
1635 action (str): Action type ("added", "removed", "reactivated", "role_changed")
1636 action_by (str): Email of the user who performed the action
1637 action_timestamp (datetime): When the action occurred
1639 Examples:
1640 >>> from mcpgateway.db import EmailTeamMemberHistory, utc_now
1641 >>> history = EmailTeamMemberHistory(
1642 ... team_id="team-123",
1643 ... user_email="user@example.com",
1644 ... role="member",
1645 ... action="added",
1646 ... action_by="admin@example.com",
1647 ... action_timestamp=utc_now()
1648 ... )
1649 >>> history.action
1650 'added'
1651 >>> history.role
1652 'member'
1653 >>> isinstance(history.action_timestamp, type(utc_now()))
1654 True
1655 """
1657 __tablename__ = "email_team_member_history"
1659 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
1660 team_member_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_team_members.id", ondelete="CASCADE"), nullable=False)
1661 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id"), nullable=False)
1662 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
1663 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False)
1664 action: Mapped[str] = mapped_column(String(50), nullable=False) # e.g. "added", "removed", "reactivated", "role_changed"
1665 action_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True)
1666 action_timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1668 team_member: Mapped["EmailTeamMember"] = relationship("EmailTeamMember")
1669 team: Mapped["EmailTeam"] = relationship("EmailTeam")
1670 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email])
1671 actor: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[action_by])
1673 def __repr__(self) -> str:
1674 """
1675 Return a string representation of the EmailTeamMemberHistory instance.
1677 Returns:
1678 str: A string summarizing the team member history record.
1680 Examples:
1681 >>> from mcpgateway.db import EmailTeamMemberHistory, utc_now
1682 >>> history = EmailTeamMemberHistory(
1683 ... team_member_id="tm-123",
1684 ... team_id="team-123",
1685 ... user_email="user@example.com",
1686 ... role="member",
1687 ... action="added",
1688 ... action_by="admin@example.com",
1689 ... action_timestamp=utc_now()
1690 ... )
1691 >>> isinstance(repr(history), str)
1692 True
1693 """
1694 return f"<EmailTeamMemberHistory(team_member_id='{self.team_member_id}', team_id='{self.team_id}', user_email='{self.user_email}', role='{self.role}', action='{self.action}', action_by='{self.action_by}', action_timestamp='{self.action_timestamp}')>"
1697class EmailTeamInvitation(Base):
1698 """Team invitation model for managing team member invitations.
1700 This model tracks invitations sent to users to join teams, including
1701 expiration dates and invitation tokens.
1703 Attributes:
1704 id (str): Primary key UUID
1705 team_id (str): Foreign key to email_teams
1706 email (str): Email address of the invited user
1707 role (str): Role the user will have when they accept
1708 invited_by (str): Email of the user who sent the invitation
1709 invited_at (datetime): When the invitation was sent
1710 expires_at (datetime): When the invitation expires
1711 token (str): Unique invitation token
1712 is_active (bool): Whether the invitation is still active
1714 Examples:
1715 >>> invitation = EmailTeamInvitation(
1716 ... team_id="team-123",
1717 ... email="newuser@example.com",
1718 ... role="member",
1719 ... invited_by="admin@example.com"
1720 ... )
1721 >>> invitation.role
1722 'member'
1723 """
1725 __tablename__ = "email_team_invitations"
1727 # Primary key
1728 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
1730 # Foreign keys
1731 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False)
1733 # Invitation details
1734 email: Mapped[str] = mapped_column(String(255), nullable=False)
1735 role: Mapped[str] = mapped_column(String(50), default="member", nullable=False)
1736 invited_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
1738 # Timing
1739 invited_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1740 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
1742 # Security
1743 token: Mapped[str] = mapped_column(String(500), unique=True, nullable=False)
1744 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
1746 # Relationships
1747 team: Mapped["EmailTeam"] = relationship("EmailTeam", back_populates="invitations")
1748 inviter: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[invited_by])
1750 def __repr__(self) -> str:
1751 """String representation of the team invitation.
1753 Returns:
1754 str: String representation of EmailTeamInvitation instance
1755 """
1756 return f"<EmailTeamInvitation(team_id='{self.team_id}', email='{self.email}', role='{self.role}')>"
1758 def is_expired(self) -> bool:
1759 """Check if the invitation has expired.
1761 Returns:
1762 bool: True if the invitation has expired, False otherwise
1764 Examples:
1765 >>> from datetime import timedelta
1766 >>> invitation = EmailTeamInvitation(
1767 ... team_id="team-123",
1768 ... email="user@example.com",
1769 ... role="member",
1770 ... invited_by="admin@example.com",
1771 ... expires_at=utc_now() + timedelta(days=7)
1772 ... )
1773 >>> invitation.is_expired()
1774 False
1775 """
1776 now = utc_now()
1777 expires_at = self.expires_at
1779 # Handle timezone awareness mismatch
1780 if now.tzinfo is not None and expires_at.tzinfo is None:
1781 expires_at = expires_at.replace(tzinfo=timezone.utc)
1782 elif now.tzinfo is None and expires_at.tzinfo is not None:
1783 now = now.replace(tzinfo=timezone.utc)
1785 return now > expires_at
1787 def is_valid(self) -> bool:
1788 """Check if the invitation is valid (active and not expired).
1790 Returns:
1791 bool: True if the invitation is valid, False otherwise
1793 Examples:
1794 >>> from datetime import timedelta
1795 >>> invitation = EmailTeamInvitation(
1796 ... team_id="team-123",
1797 ... email="user@example.com",
1798 ... role="member",
1799 ... invited_by="admin@example.com",
1800 ... expires_at=utc_now() + timedelta(days=7),
1801 ... is_active=True
1802 ... )
1803 >>> invitation.is_valid()
1804 True
1805 """
1806 return self.is_active and not self.is_expired()
1809class EmailTeamJoinRequest(Base):
1810 """Team join request model for managing public team join requests.
1812 This model tracks user requests to join public teams, including
1813 approval workflow and expiration dates.
1815 Attributes:
1816 id (str): Primary key UUID
1817 team_id (str): Foreign key to email_teams
1818 user_email (str): Email of the user requesting to join
1819 message (str): Optional message from the user
1820 status (str): Request status (pending, approved, rejected, expired)
1821 requested_at (datetime): When the request was made
1822 expires_at (datetime): When the request expires
1823 reviewed_at (datetime): When the request was reviewed
1824 reviewed_by (str): Email of user who reviewed the request
1825 notes (str): Optional admin notes
1826 """
1828 __tablename__ = "email_team_join_requests"
1830 # Primary key
1831 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
1833 # Foreign keys
1834 team_id: Mapped[str] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="CASCADE"), nullable=False)
1835 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
1837 # Request details
1838 message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1839 status: Mapped[str] = mapped_column(String(20), default="pending", nullable=False)
1841 # Timing
1842 requested_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1843 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
1844 reviewed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
1845 reviewed_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True)
1846 notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1848 # Relationships
1849 team: Mapped["EmailTeam"] = relationship("EmailTeam")
1850 user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[user_email])
1851 reviewer: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[reviewed_by])
1853 # Unique constraint to prevent duplicate requests
1854 __table_args__ = (UniqueConstraint("team_id", "user_email", name="uq_team_join_request"),)
1856 def __repr__(self) -> str:
1857 """String representation of the team join request.
1859 Returns:
1860 str: String representation of the team join request.
1861 """
1862 return f"<EmailTeamJoinRequest(team_id='{self.team_id}', user_email='{self.user_email}', status='{self.status}')>"
1864 def is_expired(self) -> bool:
1865 """Check if the join request has expired.
1867 Returns:
1868 bool: True if the request has expired, False otherwise.
1869 """
1870 now = utc_now()
1871 expires_at = self.expires_at
1873 # Handle timezone awareness mismatch
1874 if now.tzinfo is not None and expires_at.tzinfo is None:
1875 expires_at = expires_at.replace(tzinfo=timezone.utc)
1876 elif now.tzinfo is None and expires_at.tzinfo is not None:
1877 now = now.replace(tzinfo=timezone.utc)
1879 return now > expires_at
1881 def is_pending(self) -> bool:
1882 """Check if the join request is still pending.
1884 Returns:
1885 bool: True if the request is pending and not expired, False otherwise.
1886 """
1887 return self.status == "pending" and not self.is_expired()
1890class PendingUserApproval(Base):
1891 """Model for pending SSO user registrations awaiting admin approval.
1893 This model stores information about users who have authenticated via SSO
1894 but require admin approval before their account is fully activated.
1896 Attributes:
1897 id (str): Primary key
1898 email (str): Email address of the pending user
1899 full_name (str): Full name from SSO provider
1900 auth_provider (str): SSO provider (github, google, etc.)
1901 sso_metadata (dict): Additional metadata from SSO provider
1902 requested_at (datetime): When the approval was requested
1903 expires_at (datetime): When the approval request expires
1904 approved_by (str): Email of admin who approved (if approved)
1905 approved_at (datetime): When the approval was granted
1906 status (str): Current status (pending, approved, rejected, expired)
1907 rejection_reason (str): Reason for rejection (if applicable)
1908 admin_notes (str): Notes from admin review
1910 Examples:
1911 >>> from datetime import timedelta
1912 >>> approval = PendingUserApproval(
1913 ... email="newuser@example.com",
1914 ... full_name="New User",
1915 ... auth_provider="github",
1916 ... expires_at=utc_now() + timedelta(days=30),
1917 ... status="pending"
1918 ... )
1919 >>> approval.status
1920 'pending'
1921 """
1923 __tablename__ = "pending_user_approvals"
1925 # Primary key
1926 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
1928 # User details
1929 email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
1930 full_name: Mapped[str] = mapped_column(String(255), nullable=False)
1931 auth_provider: Mapped[str] = mapped_column(String(50), nullable=False)
1932 sso_metadata: Mapped[Optional[Dict]] = mapped_column(JSON, nullable=True)
1934 # Request details
1935 requested_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
1936 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
1938 # Approval details
1939 approved_by: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True)
1940 approved_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
1941 status: Mapped[str] = mapped_column(String(20), default="pending", nullable=False) # pending, approved, rejected, expired
1942 rejection_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1943 admin_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
1945 # Relationships
1946 approver: Mapped[Optional["EmailUser"]] = relationship("EmailUser", foreign_keys=[approved_by])
1948 def __repr__(self) -> str:
1949 """String representation of the pending approval.
1951 Returns:
1952 str: String representation of PendingUserApproval instance
1953 """
1954 return f"<PendingUserApproval(email='{self.email}', status='{self.status}', provider='{self.auth_provider}')>"
1956 def is_expired(self) -> bool:
1957 """Check if the approval request has expired.
1959 Returns:
1960 bool: True if the approval request has expired
1961 """
1962 now = utc_now()
1963 expires_at = self.expires_at
1965 # Handle timezone awareness mismatch
1966 if now.tzinfo is not None and expires_at.tzinfo is None:
1967 expires_at = expires_at.replace(tzinfo=timezone.utc)
1968 elif now.tzinfo is None and expires_at.tzinfo is not None: 1968 ↛ 1971line 1968 didn't jump to line 1971 because the condition on line 1968 was always true
1969 now = now.replace(tzinfo=timezone.utc)
1971 return now > expires_at
1973 def approve(self, admin_email: str, notes: Optional[str] = None) -> None:
1974 """Approve the user registration.
1976 Args:
1977 admin_email: Email of the admin approving the request
1978 notes: Optional admin notes
1979 """
1980 self.status = "approved"
1981 self.approved_by = admin_email
1982 self.approved_at = utc_now()
1983 self.admin_notes = notes
1985 def reject(self, admin_email: str, reason: str, notes: Optional[str] = None) -> None:
1986 """Reject the user registration.
1988 Args:
1989 admin_email: Email of the admin rejecting the request
1990 reason: Reason for rejection
1991 notes: Optional admin notes
1992 """
1993 self.status = "rejected"
1994 self.approved_by = admin_email
1995 self.approved_at = utc_now()
1996 self.rejection_reason = reason
1997 self.admin_notes = notes
2000# Association table for servers and tools
2001server_tool_association = Table(
2002 "server_tool_association",
2003 Base.metadata,
2004 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True),
2005 Column("tool_id", String(36), ForeignKey("tools.id"), primary_key=True),
2006)
2008# Association table for servers and resources
2009server_resource_association = Table(
2010 "server_resource_association",
2011 Base.metadata,
2012 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True),
2013 Column("resource_id", String(36), ForeignKey("resources.id"), primary_key=True),
2014)
2016# Association table for servers and prompts
2017server_prompt_association = Table(
2018 "server_prompt_association",
2019 Base.metadata,
2020 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True),
2021 Column("prompt_id", String(36), ForeignKey("prompts.id"), primary_key=True),
2022)
2024# Association table for servers and A2A agents
2025server_a2a_association = Table(
2026 "server_a2a_association",
2027 Base.metadata,
2028 Column("server_id", String(36), ForeignKey("servers.id"), primary_key=True),
2029 Column("a2a_agent_id", String(36), ForeignKey("a2a_agents.id"), primary_key=True),
2030)
2033class GlobalConfig(Base):
2034 """Global configuration settings.
2036 Attributes:
2037 id (int): Primary key
2038 passthrough_headers (List[str]): List of headers allowed to be passed through globally
2039 """
2041 __tablename__ = "global_config"
2043 id: Mapped[int] = mapped_column(Integer, primary_key=True)
2044 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array
2047class ToolMetric(Base):
2048 """
2049 ORM model for recording individual metrics for tool executions.
2051 Each record in this table corresponds to a single tool invocation and records:
2052 - timestamp (datetime): When the invocation occurred.
2053 - response_time (float): The execution time in seconds.
2054 - is_success (bool): True if the execution succeeded, False otherwise.
2055 - error_message (Optional[str]): Error message if the execution failed.
2057 Aggregated metrics (such as total executions, successful/failed counts, failure rate,
2058 minimum, maximum, and average response times, and last execution time) should be computed
2059 on the fly using SQL aggregate functions over the rows in this table.
2060 """
2062 __tablename__ = "tool_metrics"
2064 id: Mapped[int] = mapped_column(primary_key=True)
2065 tool_id: Mapped[str] = mapped_column(String(36), ForeignKey("tools.id"), nullable=False, index=True)
2066 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True)
2067 response_time: Mapped[float] = mapped_column(Float, nullable=False)
2068 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False)
2069 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2071 # Relationship back to the Tool model.
2072 tool: Mapped["Tool"] = relationship("Tool", back_populates="metrics")
2075class ResourceMetric(Base):
2076 """
2077 ORM model for recording metrics for resource invocations.
2079 Attributes:
2080 id (int): Primary key.
2081 resource_id (str): Foreign key linking to the resource.
2082 timestamp (datetime): The time when the invocation occurred.
2083 response_time (float): The response time in seconds.
2084 is_success (bool): True if the invocation succeeded, False otherwise.
2085 error_message (Optional[str]): Error message if the invocation failed.
2086 """
2088 __tablename__ = "resource_metrics"
2090 id: Mapped[int] = mapped_column(primary_key=True)
2091 resource_id: Mapped[str] = mapped_column(String(36), ForeignKey("resources.id"), nullable=False, index=True)
2092 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True)
2093 response_time: Mapped[float] = mapped_column(Float, nullable=False)
2094 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False)
2095 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2097 # Relationship back to the Resource model.
2098 resource: Mapped["Resource"] = relationship("Resource", back_populates="metrics")
2101class ServerMetric(Base):
2102 """
2103 ORM model for recording metrics for server invocations.
2105 Attributes:
2106 id (int): Primary key.
2107 server_id (str): Foreign key linking to the server.
2108 timestamp (datetime): The time when the invocation occurred.
2109 response_time (float): The response time in seconds.
2110 is_success (bool): True if the invocation succeeded, False otherwise.
2111 error_message (Optional[str]): Error message if the invocation failed.
2112 """
2114 __tablename__ = "server_metrics"
2116 id: Mapped[int] = mapped_column(primary_key=True)
2117 server_id: Mapped[str] = mapped_column(String(36), ForeignKey("servers.id"), nullable=False, index=True)
2118 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True)
2119 response_time: Mapped[float] = mapped_column(Float, nullable=False)
2120 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False)
2121 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2123 # Relationship back to the Server model.
2124 server: Mapped["Server"] = relationship("Server", back_populates="metrics")
2127class PromptMetric(Base):
2128 """
2129 ORM model for recording metrics for prompt invocations.
2131 Attributes:
2132 id (int): Primary key.
2133 prompt_id (str): Foreign key linking to the prompt.
2134 timestamp (datetime): The time when the invocation occurred.
2135 response_time (float): The response time in seconds.
2136 is_success (bool): True if the invocation succeeded, False otherwise.
2137 error_message (Optional[str]): Error message if the invocation failed.
2138 """
2140 __tablename__ = "prompt_metrics"
2142 id: Mapped[int] = mapped_column(primary_key=True)
2143 prompt_id: Mapped[str] = mapped_column(String(36), ForeignKey("prompts.id"), nullable=False, index=True)
2144 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True)
2145 response_time: Mapped[float] = mapped_column(Float, nullable=False)
2146 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False)
2147 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2149 # Relationship back to the Prompt model.
2150 prompt: Mapped["Prompt"] = relationship("Prompt", back_populates="metrics")
2153class A2AAgentMetric(Base):
2154 """
2155 ORM model for recording metrics for A2A agent interactions.
2157 Attributes:
2158 id (int): Primary key.
2159 a2a_agent_id (str): Foreign key linking to the A2A agent.
2160 timestamp (datetime): The time when the interaction occurred.
2161 response_time (float): The response time in seconds.
2162 is_success (bool): True if the interaction succeeded, False otherwise.
2163 error_message (Optional[str]): Error message if the interaction failed.
2164 interaction_type (str): Type of interaction (invoke, query, etc.).
2165 """
2167 __tablename__ = "a2a_agent_metrics"
2169 id: Mapped[int] = mapped_column(primary_key=True)
2170 a2a_agent_id: Mapped[str] = mapped_column(String(36), ForeignKey("a2a_agents.id"), nullable=False, index=True)
2171 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, index=True)
2172 response_time: Mapped[float] = mapped_column(Float, nullable=False)
2173 is_success: Mapped[bool] = mapped_column(Boolean, nullable=False)
2174 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2175 interaction_type: Mapped[str] = mapped_column(String(50), nullable=False, default="invoke")
2177 # Relationship back to the A2AAgent model.
2178 a2a_agent: Mapped["A2AAgent"] = relationship("A2AAgent", back_populates="metrics")
2181# ===================================
2182# Metrics Hourly Rollup Tables
2183# These tables store pre-aggregated hourly summaries for efficient historical queries.
2184# Raw metrics can be cleaned up after rollup, reducing storage while preserving trends.
2185# ===================================
2188class ToolMetricsHourly(Base):
2189 """
2190 Hourly rollup of tool metrics for efficient historical trend analysis.
2192 This table stores pre-aggregated metrics per tool per hour, enabling fast
2193 queries for dashboards and reports without scanning millions of raw metrics.
2195 Attributes:
2196 id: Primary key.
2197 tool_id: Foreign key to the tool (nullable for deleted tools).
2198 tool_name: Tool name snapshot (preserved even if tool is deleted).
2199 hour_start: Start of the aggregation hour (UTC).
2200 total_count: Total invocations during this hour.
2201 success_count: Successful invocations.
2202 failure_count: Failed invocations.
2203 min_response_time: Minimum response time in seconds.
2204 max_response_time: Maximum response time in seconds.
2205 avg_response_time: Average response time in seconds.
2206 p50_response_time: 50th percentile (median) response time.
2207 p95_response_time: 95th percentile response time.
2208 p99_response_time: 99th percentile response time.
2209 created_at: When this rollup was created.
2210 """
2212 __tablename__ = "tool_metrics_hourly"
2213 __table_args__ = (
2214 UniqueConstraint("tool_id", "hour_start", name="uq_tool_metrics_hourly_tool_hour"),
2215 Index("ix_tool_metrics_hourly_hour_start", "hour_start"),
2216 )
2218 id: Mapped[int] = mapped_column(primary_key=True)
2219 tool_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("tools.id", ondelete="SET NULL"), nullable=True, index=True)
2220 tool_name: Mapped[str] = mapped_column(String(255), nullable=False)
2221 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2222 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2223 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2224 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2225 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2226 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2227 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2228 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2229 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2230 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2231 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2234class ResourceMetricsHourly(Base):
2235 """Hourly rollup of resource metrics for efficient historical trend analysis."""
2237 __tablename__ = "resource_metrics_hourly"
2238 __table_args__ = (
2239 UniqueConstraint("resource_id", "hour_start", name="uq_resource_metrics_hourly_resource_hour"),
2240 Index("ix_resource_metrics_hourly_hour_start", "hour_start"),
2241 )
2243 id: Mapped[int] = mapped_column(primary_key=True)
2244 resource_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("resources.id", ondelete="SET NULL"), nullable=True, index=True)
2245 resource_name: Mapped[str] = mapped_column(String(255), nullable=False)
2246 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2247 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2248 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2249 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2250 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2251 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2252 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2253 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2254 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2255 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2256 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2259class PromptMetricsHourly(Base):
2260 """Hourly rollup of prompt metrics for efficient historical trend analysis."""
2262 __tablename__ = "prompt_metrics_hourly"
2263 __table_args__ = (
2264 UniqueConstraint("prompt_id", "hour_start", name="uq_prompt_metrics_hourly_prompt_hour"),
2265 Index("ix_prompt_metrics_hourly_hour_start", "hour_start"),
2266 )
2268 id: Mapped[int] = mapped_column(primary_key=True)
2269 prompt_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("prompts.id", ondelete="SET NULL"), nullable=True, index=True)
2270 prompt_name: Mapped[str] = mapped_column(String(255), nullable=False)
2271 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2272 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2273 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2274 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2275 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2276 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2277 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2278 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2279 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2280 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2281 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2284class ServerMetricsHourly(Base):
2285 """Hourly rollup of server metrics for efficient historical trend analysis."""
2287 __tablename__ = "server_metrics_hourly"
2288 __table_args__ = (
2289 UniqueConstraint("server_id", "hour_start", name="uq_server_metrics_hourly_server_hour"),
2290 Index("ix_server_metrics_hourly_hour_start", "hour_start"),
2291 )
2293 id: Mapped[int] = mapped_column(primary_key=True)
2294 server_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("servers.id", ondelete="SET NULL"), nullable=True, index=True)
2295 server_name: Mapped[str] = mapped_column(String(255), nullable=False)
2296 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2297 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2298 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2299 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2300 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2301 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2302 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2303 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2304 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2305 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2306 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2309class A2AAgentMetricsHourly(Base):
2310 """Hourly rollup of A2A agent metrics for efficient historical trend analysis."""
2312 __tablename__ = "a2a_agent_metrics_hourly"
2313 __table_args__ = (
2314 UniqueConstraint("a2a_agent_id", "hour_start", "interaction_type", name="uq_a2a_agent_metrics_hourly_agent_hour_type"),
2315 Index("ix_a2a_agent_metrics_hourly_hour_start", "hour_start"),
2316 )
2318 id: Mapped[int] = mapped_column(primary_key=True)
2319 a2a_agent_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("a2a_agents.id", ondelete="SET NULL"), nullable=True, index=True)
2320 agent_name: Mapped[str] = mapped_column(String(255), nullable=False)
2321 hour_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2322 interaction_type: Mapped[str] = mapped_column(String(50), nullable=False, default="invoke")
2323 total_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2324 success_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2325 failure_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
2326 min_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2327 max_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2328 avg_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2329 p50_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2330 p95_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2331 p99_response_time: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2332 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2335# ===================================
2336# Observability Models (OpenTelemetry-style traces, spans, events)
2337# ===================================
2340class ObservabilityTrace(Base):
2341 """
2342 ORM model for observability traces (similar to OpenTelemetry traces).
2344 A trace represents a complete request flow through the system. It contains
2345 one or more spans representing individual operations.
2347 Attributes:
2348 trace_id (str): Unique trace identifier (UUID or OpenTelemetry trace ID format).
2349 name (str): Human-readable name for the trace (e.g., "POST /tools/invoke").
2350 start_time (datetime): When the trace started.
2351 end_time (datetime): When the trace ended (optional, set when completed).
2352 duration_ms (float): Total duration in milliseconds.
2353 status (str): Trace status (success, error, timeout).
2354 status_message (str): Optional status message or error description.
2355 http_method (str): HTTP method for the request (GET, POST, etc.).
2356 http_url (str): Full URL of the request.
2357 http_status_code (int): HTTP response status code.
2358 user_email (str): User who initiated the request (if authenticated).
2359 user_agent (str): Client user agent string.
2360 ip_address (str): Client IP address.
2361 attributes (dict): Additional trace attributes (JSON).
2362 resource_attributes (dict): Resource attributes (service name, version, etc.).
2363 created_at (datetime): Trace creation timestamp.
2364 """
2366 __tablename__ = "observability_traces"
2368 # Primary key
2369 trace_id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
2371 # Trace metadata
2372 name: Mapped[str] = mapped_column(String(255), nullable=False)
2373 start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True)
2374 end_time: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
2375 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2376 status: Mapped[str] = mapped_column(String(20), nullable=False, default="unset") # unset, ok, error
2377 status_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2379 # HTTP request context
2380 http_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True)
2381 http_url: Mapped[Optional[str]] = mapped_column(String(767), nullable=True)
2382 http_status_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
2384 # User context
2385 user_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
2386 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2387 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
2389 # Attributes (flexible key-value storage)
2390 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict)
2391 resource_attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict)
2393 # Timestamps
2394 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2396 # Relationships
2397 spans: Mapped[List["ObservabilitySpan"]] = relationship("ObservabilitySpan", back_populates="trace", cascade="all, delete-orphan")
2399 # Indexes for performance
2400 __table_args__ = (
2401 Index("idx_observability_traces_start_time", "start_time"),
2402 Index("idx_observability_traces_user_email", "user_email"),
2403 Index("idx_observability_traces_status", "status"),
2404 Index("idx_observability_traces_http_status_code", "http_status_code"),
2405 )
2408class ObservabilitySpan(Base):
2409 """
2410 ORM model for observability spans (similar to OpenTelemetry spans).
2412 A span represents a single operation within a trace. Spans can be nested
2413 to represent hierarchical operations.
2415 Attributes:
2416 span_id (str): Unique span identifier.
2417 trace_id (str): Parent trace ID.
2418 parent_span_id (str): Parent span ID (for nested spans).
2419 name (str): Span name (e.g., "database_query", "tool_invocation").
2420 kind (str): Span kind (internal, server, client, producer, consumer).
2421 start_time (datetime): When the span started.
2422 end_time (datetime): When the span ended.
2423 duration_ms (float): Span duration in milliseconds.
2424 status (str): Span status (success, error).
2425 status_message (str): Optional status message.
2426 attributes (dict): Span attributes (JSON).
2427 resource_name (str): Name of the resource being operated on.
2428 resource_type (str): Type of resource (tool, resource, prompt, gateway, etc.).
2429 resource_id (str): ID of the specific resource.
2430 created_at (datetime): Span creation timestamp.
2431 """
2433 __tablename__ = "observability_spans"
2435 # Primary key
2436 span_id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
2438 # Trace relationship
2439 trace_id: Mapped[str] = mapped_column(String(36), ForeignKey("observability_traces.trace_id", ondelete="CASCADE"), nullable=False, index=True)
2440 parent_span_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("observability_spans.span_id", ondelete="CASCADE"), nullable=True, index=True)
2442 # Span metadata
2443 name: Mapped[str] = mapped_column(String(255), nullable=False)
2444 kind: Mapped[str] = mapped_column(String(20), nullable=False, default="internal") # internal, server, client, producer, consumer
2445 start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True)
2446 end_time: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
2447 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
2448 status: Mapped[str] = mapped_column(String(20), nullable=False, default="unset")
2449 status_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2451 # Attributes
2452 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict)
2454 # Resource context
2455 resource_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
2456 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True) # tool, resource, prompt, gateway, a2a_agent
2457 resource_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True, index=True)
2459 # Timestamps
2460 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2462 # Relationships
2463 trace: Mapped["ObservabilityTrace"] = relationship("ObservabilityTrace", back_populates="spans")
2464 parent_span: Mapped[Optional["ObservabilitySpan"]] = relationship("ObservabilitySpan", remote_side=[span_id], backref="child_spans")
2465 events: Mapped[List["ObservabilityEvent"]] = relationship("ObservabilityEvent", back_populates="span", cascade="all, delete-orphan")
2467 # Indexes for performance
2468 __table_args__ = (
2469 Index("idx_observability_spans_trace_id", "trace_id"),
2470 Index("idx_observability_spans_parent_span_id", "parent_span_id"),
2471 Index("idx_observability_spans_start_time", "start_time"),
2472 Index("idx_observability_spans_resource_type", "resource_type"),
2473 Index("idx_observability_spans_resource_name", "resource_name"),
2474 )
2477class ObservabilityEvent(Base):
2478 """
2479 ORM model for observability events (logs within spans).
2481 Events represent discrete occurrences within a span, such as log messages,
2482 exceptions, or state changes.
2484 Attributes:
2485 id (int): Auto-incrementing primary key.
2486 span_id (str): Parent span ID.
2487 name (str): Event name (e.g., "exception", "log", "checkpoint").
2488 timestamp (datetime): When the event occurred.
2489 attributes (dict): Event attributes (JSON).
2490 severity (str): Log severity level (debug, info, warning, error, critical).
2491 message (str): Event message.
2492 exception_type (str): Exception class name (if event is an exception).
2493 exception_message (str): Exception message.
2494 exception_stacktrace (str): Exception stacktrace.
2495 created_at (datetime): Event creation timestamp.
2496 """
2498 __tablename__ = "observability_events"
2500 # Primary key
2501 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
2503 # Span relationship
2504 span_id: Mapped[str] = mapped_column(String(36), ForeignKey("observability_spans.span_id", ondelete="CASCADE"), nullable=False, index=True)
2506 # Event metadata
2507 name: Mapped[str] = mapped_column(String(255), nullable=False)
2508 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, index=True)
2509 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict)
2511 # Log fields
2512 severity: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, index=True) # debug, info, warning, error, critical
2513 message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2515 # Exception fields
2516 exception_type: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2517 exception_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2518 exception_stacktrace: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2520 # Timestamps
2521 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2523 # Relationships
2524 span: Mapped["ObservabilitySpan"] = relationship("ObservabilitySpan", back_populates="events")
2526 # Indexes for performance
2527 __table_args__ = (
2528 Index("idx_observability_events_span_id", "span_id"),
2529 Index("idx_observability_events_timestamp", "timestamp"),
2530 Index("idx_observability_events_severity", "severity"),
2531 )
2534class ObservabilityMetric(Base):
2535 """
2536 ORM model for observability metrics (time-series numerical data).
2538 Metrics represent numerical measurements over time, such as request rates,
2539 error rates, latencies, and custom business metrics.
2541 Attributes:
2542 id (int): Auto-incrementing primary key.
2543 name (str): Metric name (e.g., "http.request.duration", "tool.invocation.count").
2544 metric_type (str): Metric type (counter, gauge, histogram).
2545 value (float): Metric value.
2546 timestamp (datetime): When the metric was recorded.
2547 unit (str): Metric unit (ms, count, bytes, etc.).
2548 attributes (dict): Metric attributes/labels (JSON).
2549 resource_type (str): Type of resource (tool, resource, prompt, etc.).
2550 resource_id (str): ID of the specific resource.
2551 trace_id (str): Associated trace ID (optional).
2552 created_at (datetime): Metric creation timestamp.
2553 """
2555 __tablename__ = "observability_metrics"
2557 # Primary key
2558 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
2560 # Metric metadata
2561 name: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
2562 metric_type: Mapped[str] = mapped_column(String(20), nullable=False) # counter, gauge, histogram
2563 value: Mapped[float] = mapped_column(Float, nullable=False)
2564 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now, index=True)
2565 unit: Mapped[Optional[str]] = mapped_column(String(20), nullable=True)
2567 # Attributes/labels
2568 attributes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, default=dict)
2570 # Resource context
2571 resource_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True)
2572 resource_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True, index=True)
2574 # Trace association (optional)
2575 trace_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("observability_traces.trace_id", ondelete="SET NULL"), nullable=True, index=True)
2577 # Timestamps
2578 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2580 # Indexes for performance
2581 __table_args__ = (
2582 Index("idx_observability_metrics_name_timestamp", "name", "timestamp"),
2583 Index("idx_observability_metrics_resource_type", "resource_type"),
2584 Index("idx_observability_metrics_trace_id", "trace_id"),
2585 )
2588class ObservabilitySavedQuery(Base):
2589 """
2590 ORM model for saved observability queries (filter presets).
2592 Allows users to save their filter configurations for quick access and
2593 historical query tracking. Queries can be personal or shared with the team.
2595 Attributes:
2596 id (int): Auto-incrementing primary key.
2597 name (str): User-given name for the saved query.
2598 description (str): Optional description of what this query finds.
2599 user_email (str): Email of the user who created this query.
2600 filter_config (dict): JSON containing all filter values (time_range, status_filter, etc.).
2601 is_shared (bool): Whether this query is visible to other users.
2602 created_at (datetime): When the query was created.
2603 updated_at (datetime): When the query was last modified.
2604 last_used_at (datetime): When the query was last executed.
2605 use_count (int): How many times this query has been used.
2606 """
2608 __tablename__ = "observability_saved_queries"
2610 # Primary key
2611 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
2613 # Query metadata
2614 name: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
2615 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2616 user_email: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
2618 # Filter configuration (stored as JSON)
2619 filter_config: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False)
2621 # Sharing settings
2622 is_shared: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
2624 # Timestamps and usage tracking
2625 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2626 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
2627 last_used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
2628 use_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
2630 # Indexes for performance
2631 __table_args__ = (
2632 Index("idx_observability_saved_queries_user_email", "user_email"),
2633 Index("idx_observability_saved_queries_is_shared", "is_shared"),
2634 Index("idx_observability_saved_queries_created_at", "created_at"),
2635 )
2638# ---------------------------------------------------------------------------
2639# Performance Monitoring Models
2640# ---------------------------------------------------------------------------
2643class PerformanceSnapshot(Base):
2644 """
2645 ORM model for point-in-time performance snapshots.
2647 Stores comprehensive system, request, and worker metrics at regular intervals
2648 for historical analysis and trend detection.
2650 Attributes:
2651 id (int): Auto-incrementing primary key.
2652 timestamp (datetime): When the snapshot was taken.
2653 host (str): Hostname of the machine.
2654 worker_id (str): Worker identifier (PID or UUID).
2655 metrics_json (dict): JSON blob containing all metrics data.
2656 created_at (datetime): Record creation timestamp.
2657 """
2659 __tablename__ = "performance_snapshots"
2661 # Primary key
2662 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
2664 # Snapshot metadata
2665 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False, index=True)
2666 host: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
2667 worker_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True)
2669 # Metrics data (JSON blob)
2670 metrics_json: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False)
2672 # Timestamps
2673 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2675 # Indexes for efficient querying
2676 __table_args__ = (
2677 Index("idx_performance_snapshots_timestamp", "timestamp"),
2678 Index("idx_performance_snapshots_host_timestamp", "host", "timestamp"),
2679 Index("idx_performance_snapshots_created_at", "created_at"),
2680 )
2683class PerformanceAggregate(Base):
2684 """
2685 ORM model for aggregated performance metrics.
2687 Stores hourly and daily aggregations of performance data for efficient
2688 historical reporting and trend analysis.
2690 Attributes:
2691 id (int): Auto-incrementing primary key.
2692 period_start (datetime): Start of the aggregation period.
2693 period_end (datetime): End of the aggregation period.
2694 period_type (str): Type of aggregation (hourly, daily).
2695 host (str): Hostname (None for cluster-wide aggregates).
2696 Various aggregate metrics for requests and resources.
2697 created_at (datetime): Record creation timestamp.
2698 """
2700 __tablename__ = "performance_aggregates"
2702 # Primary key
2703 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
2705 # Period metadata
2706 period_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True)
2707 period_end: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
2708 period_type: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # hourly, daily
2709 host: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
2711 # Request aggregates
2712 requests_total: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
2713 requests_2xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
2714 requests_4xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
2715 requests_5xx: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
2716 avg_response_time_ms: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2717 p95_response_time_ms: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2718 peak_requests_per_second: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2720 # Resource aggregates
2721 avg_cpu_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2722 avg_memory_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2723 peak_cpu_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2724 peak_memory_percent: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
2726 # Timestamps
2727 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
2729 # Indexes and constraints
2730 __table_args__ = (
2731 Index("idx_performance_aggregates_period", "period_type", "period_start"),
2732 Index("idx_performance_aggregates_host_period", "host", "period_type", "period_start"),
2733 UniqueConstraint("period_type", "period_start", "host", name="uq_performance_aggregate_period_host"),
2734 )
2737class Tool(Base):
2738 """
2739 ORM model for a registered Tool.
2741 Supports both local tools and federated tools from other gateways.
2742 The integration_type field indicates the tool format:
2743 - "MCP" for MCP-compliant tools (default)
2744 - "REST" for REST tools
2746 Additionally, this model provides computed properties for aggregated metrics based
2747 on the associated ToolMetric records. These include:
2748 - execution_count: Total number of invocations.
2749 - successful_executions: Count of successful invocations.
2750 - failed_executions: Count of failed invocations.
2751 - failure_rate: Ratio of failed invocations to total invocations.
2752 - min_response_time: Fastest recorded response time.
2753 - max_response_time: Slowest recorded response time.
2754 - avg_response_time: Mean response time.
2755 - last_execution_time: Timestamp of the most recent invocation.
2757 The property `metrics_summary` returns a dictionary with these aggregated values.
2759 Team association is handled via the `email_team` relationship (default lazy loading)
2760 which only includes active teams. For list operations, use explicit joinedload()
2761 to eager load team names. The `team` property provides convenient access to
2762 the team name:
2763 - team: Returns the team name if the tool belongs to an active team, otherwise None.
2765 The following fields have been added to support tool invocation configuration:
2766 - request_type: HTTP method to use when invoking the tool.
2767 - auth_type: Type of authentication ("basic", "bearer", or None).
2768 - auth_username: Username for basic authentication.
2769 - auth_password: Password for basic authentication.
2770 - auth_token: Token for bearer token authentication.
2771 - auth_header_key: header key for authentication.
2772 - auth_header_value: header value for authentication.
2773 """
2775 __tablename__ = "tools"
2777 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
2778 original_name: Mapped[str] = mapped_column(String(255), nullable=False)
2779 url: Mapped[str] = mapped_column(String(767), nullable=True)
2780 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2781 integration_type: Mapped[str] = mapped_column(String(20), default="MCP")
2782 request_type: Mapped[str] = mapped_column(String(20), default="SSE")
2783 headers: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON)
2784 input_schema: Mapped[Dict[str, Any]] = mapped_column(JSON)
2785 output_schema: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
2786 annotations: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, default=lambda: {})
2787 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
2788 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
2789 enabled: Mapped[bool] = mapped_column(default=True)
2790 reachable: Mapped[bool] = mapped_column(default=True)
2791 jsonpath_filter: Mapped[str] = mapped_column(Text, default="")
2792 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
2794 # Comprehensive metadata for audit tracking
2795 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2796 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
2797 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
2798 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2800 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2801 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
2802 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
2803 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
2805 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
2806 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2807 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
2809 # Request type and authentication fields
2810 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", or None
2811 auth_value: Mapped[Optional[str]] = mapped_column(Text, default=None)
2813 # custom_name,custom_name_slug, display_name
2814 custom_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=False)
2815 custom_name_slug: Mapped[Optional[str]] = mapped_column(String(255), nullable=False)
2816 display_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2818 # Passthrough REST fields
2819 base_url: Mapped[Optional[str]] = mapped_column(String, nullable=True)
2820 path_template: Mapped[Optional[str]] = mapped_column(String, nullable=True)
2821 query_mapping: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
2822 header_mapping: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
2823 timeout_ms: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, default=None)
2824 expose_passthrough: Mapped[bool] = mapped_column(Boolean, default=True)
2825 allowlist: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True)
2826 plugin_chain_pre: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True)
2827 plugin_chain_post: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True)
2829 # Federation relationship with a local gateway
2830 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE"))
2831 # gateway_slug: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.slug"))
2832 gateway: Mapped["Gateway"] = relationship("Gateway", primaryjoin="Tool.gateway_id == Gateway.id", foreign_keys=[gateway_id], back_populates="tools")
2833 # federated_with = relationship("Gateway", secondary=tool_gateway_table, back_populates="federated_tools")
2835 # Many-to-many relationship with Servers
2836 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_tool_association, back_populates="tools")
2838 # Relationship with ToolMetric records
2839 metrics: Mapped[List["ToolMetric"]] = relationship("ToolMetric", back_populates="tool", cascade="all, delete-orphan")
2841 # Team scoping fields for resource organization
2842 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
2843 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
2844 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
2846 # Relationship for loading team names (only active teams)
2847 # Uses default lazy loading - team name is only loaded when accessed
2848 # For list/admin views, use explicit joinedload(DbTool.email_team) for single-query loading
2849 # This avoids adding overhead to hot paths like tool invocation that don't need team names
2850 email_team: Mapped[Optional["EmailTeam"]] = relationship(
2851 "EmailTeam",
2852 primaryjoin="and_(Tool.team_id == EmailTeam.id, EmailTeam.is_active == True)",
2853 foreign_keys=[team_id],
2854 )
2856 @property
2857 def team(self) -> Optional[str]:
2858 """Return the team name from the eagerly-loaded email_team relationship.
2860 Returns:
2861 Optional[str]: The team name if the tool belongs to an active team, otherwise None.
2862 """
2863 return self.email_team.name if self.email_team else None
2865 # @property
2866 # def gateway_slug(self) -> str:
2867 # return self.gateway.slug
2869 _computed_name: Mapped[str] = mapped_column("name", String(255), nullable=False) # Stored column
2871 @hybrid_property
2872 def name(self) -> str:
2873 """Return the display/lookup name computed from gateway and custom slug.
2875 Returns:
2876 str: Display/lookup name to use for this tool.
2877 """
2878 # Instance access resolves Column to Python value; cast ensures static acceptance
2879 if getattr(self, "_computed_name", None):
2880 return cast(str, getattr(self, "_computed_name"))
2881 custom_name_slug = slugify(getattr(self, "custom_name_slug"))
2882 if getattr(self, "gateway_id", None):
2883 gateway_slug = slugify(self.gateway.name) # type: ignore[attr-defined]
2884 return f"{gateway_slug}{settings.gateway_tool_name_separator}{custom_name_slug}"
2885 return custom_name_slug
2887 @name.setter
2888 def name(self, value: str) -> None:
2889 """Setter for the stored name column.
2891 Args:
2892 value: Explicit name to persist to the underlying column.
2893 """
2894 setattr(self, "_computed_name", value)
2896 @name.expression
2897 @classmethod
2898 def name(cls) -> Any:
2899 """SQL expression for name used in queries (backs onto stored column).
2901 Returns:
2902 Any: SQLAlchemy expression referencing the stored name column.
2903 """
2904 return cls._computed_name
2906 __table_args__ = (
2907 UniqueConstraint("gateway_id", "original_name", name="uq_gateway_id__original_name"),
2908 UniqueConstraint("team_id", "owner_email", "name", name="uq_team_owner_email_name_tool"),
2909 Index("idx_tools_created_at_id", "created_at", "id"),
2910 )
2912 @hybrid_property
2913 def gateway_slug(self) -> Optional[str]:
2914 """Python accessor returning the related gateway's slug if available.
2916 Returns:
2917 Optional[str]: The gateway slug, or None if no gateway relation.
2918 """
2919 return self.gateway.slug if self.gateway else None
2921 @gateway_slug.expression
2922 @classmethod
2923 def gateway_slug(cls) -> Any:
2924 """SQL expression to select current gateway slug for this tool.
2926 Returns:
2927 Any: SQLAlchemy scalar subquery selecting the gateway slug.
2928 """
2929 return select(Gateway.slug).where(Gateway.id == cls.gateway_id).scalar_subquery()
2931 def _metrics_loaded(self) -> bool:
2932 """Check if metrics relationship is loaded without triggering lazy load.
2934 Returns:
2935 bool: True if metrics are loaded, False otherwise.
2936 """
2937 return "metrics" in sa_inspect(self).dict
2939 def _get_metric_counts(self) -> tuple[int, int, int]:
2940 """Get total, successful, and failed metric counts in a single operation.
2942 When metrics are already loaded, computes from memory in O(n).
2943 When not loaded, uses a single SQL query with conditional aggregation.
2945 Note: For bulk operations, use metrics_summary which computes all fields
2946 in a single pass, or ensure metrics are preloaded via selectinload.
2948 Returns:
2949 tuple[int, int, int]: (total, successful, failed) counts.
2950 """
2951 # If metrics are loaded, compute from memory in a single pass
2952 if self._metrics_loaded():
2953 total = 0
2954 successful = 0
2955 for m in self.metrics:
2956 total += 1
2957 if m.is_success:
2958 successful += 1
2959 return (total, successful, total - successful)
2961 # Use single SQL query with conditional aggregation
2962 # Third-Party
2963 from sqlalchemy import case # pylint: disable=import-outside-toplevel
2964 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
2966 session = object_session(self)
2967 if session is None:
2968 return (0, 0, 0)
2970 result = (
2971 session.query(
2972 func.count(ToolMetric.id), # pylint: disable=not-callable
2973 func.sum(case((ToolMetric.is_success.is_(True), 1), else_=0)),
2974 )
2975 .filter(ToolMetric.tool_id == self.id)
2976 .one()
2977 )
2979 total = result[0] or 0
2980 successful = result[1] or 0
2981 return (total, successful, total - successful)
2983 @hybrid_property
2984 def execution_count(self) -> int:
2985 """Number of ToolMetric records associated with this tool instance.
2987 Note: Each property access may trigger a SQL query if metrics aren't loaded.
2988 For reading multiple metric fields, use metrics_summary or preload metrics.
2990 Returns:
2991 int: Count of ToolMetric records for this tool.
2992 """
2993 return self._get_metric_counts()[0]
2995 @execution_count.expression
2996 @classmethod
2997 def execution_count(cls) -> Any:
2998 """SQL expression that counts ToolMetric rows for this tool.
3000 Returns:
3001 Any: SQLAlchemy labeled count expression for tool metrics.
3002 """
3003 return select(func.count(ToolMetric.id)).where(ToolMetric.tool_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable
3005 @property
3006 def successful_executions(self) -> int:
3007 """Count of successful tool executions.
3009 Returns:
3010 int: The count of successful tool executions.
3011 """
3012 return self._get_metric_counts()[1]
3014 @property
3015 def failed_executions(self) -> int:
3016 """Count of failed tool executions.
3018 Returns:
3019 int: The count of failed tool executions.
3020 """
3021 return self._get_metric_counts()[2]
3023 @property
3024 def failure_rate(self) -> float:
3025 """Failure rate as a float between 0 and 1.
3027 Returns:
3028 float: The failure rate as a value between 0 and 1.
3029 """
3030 total, _, failed = self._get_metric_counts()
3031 return failed / total if total > 0 else 0.0
3033 @property
3034 def min_response_time(self) -> Optional[float]:
3035 """Minimum response time among all tool executions.
3037 Returns None if metrics are not loaded (use metrics_summary for SQL fallback).
3039 Returns:
3040 Optional[float]: The minimum response time, or None.
3041 """
3042 if not self._metrics_loaded():
3043 return None
3044 times: List[float] = [m.response_time for m in self.metrics]
3045 return min(times) if times else None
3047 @property
3048 def max_response_time(self) -> Optional[float]:
3049 """Maximum response time among all tool executions.
3051 Returns None if metrics are not loaded (use metrics_summary for SQL fallback).
3053 Returns:
3054 Optional[float]: The maximum response time, or None.
3055 """
3056 if not self._metrics_loaded():
3057 return None
3058 times: List[float] = [m.response_time for m in self.metrics]
3059 return max(times) if times else None
3061 @property
3062 def avg_response_time(self) -> Optional[float]:
3063 """Average response time among all tool executions.
3065 Returns None if metrics are not loaded (use metrics_summary for SQL fallback).
3067 Returns:
3068 Optional[float]: The average response time, or None.
3069 """
3070 if not self._metrics_loaded():
3071 return None
3072 times: List[float] = [m.response_time for m in self.metrics]
3073 return sum(times) / len(times) if times else None
3075 @property
3076 def last_execution_time(self) -> Optional[datetime]:
3077 """Timestamp of the most recent tool execution.
3079 Returns None if metrics are not loaded (use metrics_summary for SQL fallback).
3081 Returns:
3082 Optional[datetime]: The timestamp of the most recent execution, or None.
3083 """
3084 if not self._metrics_loaded():
3085 return None
3086 if not self.metrics:
3087 return None
3088 return max(m.timestamp for m in self.metrics)
3090 @property
3091 def metrics_summary(self) -> Dict[str, Any]:
3092 """Aggregated metrics for the tool.
3094 When metrics are loaded: computes all values from memory in a single pass.
3095 When not loaded: uses a single SQL query with aggregation for all fields.
3097 Returns:
3098 Dict[str, Any]: Dictionary containing aggregated metrics:
3099 - total_executions, successful_executions, failed_executions
3100 - failure_rate, min/max/avg_response_time, last_execution_time
3101 """
3102 # If metrics are loaded, compute everything in a single pass
3103 if self._metrics_loaded():
3104 total = 0
3105 successful = 0
3106 min_rt: Optional[float] = None
3107 max_rt: Optional[float] = None
3108 sum_rt = 0.0
3109 last_time: Optional[datetime] = None
3111 for m in self.metrics:
3112 total += 1
3113 if m.is_success:
3114 successful += 1
3115 rt = m.response_time
3116 if min_rt is None or rt < min_rt:
3117 min_rt = rt
3118 if max_rt is None or rt > max_rt: 3118 ↛ 3120line 3118 didn't jump to line 3120 because the condition on line 3118 was always true
3119 max_rt = rt
3120 sum_rt += rt
3121 if last_time is None or m.timestamp > last_time: 3121 ↛ 3111line 3121 didn't jump to line 3111 because the condition on line 3121 was always true
3122 last_time = m.timestamp
3124 failed = total - successful
3125 return {
3126 "total_executions": total,
3127 "successful_executions": successful,
3128 "failed_executions": failed,
3129 "failure_rate": failed / total if total > 0 else 0.0,
3130 "min_response_time": min_rt,
3131 "max_response_time": max_rt,
3132 "avg_response_time": sum_rt / total if total > 0 else None,
3133 "last_execution_time": last_time,
3134 }
3136 # Use single SQL query with full aggregation
3137 # Third-Party
3138 from sqlalchemy import case # pylint: disable=import-outside-toplevel
3139 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
3141 session = object_session(self)
3142 if session is None:
3143 return {
3144 "total_executions": 0,
3145 "successful_executions": 0,
3146 "failed_executions": 0,
3147 "failure_rate": 0.0,
3148 "min_response_time": None,
3149 "max_response_time": None,
3150 "avg_response_time": None,
3151 "last_execution_time": None,
3152 }
3154 result = (
3155 session.query(
3156 func.count(ToolMetric.id), # pylint: disable=not-callable
3157 func.sum(case((ToolMetric.is_success.is_(True), 1), else_=0)),
3158 func.min(ToolMetric.response_time), # pylint: disable=not-callable
3159 func.max(ToolMetric.response_time), # pylint: disable=not-callable
3160 func.avg(ToolMetric.response_time), # pylint: disable=not-callable
3161 func.max(ToolMetric.timestamp), # pylint: disable=not-callable
3162 )
3163 .filter(ToolMetric.tool_id == self.id)
3164 .one()
3165 )
3167 total = result[0] or 0
3168 successful = result[1] or 0
3169 failed = total - successful
3171 return {
3172 "total_executions": total,
3173 "successful_executions": successful,
3174 "failed_executions": failed,
3175 "failure_rate": failed / total if total > 0 else 0.0,
3176 "min_response_time": result[2],
3177 "max_response_time": result[3],
3178 "avg_response_time": float(result[4]) if result[4] is not None else None,
3179 "last_execution_time": result[5],
3180 }
3183class Resource(Base):
3184 """
3185 ORM model for a registered Resource.
3187 Resources represent content that can be read by clients.
3188 Supports subscriptions for real-time updates.
3189 Additionally, this model provides a relationship with ResourceMetric records
3190 to capture invocation metrics (such as execution counts, response times, and failures).
3191 """
3193 __tablename__ = "resources"
3195 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
3196 uri: Mapped[str] = mapped_column(String(767), nullable=False)
3197 name: Mapped[str] = mapped_column(String(255), nullable=False)
3198 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3199 mime_type: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3200 size: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
3201 uri_template: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # URI template for parameterized resources
3202 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
3203 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
3204 # is_active: Mapped[bool] = mapped_column(default=True)
3205 enabled: Mapped[bool] = mapped_column(default=True)
3206 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
3208 # Comprehensive metadata for audit tracking
3209 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3210 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
3211 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
3212 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3214 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3215 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
3216 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
3217 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3219 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
3220 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3221 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
3223 metrics: Mapped[List["ResourceMetric"]] = relationship("ResourceMetric", back_populates="resource", cascade="all, delete-orphan")
3225 # Content storage - can be text or binary
3226 text_content: Mapped[Optional[str]] = mapped_column(Text)
3227 binary_content: Mapped[Optional[bytes]]
3229 # Subscription tracking
3230 subscriptions: Mapped[List["ResourceSubscription"]] = relationship("ResourceSubscription", back_populates="resource", cascade="all, delete-orphan")
3232 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE"))
3233 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="resources")
3234 # federated_with = relationship("Gateway", secondary=resource_gateway_table, back_populates="federated_resources")
3236 # Many-to-many relationship with Servers
3237 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_resource_association, back_populates="resources")
3238 __table_args__ = (
3239 UniqueConstraint("team_id", "owner_email", "gateway_id", "uri", name="uq_team_owner_gateway_uri_resource"),
3240 Index("uq_team_owner_uri_resource_local", "team_id", "owner_email", "uri", unique=True, postgresql_where=text("gateway_id IS NULL"), sqlite_where=text("gateway_id IS NULL")),
3241 Index("idx_resources_created_at_id", "created_at", "id"),
3242 )
3244 @property
3245 def content(self) -> "ResourceContent":
3246 """
3247 Returns the resource content in the appropriate format.
3249 If text content exists, returns a ResourceContent with text.
3250 Otherwise, if binary content exists, returns a ResourceContent with blob data.
3251 Raises a ValueError if no content is available.
3253 Returns:
3254 ResourceContent: The resource content with appropriate format (text or blob).
3256 Raises:
3257 ValueError: If the resource has no content available.
3259 Examples:
3260 >>> resource = Resource(uri="test://example", name="test")
3261 >>> resource.text_content = "Hello, World!"
3262 >>> content = resource.content
3263 >>> content.text
3264 'Hello, World!'
3265 >>> content.type
3266 'resource'
3268 >>> binary_resource = Resource(uri="test://binary", name="binary")
3269 >>> binary_resource.binary_content = b"\\x00\\x01\\x02"
3270 >>> binary_content = binary_resource.content
3271 >>> binary_content.blob
3272 b'\\x00\\x01\\x02'
3274 >>> empty_resource = Resource(uri="test://empty", name="empty")
3275 >>> try:
3276 ... empty_resource.content
3277 ... except ValueError as e:
3278 ... str(e)
3279 'Resource has no content'
3280 """
3282 # Local import to avoid circular import
3283 # First-Party
3284 from mcpgateway.common.models import ResourceContent # pylint: disable=import-outside-toplevel
3286 if self.text_content is not None:
3287 return ResourceContent(
3288 type="resource",
3289 id=str(self.id),
3290 uri=self.uri,
3291 mime_type=self.mime_type,
3292 text=self.text_content,
3293 )
3294 if self.binary_content is not None:
3295 return ResourceContent(
3296 type="resource",
3297 id=str(self.id),
3298 uri=self.uri,
3299 mime_type=self.mime_type or "application/octet-stream",
3300 blob=self.binary_content,
3301 )
3302 raise ValueError("Resource has no content")
3304 def _metrics_loaded(self) -> bool:
3305 """Check if metrics relationship is loaded without triggering lazy load.
3307 Returns:
3308 bool: True if metrics are loaded, False otherwise.
3309 """
3310 return "metrics" in sa_inspect(self).dict
3312 def _get_metric_counts(self) -> tuple[int, int, int]:
3313 """Get total, successful, and failed metric counts in a single operation.
3315 When metrics are already loaded, computes from memory in O(n).
3316 When not loaded, uses a single SQL query with conditional aggregation.
3318 Returns:
3319 tuple[int, int, int]: (total, successful, failed) counts.
3320 """
3321 if self._metrics_loaded():
3322 total = 0
3323 successful = 0
3324 for m in self.metrics:
3325 total += 1
3326 if m.is_success:
3327 successful += 1
3328 return (total, successful, total - successful)
3330 # Third-Party
3331 from sqlalchemy import case # pylint: disable=import-outside-toplevel
3332 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
3334 session = object_session(self)
3335 if session is None:
3336 return (0, 0, 0)
3338 result = (
3339 session.query(
3340 func.count(ResourceMetric.id), # pylint: disable=not-callable
3341 func.sum(case((ResourceMetric.is_success.is_(True), 1), else_=0)),
3342 )
3343 .filter(ResourceMetric.resource_id == self.id)
3344 .one()
3345 )
3347 total = result[0] or 0
3348 successful = result[1] or 0
3349 return (total, successful, total - successful)
3351 @hybrid_property
3352 def execution_count(self) -> int:
3353 """Number of ResourceMetric records associated with this resource instance.
3355 Returns:
3356 int: Count of ResourceMetric records for this resource.
3357 """
3358 return self._get_metric_counts()[0]
3360 @execution_count.expression
3361 @classmethod
3362 def execution_count(cls) -> Any:
3363 """SQL expression that counts ResourceMetric rows for this resource.
3365 Returns:
3366 Any: SQLAlchemy labeled count expression for resource metrics.
3367 """
3368 return select(func.count(ResourceMetric.id)).where(ResourceMetric.resource_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable
3370 @property
3371 def successful_executions(self) -> int:
3372 """Count of successful resource invocations.
3374 Returns:
3375 int: The count of successful resource invocations.
3376 """
3377 return self._get_metric_counts()[1]
3379 @property
3380 def failed_executions(self) -> int:
3381 """Count of failed resource invocations.
3383 Returns:
3384 int: The count of failed resource invocations.
3385 """
3386 return self._get_metric_counts()[2]
3388 @property
3389 def failure_rate(self) -> float:
3390 """Failure rate as a float between 0 and 1.
3392 Returns:
3393 float: The failure rate as a value between 0 and 1.
3394 """
3395 total, _, failed = self._get_metric_counts()
3396 return failed / total if total > 0 else 0.0
3398 @property
3399 def min_response_time(self) -> Optional[float]:
3400 """Minimum response time among all resource invocations.
3402 Returns None if metrics are not loaded. Note: counts may be non-zero
3403 (via SQL) while timing is None. Use service layer converters for
3404 consistent metrics, or preload metrics via selectinload.
3406 Returns:
3407 Optional[float]: The minimum response time, or None.
3408 """
3409 if not self._metrics_loaded():
3410 return None
3411 times: List[float] = [m.response_time for m in self.metrics]
3412 return min(times) if times else None
3414 @property
3415 def max_response_time(self) -> Optional[float]:
3416 """Maximum response time among all resource invocations.
3418 Returns None if metrics are not loaded. Note: counts may be non-zero
3419 (via SQL) while timing is None. Use service layer converters for
3420 consistent metrics, or preload metrics via selectinload.
3422 Returns:
3423 Optional[float]: The maximum response time, or None.
3424 """
3425 if not self._metrics_loaded():
3426 return None
3427 times: List[float] = [m.response_time for m in self.metrics]
3428 return max(times) if times else None
3430 @property
3431 def avg_response_time(self) -> Optional[float]:
3432 """Average response time among all resource invocations.
3434 Returns None if metrics are not loaded. Note: counts may be non-zero
3435 (via SQL) while timing is None. Use service layer converters for
3436 consistent metrics, or preload metrics via selectinload.
3438 Returns:
3439 Optional[float]: The average response time, or None.
3440 """
3441 if not self._metrics_loaded():
3442 return None
3443 times: List[float] = [m.response_time for m in self.metrics]
3444 return sum(times) / len(times) if times else None
3446 @property
3447 def last_execution_time(self) -> Optional[datetime]:
3448 """Timestamp of the most recent resource invocation.
3450 Returns None if metrics are not loaded. Note: counts may be non-zero
3451 (via SQL) while timing is None. Use service layer converters for
3452 consistent metrics, or preload metrics via selectinload.
3454 Returns:
3455 Optional[datetime]: The timestamp of the most recent invocation, or None.
3456 """
3457 if not self._metrics_loaded():
3458 return None
3459 if not self.metrics:
3460 return None
3461 return max(m.timestamp for m in self.metrics)
3463 @property
3464 def metrics_summary(self) -> Dict[str, Any]:
3465 """Aggregated metrics for the resource.
3467 When metrics are loaded: computes all values from memory in a single pass.
3468 When not loaded: uses a single SQL query with aggregation for all fields.
3470 Returns:
3471 Dict[str, Any]: Dictionary containing aggregated metrics:
3472 - total_executions, successful_executions, failed_executions
3473 - failure_rate, min/max/avg_response_time, last_execution_time
3474 """
3475 if self._metrics_loaded():
3476 total = 0
3477 successful = 0
3478 min_rt: Optional[float] = None
3479 max_rt: Optional[float] = None
3480 sum_rt = 0.0
3481 last_time: Optional[datetime] = None
3483 for m in self.metrics:
3484 total += 1
3485 if m.is_success:
3486 successful += 1
3487 rt = m.response_time
3488 if min_rt is None or rt < min_rt:
3489 min_rt = rt
3490 if max_rt is None or rt > max_rt: 3490 ↛ 3492line 3490 didn't jump to line 3492 because the condition on line 3490 was always true
3491 max_rt = rt
3492 sum_rt += rt
3493 if last_time is None or m.timestamp > last_time: 3493 ↛ 3483line 3493 didn't jump to line 3483 because the condition on line 3493 was always true
3494 last_time = m.timestamp
3496 failed = total - successful
3497 return {
3498 "total_executions": total,
3499 "successful_executions": successful,
3500 "failed_executions": failed,
3501 "failure_rate": failed / total if total > 0 else 0.0,
3502 "min_response_time": min_rt,
3503 "max_response_time": max_rt,
3504 "avg_response_time": sum_rt / total if total > 0 else None,
3505 "last_execution_time": last_time,
3506 }
3508 # Third-Party
3509 from sqlalchemy import case # pylint: disable=import-outside-toplevel
3510 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
3512 session = object_session(self)
3513 if session is None:
3514 return {
3515 "total_executions": 0,
3516 "successful_executions": 0,
3517 "failed_executions": 0,
3518 "failure_rate": 0.0,
3519 "min_response_time": None,
3520 "max_response_time": None,
3521 "avg_response_time": None,
3522 "last_execution_time": None,
3523 }
3525 result = (
3526 session.query(
3527 func.count(ResourceMetric.id), # pylint: disable=not-callable
3528 func.sum(case((ResourceMetric.is_success.is_(True), 1), else_=0)),
3529 func.min(ResourceMetric.response_time), # pylint: disable=not-callable
3530 func.max(ResourceMetric.response_time), # pylint: disable=not-callable
3531 func.avg(ResourceMetric.response_time), # pylint: disable=not-callable
3532 func.max(ResourceMetric.timestamp), # pylint: disable=not-callable
3533 )
3534 .filter(ResourceMetric.resource_id == self.id)
3535 .one()
3536 )
3538 total = result[0] or 0
3539 successful = result[1] or 0
3540 failed = total - successful
3542 return {
3543 "total_executions": total,
3544 "successful_executions": successful,
3545 "failed_executions": failed,
3546 "failure_rate": failed / total if total > 0 else 0.0,
3547 "min_response_time": result[2],
3548 "max_response_time": result[3],
3549 "avg_response_time": float(result[4]) if result[4] is not None else None,
3550 "last_execution_time": result[5],
3551 }
3553 # Team scoping fields for resource organization
3554 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
3555 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3556 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
3559class ResourceSubscription(Base):
3560 """Tracks subscriptions to resource updates."""
3562 __tablename__ = "resource_subscriptions"
3564 id: Mapped[int] = mapped_column(primary_key=True)
3565 resource_id: Mapped[str] = mapped_column(ForeignKey("resources.id"))
3566 subscriber_id: Mapped[str] = mapped_column(String(255), nullable=False) # Client identifier
3567 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
3568 last_notification: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
3570 resource: Mapped["Resource"] = relationship(back_populates="subscriptions")
3573class ToolOpsTestCases(Base):
3574 """
3575 ORM model for a registered Tool test cases.
3577 Represents a tool and the generated test cases.
3578 Includes:
3579 - tool_id: unique tool identifier
3580 - test_cases: generated test cases.
3581 - run_status: status of test case generation
3582 """
3584 __tablename__ = "toolops_test_cases"
3586 tool_id: Mapped[str] = mapped_column(String(255), primary_key=True)
3587 test_cases: Mapped[Dict[str, Any]] = mapped_column(JSON)
3588 run_status: Mapped[str] = mapped_column(String(255), nullable=False)
3591class Prompt(Base):
3592 """
3593 ORM model for a registered Prompt template.
3595 Represents a prompt template along with its argument schema.
3596 Supports rendering and invocation of prompts.
3597 Additionally, this model provides computed properties for aggregated metrics based
3598 on the associated PromptMetric records. These include:
3599 - execution_count: Total number of prompt invocations.
3600 - successful_executions: Count of successful invocations.
3601 - failed_executions: Count of failed invocations.
3602 - failure_rate: Ratio of failed invocations to total invocations.
3603 - min_response_time: Fastest recorded response time.
3604 - max_response_time: Slowest recorded response time.
3605 - avg_response_time: Mean response time.
3606 - last_execution_time: Timestamp of the most recent invocation.
3607 """
3609 __tablename__ = "prompts"
3611 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
3612 original_name: Mapped[str] = mapped_column(String(255), nullable=False)
3613 custom_name: Mapped[str] = mapped_column(String(255), nullable=False)
3614 custom_name_slug: Mapped[str] = mapped_column(String(255), nullable=False)
3615 display_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3616 name: Mapped[str] = mapped_column(String(255), nullable=False)
3617 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3618 template: Mapped[str] = mapped_column(Text)
3619 argument_schema: Mapped[Dict[str, Any]] = mapped_column(JSON)
3620 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
3621 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
3622 # is_active: Mapped[bool] = mapped_column(default=True)
3623 enabled: Mapped[bool] = mapped_column(default=True)
3624 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
3626 # Comprehensive metadata for audit tracking
3627 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3628 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
3629 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
3630 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3632 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3633 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
3634 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
3635 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3637 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
3638 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3639 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
3641 metrics: Mapped[List["PromptMetric"]] = relationship("PromptMetric", back_populates="prompt", cascade="all, delete-orphan")
3643 gateway_id: Mapped[Optional[str]] = mapped_column(ForeignKey("gateways.id", ondelete="CASCADE"))
3644 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="prompts")
3645 # federated_with = relationship("Gateway", secondary=prompt_gateway_table, back_populates="federated_prompts")
3647 # Many-to-many relationship with Servers
3648 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_prompt_association, back_populates="prompts")
3650 # Team scoping fields for resource organization
3651 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
3652 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3653 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
3655 __table_args__ = (
3656 UniqueConstraint("team_id", "owner_email", "gateway_id", "name", name="uq_team_owner_gateway_name_prompt"),
3657 UniqueConstraint("gateway_id", "original_name", name="uq_gateway_id__original_name_prompt"),
3658 Index("uq_team_owner_name_prompt_local", "team_id", "owner_email", "name", unique=True, postgresql_where=text("gateway_id IS NULL"), sqlite_where=text("gateway_id IS NULL")),
3659 Index("idx_prompts_created_at_id", "created_at", "id"),
3660 )
3662 @hybrid_property
3663 def gateway_slug(self) -> Optional[str]:
3664 """Return the related gateway's slug if available.
3666 Returns:
3667 Optional[str]: Gateway slug or None when no gateway is attached.
3668 """
3669 return self.gateway.slug if self.gateway else None
3671 @gateway_slug.expression
3672 @classmethod
3673 def gateway_slug(cls) -> Any:
3674 """SQL expression to select current gateway slug for this prompt.
3676 Returns:
3677 Any: SQLAlchemy scalar subquery selecting the gateway slug.
3678 """
3679 return select(Gateway.slug).where(Gateway.id == cls.gateway_id).scalar_subquery()
3681 def validate_arguments(self, args: Dict[str, str]) -> None:
3682 """
3683 Validate prompt arguments against the argument schema.
3685 Args:
3686 args (Dict[str, str]): Dictionary of arguments to validate.
3688 Raises:
3689 ValueError: If the arguments do not conform to the schema.
3691 Examples:
3692 >>> prompt = Prompt(
3693 ... name="test_prompt",
3694 ... template="Hello {name}",
3695 ... argument_schema={
3696 ... "type": "object",
3697 ... "properties": {
3698 ... "name": {"type": "string"}
3699 ... },
3700 ... "required": ["name"]
3701 ... }
3702 ... )
3703 >>> prompt.validate_arguments({"name": "Alice"}) # No exception
3704 >>> try:
3705 ... prompt.validate_arguments({"age": 25}) # Missing required field
3706 ... except ValueError as e:
3707 ... "name" in str(e)
3708 True
3709 """
3710 try:
3711 jsonschema.validate(args, self.argument_schema)
3712 except jsonschema.exceptions.ValidationError as e:
3713 raise ValueError(f"Invalid prompt arguments: {str(e)}") from e
3715 def _metrics_loaded(self) -> bool:
3716 """Check if metrics relationship is loaded without triggering lazy load.
3718 Returns:
3719 bool: True if metrics are loaded, False otherwise.
3720 """
3721 return "metrics" in sa_inspect(self).dict
3723 def _get_metric_counts(self) -> tuple[int, int, int]:
3724 """Get total, successful, and failed metric counts in a single operation.
3726 When metrics are already loaded, computes from memory in O(n).
3727 When not loaded, uses a single SQL query with conditional aggregation.
3729 Returns:
3730 tuple[int, int, int]: (total, successful, failed) counts.
3731 """
3732 if self._metrics_loaded():
3733 total = 0
3734 successful = 0
3735 for m in self.metrics:
3736 total += 1
3737 if m.is_success:
3738 successful += 1
3739 return (total, successful, total - successful)
3741 # Third-Party
3742 from sqlalchemy import case # pylint: disable=import-outside-toplevel
3743 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
3745 session = object_session(self)
3746 if session is None:
3747 return (0, 0, 0)
3749 result = (
3750 session.query(
3751 func.count(PromptMetric.id), # pylint: disable=not-callable
3752 func.sum(case((PromptMetric.is_success.is_(True), 1), else_=0)),
3753 )
3754 .filter(PromptMetric.prompt_id == self.id)
3755 .one()
3756 )
3758 total = result[0] or 0
3759 successful = result[1] or 0
3760 return (total, successful, total - successful)
3762 @hybrid_property
3763 def execution_count(self) -> int:
3764 """Number of PromptMetric records associated with this prompt instance.
3766 Returns:
3767 int: Count of PromptMetric records for this prompt.
3768 """
3769 return self._get_metric_counts()[0]
3771 @execution_count.expression
3772 @classmethod
3773 def execution_count(cls) -> Any:
3774 """SQL expression that counts PromptMetric rows for this prompt.
3776 Returns:
3777 Any: SQLAlchemy labeled count expression for prompt metrics.
3778 """
3779 return select(func.count(PromptMetric.id)).where(PromptMetric.prompt_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable
3781 @property
3782 def successful_executions(self) -> int:
3783 """Count of successful prompt invocations.
3785 Returns:
3786 int: The count of successful prompt invocations.
3787 """
3788 return self._get_metric_counts()[1]
3790 @property
3791 def failed_executions(self) -> int:
3792 """Count of failed prompt invocations.
3794 Returns:
3795 int: The count of failed prompt invocations.
3796 """
3797 return self._get_metric_counts()[2]
3799 @property
3800 def failure_rate(self) -> float:
3801 """Failure rate as a float between 0 and 1.
3803 Returns:
3804 float: The failure rate as a value between 0 and 1.
3805 """
3806 total, _, failed = self._get_metric_counts()
3807 return failed / total if total > 0 else 0.0
3809 @property
3810 def min_response_time(self) -> Optional[float]:
3811 """Minimum response time among all prompt invocations.
3813 Returns None if metrics are not loaded. Note: counts may be non-zero
3814 (via SQL) while timing is None. Use service layer converters for
3815 consistent metrics, or preload metrics via selectinload.
3817 Returns:
3818 Optional[float]: The minimum response time, or None.
3819 """
3820 if not self._metrics_loaded():
3821 return None
3822 times: List[float] = [m.response_time for m in self.metrics]
3823 return min(times) if times else None
3825 @property
3826 def max_response_time(self) -> Optional[float]:
3827 """Maximum response time among all prompt invocations.
3829 Returns None if metrics are not loaded. Note: counts may be non-zero
3830 (via SQL) while timing is None. Use service layer converters for
3831 consistent metrics, or preload metrics via selectinload.
3833 Returns:
3834 Optional[float]: The maximum response time, or None.
3835 """
3836 if not self._metrics_loaded():
3837 return None
3838 times: List[float] = [m.response_time for m in self.metrics]
3839 return max(times) if times else None
3841 @property
3842 def avg_response_time(self) -> Optional[float]:
3843 """Average response time among all prompt invocations.
3845 Returns None if metrics are not loaded. Note: counts may be non-zero
3846 (via SQL) while timing is None. Use service layer converters for
3847 consistent metrics, or preload metrics via selectinload.
3849 Returns:
3850 Optional[float]: The average response time, or None.
3851 """
3852 if not self._metrics_loaded():
3853 return None
3854 times: List[float] = [m.response_time for m in self.metrics]
3855 return sum(times) / len(times) if times else None
3857 @property
3858 def last_execution_time(self) -> Optional[datetime]:
3859 """Timestamp of the most recent prompt invocation.
3861 Returns None if metrics are not loaded. Note: counts may be non-zero
3862 (via SQL) while timing is None. Use service layer converters for
3863 consistent metrics, or preload metrics via selectinload.
3865 Returns:
3866 Optional[datetime]: The timestamp of the most recent invocation, or None if no invocations exist.
3867 """
3868 if not self._metrics_loaded():
3869 return None
3870 if not self.metrics:
3871 return None
3872 return max(m.timestamp for m in self.metrics)
3874 @property
3875 def metrics_summary(self) -> Dict[str, Any]:
3876 """Aggregated metrics for the prompt.
3878 When metrics are loaded: computes all values from memory in a single pass.
3879 When not loaded: uses a single SQL query with aggregation for all fields.
3881 Returns:
3882 Dict[str, Any]: Dictionary containing aggregated metrics:
3883 - total_executions, successful_executions, failed_executions
3884 - failure_rate, min/max/avg_response_time, last_execution_time
3885 """
3886 if self._metrics_loaded():
3887 total = 0
3888 successful = 0
3889 min_rt: Optional[float] = None
3890 max_rt: Optional[float] = None
3891 sum_rt = 0.0
3892 last_time: Optional[datetime] = None
3894 for m in self.metrics:
3895 total += 1
3896 if m.is_success:
3897 successful += 1
3898 rt = m.response_time
3899 if min_rt is None or rt < min_rt:
3900 min_rt = rt
3901 if max_rt is None or rt > max_rt: 3901 ↛ 3903line 3901 didn't jump to line 3903 because the condition on line 3901 was always true
3902 max_rt = rt
3903 sum_rt += rt
3904 if last_time is None or m.timestamp > last_time: 3904 ↛ 3894line 3904 didn't jump to line 3894 because the condition on line 3904 was always true
3905 last_time = m.timestamp
3907 failed = total - successful
3908 return {
3909 "total_executions": total,
3910 "successful_executions": successful,
3911 "failed_executions": failed,
3912 "failure_rate": failed / total if total > 0 else 0.0,
3913 "min_response_time": min_rt,
3914 "max_response_time": max_rt,
3915 "avg_response_time": sum_rt / total if total > 0 else None,
3916 "last_execution_time": last_time,
3917 }
3919 # Third-Party
3920 from sqlalchemy import case # pylint: disable=import-outside-toplevel
3921 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
3923 session = object_session(self)
3924 if session is None:
3925 return {
3926 "total_executions": 0,
3927 "successful_executions": 0,
3928 "failed_executions": 0,
3929 "failure_rate": 0.0,
3930 "min_response_time": None,
3931 "max_response_time": None,
3932 "avg_response_time": None,
3933 "last_execution_time": None,
3934 }
3936 result = (
3937 session.query(
3938 func.count(PromptMetric.id), # pylint: disable=not-callable
3939 func.sum(case((PromptMetric.is_success.is_(True), 1), else_=0)),
3940 func.min(PromptMetric.response_time), # pylint: disable=not-callable
3941 func.max(PromptMetric.response_time), # pylint: disable=not-callable
3942 func.avg(PromptMetric.response_time), # pylint: disable=not-callable
3943 func.max(PromptMetric.timestamp), # pylint: disable=not-callable
3944 )
3945 .filter(PromptMetric.prompt_id == self.id)
3946 .one()
3947 )
3949 total = result[0] or 0
3950 successful = result[1] or 0
3951 failed = total - successful
3953 return {
3954 "total_executions": total,
3955 "successful_executions": successful,
3956 "failed_executions": failed,
3957 "failure_rate": failed / total if total > 0 else 0.0,
3958 "min_response_time": result[2],
3959 "max_response_time": result[3],
3960 "avg_response_time": float(result[4]) if result[4] is not None else None,
3961 "last_execution_time": result[5],
3962 }
3965class Server(Base):
3966 """
3967 ORM model for MCP Servers Catalog.
3969 Represents a server that composes catalog items (tools, resources, prompts).
3970 Additionally, this model provides computed properties for aggregated metrics based
3971 on the associated ServerMetric records. These include:
3972 - execution_count: Total number of invocations.
3973 - successful_executions: Count of successful invocations.
3974 - failed_executions: Count of failed invocations.
3975 - failure_rate: Ratio of failed invocations to total invocations.
3976 - min_response_time: Fastest recorded response time.
3977 - max_response_time: Slowest recorded response time.
3978 - avg_response_time: Mean response time.
3979 - last_execution_time: Timestamp of the most recent invocation.
3980 """
3982 __tablename__ = "servers"
3984 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
3985 name: Mapped[str] = mapped_column(String(255), nullable=False)
3986 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
3987 icon: Mapped[Optional[str]] = mapped_column(String(767), nullable=True)
3988 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
3989 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
3990 # is_active: Mapped[bool] = mapped_column(default=True)
3991 enabled: Mapped[bool] = mapped_column(default=True)
3992 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
3994 # Comprehensive metadata for audit tracking
3995 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
3996 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
3997 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
3998 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4000 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4001 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4002 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4003 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4005 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
4006 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4007 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
4009 metrics: Mapped[List["ServerMetric"]] = relationship("ServerMetric", back_populates="server", cascade="all, delete-orphan")
4011 # Many-to-many relationships for associated items
4012 tools: Mapped[List["Tool"]] = relationship("Tool", secondary=server_tool_association, back_populates="servers")
4013 resources: Mapped[List["Resource"]] = relationship("Resource", secondary=server_resource_association, back_populates="servers")
4014 prompts: Mapped[List["Prompt"]] = relationship("Prompt", secondary=server_prompt_association, back_populates="servers")
4015 a2a_agents: Mapped[List["A2AAgent"]] = relationship("A2AAgent", secondary=server_a2a_association, back_populates="servers")
4017 # API token relationships
4018 scoped_tokens: Mapped[List["EmailApiToken"]] = relationship("EmailApiToken", back_populates="server")
4020 def _metrics_loaded(self) -> bool:
4021 """Check if metrics relationship is loaded without triggering lazy load.
4023 Returns:
4024 bool: True if metrics are loaded, False otherwise.
4025 """
4026 return "metrics" in sa_inspect(self).dict
4028 def _get_metric_counts(self) -> tuple[int, int, int]:
4029 """Get total, successful, and failed metric counts in a single operation.
4031 When metrics are already loaded, computes from memory in O(n).
4032 When not loaded, uses a single SQL query with conditional aggregation.
4034 Returns:
4035 tuple[int, int, int]: (total, successful, failed) counts.
4036 """
4037 if self._metrics_loaded():
4038 total = 0
4039 successful = 0
4040 for m in self.metrics:
4041 total += 1
4042 if m.is_success:
4043 successful += 1
4044 return (total, successful, total - successful)
4046 # Third-Party
4047 from sqlalchemy import case # pylint: disable=import-outside-toplevel
4048 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
4050 session = object_session(self)
4051 if session is None:
4052 return (0, 0, 0)
4054 result = (
4055 session.query(
4056 func.count(ServerMetric.id), # pylint: disable=not-callable
4057 func.sum(case((ServerMetric.is_success.is_(True), 1), else_=0)),
4058 )
4059 .filter(ServerMetric.server_id == self.id)
4060 .one()
4061 )
4063 total = result[0] or 0
4064 successful = result[1] or 0
4065 return (total, successful, total - successful)
4067 @hybrid_property
4068 def execution_count(self) -> int:
4069 """Number of ServerMetric records associated with this server instance.
4071 Returns:
4072 int: Count of ServerMetric records for this server.
4073 """
4074 return self._get_metric_counts()[0]
4076 @execution_count.expression
4077 @classmethod
4078 def execution_count(cls) -> Any:
4079 """SQL expression that counts ServerMetric rows for this server.
4081 Returns:
4082 Any: SQLAlchemy labeled count expression for server metrics.
4083 """
4084 return select(func.count(ServerMetric.id)).where(ServerMetric.server_id == cls.id).correlate(cls).scalar_subquery().label("execution_count") # pylint: disable=not-callable
4086 @property
4087 def successful_executions(self) -> int:
4088 """Count of successful server invocations.
4090 Returns:
4091 int: The count of successful server invocations.
4092 """
4093 return self._get_metric_counts()[1]
4095 @property
4096 def failed_executions(self) -> int:
4097 """Count of failed server invocations.
4099 Returns:
4100 int: The count of failed server invocations.
4101 """
4102 return self._get_metric_counts()[2]
4104 @property
4105 def failure_rate(self) -> float:
4106 """Failure rate as a float between 0 and 1.
4108 Returns:
4109 float: The failure rate as a value between 0 and 1.
4110 """
4111 total, _, failed = self._get_metric_counts()
4112 return failed / total if total > 0 else 0.0
4114 @property
4115 def min_response_time(self) -> Optional[float]:
4116 """Minimum response time among all server invocations.
4118 Returns None if metrics are not loaded. Note: counts may be non-zero
4119 (via SQL) while timing is None. Use service layer converters for
4120 consistent metrics, or preload metrics via selectinload.
4122 Returns:
4123 Optional[float]: The minimum response time, or None.
4124 """
4125 if not self._metrics_loaded():
4126 return None
4127 times: List[float] = [m.response_time for m in self.metrics]
4128 return min(times) if times else None
4130 @property
4131 def max_response_time(self) -> Optional[float]:
4132 """Maximum response time among all server invocations.
4134 Returns None if metrics are not loaded. Note: counts may be non-zero
4135 (via SQL) while timing is None. Use service layer converters for
4136 consistent metrics, or preload metrics via selectinload.
4138 Returns:
4139 Optional[float]: The maximum response time, or None.
4140 """
4141 if not self._metrics_loaded():
4142 return None
4143 times: List[float] = [m.response_time for m in self.metrics]
4144 return max(times) if times else None
4146 @property
4147 def avg_response_time(self) -> Optional[float]:
4148 """Average response time among all server invocations.
4150 Returns None if metrics are not loaded. Note: counts may be non-zero
4151 (via SQL) while timing is None. Use service layer converters for
4152 consistent metrics, or preload metrics via selectinload.
4154 Returns:
4155 Optional[float]: The average response time, or None.
4156 """
4157 if not self._metrics_loaded():
4158 return None
4159 times: List[float] = [m.response_time for m in self.metrics]
4160 return sum(times) / len(times) if times else None
4162 @property
4163 def last_execution_time(self) -> Optional[datetime]:
4164 """Timestamp of the most recent server invocation.
4166 Returns None if metrics are not loaded. Note: counts may be non-zero
4167 (via SQL) while timing is None. Use service layer converters for
4168 consistent metrics, or preload metrics via selectinload.
4170 Returns:
4171 Optional[datetime]: The timestamp of the most recent invocation, or None.
4172 """
4173 if not self._metrics_loaded():
4174 return None
4175 if not self.metrics:
4176 return None
4177 return max(m.timestamp for m in self.metrics)
4179 @property
4180 def metrics_summary(self) -> Dict[str, Any]:
4181 """Aggregated metrics for the server.
4183 When metrics are loaded: computes all values from memory in a single pass.
4184 When not loaded: uses a single SQL query with aggregation for all fields.
4186 Returns:
4187 Dict[str, Any]: Dictionary containing aggregated metrics:
4188 - total_executions, successful_executions, failed_executions
4189 - failure_rate, min/max/avg_response_time, last_execution_time
4190 """
4191 if self._metrics_loaded():
4192 total = 0
4193 successful = 0
4194 min_rt: Optional[float] = None
4195 max_rt: Optional[float] = None
4196 sum_rt = 0.0
4197 last_time: Optional[datetime] = None
4199 for m in self.metrics:
4200 total += 1
4201 if m.is_success:
4202 successful += 1
4203 rt = m.response_time
4204 if min_rt is None or rt < min_rt:
4205 min_rt = rt
4206 if max_rt is None or rt > max_rt: 4206 ↛ 4208line 4206 didn't jump to line 4208 because the condition on line 4206 was always true
4207 max_rt = rt
4208 sum_rt += rt
4209 if last_time is None or m.timestamp > last_time: 4209 ↛ 4199line 4209 didn't jump to line 4199 because the condition on line 4209 was always true
4210 last_time = m.timestamp
4212 failed = total - successful
4213 return {
4214 "total_executions": total,
4215 "successful_executions": successful,
4216 "failed_executions": failed,
4217 "failure_rate": failed / total if total > 0 else 0.0,
4218 "min_response_time": min_rt,
4219 "max_response_time": max_rt,
4220 "avg_response_time": sum_rt / total if total > 0 else None,
4221 "last_execution_time": last_time,
4222 }
4224 # Third-Party
4225 from sqlalchemy import case # pylint: disable=import-outside-toplevel
4226 from sqlalchemy.orm import object_session # pylint: disable=import-outside-toplevel
4228 session = object_session(self)
4229 if session is None:
4230 return {
4231 "total_executions": 0,
4232 "successful_executions": 0,
4233 "failed_executions": 0,
4234 "failure_rate": 0.0,
4235 "min_response_time": None,
4236 "max_response_time": None,
4237 "avg_response_time": None,
4238 "last_execution_time": None,
4239 }
4241 result = (
4242 session.query(
4243 func.count(ServerMetric.id), # pylint: disable=not-callable
4244 func.sum(case((ServerMetric.is_success.is_(True), 1), else_=0)),
4245 func.min(ServerMetric.response_time), # pylint: disable=not-callable
4246 func.max(ServerMetric.response_time), # pylint: disable=not-callable
4247 func.avg(ServerMetric.response_time), # pylint: disable=not-callable
4248 func.max(ServerMetric.timestamp), # pylint: disable=not-callable
4249 )
4250 .filter(ServerMetric.server_id == self.id)
4251 .one()
4252 )
4254 total = result[0] or 0
4255 successful = result[1] or 0
4256 failed = total - successful
4258 return {
4259 "total_executions": total,
4260 "successful_executions": successful,
4261 "failed_executions": failed,
4262 "failure_rate": failed / total if total > 0 else 0.0,
4263 "min_response_time": result[2],
4264 "max_response_time": result[3],
4265 "avg_response_time": float(result[4]) if result[4] is not None else None,
4266 "last_execution_time": result[5],
4267 }
4269 # Team scoping fields for resource organization
4270 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
4271 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4272 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
4274 # OAuth 2.0 configuration for RFC 9728 Protected Resource Metadata
4275 # When enabled, MCP clients can authenticate using OAuth with browser-based IDP SSO
4276 oauth_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
4277 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
4279 # Relationship for loading team names (only active teams)
4280 # Uses default lazy loading - team name is only loaded when accessed
4281 # For list/admin views, use explicit joinedload(DbServer.email_team) for single-query loading
4282 # This avoids adding overhead to hot paths that don't need team names
4283 email_team: Mapped[Optional["EmailTeam"]] = relationship(
4284 "EmailTeam",
4285 primaryjoin="and_(Server.team_id == EmailTeam.id, EmailTeam.is_active == True)",
4286 foreign_keys=[team_id],
4287 )
4289 @property
4290 def team(self) -> Optional[str]:
4291 """Return the team name from the `email_team` relationship.
4293 Returns:
4294 Optional[str]: The team name if the server belongs to an active team, otherwise None.
4295 """
4296 return self.email_team.name if self.email_team else None
4298 __table_args__ = (
4299 UniqueConstraint("team_id", "owner_email", "name", name="uq_team_owner_name_server"),
4300 Index("idx_servers_created_at_id", "created_at", "id"),
4301 )
4304class Gateway(Base):
4305 """ORM model for a federated peer Gateway."""
4307 __tablename__ = "gateways"
4309 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
4310 name: Mapped[str] = mapped_column(String(255), nullable=False)
4311 slug: Mapped[str] = mapped_column(String(255), nullable=False)
4312 url: Mapped[str] = mapped_column(String(767), nullable=False)
4313 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4314 transport: Mapped[str] = mapped_column(String(20), default="SSE")
4315 capabilities: Mapped[Dict[str, Any]] = mapped_column(JSON)
4316 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4317 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
4318 enabled: Mapped[bool] = mapped_column(default=True)
4319 reachable: Mapped[bool] = mapped_column(default=True)
4320 last_seen: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
4321 tags: Mapped[List[Dict[str, str]]] = mapped_column(JSON, default=list, nullable=False)
4323 # Comprehensive metadata for audit tracking
4324 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4325 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4326 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4327 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4329 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4330 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4331 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4332 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4334 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
4335 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4336 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
4338 # Header passthrough configuration
4339 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array
4341 # CA certificate
4342 ca_certificate: Mapped[Optional[bytes]] = mapped_column(Text, nullable=True)
4343 ca_certificate_sig: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
4344 signing_algorithm: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, default="ed25519") # e.g., "sha256"
4346 # Relationship with local tools this gateway provides
4347 tools: Mapped[List["Tool"]] = relationship(back_populates="gateway", foreign_keys="Tool.gateway_id", cascade="all, delete-orphan", passive_deletes=True)
4349 # Relationship with local prompts this gateway provides
4350 prompts: Mapped[List["Prompt"]] = relationship(back_populates="gateway", cascade="all, delete-orphan", passive_deletes=True)
4352 # Relationship with local resources this gateway provides
4353 resources: Mapped[List["Resource"]] = relationship(back_populates="gateway", cascade="all, delete-orphan", passive_deletes=True)
4355 # # Tools federated from this gateway
4356 # federated_tools: Mapped[List["Tool"]] = relationship(secondary=tool_gateway_table, back_populates="federated_with")
4358 # # Prompts federated from this resource
4359 # federated_resources: Mapped[List["Resource"]] = relationship(secondary=resource_gateway_table, back_populates="federated_with")
4361 # # Prompts federated from this gateway
4362 # federated_prompts: Mapped[List["Prompt"]] = relationship(secondary=prompt_gateway_table, back_populates="federated_with")
4364 # Authorizations
4365 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", "headers", "oauth", "query_param" or None
4366 auth_value: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON)
4367 auth_query_params: Mapped[Optional[Dict[str, str]]] = mapped_column(
4368 JSON,
4369 nullable=True,
4370 default=None,
4371 comment="Encrypted query parameters for auth. Format: {'param_name': 'encrypted_value'}",
4372 )
4374 # OAuth configuration
4375 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes")
4377 # Team scoping fields for resource organization
4378 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
4379 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4380 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
4382 # Relationship for loading team names (only active teams)
4383 # Uses default lazy loading - team name is only loaded when accessed
4384 # For list/admin views, use explicit joinedload(DbGateway.email_team) for single-query loading
4385 # This avoids adding overhead to hot paths that don't need team names
4386 email_team: Mapped[Optional["EmailTeam"]] = relationship(
4387 "EmailTeam",
4388 primaryjoin="and_(Gateway.team_id == EmailTeam.id, EmailTeam.is_active == True)",
4389 foreign_keys=[team_id],
4390 )
4392 @property
4393 def team(self) -> Optional[str]:
4394 """Return the team name from the `email_team` relationship.
4396 Returns:
4397 Optional[str]: The team name if the gateway belongs to an active team, otherwise None.
4398 """
4399 return self.email_team.name if self.email_team else None
4401 # Per-gateway refresh configuration
4402 refresh_interval_seconds: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, comment="Per-gateway refresh interval in seconds; NULL uses global default")
4403 last_refresh_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True, comment="Timestamp of the last successful tools/resources/prompts refresh")
4405 # Relationship with OAuth tokens
4406 oauth_tokens: Mapped[List["OAuthToken"]] = relationship("OAuthToken", back_populates="gateway", cascade="all, delete-orphan")
4408 # Relationship with registered OAuth clients (DCR)
4410 registered_oauth_clients: Mapped[List["RegisteredOAuthClient"]] = relationship("RegisteredOAuthClient", back_populates="gateway", cascade="all, delete-orphan")
4412 __table_args__ = (
4413 UniqueConstraint("team_id", "owner_email", "slug", name="uq_team_owner_slug_gateway"),
4414 Index("idx_gateways_created_at_id", "created_at", "id"),
4415 )
4418@event.listens_for(Gateway, "after_update")
4419def update_tool_names_on_gateway_update(_mapper, connection, target):
4420 """
4421 If a Gateway's name is updated, efficiently update all of its
4422 child Tools' names with a single SQL statement.
4424 Args:
4425 _mapper: Mapper
4426 connection: Connection
4427 target: Target
4428 """
4429 # 1. Check if the 'name' field was actually part of the update.
4430 # This is a concise way to see if the value has changed.
4431 if not get_history(target, "name").has_changes():
4432 return
4434 logger.info("Gateway name changed for ID %s. Issuing bulk update for tools.", target.id)
4436 # 2. Get a reference to the underlying database table for Tools
4437 tools_table = Tool.__table__
4439 # 3. Prepare the new values
4440 new_gateway_slug = slugify(target.name)
4441 separator = settings.gateway_tool_name_separator
4443 # 4. Construct a single, powerful UPDATE statement using SQLAlchemy Core.
4444 # This is highly efficient as it all happens in the database.
4445 stmt = (
4446 cast(Any, tools_table)
4447 .update()
4448 .where(tools_table.c.gateway_id == target.id)
4449 .values(name=new_gateway_slug + separator + tools_table.c.custom_name_slug)
4450 .execution_options(synchronize_session=False)
4451 )
4453 # 5. Execute the statement using the connection from the ongoing transaction.
4454 connection.execute(stmt)
4457@event.listens_for(Gateway, "after_update")
4458def update_prompt_names_on_gateway_update(_mapper, connection, target):
4459 """Update prompt names when a gateway name changes.
4461 Args:
4462 _mapper: SQLAlchemy mapper for the Gateway model.
4463 connection: Database connection for the update transaction.
4464 target: Gateway instance being updated.
4465 """
4466 if not get_history(target, "name").has_changes():
4467 return
4469 logger.info("Gateway name changed for ID %s. Issuing bulk update for prompts.", target.id)
4471 prompts_table = Prompt.__table__
4472 new_gateway_slug = slugify(target.name)
4473 separator = settings.gateway_tool_name_separator
4475 stmt = (
4476 cast(Any, prompts_table)
4477 .update()
4478 .where(prompts_table.c.gateway_id == target.id)
4479 .values(name=new_gateway_slug + separator + prompts_table.c.custom_name_slug)
4480 .execution_options(synchronize_session=False)
4481 )
4483 connection.execute(stmt)
4486class A2AAgent(Base):
4487 """
4488 ORM model for A2A (Agent-to-Agent) compatible agents.
4490 A2A agents represent external AI agents that can be integrated into the gateway
4491 and exposed as tools within virtual servers. They support standardized
4492 Agent-to-Agent communication protocols for interoperability.
4493 """
4495 __tablename__ = "a2a_agents"
4497 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
4498 name: Mapped[str] = mapped_column(String(255), nullable=False)
4499 slug: Mapped[str] = mapped_column(String(255), nullable=False)
4500 description: Mapped[Optional[str]] = mapped_column(Text)
4501 endpoint_url: Mapped[str] = mapped_column(String(767), nullable=False)
4502 agent_type: Mapped[str] = mapped_column(String(50), nullable=False, default="generic") # e.g., "openai", "anthropic", "custom"
4503 protocol_version: Mapped[str] = mapped_column(String(10), nullable=False, default="1.0")
4504 capabilities: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict)
4505 # Configuration
4506 config: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict)
4508 # Authorizations
4509 auth_type: Mapped[Optional[str]] = mapped_column(String(20), default=None) # "basic", "bearer", "headers", "oauth", "query_param" or None
4510 auth_value: Mapped[Optional[Dict[str, str]]] = mapped_column(JSON)
4511 auth_query_params: Mapped[Optional[Dict[str, str]]] = mapped_column(
4512 JSON,
4513 nullable=True,
4514 default=None,
4515 comment="Encrypted query parameters for auth. Format: {'param_name': 'encrypted_value'}",
4516 )
4518 # OAuth configuration
4519 oauth_config: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes")
4521 # Header passthrough configuration
4522 passthrough_headers: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True) # Store list of strings as JSON array
4524 # Status and metadata
4525 enabled: Mapped[bool] = mapped_column(Boolean, default=True)
4526 reachable: Mapped[bool] = mapped_column(Boolean, default=True)
4527 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4528 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
4529 last_interaction: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
4531 # Tags for categorization
4532 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
4534 # Comprehensive metadata for audit tracking
4535 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4536 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4537 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4538 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4540 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4541 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4542 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4543 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4545 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
4546 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4547 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
4549 # Team scoping fields for resource organization
4550 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
4551 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4552 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
4554 # Associated tool ID (A2A agents are automatically registered as tools)
4555 tool_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("tools.id", ondelete="SET NULL"), nullable=True)
4557 # Relationships
4558 servers: Mapped[List["Server"]] = relationship("Server", secondary=server_a2a_association, back_populates="a2a_agents")
4559 tool: Mapped[Optional["Tool"]] = relationship("Tool", foreign_keys=[tool_id])
4560 metrics: Mapped[List["A2AAgentMetric"]] = relationship("A2AAgentMetric", back_populates="a2a_agent", cascade="all, delete-orphan")
4561 __table_args__ = (
4562 UniqueConstraint("team_id", "owner_email", "slug", name="uq_team_owner_slug_a2a_agent"),
4563 Index("idx_a2a_agents_created_at_id", "created_at", "id"),
4564 Index("idx_a2a_agents_tool_id", "tool_id"),
4565 )
4567 # Relationship with OAuth tokens
4568 # oauth_tokens: Mapped[List["OAuthToken"]] = relationship("OAuthToken", back_populates="gateway", cascade="all, delete-orphan")
4570 # Relationship with registered OAuth clients (DCR)
4571 # registered_oauth_clients: Mapped[List["RegisteredOAuthClient"]] = relationship("RegisteredOAuthClient", back_populates="gateway", cascade="all, delete-orphan")
4573 def _metrics_loaded(self) -> bool:
4574 """Check if metrics relationship is loaded without triggering lazy load.
4576 Returns:
4577 bool: True if metrics are loaded, False otherwise.
4578 """
4579 return "metrics" in sa_inspect(self).dict
4581 @property
4582 def execution_count(self) -> int:
4583 """Total number of interactions with this agent.
4584 Returns 0 if metrics are not loaded (avoids lazy loading).
4586 Returns:
4587 int: The total count of interactions.
4588 """
4589 if not self._metrics_loaded():
4590 return 0
4591 return len(self.metrics)
4593 @property
4594 def successful_executions(self) -> int:
4595 """Number of successful interactions.
4596 Returns 0 if metrics are not loaded (avoids lazy loading).
4598 Returns:
4599 int: The count of successful interactions.
4600 """
4601 if not self._metrics_loaded():
4602 return 0
4603 return sum(1 for m in self.metrics if m.is_success)
4605 @property
4606 def failed_executions(self) -> int:
4607 """Number of failed interactions.
4608 Returns 0 if metrics are not loaded (avoids lazy loading).
4610 Returns:
4611 int: The count of failed interactions.
4612 """
4613 if not self._metrics_loaded():
4614 return 0
4615 return sum(1 for m in self.metrics if not m.is_success)
4617 @property
4618 def failure_rate(self) -> float:
4619 """Failure rate as a percentage.
4620 Returns 0.0 if metrics are not loaded (avoids lazy loading).
4622 Returns:
4623 float: The failure rate percentage.
4624 """
4625 if not self._metrics_loaded():
4626 return 0.0
4627 if not self.metrics:
4628 return 0.0
4629 return (self.failed_executions / len(self.metrics)) * 100
4631 @property
4632 def avg_response_time(self) -> Optional[float]:
4633 """Average response time in seconds.
4634 Returns None if metrics are not loaded (avoids lazy loading).
4636 Returns:
4637 Optional[float]: The average response time, or None if no metrics.
4638 """
4639 if not self._metrics_loaded():
4640 return None
4641 if not self.metrics:
4642 return None
4643 return sum(m.response_time for m in self.metrics) / len(self.metrics)
4645 @property
4646 def last_execution_time(self) -> Optional[datetime]:
4647 """Timestamp of the most recent interaction.
4648 Returns None if metrics are not loaded (avoids lazy loading).
4650 Returns:
4651 Optional[datetime]: The timestamp of the last interaction, or None if no metrics.
4652 """
4653 if not self._metrics_loaded():
4654 return None
4655 if not self.metrics:
4656 return None
4657 return max(m.timestamp for m in self.metrics)
4659 def __repr__(self) -> str:
4660 """Return a string representation of the A2AAgent instance.
4662 Returns:
4663 str: A formatted string containing the agent's ID, name, and type.
4665 Examples:
4666 >>> agent = A2AAgent(id='123', name='test-agent', agent_type='custom')
4667 >>> repr(agent)
4668 "<A2AAgent(id='123', name='test-agent', agent_type='custom')>"
4669 """
4670 return f"<A2AAgent(id='{self.id}', name='{self.name}', agent_type='{self.agent_type}')>"
4673class GrpcService(Base):
4674 """
4675 ORM model for gRPC services with reflection-based discovery.
4677 gRPC services represent external gRPC servers that can be automatically discovered
4678 via server reflection and exposed as MCP tools. The gateway translates between
4679 gRPC/Protobuf and MCP/JSON protocols.
4680 """
4682 __tablename__ = "grpc_services"
4684 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
4685 name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
4686 slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
4687 description: Mapped[Optional[str]] = mapped_column(Text)
4688 target: Mapped[str] = mapped_column(String(767), nullable=False) # host:port format
4690 # Configuration
4691 reflection_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
4692 tls_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
4693 tls_cert_path: Mapped[Optional[str]] = mapped_column(String(767))
4694 tls_key_path: Mapped[Optional[str]] = mapped_column(String(767))
4695 grpc_metadata: Mapped[Dict[str, str]] = mapped_column(JSON, default=dict) # gRPC metadata headers
4697 # Status
4698 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
4699 reachable: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
4701 # Discovery results from reflection
4702 service_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
4703 method_count: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
4704 discovered_services: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict) # Service descriptors
4705 last_reflection: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
4707 # Tags for categorization
4708 tags: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
4710 # Timestamps
4711 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4712 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
4714 # Comprehensive metadata for audit tracking
4715 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4716 created_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4717 created_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4718 created_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4720 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4721 modified_from_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
4722 modified_via: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
4723 modified_user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4725 import_batch_id: Mapped[Optional[str]] = mapped_column(String(36), nullable=True)
4726 federation_source: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4727 version: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
4729 # Team scoping fields for resource organization
4730 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True)
4731 owner_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
4732 visibility: Mapped[str] = mapped_column(String(20), nullable=False, default="public")
4734 def __repr__(self) -> str:
4735 """Return a string representation of the GrpcService instance.
4737 Returns:
4738 str: A formatted string containing the service's ID, name, and target.
4739 """
4740 return f"<GrpcService(id='{self.id}', name='{self.name}', target='{self.target}')>"
4743class SessionRecord(Base):
4744 """ORM model for sessions from SSE client."""
4746 __tablename__ = "mcp_sessions"
4748 session_id: Mapped[str] = mapped_column(String(255), primary_key=True)
4749 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) # pylint: disable=not-callable
4750 last_accessed: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # pylint: disable=not-callable
4751 data: Mapped[str] = mapped_column(Text, nullable=True)
4753 messages: Mapped[List["SessionMessageRecord"]] = relationship("SessionMessageRecord", back_populates="session", cascade="all, delete-orphan")
4756class SessionMessageRecord(Base):
4757 """ORM model for messages from SSE client."""
4759 __tablename__ = "mcp_messages"
4761 id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
4762 session_id: Mapped[str] = mapped_column(String(255), ForeignKey("mcp_sessions.session_id"))
4763 message: Mapped[str] = mapped_column(Text, nullable=True)
4764 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now) # pylint: disable=not-callable
4765 last_accessed: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # pylint: disable=not-callable
4767 session: Mapped["SessionRecord"] = relationship("SessionRecord", back_populates="messages")
4770class OAuthToken(Base):
4771 """ORM model for OAuth access and refresh tokens with user association."""
4773 __tablename__ = "oauth_tokens"
4775 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
4776 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False)
4777 user_id: Mapped[str] = mapped_column(String(255), nullable=False) # OAuth provider's user ID
4778 app_user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email", ondelete="CASCADE"), nullable=False) # MCP Gateway user
4779 access_token: Mapped[str] = mapped_column(Text, nullable=False)
4780 refresh_token: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4781 token_type: Mapped[str] = mapped_column(String(50), default="Bearer")
4782 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
4783 scopes: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True)
4784 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4785 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
4787 # Relationships
4788 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="oauth_tokens")
4789 app_user: Mapped["EmailUser"] = relationship("EmailUser", foreign_keys=[app_user_email])
4791 # Unique constraint: one token per user per gateway
4792 __table_args__ = (UniqueConstraint("gateway_id", "app_user_email", name="uq_oauth_gateway_user"),)
4795class OAuthState(Base):
4796 """ORM model for OAuth authorization states with TTL for CSRF protection."""
4798 __tablename__ = "oauth_states"
4800 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
4801 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False)
4802 state: Mapped[str] = mapped_column(String(500), nullable=False, unique=True) # The state parameter
4803 code_verifier: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # PKCE code verifier (RFC 7636)
4804 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
4805 used: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
4806 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4808 # Relationships
4809 gateway: Mapped["Gateway"] = relationship("Gateway")
4811 # Index for efficient lookups
4812 __table_args__ = (Index("idx_oauth_state_lookup", "gateway_id", "state"),)
4815class RegisteredOAuthClient(Base):
4816 """Stores dynamically registered OAuth clients (RFC 7591 client mode).
4818 This model maintains client credentials obtained through Dynamic Client
4819 Registration with upstream Authorization Servers.
4820 """
4822 __tablename__ = "registered_oauth_clients"
4824 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
4825 gateway_id: Mapped[str] = mapped_column(String(36), ForeignKey("gateways.id", ondelete="CASCADE"), nullable=False, index=True)
4827 # Registration details
4828 issuer: Mapped[str] = mapped_column(String(500), nullable=False) # AS issuer URL
4829 client_id: Mapped[str] = mapped_column(String(500), nullable=False)
4830 client_secret_encrypted: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # Encrypted
4832 # RFC 7591 fields
4833 redirect_uris: Mapped[str] = mapped_column(Text, nullable=False) # JSON array
4834 grant_types: Mapped[str] = mapped_column(Text, nullable=False) # JSON array
4835 response_types: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON array
4836 scope: Mapped[Optional[str]] = mapped_column(String(1000), nullable=True)
4837 token_endpoint_auth_method: Mapped[str] = mapped_column(String(50), default="client_secret_basic")
4839 # Registration management (RFC 7591 section 4)
4840 registration_client_uri: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
4841 registration_access_token_encrypted: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4843 # Metadata
4844 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now)
4845 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
4846 is_active: Mapped[bool] = mapped_column(Boolean, default=True)
4848 # Relationships
4849 gateway: Mapped["Gateway"] = relationship("Gateway", back_populates="registered_oauth_clients")
4851 # Unique constraint: one registration per gateway+issuer
4852 __table_args__ = (Index("idx_gateway_issuer", "gateway_id", "issuer", unique=True),)
4855class EmailApiToken(Base):
4856 """Email user API token model for token catalog management.
4858 This model provides comprehensive API token management with scoping,
4859 revocation, and usage tracking for email-based users.
4861 Attributes:
4862 id (str): Unique token identifier
4863 user_email (str): Owner's email address
4864 team_id (str): Team the token is associated with (required for team-based access)
4865 name (str): Human-readable token name
4866 jti (str): JWT ID for revocation checking
4867 token_hash (str): Hashed token value for security
4868 server_id (str): Optional server scope limitation
4869 resource_scopes (List[str]): Permission scopes like ['tools.read']
4870 ip_restrictions (List[str]): IP address/CIDR restrictions
4871 time_restrictions (dict): Time-based access restrictions
4872 usage_limits (dict): Rate limiting and usage quotas
4873 created_at (datetime): Token creation timestamp
4874 expires_at (datetime): Optional expiry timestamp
4875 last_used (datetime): Last usage timestamp
4876 is_active (bool): Active status flag
4877 description (str): Token description
4878 tags (List[str]): Organizational tags
4880 Examples:
4881 >>> token = EmailApiToken(
4882 ... user_email="alice@example.com",
4883 ... name="Production API Access",
4884 ... server_id="prod-server-123",
4885 ... resource_scopes=["tools.read", "resources.read"],
4886 ... description="Read-only access to production tools"
4887 ... )
4888 >>> token.is_scoped_to_server("prod-server-123")
4889 True
4890 >>> token.has_permission("tools.read")
4891 True
4892 """
4894 __tablename__ = "email_api_tokens"
4896 # Core identity fields
4897 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
4898 user_email: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email", ondelete="CASCADE"), nullable=False, index=True)
4899 team_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("email_teams.id", ondelete="SET NULL"), nullable=True, index=True)
4900 name: Mapped[str] = mapped_column(String(255), nullable=False)
4901 jti: Mapped[str] = mapped_column(String(36), unique=True, nullable=False, default=lambda: str(uuid.uuid4()))
4902 token_hash: Mapped[str] = mapped_column(String(255), nullable=False)
4904 # Scoping fields
4905 server_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("servers.id", ondelete="CASCADE"), nullable=True)
4906 resource_scopes: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list)
4907 ip_restrictions: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list)
4908 time_restrictions: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True, default=dict)
4909 usage_limits: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True, default=dict)
4911 # Lifecycle fields
4912 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
4913 expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
4914 last_used: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
4915 is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
4917 # Metadata fields
4918 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
4919 tags: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True, default=list)
4921 # Unique constraint for user+name combination
4922 __table_args__ = (
4923 UniqueConstraint("user_email", "name", name="uq_email_api_tokens_user_name"),
4924 Index("idx_email_api_tokens_user_email", "user_email"),
4925 Index("idx_email_api_tokens_jti", "jti"),
4926 Index("idx_email_api_tokens_expires_at", "expires_at"),
4927 Index("idx_email_api_tokens_is_active", "is_active"),
4928 )
4930 # Relationships
4931 user: Mapped["EmailUser"] = relationship("EmailUser", back_populates="api_tokens")
4932 team: Mapped[Optional["EmailTeam"]] = relationship("EmailTeam", back_populates="api_tokens")
4933 server: Mapped[Optional["Server"]] = relationship("Server", back_populates="scoped_tokens")
4935 def is_scoped_to_server(self, server_id: str) -> bool:
4936 """Check if token is scoped to a specific server.
4938 Args:
4939 server_id: Server ID to check against.
4941 Returns:
4942 bool: True if token is scoped to the server, False otherwise.
4943 """
4944 return self.server_id == server_id if self.server_id else False
4946 def has_permission(self, permission: str) -> bool:
4947 """Check if token has a specific permission.
4949 Args:
4950 permission: Permission string to check for.
4952 Returns:
4953 bool: True if token has the permission, False otherwise.
4954 """
4955 return permission in (self.resource_scopes or [])
4957 def is_team_token(self) -> bool:
4958 """Check if this is a team-based token.
4960 Returns:
4961 bool: True if token is associated with a team, False otherwise.
4962 """
4963 return self.team_id is not None
4965 def get_effective_permissions(self) -> List[str]:
4966 """Get effective permissions for this token.
4968 For team tokens, this should inherit team permissions.
4969 For personal tokens, this uses the resource_scopes.
4971 Returns:
4972 List[str]: List of effective permissions for this token.
4973 """
4974 if self.is_team_token() and self.team:
4975 # For team tokens, we would inherit team permissions
4976 # This would need to be implemented based on your RBAC system
4977 return self.resource_scopes or []
4978 return self.resource_scopes or []
4980 def is_expired(self) -> bool:
4981 """Check if token is expired.
4983 Returns:
4984 bool: True if token is expired, False otherwise.
4985 """
4986 if not self.expires_at:
4987 return False
4988 return utc_now() > self.expires_at
4990 def is_valid(self) -> bool:
4991 """Check if token is valid (active and not expired).
4993 Returns:
4994 bool: True if token is valid, False otherwise.
4995 """
4996 return self.is_active and not self.is_expired()
4999class TokenUsageLog(Base):
5000 """Token usage logging for analytics and security monitoring.
5002 This model tracks every API request made with email API tokens
5003 for security auditing and usage analytics.
5005 Attributes:
5006 id (int): Auto-incrementing log ID
5007 token_jti (str): Token JWT ID reference
5008 user_email (str): Token owner's email
5009 timestamp (datetime): Request timestamp
5010 endpoint (str): API endpoint accessed
5011 method (str): HTTP method used
5012 ip_address (str): Client IP address
5013 user_agent (str): Client user agent
5014 status_code (int): HTTP response status
5015 response_time_ms (int): Response time in milliseconds
5016 blocked (bool): Whether request was blocked
5017 block_reason (str): Reason for blocking if applicable
5019 Examples:
5020 >>> log = TokenUsageLog(
5021 ... token_jti="token-uuid-123",
5022 ... user_email="alice@example.com",
5023 ... endpoint="/tools",
5024 ... method="GET",
5025 ... ip_address="192.168.1.100",
5026 ... status_code=200,
5027 ... response_time_ms=45
5028 ... )
5029 """
5031 __tablename__ = "token_usage_logs"
5033 # Primary key
5034 id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
5036 # Token reference
5037 token_jti: Mapped[str] = mapped_column(String(36), nullable=False, index=True)
5038 user_email: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
5040 # Timestamp
5041 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False, index=True)
5043 # Request details
5044 endpoint: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5045 method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True)
5046 ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length
5047 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5049 # Response details
5050 status_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
5051 response_time_ms: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
5053 # Security fields
5054 blocked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
5055 block_reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5057 # Indexes for performance
5058 __table_args__ = (
5059 Index("idx_token_usage_logs_token_jti_timestamp", "token_jti", "timestamp"),
5060 Index("idx_token_usage_logs_user_email_timestamp", "user_email", "timestamp"),
5061 )
5064class TokenRevocation(Base):
5065 """Token revocation blacklist for immediate token invalidation.
5067 This model maintains a blacklist of revoked JWT tokens to provide
5068 immediate token invalidation capabilities.
5070 Attributes:
5071 jti (str): JWT ID (primary key)
5072 revoked_at (datetime): Revocation timestamp
5073 revoked_by (str): Email of user who revoked the token
5074 reason (str): Optional reason for revocation
5076 Examples:
5077 >>> revocation = TokenRevocation(
5078 ... jti="token-uuid-123",
5079 ... revoked_by="admin@example.com",
5080 ... reason="Security compromise"
5081 ... )
5082 """
5084 __tablename__ = "token_revocations"
5086 # JWT ID as primary key
5087 jti: Mapped[str] = mapped_column(String(36), primary_key=True)
5089 # Revocation details
5090 revoked_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
5091 revoked_by: Mapped[str] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=False)
5092 reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5094 # Relationship
5095 revoker: Mapped["EmailUser"] = relationship("EmailUser")
5098class SSOProvider(Base):
5099 """SSO identity provider configuration for OAuth2/OIDC authentication.
5101 Stores configuration and credentials for external identity providers
5102 like GitHub, Google, IBM Security Verify, Okta, Microsoft Entra ID,
5103 and any generic OIDC-compliant provider (Keycloak, Auth0, Authentik, etc.).
5105 Attributes:
5106 id (str): Unique provider ID (e.g., 'github', 'google', 'ibm_verify')
5107 name (str): Human-readable provider name
5108 display_name (str): Display name for UI
5109 provider_type (str): Protocol type ('oauth2', 'oidc')
5110 is_enabled (bool): Whether provider is active
5111 client_id (str): OAuth client ID
5112 client_secret_encrypted (str): Encrypted client secret
5113 authorization_url (str): OAuth authorization endpoint
5114 token_url (str): OAuth token endpoint
5115 userinfo_url (str): User info endpoint
5116 issuer (str): OIDC issuer (optional)
5117 trusted_domains (List[str]): Auto-approved email domains
5118 scope (str): OAuth scope string
5119 auto_create_users (bool): Auto-create users on first login
5120 team_mapping (dict): Organization/domain to team mapping rules
5121 created_at (datetime): Provider creation timestamp
5122 updated_at (datetime): Last configuration update
5124 Examples:
5125 >>> provider = SSOProvider(
5126 ... id="github",
5127 ... name="github",
5128 ... display_name="GitHub",
5129 ... provider_type="oauth2",
5130 ... client_id="gh_client_123",
5131 ... authorization_url="https://github.com/login/oauth/authorize",
5132 ... token_url="https://github.com/login/oauth/access_token",
5133 ... userinfo_url="https://api.github.com/user",
5134 ... scope="user:email"
5135 ... )
5136 """
5138 __tablename__ = "sso_providers"
5140 # Provider identification
5141 id: Mapped[str] = mapped_column(String(50), primary_key=True) # github, google, ibm_verify, okta, keycloak, entra, or any custom ID
5142 name: Mapped[str] = mapped_column(String(100), nullable=False, unique=True)
5143 display_name: Mapped[str] = mapped_column(String(100), nullable=False)
5144 provider_type: Mapped[str] = mapped_column(String(20), nullable=False) # oauth2, oidc
5145 is_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
5147 # OAuth2/OIDC Configuration
5148 client_id: Mapped[str] = mapped_column(String(255), nullable=False)
5149 client_secret_encrypted: Mapped[str] = mapped_column(Text, nullable=False) # Encrypted storage
5150 authorization_url: Mapped[str] = mapped_column(String(500), nullable=False)
5151 token_url: Mapped[str] = mapped_column(String(500), nullable=False)
5152 userinfo_url: Mapped[str] = mapped_column(String(500), nullable=False)
5153 issuer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # For OIDC
5155 # Provider Settings
5156 trusted_domains: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
5157 scope: Mapped[str] = mapped_column(String(200), default="openid profile email", nullable=False)
5158 auto_create_users: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
5159 team_mapping: Mapped[dict] = mapped_column(JSON, default=dict, nullable=False)
5161 # Provider-specific metadata (e.g., role mappings, claim configurations)
5162 provider_metadata: Mapped[dict] = mapped_column(JSON, default=dict, nullable=False)
5164 # Timestamps
5165 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
5166 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
5168 def __repr__(self):
5169 """String representation of SSO provider.
5171 Returns:
5172 String representation of the SSO provider instance
5173 """
5174 return f"<SSOProvider(id='{self.id}', name='{self.name}', enabled={self.is_enabled})>"
5177class SSOAuthSession(Base):
5178 """Tracks SSO authentication sessions and state.
5180 Maintains OAuth state parameters and callback information during
5181 the SSO authentication flow for security and session management.
5183 Attributes:
5184 id (str): Unique session ID (UUID)
5185 provider_id (str): Reference to SSO provider
5186 state (str): OAuth state parameter for CSRF protection
5187 code_verifier (str): PKCE code verifier (for OAuth 2.1)
5188 nonce (str): OIDC nonce parameter
5189 redirect_uri (str): OAuth callback URI
5190 expires_at (datetime): Session expiration time
5191 user_email (str): User email after successful auth (optional)
5192 created_at (datetime): Session creation timestamp
5194 Examples:
5195 >>> session = SSOAuthSession(
5196 ... provider_id="github",
5197 ... state="csrf-state-token",
5198 ... redirect_uri="https://gateway.example.com/auth/sso-callback/github"
5199 ... )
5200 """
5202 __tablename__ = "sso_auth_sessions"
5204 # Session identification
5205 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
5206 provider_id: Mapped[str] = mapped_column(String(50), ForeignKey("sso_providers.id"), nullable=False)
5208 # OAuth/OIDC parameters
5209 state: Mapped[str] = mapped_column(String(128), nullable=False, unique=True) # CSRF protection
5210 code_verifier: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # PKCE
5211 nonce: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) # OIDC
5212 redirect_uri: Mapped[str] = mapped_column(String(500), nullable=False)
5214 # Session lifecycle
5215 expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: utc_now() + timedelta(minutes=10), nullable=False) # 10-minute expiration
5216 user_email: Mapped[Optional[str]] = mapped_column(String(255), ForeignKey("email_users.email"), nullable=True)
5217 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
5219 # Relationships
5220 provider: Mapped["SSOProvider"] = relationship("SSOProvider")
5221 user: Mapped[Optional["EmailUser"]] = relationship("EmailUser")
5223 @property
5224 def is_expired(self) -> bool:
5225 """Check if SSO auth session has expired.
5227 Returns:
5228 True if the session has expired, False otherwise
5229 """
5230 now = utc_now()
5231 expires = self.expires_at
5233 # Handle timezone mismatch by converting naive datetime to UTC if needed
5234 if expires.tzinfo is None:
5235 # expires_at is timezone-naive, assume it's UTC
5236 expires = expires.replace(tzinfo=timezone.utc)
5237 elif now.tzinfo is None: 5237 ↛ 5241line 5237 didn't jump to line 5241 because the condition on line 5237 was always true
5238 # now is timezone-naive (shouldn't happen with utc_now, but just in case)
5239 now = now.replace(tzinfo=timezone.utc)
5241 return now > expires
5243 def __repr__(self):
5244 """String representation of SSO auth session.
5246 Returns:
5247 str: String representation of the session object
5248 """
5249 return f"<SSOAuthSession(id='{self.id}', provider='{self.provider_id}', expired={self.is_expired})>"
5252# Event listeners for validation
5253def validate_tool_schema(mapper, connection, target):
5254 """
5255 Validate tool schema before insert/update.
5257 Args:
5258 mapper: The mapper being used for the operation.
5259 connection: The database connection.
5260 target: The target object being validated.
5262 Raises:
5263 ValueError: If the tool input schema is invalid.
5265 """
5266 # You can use mapper and connection later, if required.
5267 _ = mapper
5268 _ = connection
5270 allowed_validator_names = {
5271 "Draft4Validator",
5272 "Draft6Validator",
5273 "Draft7Validator",
5274 "Draft201909Validator",
5275 "Draft202012Validator",
5276 }
5278 if hasattr(target, "input_schema"):
5279 schema = target.input_schema
5280 if schema is None:
5281 return
5283 try:
5284 # If $schema is missing, default to Draft 2020-12 as per MCP spec.
5285 if schema.get("$schema") is None:
5286 validator_cls = jsonschema.Draft202012Validator
5287 else:
5288 validator_cls = jsonschema.validators.validator_for(schema)
5290 if validator_cls.__name__ not in allowed_validator_names:
5291 logger.warning(f"Unsupported JSON Schema draft: {validator_cls.__name__}")
5293 validator_cls.check_schema(schema)
5294 except jsonschema.exceptions.SchemaError as e:
5295 logger.warning(f"Invalid tool input schema: {str(e)}")
5296 if settings.json_schema_validation_strict:
5297 raise ValueError(f"Invalid tool input schema: {str(e)}") from e
5300def validate_tool_name(mapper, connection, target):
5301 """
5302 Validate tool name before insert/update. Check if the name matches the required pattern.
5304 Args:
5305 mapper: The mapper being used for the operation.
5306 connection: The database connection.
5307 target: The target object being validated.
5309 Raises:
5310 ValueError: If the tool name contains invalid characters.
5311 """
5312 # You can use mapper and connection later, if required.
5313 _ = mapper
5314 _ = connection
5315 if hasattr(target, "name"):
5316 try:
5317 SecurityValidator.validate_tool_name(target.name)
5318 except ValueError as e:
5319 raise ValueError(f"Invalid tool name: {str(e)}") from e
5322def validate_prompt_schema(mapper, connection, target):
5323 """
5324 Validate prompt argument schema before insert/update.
5326 Args:
5327 mapper: The mapper being used for the operation.
5328 connection: The database connection.
5329 target: The target object being validated.
5331 Raises:
5332 ValueError: If the prompt argument schema is invalid.
5333 """
5334 # You can use mapper and connection later, if required.
5335 _ = mapper
5336 _ = connection
5338 allowed_validator_names = {
5339 "Draft4Validator",
5340 "Draft6Validator",
5341 "Draft7Validator",
5342 "Draft201909Validator",
5343 "Draft202012Validator",
5344 }
5346 if hasattr(target, "argument_schema"):
5347 schema = target.argument_schema
5348 if schema is None:
5349 return
5351 try:
5352 # If $schema is missing, default to Draft 2020-12 as per MCP spec.
5353 if schema.get("$schema") is None:
5354 validator_cls = jsonschema.Draft202012Validator
5355 else:
5356 validator_cls = jsonschema.validators.validator_for(schema)
5358 if validator_cls.__name__ not in allowed_validator_names:
5359 logger.warning(f"Unsupported JSON Schema draft: {validator_cls.__name__}")
5361 validator_cls.check_schema(schema)
5362 except jsonschema.exceptions.SchemaError as e:
5363 logger.warning(f"Invalid prompt argument schema: {str(e)}")
5364 if settings.json_schema_validation_strict:
5365 raise ValueError(f"Invalid prompt argument schema: {str(e)}") from e
5368# Register validation listeners
5370listen(Tool, "before_insert", validate_tool_schema)
5371listen(Tool, "before_update", validate_tool_schema)
5372listen(Tool, "before_insert", validate_tool_name)
5373listen(Tool, "before_update", validate_tool_name)
5374listen(Prompt, "before_insert", validate_prompt_schema)
5375listen(Prompt, "before_update", validate_prompt_schema)
5378def get_db() -> Generator[Session, Any, None]:
5379 """
5380 Dependency to get database session.
5382 Commits the transaction on successful completion to avoid implicit rollbacks
5383 for read-only operations. Rolls back explicitly on exception.
5385 Yields:
5386 SessionLocal: A SQLAlchemy database session.
5388 Raises:
5389 Exception: Re-raises any exception after rolling back the transaction.
5391 Examples:
5392 >>> from mcpgateway.db import get_db
5393 >>> gen = get_db()
5394 >>> db = next(gen)
5395 >>> hasattr(db, 'query')
5396 True
5397 >>> hasattr(db, 'commit')
5398 True
5399 >>> gen.close()
5400 """
5401 db = SessionLocal()
5402 try:
5403 yield db
5404 db.commit()
5405 except Exception:
5406 try:
5407 db.rollback()
5408 except Exception:
5409 try:
5410 db.invalidate()
5411 except Exception:
5412 pass # nosec B110 - Best effort cleanup on connection failure
5413 raise
5414 finally:
5415 db.close()
5418def get_for_update(
5419 db: Session,
5420 model,
5421 entity_id=None,
5422 where: Optional[Any] = None,
5423 skip_locked: bool = False,
5424 nowait: bool = False,
5425 lock_timeout_ms: Optional[int] = None,
5426 options: Optional[List] = None,
5427):
5428 """Get entity with row lock for update operations.
5430 Args:
5431 db: SQLAlchemy Session
5432 model: ORM model class
5433 entity_id: Primary key value (optional if `where` provided)
5434 where: Optional SQLAlchemy WHERE clause to locate rows for conflict detection
5435 skip_locked: If False (default), wait for locked rows. If True, skip locked
5436 rows (returns None if row is locked). Use False for conflict checks and
5437 entity updates to ensure consistency. Use True only for job-queue patterns.
5438 nowait: If True, fail immediately if row is locked (raises OperationalError).
5439 Use this for operations that should not block. Default False.
5440 lock_timeout_ms: Optional lock timeout in milliseconds for PostgreSQL.
5441 If set, the query will wait at most this long for locks before failing.
5442 Only applies to PostgreSQL. Default None (use database default).
5443 options: Optional list of loader options (e.g., selectinload(...))
5445 Returns:
5446 The model instance or None
5448 Raises:
5449 sqlalchemy.exc.OperationalError: If nowait=True and row is locked, or if
5450 lock_timeout_ms is exceeded.
5452 Notes:
5453 - On PostgreSQL this acquires a FOR UPDATE row lock.
5454 - On SQLite (or other backends that don't support FOR UPDATE) it
5455 falls back to a regular select; when ``options`` is None it uses
5456 ``db.get`` for efficiency, otherwise it executes a select with
5457 the provided loader options.
5458 """
5459 dialect = ""
5460 try:
5461 dialect = db.bind.dialect.name
5462 except Exception:
5463 dialect = ""
5465 # Build base select statement. Prefer `where` when provided, otherwise use primary key `entity_id`.
5466 if where is not None:
5467 stmt = select(model).where(where)
5468 elif entity_id is not None:
5469 stmt = select(model).where(model.id == entity_id)
5470 else:
5471 return None
5473 if options:
5474 stmt = stmt.options(*options)
5476 if dialect != "postgresql":
5477 # SQLite and others: no FOR UPDATE support
5478 # Use db.get optimization only when querying by primary key without loader options
5479 if not options and where is None and entity_id is not None:
5480 return db.get(model, entity_id)
5481 return db.execute(stmt).scalar_one_or_none()
5483 # PostgreSQL: set lock timeout if specified
5484 if lock_timeout_ms is not None:
5485 db.execute(text(f"SET LOCAL lock_timeout = '{lock_timeout_ms}ms'"))
5487 # PostgreSQL: apply FOR UPDATE with optional nowait
5488 stmt = stmt.with_for_update(skip_locked=skip_locked, nowait=nowait)
5489 return db.execute(stmt).scalar_one_or_none()
5492# Using the existing get_db generator to create a context manager for fresh sessions
5493fresh_db_session = contextmanager(get_db) # type: ignore
5496def patch_string_columns_for_mariadb(base, engine_) -> None:
5497 """
5498 MariaDB requires VARCHAR to have an explicit length.
5499 Auto-assign VARCHAR(255) to any String() columns without a length.
5501 Args:
5502 base (DeclarativeBase): SQLAlchemy Declarative Base containing metadata.
5503 engine_ (Engine): SQLAlchemy engine, used to detect MariaDB dialect.
5504 """
5505 if engine_.dialect.name != "mariadb":
5506 return
5508 for table in base.metadata.tables.values():
5509 for column in table.columns:
5510 if isinstance(column.type, String) and column.type.length is None:
5511 # Replace with VARCHAR(255)
5512 column.type = VARCHAR(255)
5515def extract_json_field(column, json_path: str, dialect_name: Optional[str] = None):
5516 """Extract a JSON field in a database-agnostic way.
5518 This function provides cross-database compatibility for JSON field extraction,
5519 supporting both SQLite and PostgreSQL backends.
5521 Args:
5522 column: SQLAlchemy column containing JSON data
5523 json_path: JSON path in SQLite format (e.g., '$.\"tool.name\"')
5524 dialect_name: Optional database dialect name to override global backend.
5525 If not provided, uses the global backend from DATABASE_URL.
5526 Use this when querying a different database than the default.
5528 Returns:
5529 SQLAlchemy expression for extracting the JSON field as text
5531 Note:
5532 - For SQLite: Uses json_extract(column, '$.\"key\"')
5533 - For PostgreSQL: Uses column ->> 'key' operator
5534 - Backend-specific behavior is tested via unit tests in test_db.py
5535 """
5536 effective_backend = dialect_name if dialect_name is not None else backend
5538 if effective_backend == "postgresql":
5539 # PostgreSQL uses ->> operator for text extraction
5540 # Convert $.\"key\" or $.\"nested.key\" format to just the key
5541 # Handle both simple keys and nested keys with dots
5542 path_key = json_path.replace('$."', "").replace('"', "")
5543 return column.op("->>")(path_key)
5545 # SQLite and other databases use json_extract function
5546 # Keep the original $.\"key\" format
5547 return func.json_extract(column, json_path)
5550# Create all tables
5551def init_db():
5552 """
5553 Initialize database tables.
5555 Raises:
5556 Exception: If database initialization fails.
5557 """
5558 try:
5559 # Apply MariaDB compatibility fix
5560 patch_string_columns_for_mariadb(Base, engine)
5562 # Base.metadata.drop_all(bind=engine)
5563 Base.metadata.create_all(bind=engine)
5564 except SQLAlchemyError as e:
5565 raise Exception(f"Failed to initialize database: {str(e)}")
5568# ============================================================================
5569# Structured Logging Models
5570# ============================================================================
5573class StructuredLogEntry(Base):
5574 """Structured log entry for comprehensive logging and analysis.
5576 Stores all log entries with correlation IDs, performance metrics,
5577 and security context for advanced search and analytics.
5578 """
5580 __tablename__ = "structured_log_entries"
5582 # Primary key
5583 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
5585 # Timestamps
5586 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now)
5588 # Correlation and request tracking
5589 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
5590 request_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
5592 # Log metadata
5593 level: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # DEBUG, INFO, WARNING, ERROR, CRITICAL
5594 component: Mapped[str] = mapped_column(String(100), nullable=False, index=True)
5595 message: Mapped[str] = mapped_column(Text, nullable=False)
5596 logger: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5598 # User and request context
5599 user_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
5600 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
5601 client_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) # IPv6 max length
5602 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5603 request_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
5604 request_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True)
5606 # Performance data
5607 duration_ms: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
5608 operation_type: Mapped[Optional[str]] = mapped_column(String(100), index=True, nullable=True)
5610 # Security context
5611 is_security_event: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False)
5612 security_severity: Mapped[Optional[str]] = mapped_column(String(20), index=True, nullable=True) # LOW, MEDIUM, HIGH, CRITICAL
5613 threat_indicators: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5615 # Structured context data
5616 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5617 error_details: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5618 performance_metrics: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5620 # System information
5621 hostname: Mapped[str] = mapped_column(String(255), nullable=False)
5622 process_id: Mapped[int] = mapped_column(Integer, nullable=False)
5623 thread_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
5624 version: Mapped[str] = mapped_column(String(50), nullable=False)
5625 environment: Mapped[str] = mapped_column(String(50), nullable=False, default="production")
5627 # OpenTelemetry trace context
5628 trace_id: Mapped[Optional[str]] = mapped_column(String(32), index=True, nullable=True)
5629 span_id: Mapped[Optional[str]] = mapped_column(String(16), nullable=True)
5631 # Indexes for performance
5632 __table_args__ = (
5633 Index("idx_log_correlation_time", "correlation_id", "timestamp"),
5634 Index("idx_log_user_time", "user_id", "timestamp"),
5635 Index("idx_log_level_time", "level", "timestamp"),
5636 Index("idx_log_component_time", "component", "timestamp"),
5637 Index("idx_log_security", "is_security_event", "security_severity", "timestamp"),
5638 Index("idx_log_operation", "operation_type", "timestamp"),
5639 Index("idx_log_trace", "trace_id", "timestamp"),
5640 )
5643class PerformanceMetric(Base):
5644 """Aggregated performance metrics from log analysis.
5646 Stores time-windowed aggregations of operation performance
5647 for analytics and trend analysis.
5648 """
5650 __tablename__ = "performance_metrics"
5652 # Primary key
5653 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
5655 # Timestamp
5656 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now)
5658 # Metric identification
5659 operation_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True)
5660 component: Mapped[str] = mapped_column(String(100), nullable=False, index=True)
5662 # Aggregated metrics
5663 request_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
5664 error_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
5665 error_rate: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
5667 # Duration metrics (in milliseconds)
5668 avg_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5669 min_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5670 max_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5671 p50_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5672 p95_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5673 p99_duration_ms: Mapped[float] = mapped_column(Float, nullable=False)
5675 # Time window
5676 window_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True)
5677 window_end: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
5678 window_duration_seconds: Mapped[int] = mapped_column(Integer, nullable=False)
5680 # Additional context
5681 metric_metadata: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5683 __table_args__ = (
5684 Index("idx_perf_operation_time", "operation_type", "window_start"),
5685 Index("idx_perf_component_time", "component", "window_start"),
5686 Index("idx_perf_window", "window_start", "window_end"),
5687 )
5690class SecurityEvent(Base):
5691 """Security event logging for threat detection and audit trails.
5693 Specialized table for security events with enhanced context
5694 and threat analysis capabilities.
5695 """
5697 __tablename__ = "security_events"
5699 # Primary key
5700 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
5702 # Timestamps
5703 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now)
5704 detected_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=utc_now)
5706 # Correlation tracking
5707 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
5708 log_entry_id: Mapped[Optional[str]] = mapped_column(String(36), ForeignKey("structured_log_entries.id"), index=True, nullable=True)
5710 # Event classification
5711 event_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # auth_failure, suspicious_activity, rate_limit, etc.
5712 severity: Mapped[str] = mapped_column(String(20), nullable=False, index=True) # LOW, MEDIUM, HIGH, CRITICAL
5713 category: Mapped[str] = mapped_column(String(50), nullable=False, index=True) # authentication, authorization, data_access, etc.
5715 # User and request context
5716 user_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
5717 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
5718 client_ip: Mapped[str] = mapped_column(String(45), nullable=False, index=True)
5719 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5721 # Event details
5722 description: Mapped[str] = mapped_column(Text, nullable=False)
5723 action_taken: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # blocked, allowed, flagged, etc.
5725 # Threat analysis
5726 threat_score: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) # 0.0-1.0
5727 threat_indicators: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False, default=dict)
5728 failed_attempts_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
5730 # Resolution tracking
5731 resolved: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False)
5732 resolved_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
5733 resolved_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5734 resolution_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5736 # Alert tracking
5737 alert_sent: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
5738 alert_sent_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
5739 alert_recipients: Mapped[Optional[List[str]]] = mapped_column(JSON, nullable=True)
5741 # Additional context
5742 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
5744 __table_args__ = (
5745 Index("idx_security_type_time", "event_type", "timestamp"),
5746 Index("idx_security_severity_time", "severity", "timestamp"),
5747 Index("idx_security_user_time", "user_id", "timestamp"),
5748 Index("idx_security_ip_time", "client_ip", "timestamp"),
5749 Index("idx_security_unresolved", "resolved", "severity", "timestamp"),
5750 )
5753# ---------------------------------------------------------------------------
5754# LLM Provider Configuration Models
5755# ---------------------------------------------------------------------------
5758class LLMProviderType:
5759 """Constants for LLM provider types."""
5761 OPENAI = "openai"
5762 AZURE_OPENAI = "azure_openai"
5763 ANTHROPIC = "anthropic"
5764 BEDROCK = "bedrock"
5765 GOOGLE_VERTEX = "google_vertex"
5766 WATSONX = "watsonx"
5767 OLLAMA = "ollama"
5768 OPENAI_COMPATIBLE = "openai_compatible"
5769 COHERE = "cohere"
5770 MISTRAL = "mistral"
5771 GROQ = "groq"
5772 TOGETHER = "together"
5774 @classmethod
5775 def get_all_types(cls) -> List[str]:
5776 """Get list of all supported provider types.
5778 Returns:
5779 List of provider type strings.
5780 """
5781 return [
5782 cls.OPENAI,
5783 cls.AZURE_OPENAI,
5784 cls.ANTHROPIC,
5785 cls.BEDROCK,
5786 cls.GOOGLE_VERTEX,
5787 cls.WATSONX,
5788 cls.OLLAMA,
5789 cls.OPENAI_COMPATIBLE,
5790 cls.COHERE,
5791 cls.MISTRAL,
5792 cls.GROQ,
5793 cls.TOGETHER,
5794 ]
5796 @classmethod
5797 def get_provider_defaults(cls) -> Dict[str, Dict[str, Any]]:
5798 """Get default configuration for each provider type.
5800 Returns:
5801 Dictionary mapping provider type to default config.
5802 """
5803 return {
5804 cls.OPENAI: {
5805 "api_base": "https://api.openai.com/v1",
5806 "default_model": "gpt-4o",
5807 "supports_model_list": True,
5808 "models_endpoint": "/models",
5809 "requires_api_key": True,
5810 "description": "OpenAI GPT models (GPT-4, GPT-4o, etc.)",
5811 },
5812 cls.AZURE_OPENAI: {
5813 "api_base": "https://{resource}.openai.azure.com/openai/deployments/{deployment}",
5814 "default_model": "",
5815 "supports_model_list": False,
5816 "requires_api_key": True,
5817 "description": "Azure OpenAI Service",
5818 },
5819 cls.ANTHROPIC: {
5820 "api_base": "https://api.anthropic.com",
5821 "default_model": "claude-sonnet-4-20250514",
5822 "supports_model_list": False,
5823 "requires_api_key": True,
5824 "description": "Anthropic Claude models",
5825 },
5826 cls.OLLAMA: {
5827 "api_base": "http://localhost:11434/v1",
5828 "default_model": "llama3.2",
5829 "supports_model_list": True,
5830 "models_endpoint": "/models",
5831 "requires_api_key": False,
5832 "description": "Local Ollama server (OpenAI-compatible)",
5833 },
5834 cls.OPENAI_COMPATIBLE: {
5835 "api_base": "http://localhost:8080/v1",
5836 "default_model": "",
5837 "supports_model_list": True,
5838 "models_endpoint": "/models",
5839 "requires_api_key": False,
5840 "description": "Any OpenAI-compatible API server",
5841 },
5842 cls.COHERE: {
5843 "api_base": "https://api.cohere.ai/v1",
5844 "default_model": "command-r-plus",
5845 "supports_model_list": True,
5846 "models_endpoint": "/models",
5847 "requires_api_key": True,
5848 "description": "Cohere Command models",
5849 },
5850 cls.MISTRAL: {
5851 "api_base": "https://api.mistral.ai/v1",
5852 "default_model": "mistral-large-latest",
5853 "supports_model_list": True,
5854 "models_endpoint": "/models",
5855 "requires_api_key": True,
5856 "description": "Mistral AI models",
5857 },
5858 cls.GROQ: {
5859 "api_base": "https://api.groq.com/openai/v1",
5860 "default_model": "llama-3.3-70b-versatile",
5861 "supports_model_list": True,
5862 "models_endpoint": "/models",
5863 "requires_api_key": True,
5864 "description": "Groq high-speed inference",
5865 },
5866 cls.TOGETHER: {
5867 "api_base": "https://api.together.xyz/v1",
5868 "default_model": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
5869 "supports_model_list": True,
5870 "models_endpoint": "/models",
5871 "requires_api_key": True,
5872 "description": "Together AI inference",
5873 },
5874 cls.BEDROCK: {
5875 "api_base": "",
5876 "default_model": "anthropic.claude-3-sonnet-20240229-v1:0",
5877 "supports_model_list": False,
5878 "requires_api_key": False,
5879 "description": "AWS Bedrock (uses IAM credentials)",
5880 },
5881 cls.GOOGLE_VERTEX: {
5882 "api_base": "",
5883 "default_model": "gemini-1.5-pro",
5884 "supports_model_list": False,
5885 "requires_api_key": False,
5886 "description": "Google Vertex AI (uses service account)",
5887 },
5888 cls.WATSONX: {
5889 "api_base": "https://us-south.ml.cloud.ibm.com",
5890 "default_model": "ibm/granite-13b-chat-v2",
5891 "supports_model_list": False,
5892 "requires_api_key": True,
5893 "description": "IBM watsonx.ai",
5894 },
5895 }
5898class LLMProvider(Base):
5899 """ORM model for LLM provider configurations.
5901 Stores credentials and settings for external LLM providers
5902 used by the internal LLM Chat feature.
5904 Attributes:
5905 id: Unique identifier (UUID)
5906 name: Display name (unique)
5907 slug: URL-safe identifier (unique)
5908 provider_type: Provider type (openai, anthropic, etc.)
5909 api_key: Encrypted API key
5910 api_base: Base URL for API requests
5911 api_version: API version (for Azure OpenAI)
5912 config: Provider-specific settings (JSON)
5913 default_model: Default model ID
5914 default_temperature: Default temperature (0.0-2.0)
5915 default_max_tokens: Default max tokens
5916 enabled: Whether provider is enabled
5917 health_status: Current health status (healthy/unhealthy/unknown)
5918 last_health_check: Last health check timestamp
5919 plugin_ids: Attached plugin IDs (JSON)
5920 """
5922 __tablename__ = "llm_providers"
5924 # Primary key
5925 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
5927 # Basic info
5928 name: Mapped[str] = mapped_column(String(255), nullable=False)
5929 slug: Mapped[str] = mapped_column(String(255), nullable=False)
5930 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5932 # Provider type
5933 provider_type: Mapped[str] = mapped_column(String(50), nullable=False)
5935 # Credentials (encrypted)
5936 api_key: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
5937 api_base: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
5938 api_version: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
5940 # Provider-specific configuration
5941 config: Mapped[Dict[str, Any]] = mapped_column(JSON, default=dict, nullable=False)
5943 # Default settings
5944 default_model: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5945 default_temperature: Mapped[float] = mapped_column(Float, default=0.7, nullable=False)
5946 default_max_tokens: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
5948 # Status
5949 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
5950 health_status: Mapped[str] = mapped_column(String(20), default="unknown", nullable=False)
5951 last_health_check: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
5953 # Plugin integration
5954 plugin_ids: Mapped[List[str]] = mapped_column(JSON, default=list, nullable=False)
5956 # Timestamps
5957 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
5958 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
5960 # Audit fields
5961 created_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5962 modified_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
5964 # Relationships
5965 models: Mapped[List["LLMModel"]] = relationship("LLMModel", back_populates="provider", cascade="all, delete-orphan")
5967 __table_args__ = (
5968 UniqueConstraint("name", name="uq_llm_providers_name"),
5969 UniqueConstraint("slug", name="uq_llm_providers_slug"),
5970 Index("idx_llm_providers_enabled", "enabled"),
5971 Index("idx_llm_providers_type", "provider_type"),
5972 Index("idx_llm_providers_health", "health_status"),
5973 )
5975 def __repr__(self) -> str:
5976 """Return string representation.
5978 Returns:
5979 String representation of the provider.
5980 """
5981 return f"<LLMProvider(id='{self.id}', name='{self.name}', type='{self.provider_type}')>"
5984class LLMModel(Base):
5985 """ORM model for LLM model definitions.
5987 Stores model metadata and capabilities for each provider.
5989 Attributes:
5990 id: Unique identifier (UUID)
5991 provider_id: Foreign key to llm_providers
5992 model_id: Provider's model ID (e.g., gpt-4o)
5993 model_name: Display name
5994 model_alias: Optional routing alias
5995 supports_chat: Whether model supports chat completions
5996 supports_streaming: Whether model supports streaming
5997 supports_function_calling: Whether model supports function/tool calling
5998 supports_vision: Whether model supports vision/images
5999 context_window: Maximum context tokens
6000 max_output_tokens: Maximum output tokens
6001 enabled: Whether model is enabled
6002 deprecated: Whether model is deprecated
6003 """
6005 __tablename__ = "llm_models"
6007 # Primary key
6008 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
6010 # Provider relationship
6011 provider_id: Mapped[str] = mapped_column(String(36), ForeignKey("llm_providers.id", ondelete="CASCADE"), nullable=False)
6013 # Model identification
6014 model_id: Mapped[str] = mapped_column(String(255), nullable=False)
6015 model_name: Mapped[str] = mapped_column(String(255), nullable=False)
6016 model_alias: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
6017 description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
6019 # Capabilities
6020 supports_chat: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
6021 supports_streaming: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
6022 supports_function_calling: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
6023 supports_vision: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
6025 # Limits
6026 context_window: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
6027 max_output_tokens: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
6029 # Status
6030 enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
6031 deprecated: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
6033 # Timestamps
6034 created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, nullable=False)
6035 updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utc_now, onupdate=utc_now, nullable=False)
6037 # Relationship
6038 provider: Mapped["LLMProvider"] = relationship("LLMProvider", back_populates="models")
6040 __table_args__ = (
6041 UniqueConstraint("provider_id", "model_id", name="uq_llm_models_provider_model"),
6042 Index("idx_llm_models_provider", "provider_id"),
6043 Index("idx_llm_models_enabled", "enabled"),
6044 Index("idx_llm_models_deprecated", "deprecated"),
6045 )
6047 def __repr__(self) -> str:
6048 """Return string representation.
6050 Returns:
6051 String representation of the model.
6052 """
6053 return f"<LLMModel(id='{self.id}', model_id='{self.model_id}', provider_id='{self.provider_id}')>"
6056class AuditTrail(Base):
6057 """Comprehensive audit trail for data access and changes.
6059 Tracks all significant system changes and data access for
6060 compliance and security auditing.
6061 """
6063 __tablename__ = "audit_trails"
6065 # Primary key
6066 id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex)
6068 # Timestamps
6069 timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True, default=utc_now)
6071 # Correlation tracking
6072 correlation_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
6073 request_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
6075 # Action details
6076 action: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # create, read, update, delete, execute, etc.
6077 resource_type: Mapped[str] = mapped_column(String(100), nullable=False, index=True) # tool, resource, prompt, user, etc.
6078 resource_id: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
6079 resource_name: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
6081 # User context
6082 user_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
6083 user_email: Mapped[Optional[str]] = mapped_column(String(255), index=True, nullable=True)
6084 team_id: Mapped[Optional[str]] = mapped_column(String(36), index=True, nullable=True)
6086 # Request context
6087 client_ip: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
6088 user_agent: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
6089 request_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
6090 request_method: Mapped[Optional[str]] = mapped_column(String(10), nullable=True)
6092 # Change tracking
6093 old_values: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
6094 new_values: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
6095 changes: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
6097 # Data classification
6098 data_classification: Mapped[Optional[str]] = mapped_column(String(50), index=True, nullable=True) # public, internal, confidential, restricted
6099 requires_review: Mapped[bool] = mapped_column(Boolean, default=False, index=True, nullable=False)
6101 # Result
6102 success: Mapped[bool] = mapped_column(Boolean, nullable=False, index=True)
6103 error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
6105 # Additional context
6106 context: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSON, nullable=True)
6108 __table_args__ = (
6109 Index("idx_audit_action_time", "action", "timestamp"),
6110 Index("idx_audit_resource_time", "resource_type", "resource_id", "timestamp"),
6111 Index("idx_audit_user_time", "user_id", "timestamp"),
6112 Index("idx_audit_classification", "data_classification", "timestamp"),
6113 Index("idx_audit_review", "requires_review", "timestamp"),
6114 )
6117if __name__ == "__main__":
6118 # Wait for database to be ready before initializing
6119 wait_for_db_ready(max_tries=int(settings.db_max_retries), interval=int(settings.db_retry_interval_ms) / 1000, sync=True) # Converting ms to s
6121 init_db()
6124@event.listens_for(Gateway, "before_insert")
6125def set_gateway_slug(_mapper, _conn, target):
6126 """Set the slug for a Gateway before insert.
6128 Args:
6129 _mapper: Mapper
6130 _conn: Connection
6131 target: Target Gateway instance
6132 """
6134 target.slug = slugify(target.name)
6137@event.listens_for(A2AAgent, "before_insert")
6138def set_a2a_agent_slug(_mapper, _conn, target):
6139 """Set the slug for an A2AAgent before insert.
6141 Args:
6142 _mapper: Mapper
6143 _conn: Connection
6144 target: Target A2AAgent instance
6145 """
6146 target.slug = slugify(target.name)
6149@event.listens_for(GrpcService, "before_insert")
6150def set_grpc_service_slug(_mapper, _conn, target):
6151 """Set the slug for a GrpcService before insert.
6153 Args:
6154 _mapper: Mapper
6155 _conn: Connection
6156 target: Target GrpcService instance
6157 """
6158 target.slug = slugify(target.name)
6161@event.listens_for(LLMProvider, "before_insert")
6162def set_llm_provider_slug(_mapper, _conn, target):
6163 """Set the slug for an LLMProvider before insert.
6165 Args:
6166 _mapper: Mapper
6167 _conn: Connection
6168 target: Target LLMProvider instance
6169 """
6170 target.slug = slugify(target.name)
6173@event.listens_for(EmailTeam, "before_insert")
6174def set_email_team_slug(_mapper, _conn, target):
6175 """Set the slug for an EmailTeam before insert.
6177 Args:
6178 _mapper: Mapper
6179 _conn: Connection
6180 target: Target EmailTeam instance
6181 """
6182 target.slug = slugify(target.name)
6185@event.listens_for(Tool, "before_insert")
6186@event.listens_for(Tool, "before_update")
6187def set_custom_name_and_slug(mapper, connection, target): # pylint: disable=unused-argument
6188 """
6189 Event listener to set custom_name, custom_name_slug, and name for Tool before insert/update.
6191 - Sets custom_name to original_name if not provided.
6192 - Calculates custom_name_slug from custom_name using slugify.
6193 - Updates name to gateway_slug + separator + custom_name_slug.
6194 - Sets display_name to custom_name if not provided.
6196 Note: The gateway relationship must be explicitly set (via target.gateway = gateway_obj)
6197 before adding the tool to the session if gateway namespacing is needed. If only
6198 gateway_id is set without the relationship, we look up the gateway name via a direct
6199 SQL query.
6201 Args:
6202 mapper: SQLAlchemy mapper for the Tool model.
6203 connection: Database connection.
6204 target: The Tool instance being inserted or updated.
6205 """
6206 # Set custom_name to original_name if not provided
6207 if not target.custom_name:
6208 target.custom_name = target.original_name
6209 # Set display_name to custom_name if not provided
6210 if not target.display_name:
6211 target.display_name = target.custom_name
6212 # Always update custom_name_slug from custom_name
6213 target.custom_name_slug = slugify(target.custom_name)
6215 # Get gateway_slug - check for explicitly set gateway relationship first
6216 gateway_slug = ""
6217 if target.gateway:
6218 # Gateway relationship is already loaded
6219 gateway_slug = slugify(target.gateway.name)
6220 elif target.gateway_id:
6221 # Gateway relationship not loaded but gateway_id is set
6222 # Use a cached gateway name if available from gateway_name_cache attribute
6223 if hasattr(target, "gateway_name_cache") and target.gateway_name_cache:
6224 gateway_slug = slugify(target.gateway_name_cache)
6225 else:
6226 # Fall back to querying the database
6227 try:
6228 result = connection.execute(text("SELECT name FROM gateways WHERE id = :gw_id"), {"gw_id": target.gateway_id})
6229 row = result.fetchone()
6230 if row: 6230 ↛ 6235line 6230 didn't jump to line 6235 because the condition on line 6230 was always true
6231 gateway_slug = slugify(row[0])
6232 except Exception: # nosec B110 - intentionally proceed without prefix on failure
6233 pass
6235 if gateway_slug:
6236 sep = settings.gateway_tool_name_separator
6237 target.name = f"{gateway_slug}{sep}{target.custom_name_slug}"
6238 else:
6239 target.name = target.custom_name_slug
6242@event.listens_for(Prompt, "before_insert")
6243@event.listens_for(Prompt, "before_update")
6244def set_prompt_name_and_slug(mapper, connection, target): # pylint: disable=unused-argument
6245 """Set name fields for Prompt before insert/update.
6247 - Sets original_name from name if missing (legacy compatibility).
6248 - Sets custom_name to original_name if not provided.
6249 - Sets display_name to custom_name if not provided.
6250 - Calculates custom_name_slug from custom_name.
6251 - Updates name to gateway_slug + separator + custom_name_slug.
6253 Note: The gateway relationship must be explicitly set (via target.gateway = gateway_obj)
6254 before adding the prompt to the session if gateway namespacing is needed. If only
6255 gateway_id is set without the relationship, we look up the gateway name via a direct
6256 SQL query.
6258 Args:
6259 mapper: SQLAlchemy mapper for the Prompt model.
6260 connection: Database connection for the insert/update.
6261 target: Prompt instance being inserted or updated.
6262 """
6263 if not target.original_name:
6264 target.original_name = target.name
6265 if not target.custom_name:
6266 target.custom_name = target.original_name
6267 if not target.display_name:
6268 target.display_name = target.custom_name
6269 target.custom_name_slug = slugify(target.custom_name)
6271 # Get gateway_slug - check for explicitly set gateway relationship first
6272 gateway_slug = ""
6273 if target.gateway:
6274 # Gateway relationship is already loaded
6275 gateway_slug = slugify(target.gateway.name)
6276 elif target.gateway_id:
6277 # Gateway relationship not loaded but gateway_id is set
6278 # Use a cached gateway name if available from gateway_name_cache attribute
6279 if hasattr(target, "gateway_name_cache") and target.gateway_name_cache:
6280 gateway_slug = slugify(target.gateway_name_cache)
6281 else:
6282 # Fall back to querying the database
6283 try:
6284 result = connection.execute(text("SELECT name FROM gateways WHERE id = :gw_id"), {"gw_id": target.gateway_id})
6285 row = result.fetchone()
6286 if row: 6286 ↛ 6291line 6286 didn't jump to line 6291 because the condition on line 6286 was always true
6287 gateway_slug = slugify(row[0])
6288 except Exception: # nosec B110 - intentionally proceed without prefix on failure
6289 pass
6291 if gateway_slug:
6292 sep = settings.gateway_tool_name_separator
6293 target.name = f"{gateway_slug}{sep}{target.custom_name_slug}"
6294 else:
6295 target.name = target.custom_name_slug