GT AI OS Community Edition v2.0.33

Security hardening release addressing CodeQL and Dependabot alerts:

- Fix stack trace exposure in error responses
- Add SSRF protection with DNS resolution checking
- Implement proper URL hostname validation (replaces substring matching)
- Add centralized path sanitization to prevent path traversal
- Fix ReDoS vulnerability in email validation regex
- Improve HTML sanitization in validation utilities
- Fix capability wildcard matching in auth utilities
- Update glob dependency to address CVE
- Add CodeQL suppression comments for verified false positives

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
HackWeasel
2025-12-12 17:04:45 -05:00
commit b9dfb86260
746 changed files with 232071 additions and 0 deletions

View File

@@ -0,0 +1,41 @@
"""
GT 2.0 Tenant Backend Models
Database models for tenant-specific data with perfect isolation.
Each tenant has their own SQLite database with these models.
"""
from .agent import Agent # Complete migration - only Agent class
from .conversation import Conversation
from .message import Message
from .document import Document, RAGDataset, DatasetDocument, DocumentChunk
from .user_session import UserSession
from .workflow import (
Workflow,
WorkflowExecution,
WorkflowTrigger,
WorkflowSession,
WorkflowMessage,
WorkflowStatus,
TriggerType,
InteractionMode
)
__all__ = [
"Agent",
"Conversation",
"Message",
"Document",
"RAGDataset",
"DatasetDocument",
"DocumentChunk",
"UserSession",
"Workflow",
"WorkflowExecution",
"WorkflowTrigger",
"WorkflowSession",
"WorkflowMessage",
"WorkflowStatus",
"TriggerType",
"InteractionMode",
]

View File

@@ -0,0 +1,299 @@
"""
Access Group Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for access group entities using the PostgreSQL + PGVector backend.
Implements simplified Tenant → User hierarchy with access groups for resource sharing.
NO TEAM ENTITIES - using access groups instead for collaboration.
Perfect tenant isolation - each tenant has separate access data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class AccessGroup(str, Enum):
"""Resource access levels within a tenant"""
INDIVIDUAL = "individual" # Private to owner
TEAM = "team" # Shared with specific users
ORGANIZATION = "organization" # Read-only for all tenant users
class TenantStructure(BaseServiceModel):
"""
Simplified hierarchy model for GT 2.0 service-based architecture.
Direct tenant-to-user relationship with access groups for sharing.
NO TEAM ENTITIES - using access groups instead for collaboration.
"""
# Core tenant properties
tenant_domain: str = Field(..., description="Tenant domain (e.g., customer1.com)")
tenant_id: str = Field(..., description="Unique tenant identifier")
# Tenant settings
settings: Dict[str, Any] = Field(default_factory=dict, description="Tenant-wide settings")
# Statistics
user_count: int = Field(default=0, description="Number of users")
resource_count: int = Field(default=0, description="Number of resources")
# Status
is_active: bool = Field(default=True, description="Whether tenant is active")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "tenant_structures"
def activate(self) -> None:
"""Activate the tenant"""
self.is_active = True
self.update_timestamp()
def deactivate(self) -> None:
"""Deactivate the tenant"""
self.is_active = False
self.update_timestamp()
class User(BaseServiceModel):
"""
User model for GT 2.0 service-based architecture.
User within a tenant with role-based permissions.
"""
# Core user properties
user_id: str = Field(default_factory=generate_uuid, description="Unique user identifier")
email: str = Field(..., description="User email address")
full_name: str = Field(..., description="User full name")
role: str = Field(..., description="User role (admin, developer, analyst, student)")
tenant_domain: str = Field(..., description="Parent tenant domain")
# User status
is_active: bool = Field(default=True, description="Whether user is active")
last_active: Optional[datetime] = Field(None, description="Last activity timestamp")
# User settings
preferences: Dict[str, Any] = Field(default_factory=dict, description="User preferences")
# Statistics
owned_resources_count: int = Field(default=0, description="Number of owned resources")
team_resources_count: int = Field(default=0, description="Number of team resources accessible")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "users"
def update_activity(self) -> None:
"""Update last activity timestamp"""
self.last_active = datetime.utcnow()
self.update_timestamp()
def can_access_resource(self, resource_access_group: AccessGroup, resource_owner_id: str,
resource_team_members: List[str]) -> bool:
"""Check if user can access a resource"""
# Owner always has access
if resource_owner_id == self.user_id:
return True
# Organization-wide resources
if resource_access_group == AccessGroup.ORGANIZATION:
return True
# Team resources
if resource_access_group == AccessGroup.TEAM:
return self.user_id in resource_team_members
return False
def can_modify_resource(self, resource_owner_id: str) -> bool:
"""Check if user can modify a resource"""
# Only owner can modify
return resource_owner_id == self.user_id
class Resource(BaseServiceModel):
"""
Base resource model for GT 2.0 service-based architecture.
Base class for any resource (agent, dataset, automation, etc.)
with file-based storage and access control.
"""
# Core resource properties
resource_uuid: str = Field(default_factory=generate_uuid, description="Unique resource identifier")
name: str = Field(..., min_length=1, max_length=200, description="Resource name")
resource_type: str = Field(..., max_length=50, description="Type of resource")
owner_id: str = Field(..., description="Owner user ID")
tenant_domain: str = Field(..., description="Parent tenant domain")
# Access control
access_group: AccessGroup = Field(default=AccessGroup.INDIVIDUAL, description="Access level")
team_members: List[str] = Field(default_factory=list, description="Team member IDs for team access")
# File storage
file_path: Optional[str] = Field(None, description="File-based storage path")
file_permissions: str = Field(default="700", description="Unix file permissions")
# Resource metadata
metadata: Dict[str, Any] = Field(default_factory=dict, description="Resource-specific metadata")
description: Optional[str] = Field(None, max_length=1000, description="Resource description")
# Statistics
access_count: int = Field(default=0, description="Number of times accessed")
last_accessed: Optional[datetime] = Field(None, description="Last access timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "resources"
def update_access_group(self, new_group: AccessGroup, team_members: Optional[List[str]] = None) -> None:
"""Update resource access group"""
self.access_group = new_group
self.team_members = team_members if new_group == AccessGroup.TEAM else []
self.update_timestamp()
def add_team_member(self, user_id: str) -> None:
"""Add user to team access"""
if self.access_group == AccessGroup.TEAM and user_id not in self.team_members:
self.team_members.append(user_id)
self.update_timestamp()
def remove_team_member(self, user_id: str) -> None:
"""Remove user from team access"""
if user_id in self.team_members:
self.team_members.remove(user_id)
self.update_timestamp()
def record_access(self, user_id: str) -> None:
"""Record resource access"""
self.access_count += 1
self.last_accessed = datetime.utcnow()
self.update_timestamp()
def get_file_permissions(self) -> str:
"""
Get Unix file permissions based on access group.
All files created with 700 permissions (owner only).
OS User: gt-{tenant_domain}-{pod_id}
"""
return "700" # Owner read/write/execute only
# Create/Update/Response models
class AccessGroupModel(BaseCreateModel):
"""API model for access group configuration"""
access_group: AccessGroup = Field(..., description="Access level")
team_members: List[str] = Field(default_factory=list, description="Team member IDs if team access")
class ResourceCreate(BaseCreateModel):
"""Model for creating resources"""
name: str = Field(..., min_length=1, max_length=200)
resource_type: str = Field(..., max_length=50)
owner_id: str
tenant_domain: str
access_group: AccessGroup = Field(default=AccessGroup.INDIVIDUAL)
team_members: List[str] = Field(default_factory=list)
metadata: Dict[str, Any] = Field(default_factory=dict)
description: Optional[str] = Field(None, max_length=1000)
class ResourceUpdate(BaseUpdateModel):
"""Model for updating resources"""
name: Optional[str] = Field(None, min_length=1, max_length=200)
access_group: Optional[AccessGroup] = None
team_members: Optional[List[str]] = None
metadata: Optional[Dict[str, Any]] = None
description: Optional[str] = Field(None, max_length=1000)
class ResourceResponse(BaseResponseModel):
"""Model for resource API responses"""
id: str
resource_uuid: str
name: str
resource_type: str
owner_id: str
tenant_domain: str
access_group: AccessGroup
team_members: List[str]
file_path: Optional[str]
metadata: Dict[str, Any]
description: Optional[str]
access_count: int
last_accessed: Optional[datetime]
created_at: datetime
updated_at: datetime
class UserCreate(BaseCreateModel):
"""Model for creating users"""
email: str
full_name: str
role: str
tenant_domain: str
preferences: Dict[str, Any] = Field(default_factory=dict)
class UserUpdate(BaseUpdateModel):
"""Model for updating users"""
full_name: Optional[str] = None
role: Optional[str] = None
preferences: Optional[Dict[str, Any]] = None
is_active: Optional[bool] = None
class UserResponse(BaseResponseModel):
"""Model for user API responses"""
id: str
user_id: str
email: str
full_name: str
role: str
tenant_domain: str
is_active: bool
last_active: Optional[datetime]
preferences: Dict[str, Any]
owned_resources_count: int
team_resources_count: int
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,184 @@
"""
GT 2.0 Agent Model - Service-Based Architecture
Pydantic models for agent entities using the PostgreSQL + PGVector backend.
Complete migration - all assistant terminology has been replaced with agent.
"""
from datetime import datetime
from typing import Optional, Dict, Any, List
from enum import Enum
from pydantic import Field, ConfigDict, field_validator
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
class AgentStatus(str, Enum):
"""Agent status enumeration"""
ACTIVE = "active"
INACTIVE = "inactive"
ARCHIVED = "archived"
class AgentVisibility(str, Enum):
"""Agent visibility levels"""
INDIVIDUAL = "individual"
TEAM = "team"
ORGANIZATION = "organization"
class Agent(BaseServiceModel):
"""
Agent model for GT 2.0 service-based architecture.
Represents an AI agent configuration with capabilities, model settings,
and access control for perfect tenant isolation.
"""
# Core agent properties
name: str = Field(..., min_length=1, max_length=255, description="Agent display name")
description: Optional[str] = Field(None, max_length=1000, description="Agent description")
instructions: Optional[str] = Field(None, description="System instructions for the agent")
# Model configuration
model_provider: str = Field(default="groq", description="AI model provider")
model_name: str = Field(default="llama3-groq-8b-8192-tool-use-preview", description="Model identifier")
model_settings: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Model-specific configuration")
# Capabilities and tools
capabilities: Optional[List[str]] = Field(default_factory=list, description="Agent capabilities")
tools: Optional[List[str]] = Field(default_factory=list, description="Available tools")
# MCP (Model Context Protocol) tool configuration
mcp_servers: Optional[List[str]] = Field(default_factory=list, description="MCP servers this agent can access")
rag_enabled: bool = Field(default=False, description="Whether agent can access RAG tools")
# Access control
owner_id: str = Field(..., description="User ID of the agent owner")
access_group: str = Field(default="individual", description="Access group for sharing")
visibility: AgentVisibility = Field(default=AgentVisibility.INDIVIDUAL, description="Agent visibility level")
# Status and metadata
status: AgentStatus = Field(default=AgentStatus.ACTIVE, description="Agent status")
featured: bool = Field(default=False, description="Whether agent is featured")
tags: Optional[List[str]] = Field(default_factory=list, description="Agent tags for categorization")
category: Optional[str] = Field(None, max_length=100, description="Agent category")
# Usage statistics
conversation_count: int = Field(default=0, description="Number of conversations")
last_used_at: Optional[datetime] = Field(None, description="Last usage timestamp")
# UI/UX Enhancement Fields
disclaimer: Optional[str] = Field(None, max_length=500, description="Disclaimer text shown in chat")
easy_prompts: Optional[List[str]] = Field(default_factory=list, max_length=10, description="Quick-access preset prompts (max 10)")
@field_validator('disclaimer')
@classmethod
def validate_disclaimer(cls, v):
"""Validate disclaimer length"""
if v and len(v) > 500:
raise ValueError('Disclaimer must be 500 characters or less')
return v
@field_validator('easy_prompts')
@classmethod
def validate_easy_prompts(cls, v):
"""Validate easy prompts count"""
if v and len(v) > 10:
raise ValueError('Maximum 10 easy prompts allowed')
return v
# Model configuration
model_config = ConfigDict(
protected_namespaces=(), # Allow model_ fields
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "agents"
def increment_usage(self):
"""Increment usage statistics"""
self.conversation_count += 1
self.last_used_at = datetime.utcnow()
self.update_timestamp()
class AgentCreate(BaseCreateModel):
"""Model for creating new agents"""
name: str = Field(..., min_length=1, max_length=255)
description: Optional[str] = Field(None, max_length=1000)
instructions: Optional[str] = None
model_provider: str = Field(default="groq")
model_name: str = Field(default="llama3-groq-8b-8192-tool-use-preview")
model_settings: Optional[Dict[str, Any]] = Field(default_factory=dict)
capabilities: Optional[List[str]] = Field(default_factory=list)
tools: Optional[List[str]] = Field(default_factory=list)
mcp_servers: Optional[List[str]] = Field(default_factory=list)
rag_enabled: bool = Field(default=False)
owner_id: str
access_group: str = Field(default="individual")
visibility: AgentVisibility = Field(default=AgentVisibility.INDIVIDUAL)
tags: Optional[List[str]] = Field(default_factory=list)
category: Optional[str] = None
disclaimer: Optional[str] = Field(None, max_length=500)
easy_prompts: Optional[List[str]] = Field(default_factory=list)
model_config = ConfigDict(protected_namespaces=())
class AgentUpdate(BaseUpdateModel):
"""Model for updating agents"""
name: Optional[str] = Field(None, min_length=1, max_length=255)
description: Optional[str] = Field(None, max_length=1000)
instructions: Optional[str] = None
model_provider: Optional[str] = None
model_name: Optional[str] = None
model_settings: Optional[Dict[str, Any]] = None
capabilities: Optional[List[str]] = None
tools: Optional[List[str]] = None
access_group: Optional[str] = None
visibility: Optional[AgentVisibility] = None
status: Optional[AgentStatus] = None
featured: Optional[bool] = None
tags: Optional[List[str]] = None
category: Optional[str] = None
disclaimer: Optional[str] = None
easy_prompts: Optional[List[str]] = None
model_config = ConfigDict(protected_namespaces=())
class AgentResponse(BaseResponseModel):
"""Model for agent API responses"""
id: str
name: str
description: Optional[str]
instructions: Optional[str]
model_provider: str
model_name: str
model_settings: Dict[str, Any]
capabilities: List[str]
tools: List[str]
owner_id: str
access_group: str
visibility: AgentVisibility
status: AgentStatus
featured: bool
tags: List[str]
category: Optional[str]
conversation_count: int
usage_count: int = 0 # Alias for conversation_count for frontend compatibility
last_used_at: Optional[datetime]
disclaimer: Optional[str]
easy_prompts: List[str]
created_at: datetime
updated_at: datetime
model_config = ConfigDict(protected_namespaces=())

View File

@@ -0,0 +1,345 @@
"""
Agent Model for GT 2.0 Tenant Backend
File-based agent configuration with DuckDB reference tracking.
Perfect tenant isolation - each tenant has separate agent data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
import uuid
import os
import json
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean, JSON
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.core.database import Base
from app.core.config import get_settings
class Agent(Base):
"""Agent model for AI agent configurations"""
__tablename__ = "agents"
# Primary Key - using UUID for PostgreSQL compatibility
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
# Agent Details
name = Column(String(200), nullable=False, index=True)
description = Column(Text, nullable=True)
template_id = Column(String(100), nullable=True, index=True) # Template used to create this agent
category_id = Column(String(36), nullable=True, index=True) # Foreign key to categories table for discovery
agent_type = Column(String(50), nullable=False, default="custom", index=True) # Agent type/category
prompt_template = Column(Text, nullable=True) # System prompt template
# Visibility and Sharing (GT 2.0 Team Enhancement)
visibility = Column(String(20), nullable=False, default="private", index=True) # private, team, organization
tenant_id = Column(String(36), nullable=True, index=True) # Foreign key to teams table (null for private)
shared_with = Column(JSON, nullable=False, default=list) # List of user emails for explicit sharing
# File-based Configuration References
config_file_path = Column(String(500), nullable=False) # Path to config.json
prompt_file_path = Column(String(500), nullable=False) # Path to prompt.md
capabilities_file_path = Column(String(500), nullable=False) # Path to capabilities.json
# User Information (from JWT token)
created_by = Column(String(255), nullable=False, index=True) # User email or ID
user_id = Column(String(255), nullable=False, index=True) # User ID (alias for created_by for API compatibility)
user_name = Column(String(100), nullable=True) # User display name
# Agent Configuration (cached from files for quick access)
personality_config = Column(JSON, nullable=False, default=dict) # Tone, style, etc.
resource_preferences = Column(JSON, nullable=False, default=dict) # LLM preferences, etc.
memory_settings = Column(JSON, nullable=False, default=dict) # Conversation retention settings
# Status and Metadata
is_active = Column(Boolean, nullable=False, default=True)
is_favorite = Column(Boolean, nullable=False, default=False)
tags = Column(JSON, nullable=False, default=list) # User-defined tags
example_prompts = Column(JSON, nullable=False, default=list) # Up to 4 example prompts for discovery
# Statistics (updated by triggers or background processes)
conversation_count = Column(Integer, nullable=False, default=0)
total_messages = Column(Integer, nullable=False, default=0)
total_tokens_used = Column(Integer, nullable=False, default=0)
total_cost_cents = Column(Integer, nullable=False, default=0)
# Timestamps
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
last_used_at = Column(DateTime(timezone=True), nullable=True)
# Relationships
conversations = relationship("Conversation", back_populates="agent", cascade="all, delete-orphan")
def __repr__(self) -> str:
return f"<Agent(id={self.id}, name='{self.name}', created_by='{self.created_by}')>"
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary for API responses"""
return {
"id": self.id,
"uuid": str(self.uuid),
"name": self.name,
"description": self.description,
"template_id": self.template_id,
"created_by": self.created_by,
"user_name": self.user_name,
"personality_config": self.personality_config,
"resource_preferences": self.resource_preferences,
"memory_settings": self.memory_settings,
"is_active": self.is_active,
"is_favorite": self.is_favorite,
"tags": self.tags,
"conversation_count": self.conversation_count,
"total_messages": self.total_messages,
"total_tokens_used": self.total_tokens_used,
"total_cost_cents": self.total_cost_cents,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_used_at": self.last_used_at.isoformat() if self.last_used_at else None,
}
@classmethod
def from_dict(cls, data: Dict[str, Any]) -> "Agent":
"""Create from dictionary"""
created_by = data.get("created_by", data.get("user_id", ""))
return cls(
name=data.get("name", ""),
description=data.get("description"),
template_id=data.get("template_id"),
agent_type=data.get("agent_type", "custom"),
prompt_template=data.get("prompt_template", ""),
created_by=created_by,
user_id=created_by, # Keep in sync
user_name=data.get("user_name"),
personality_config=data.get("personality_config", {}),
resource_preferences=data.get("resource_preferences", {}),
memory_settings=data.get("memory_settings", {}),
tags=data.get("tags", []),
)
def get_agent_directory(self) -> str:
"""Get the file system directory for this agent"""
settings = get_settings()
tenant_data_path = os.path.dirname(settings.database_path)
return os.path.join(tenant_data_path, "agents", str(self.uuid))
def ensure_directory_exists(self) -> None:
"""Create agent directory with secure permissions"""
agent_dir = self.get_agent_directory()
os.makedirs(agent_dir, exist_ok=True, mode=0o700)
# Create subdirectories
subdirs = ["memory", "memory/conversations", "memory/context", "memory/preferences", "resources"]
for subdir in subdirs:
subdir_path = os.path.join(agent_dir, subdir)
os.makedirs(subdir_path, exist_ok=True, mode=0o700)
def initialize_file_paths(self) -> None:
"""Initialize file paths for this agent"""
agent_dir = self.get_agent_directory()
self.config_file_path = os.path.join(agent_dir, "config.json")
self.prompt_file_path = os.path.join(agent_dir, "prompt.md")
self.capabilities_file_path = os.path.join(agent_dir, "capabilities.json")
def load_config_from_file(self) -> Dict[str, Any]:
"""Load agent configuration from file"""
try:
with open(self.config_file_path, 'r') as f:
return json.load(f)
except (FileNotFoundError, json.JSONDecodeError):
return {}
def save_config_to_file(self, config: Dict[str, Any]) -> None:
"""Save agent configuration to file"""
self.ensure_directory_exists()
with open(self.config_file_path, 'w') as f:
json.dump(config, f, indent=2, default=str)
def load_prompt_from_file(self) -> str:
"""Load system prompt from file"""
try:
with open(self.prompt_file_path, 'r') as f:
return f.read()
except FileNotFoundError:
return ""
def save_prompt_to_file(self, prompt: str) -> None:
"""Save system prompt to file"""
self.ensure_directory_exists()
with open(self.prompt_file_path, 'w') as f:
f.write(prompt)
def load_capabilities_from_file(self) -> List[Dict[str, Any]]:
"""Load capabilities from file"""
try:
with open(self.capabilities_file_path, 'r') as f:
return json.load(f)
except (FileNotFoundError, json.JSONDecodeError):
return []
def save_capabilities_to_file(self, capabilities: List[Dict[str, Any]]) -> None:
"""Save capabilities to file"""
self.ensure_directory_exists()
with open(self.capabilities_file_path, 'w') as f:
json.dump(capabilities, f, indent=2, default=str)
def update_statistics(self, conversation_count: int = None, messages: int = None,
tokens: int = None, cost_cents: int = None) -> None:
"""Update agent statistics"""
if conversation_count is not None:
self.conversation_count = conversation_count
if messages is not None:
self.total_messages += messages
if tokens is not None:
self.total_tokens_used += tokens
if cost_cents is not None:
self.total_cost_cents += cost_cents
self.last_used_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
def add_tag(self, tag: str) -> None:
"""Add a tag to the agent"""
if tag not in self.tags:
current_tags = self.tags or []
current_tags.append(tag)
self.tags = current_tags
def remove_tag(self, tag: str) -> None:
"""Remove a tag from the agent"""
if self.tags and tag in self.tags:
current_tags = self.tags.copy()
current_tags.remove(tag)
self.tags = current_tags
def get_full_configuration(self) -> Dict[str, Any]:
"""Get complete agent configuration including file-based data"""
config = self.load_config_from_file()
prompt = self.load_prompt_from_file()
capabilities = self.load_capabilities_from_file()
return {
**self.to_dict(),
"config": config,
"prompt": prompt,
"capabilities": capabilities,
}
def clone(self, new_name: str, user_identifier: str, modifications: Dict[str, Any] = None) -> "Agent":
"""Create a clone of this agent with modifications"""
# Load current configuration
config = self.load_config_from_file()
prompt = self.load_prompt_from_file()
capabilities = self.load_capabilities_from_file()
# Apply modifications if provided
if modifications:
config.update(modifications.get("config", {}))
if "prompt" in modifications:
prompt = modifications["prompt"]
if "capabilities" in modifications:
capabilities = modifications["capabilities"]
# Create new agent
new_agent = Agent(
name=new_name,
description=f"Clone of {self.name}",
template_id=self.template_id,
created_by=user_identifier,
personality_config=self.personality_config.copy(),
resource_preferences=self.resource_preferences.copy(),
memory_settings=self.memory_settings.copy(),
tags=self.tags.copy() if self.tags else [],
)
return new_agent
def archive(self) -> None:
"""Archive the agent (soft delete)"""
self.is_active = False
self.updated_at = datetime.utcnow()
def unarchive(self) -> None:
"""Unarchive the agent"""
self.is_active = True
self.updated_at = datetime.utcnow()
def favorite(self) -> None:
"""Mark agent as favorite"""
self.is_favorite = True
self.updated_at = datetime.utcnow()
def unfavorite(self) -> None:
"""Remove favorite status"""
self.is_favorite = False
self.updated_at = datetime.utcnow()
def is_owned_by(self, user_identifier: str) -> bool:
"""Check if agent is owned by the given user"""
return self.created_by == user_identifier
def can_be_accessed_by(self, user_identifier: str, user_teams: List[int] = None) -> bool:
"""Check if agent can be accessed by the given user
GT 2.0 Access Rules:
1. Owner always has access
2. Team members have access if visibility is 'team' and they're in the team
3. All organization members have access if visibility is 'organization'
4. Explicitly shared users have access
"""
# Owner always has access
if self.is_owned_by(user_identifier):
return True
# Check explicit sharing
if self.shared_with and user_identifier in self.shared_with:
return True
# Check team visibility
if self.visibility == "team" and self.tenant_id and user_teams:
if self.tenant_id in user_teams:
return True
# Check organization visibility
if self.visibility == "organization":
return True # All authenticated users in the tenant
return False
@property
def average_tokens_per_message(self) -> float:
"""Calculate average tokens per message"""
if self.total_messages == 0:
return 0.0
return self.total_tokens_used / self.total_messages
@property
def total_cost_dollars(self) -> float:
"""Get total cost in dollars"""
return self.total_cost_cents / 100.0
@property
def average_cost_per_conversation(self) -> float:
"""Calculate average cost per conversation in dollars"""
if self.conversation_count == 0:
return 0.0
return self.total_cost_dollars / self.conversation_count
@property
def usage_count(self) -> int:
"""Alias for conversation_count for API compatibility"""
return self.conversation_count
@usage_count.setter
def usage_count(self, value: int) -> None:
"""Set conversation_count via usage_count alias"""
self.conversation_count = value
# Backward compatibility alias
Agent = Agent

View File

@@ -0,0 +1,166 @@
"""
Agent-Dataset Binding Model for GT 2.0 Tenant Backend
Links agents to RAG datasets for context-aware conversations.
Follows GT 2.0's principle of "Elegant Simplicity"
- Simple many-to-many relationships
- Configurable relevance thresholds
- Priority ordering for multiple datasets
"""
from datetime import datetime
from typing import Dict, Any
import uuid
from sqlalchemy import Column, Integer, String, DateTime, Float, ForeignKey, Boolean
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from app.core.database import Base
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class AssistantDataset(Base):
"""Links agents to RAG datasets for context retrieval
GT 2.0 Design: Simple binding table with configuration
"""
__tablename__ = "agent_datasets"
# Primary Key
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
# Foreign Keys
agent_id = Column(String(36), ForeignKey("agents.id", ondelete="CASCADE"), nullable=False, index=True)
dataset_id = Column(String(36), ForeignKey("rag_datasets.id", ondelete="CASCADE"), nullable=False, index=True)
# Configuration
relevance_threshold = Column(Float, nullable=False, default=0.7) # Minimum similarity score
max_chunks = Column(Integer, nullable=False, default=5) # Max chunks to retrieve
priority_order = Column(Integer, nullable=False, default=0) # Order when multiple datasets (lower = higher priority)
# Settings
is_active = Column(Boolean, nullable=False, default=True)
auto_include = Column(Boolean, nullable=False, default=True) # Automatically include in searches
# Usage Statistics
search_count = Column(Integer, nullable=False, default=0)
chunks_retrieved_total = Column(Integer, nullable=False, default=0)
# Timestamps
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
last_used_at = Column(DateTime(timezone=True), nullable=True)
# Relationships
agent = relationship("Agent", backref="dataset_bindings")
dataset = relationship("RAGDataset", backref="assistant_bindings")
def __repr__(self) -> str:
return f"<AssistantDataset(agent_id={self.agent_id}, dataset_id='{self.dataset_id}')>"
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary for API responses"""
return {
"id": self.id,
"agent_id": self.agent_id,
"dataset_id": self.dataset_id,
"relevance_threshold": self.relevance_threshold,
"max_chunks": self.max_chunks,
"priority_order": self.priority_order,
"is_active": self.is_active,
"auto_include": self.auto_include,
"search_count": self.search_count,
"chunks_retrieved_total": self.chunks_retrieved_total,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_used_at": self.last_used_at.isoformat() if self.last_used_at else None,
}
def increment_usage(self, chunks_retrieved: int = 0) -> None:
"""Update usage statistics"""
self.search_count += 1
self.chunks_retrieved_total += chunks_retrieved
self.last_used_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
class AssistantIntegration(Base):
"""Links agents to external integrations and tools
GT 2.0 Design: Simple binding to resource cluster integrations
"""
__tablename__ = "agent_integrations"
# Primary Key
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
# Foreign Keys
agent_id = Column(String(36), ForeignKey("agents.id", ondelete="CASCADE"), nullable=False, index=True)
integration_resource_id = Column(String(36), nullable=False, index=True) # Resource cluster integration ID
# Configuration
integration_type = Column(String(50), nullable=False) # github, slack, jira, etc.
enabled = Column(Boolean, nullable=False, default=True)
config = Column(String, nullable=False, default="{}") # JSON configuration
# Permissions
allowed_actions = Column(String, nullable=False, default="[]") # JSON array of allowed actions
# Usage Statistics
usage_count = Column(Integer, nullable=False, default=0)
last_error = Column(String, nullable=True)
# Timestamps
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
last_used_at = Column(DateTime(timezone=True), nullable=True)
# Relationships
agent = relationship("Agent", backref="integration_bindings")
def __repr__(self) -> str:
return f"<AssistantIntegration(agent_id={self.agent_id}, type='{self.integration_type}')>"
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary for API responses"""
import json
try:
config_obj = json.loads(self.config) if isinstance(self.config, str) else self.config
allowed_actions_list = json.loads(self.allowed_actions) if isinstance(self.allowed_actions, str) else self.allowed_actions
except json.JSONDecodeError:
config_obj = {}
allowed_actions_list = []
return {
"id": self.id,
"agent_id": self.agent_id,
"integration_resource_id": self.integration_resource_id,
"integration_type": self.integration_type,
"enabled": self.enabled,
"config": config_obj,
"allowed_actions": allowed_actions_list,
"usage_count": self.usage_count,
"last_error": self.last_error,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_used_at": self.last_used_at.isoformat() if self.last_used_at else None,
}
def increment_usage(self) -> None:
"""Update usage statistics"""
self.usage_count += 1
self.last_used_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
def record_error(self, error_message: str) -> None:
"""Record an error from the integration"""
self.last_error = error_message[:500] # Truncate to 500 chars
self.updated_at = datetime.utcnow()

View File

@@ -0,0 +1,439 @@
"""
Agent Template Models for GT 2.0
Defines agent templates, custom builders, and MCP integration models.
Follows the simplified hierarchy with file-based storage.
"""
from typing import List, Optional, Dict, Any, Union
from datetime import datetime
from enum import Enum
from pydantic import BaseModel, Field
import json
from pathlib import Path
from app.models.access_group import AccessGroup, Resource
class AssistantType(str, Enum):
"""Pre-defined agent types from architecture"""
RESEARCH = "research_assistant"
CODING = "coding_assistant"
CYBER_ANALYST = "cyber_analyst"
EDUCATIONAL = "educational_tutor"
CUSTOM = "custom"
class PersonalityConfig(BaseModel):
"""Agent personality configuration"""
tone: str = Field(default="balanced", description="formal | balanced | casual")
explanation_depth: str = Field(default="intermediate", description="beginner | intermediate | expert")
interaction_style: str = Field(default="collaborative", description="teaching | collaborative | direct")
class ResourcePreferences(BaseModel):
"""Agent resource preferences"""
primary_llm: str = Field(default="gpt-4", description="Primary LLM model")
fallback_models: List[str] = Field(default_factory=list, description="Fallback model list")
context_length: int = Field(default=4000, description="Maximum context length")
temperature: float = Field(default=0.7, description="Response temperature")
streaming_enabled: bool = Field(default=True, description="Enable streaming responses")
class MemorySettings(BaseModel):
"""Agent memory configuration"""
conversation_retention: str = Field(default="session", description="session | temporary | permanent")
context_window_size: int = Field(default=10, description="Number of messages to retain")
learning_from_interactions: bool = Field(default=False, description="Learn from user interactions")
max_memory_size_mb: int = Field(default=50, description="Maximum memory size in MB")
class AssistantTemplate(BaseModel):
"""
Pre-configured agent template
Stored in Resource Cluster library
"""
template_id: str
name: str
description: str
category: AssistantType
# Core configuration
system_prompt: str = Field(description="System prompt with variable substitution")
default_capabilities: List[str] = Field(default_factory=list, description="Default capability requirements")
# Configurations
personality_config: PersonalityConfig = Field(default_factory=PersonalityConfig)
resource_preferences: ResourcePreferences = Field(default_factory=ResourcePreferences)
memory_settings: MemorySettings = Field(default_factory=MemorySettings)
# Metadata
icon_path: Optional[str] = None
version: str = Field(default="1.0.0")
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
# Access control
required_access_groups: List[str] = Field(default_factory=list)
minimum_role: Optional[str] = None
def to_instance(self, user_id: str, instance_name: str, tenant_domain: str) -> "AssistantInstance":
"""Create an instance from this template"""
return AssistantInstance(
id=f"{user_id}-{instance_name}-{datetime.utcnow().timestamp()}",
template_id=self.template_id,
name=instance_name,
description=f"Instance of {self.name}",
owner_id=user_id,
tenant_domain=tenant_domain,
# Copy configurations
system_prompt=self.system_prompt,
capabilities=self.default_capabilities.copy(),
personality_config=self.personality_config.model_copy(),
resource_preferences=self.resource_preferences.model_copy(),
memory_settings=self.memory_settings.model_copy(),
# Instance specific
access_group=AccessGroup.INDIVIDUAL,
team_members=[],
created_at=datetime.utcnow(),
updated_at=datetime.utcnow()
)
class AssistantInstance(Resource):
"""
User's instance of an agent
Inherits from Resource for access control
"""
template_id: Optional[str] = Field(default=None, description="Source template if from template")
# Agent configuration
system_prompt: str
capabilities: List[str] = Field(default_factory=list)
personality_config: PersonalityConfig = Field(default_factory=PersonalityConfig)
resource_preferences: ResourcePreferences = Field(default_factory=ResourcePreferences)
memory_settings: MemorySettings = Field(default_factory=MemorySettings)
# Resource bindings
linked_datasets: List[str] = Field(default_factory=list, description="Linked RAG dataset IDs")
linked_tools: List[str] = Field(default_factory=list, description="Linked tool/integration IDs")
linked_models: List[str] = Field(default_factory=list, description="Specific model overrides")
# Usage tracking
conversation_count: int = Field(default=0)
total_messages: int = Field(default=0)
total_tokens_used: int = Field(default=0)
last_used: Optional[datetime] = None
# File storage paths (created by controller)
config_file_path: Optional[str] = None
memory_file_path: Optional[str] = None
def get_file_structure(self) -> Dict[str, str]:
"""Get expected file structure for agent storage"""
base_path = f"/data/{self.tenant_domain}/users/{self.owner_id}/agents/{self.id}"
return {
"config": f"{base_path}/config.json",
"prompt": f"{base_path}/prompt.md",
"capabilities": f"{base_path}/capabilities.json",
"memory": f"{base_path}/memory/",
"resources": f"{base_path}/resources/"
}
def update_from_template(self, template: AssistantTemplate):
"""Update instance from template (for version updates)"""
self.system_prompt = template.system_prompt
self.personality_config = template.personality_config.model_copy()
self.resource_preferences = template.resource_preferences.model_copy()
self.updated_at = datetime.utcnow()
def add_linked_dataset(self, dataset_id: str):
"""Link a RAG dataset to this agent"""
if dataset_id not in self.linked_datasets:
self.linked_datasets.append(dataset_id)
self.updated_at = datetime.utcnow()
def remove_linked_dataset(self, dataset_id: str):
"""Unlink a RAG dataset"""
if dataset_id in self.linked_datasets:
self.linked_datasets.remove(dataset_id)
self.updated_at = datetime.utcnow()
class AssistantBuilder(BaseModel):
"""Configuration for building custom agents"""
name: str
description: Optional[str] = None
base_template: Optional[AssistantType] = None
# Custom configuration
system_prompt: str
personality_config: PersonalityConfig = Field(default_factory=PersonalityConfig)
resource_preferences: ResourcePreferences = Field(default_factory=ResourcePreferences)
memory_settings: MemorySettings = Field(default_factory=MemorySettings)
# Capabilities
requested_capabilities: List[str] = Field(default_factory=list)
required_models: List[str] = Field(default_factory=list)
required_tools: List[str] = Field(default_factory=list)
def build_instance(self, user_id: str, tenant_domain: str) -> AssistantInstance:
"""Build agent instance from configuration"""
return AssistantInstance(
id=f"custom-{user_id}-{datetime.utcnow().timestamp()}",
template_id=None, # Custom build
name=self.name,
description=self.description or f"Custom agent by {user_id}",
owner_id=user_id,
tenant_domain=tenant_domain,
resource_type="agent",
# Apply configurations
system_prompt=self.system_prompt,
capabilities=self.requested_capabilities,
personality_config=self.personality_config,
resource_preferences=self.resource_preferences,
memory_settings=self.memory_settings,
# Default access
access_group=AccessGroup.INDIVIDUAL,
team_members=[],
created_at=datetime.utcnow(),
updated_at=datetime.utcnow()
)
# Pre-defined templates from architecture
BUILTIN_TEMPLATES = {
AssistantType.RESEARCH: AssistantTemplate(
template_id="research_assistant_v1",
name="Research & Analysis Agent",
description="Specialized in information synthesis and analysis with citations",
category=AssistantType.RESEARCH,
system_prompt="""You are a research agent specialized in information synthesis and analysis.
Focus on providing well-sourced, analytical responses with clear reasoning.
Always cite your sources and provide evidence for your claims.
When uncertain, clearly state the limitations of your knowledge.""",
default_capabilities=[
"llm:gpt-4",
"rag:semantic_search",
"tools:web_search",
"export:citations"
],
personality_config=PersonalityConfig(
tone="formal",
explanation_depth="expert",
interaction_style="collaborative"
),
resource_preferences=ResourcePreferences(
primary_llm="gpt-4",
fallback_models=["claude-sonnet", "gpt-3.5-turbo"],
context_length=8000,
temperature=0.7
),
required_access_groups=["research_tools"]
),
AssistantType.CODING: AssistantTemplate(
template_id="coding_assistant_v1",
name="Software Development Agent",
description="Code quality, debugging, and development best practices",
category=AssistantType.CODING,
system_prompt="""You are a software development agent focused on code quality and best practices.
Provide clear explanations, suggest improvements, and help debug issues.
Follow the principle of clean, maintainable code.
Always consider security implications in your suggestions.""",
default_capabilities=[
"llm:claude-sonnet",
"tools:github_integration",
"resources:documentation",
"export:code_snippets"
],
personality_config=PersonalityConfig(
tone="balanced",
explanation_depth="intermediate",
interaction_style="direct"
),
resource_preferences=ResourcePreferences(
primary_llm="claude-sonnet",
fallback_models=["gpt-4", "codellama"],
context_length=16000,
temperature=0.5
),
required_access_groups=["development_tools"]
),
AssistantType.CYBER_ANALYST: AssistantTemplate(
template_id="cyber_analyst_v1",
name="Cybersecurity Analysis Agent",
description="Threat detection, incident response, and security best practices",
category=AssistantType.CYBER_ANALYST,
system_prompt="""You are a cybersecurity analyst agent for threat detection and response.
Prioritize security best practices and provide actionable recommendations.
Consider defense-in-depth strategies and zero-trust principles.
Always emphasize the importance of continuous monitoring and improvement.""",
default_capabilities=[
"llm:gpt-4",
"tools:security_scanning",
"resources:threat_intelligence",
"export:security_reports"
],
personality_config=PersonalityConfig(
tone="formal",
explanation_depth="expert",
interaction_style="direct"
),
resource_preferences=ResourcePreferences(
primary_llm="gpt-4",
fallback_models=["claude-sonnet"],
context_length=8000,
temperature=0.3
),
required_access_groups=["cybersecurity_advanced"]
),
AssistantType.EDUCATIONAL: AssistantTemplate(
template_id="educational_tutor_v1",
name="AI Literacy Educational Agent",
description="Critical thinking development and AI collaboration skills",
category=AssistantType.EDUCATIONAL,
system_prompt="""You are an educational agent focused on developing critical thinking and AI literacy.
Use socratic questioning and encourage deep analysis of problems.
Help students understand both the capabilities and limitations of AI.
Foster independent thinking while teaching effective AI collaboration.""",
default_capabilities=[
"llm:claude-sonnet",
"games:strategic_thinking",
"puzzles:logic_reasoning",
"analytics:learning_progress"
],
personality_config=PersonalityConfig(
tone="casual",
explanation_depth="beginner",
interaction_style="teaching"
),
resource_preferences=ResourcePreferences(
primary_llm="claude-sonnet",
fallback_models=["gpt-4"],
context_length=4000,
temperature=0.8
),
required_access_groups=["ai_literacy"]
)
}
class AssistantTemplateLibrary:
"""
Manages the agent template library
Templates stored in Resource Cluster, cached locally
"""
def __init__(self, resource_cluster_url: str):
self.resource_cluster_url = resource_cluster_url
self.cache_path = Path("/tmp/agent_templates_cache")
self.cache_path.mkdir(exist_ok=True)
self._templates_cache: Dict[str, AssistantTemplate] = {}
async def get_template(self, template_id: str) -> Optional[AssistantTemplate]:
"""Get template by ID, using cache if available"""
if template_id in self._templates_cache:
return self._templates_cache[template_id]
# Check built-in templates
for template_type, template in BUILTIN_TEMPLATES.items():
if template.template_id == template_id:
self._templates_cache[template_id] = template
return template
# Would fetch from Resource Cluster in production
return None
async def list_templates(
self,
category: Optional[AssistantType] = None,
access_groups: Optional[List[str]] = None
) -> List[AssistantTemplate]:
"""List available templates with filtering"""
templates = list(BUILTIN_TEMPLATES.values())
if category:
templates = [t for t in templates if t.category == category]
if access_groups:
templates = [
t for t in templates
if any(g in access_groups for g in t.required_access_groups)
]
return templates
async def deploy_template(
self,
template_id: str,
user_id: str,
instance_name: str,
tenant_domain: str,
customizations: Optional[Dict[str, Any]] = None
) -> AssistantInstance:
"""Deploy template as user instance"""
template = await self.get_template(template_id)
if not template:
raise ValueError(f"Template not found: {template_id}")
# Create instance
instance = template.to_instance(user_id, instance_name, tenant_domain)
# Apply customizations
if customizations:
if "personality" in customizations:
instance.personality_config = PersonalityConfig(**customizations["personality"])
if "resources" in customizations:
instance.resource_preferences = ResourcePreferences(**customizations["resources"])
if "memory" in customizations:
instance.memory_settings = MemorySettings(**customizations["memory"])
return instance
# API Models
class AssistantTemplateResponse(BaseModel):
"""API response for agent template"""
template_id: str
name: str
description: str
category: str
required_access_groups: List[str]
version: str
created_at: datetime
class AssistantInstanceResponse(BaseModel):
"""API response for agent instance"""
id: str
name: str
description: str
template_id: Optional[str]
owner_id: str
access_group: AccessGroup
team_members: List[str]
conversation_count: int
last_used: Optional[datetime]
created_at: datetime
updated_at: datetime
class CreateAssistantRequest(BaseModel):
"""Request to create agent from template or custom"""
template_id: Optional[str] = None
name: str
description: Optional[str] = None
customizations: Optional[Dict[str, Any]] = None
# For custom agents
system_prompt: Optional[str] = None
personality_config: Optional[PersonalityConfig] = None
resource_preferences: Optional[ResourcePreferences] = None
memory_settings: Optional[MemorySettings] = None

View File

@@ -0,0 +1,126 @@
"""
GT 2.0 Base Model Classes - Service-Based Architecture
Provides Pydantic models for data serialization with the DuckDB service.
No SQLAlchemy ORM dependency - pure Python/Pydantic models.
"""
from typing import Any, Dict, Optional, List, Type, TypeVar
from datetime import datetime
import uuid
from pydantic import BaseModel, Field, ConfigDict
# Generic type for model classes
T = TypeVar('T', bound='BaseServiceModel')
class BaseServiceModel(BaseModel):
"""
Base model for all GT 2.0 entities using service-based architecture.
Replaces SQLAlchemy models with Pydantic models + DuckDB service.
"""
# Pydantic v2 configuration
model_config = ConfigDict(
from_attributes=True,
validate_assignment=True,
arbitrary_types_allowed=True,
use_enum_values=True
)
# Standard fields for all models
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique identifier")
created_at: datetime = Field(default_factory=datetime.utcnow, description="Creation timestamp")
updated_at: datetime = Field(default_factory=datetime.utcnow, description="Last update timestamp")
def to_dict(self) -> Dict[str, Any]:
"""Convert model to dictionary"""
return self.model_dump()
@classmethod
def from_dict(cls: Type[T], data: Dict[str, Any]) -> T:
"""Create model instance from dictionary"""
return cls(**data)
@classmethod
def from_row(cls: Type[T], row: Dict[str, Any]) -> T:
"""Create model instance from database row"""
# Convert database row to model, handling type conversions
model_data = {}
for field_name, field_info in cls.model_fields.items():
if field_name in row:
value = row[field_name]
# Handle datetime conversion
if field_info.annotation == datetime and isinstance(value, str):
try:
value = datetime.fromisoformat(value)
except ValueError:
value = datetime.utcnow()
model_data[field_name] = value
return cls(**model_data)
def update_timestamp(self):
"""Update the updated_at timestamp"""
self.updated_at = datetime.utcnow()
class BaseCreateModel(BaseModel):
"""Base model for creation requests"""
model_config = ConfigDict(from_attributes=True)
class BaseUpdateModel(BaseModel):
"""Base model for update requests"""
model_config = ConfigDict(from_attributes=True)
updated_at: datetime = Field(default_factory=datetime.utcnow)
class BaseResponseModel(BaseServiceModel):
"""Base model for API responses"""
pass
# Legacy compatibility - some files might still import Base
Base = BaseServiceModel # For backwards compatibility during migration
# Database service integration helpers
class DatabaseMixin:
"""Mixin providing database service integration methods"""
@classmethod
async def get_table_name(cls) -> str:
"""Get the database table name for this model"""
# Convert CamelCase to snake_case and pluralize
name = cls.__name__.lower()
if name.endswith('y'):
name = name[:-1] + 'ies'
elif name.endswith('s'):
name = name + 'es'
else:
name = name + 's'
return name
@classmethod
async def create_sql(cls) -> str:
"""Generate CREATE TABLE SQL for this model"""
# This would generate SQL based on Pydantic field types
# For now, return placeholder - actual schemas are in DuckDB service
table_name = await cls.get_table_name()
return f"-- CREATE TABLE {table_name} generated by DuckDB service"
async def to_sql_values(self) -> Dict[str, Any]:
"""Convert model to SQL-safe values"""
data = self.to_dict()
# Convert datetime objects to ISO strings
for key, value in data.items():
if isinstance(value, datetime):
data[key] = value.isoformat()
return data

View File

@@ -0,0 +1,340 @@
"""
Category Model for GT 2.0 Agent Discovery
Implements a simple hierarchical category system for organizing agents.
Follows GT 2.0's principle of "Clarity Over Complexity"
- Simple parent-child relationships
- System categories that cannot be deleted
- Tenant-specific and global categories
"""
from datetime import datetime
from typing import Optional, Dict, Any, List
import uuid
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean, ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from app.core.database import Base
class Category(Base):
"""Category model for organizing agents and resources
GT 2.0 Design: Simple hierarchical categories without complex taxonomies
"""
__tablename__ = "categories"
# Primary Key
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
slug = Column(String(100), unique=True, nullable=False, index=True) # URL-safe identifier
# Category Details
name = Column(String(100), nullable=False, index=True)
display_name = Column(String(100), nullable=False)
description = Column(Text, nullable=True)
icon = Column(String(10), nullable=True) # Emoji or icon code
color = Column(String(20), nullable=True) # Hex color code for UI
# Hierarchy (simple parent-child)
parent_id = Column(String(36), ForeignKey("categories.id"), nullable=True, index=True)
# Scope
is_system = Column(Boolean, nullable=False, default=False) # Protected from deletion
is_global = Column(Boolean, nullable=False, default=True) # Available to all tenants
# Display Order
sort_order = Column(Integer, nullable=False, default=0)
# Usage Statistics (cached)
assistant_count = Column(Integer, nullable=False, default=0)
dataset_count = Column(Integer, nullable=False, default=0)
# Timestamps
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
# Relationships
parent = relationship("Category", remote_side=[id], backref="children")
def __repr__(self) -> str:
return f"<Category(id={self.id}, name='{self.name}', slug='{self.slug}')>"
def to_dict(self, include_children: bool = False) -> Dict[str, Any]:
"""Convert to dictionary for API responses"""
data = {
"id": self.id,
"slug": self.slug,
"name": self.name,
"display_name": self.display_name,
"description": self.description,
"icon": self.icon,
"color": self.color,
"parent_id": self.parent_id,
"is_system": self.is_system,
"is_global": self.is_global,
"sort_order": self.sort_order,
"assistant_count": self.assistant_count,
"dataset_count": self.dataset_count,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
if include_children and self.children:
data["children"] = [child.to_dict() for child in self.children]
return data
def get_full_path(self) -> str:
"""Get full category path (e.g., 'AI Tools > Research > Academic')"""
if not self.parent_id:
return self.display_name
# Simple recursion to build path
parent_path = self.parent.get_full_path() if self.parent else ""
return f"{parent_path} > {self.display_name}" if parent_path else self.display_name
def is_descendant_of(self, ancestor_id: int) -> bool:
"""Check if this category is a descendant of another"""
if not self.parent_id:
return False
if self.parent_id == ancestor_id:
return True
return self.parent.is_descendant_of(ancestor_id) if self.parent else False
def get_all_descendants(self) -> List["Category"]:
"""Get all descendant categories"""
descendants = []
if self.children:
for child in self.children:
descendants.append(child)
descendants.extend(child.get_all_descendants())
return descendants
def update_counts(self, assistant_delta: int = 0, dataset_delta: int = 0) -> None:
"""Update resource counts for this category"""
self.assistant_count = max(0, self.assistant_count + assistant_delta)
self.dataset_count = max(0, self.dataset_count + dataset_delta)
self.updated_at = datetime.utcnow()
# GT 2.0 Default System Categories
DEFAULT_CATEGORIES = [
# Top-level categories
{
"slug": "research",
"name": "Research & Analysis",
"display_name": "Research & Analysis",
"description": "Agents for research, analysis, and information synthesis",
"icon": "🔍",
"color": "#3B82F6", # Blue
"is_system": True,
"is_global": True,
"sort_order": 10,
},
{
"slug": "development",
"name": "Software Development",
"display_name": "Software Development",
"description": "Coding, debugging, and development tools",
"icon": "💻",
"color": "#10B981", # Green
"is_system": True,
"is_global": True,
"sort_order": 20,
},
{
"slug": "cybersecurity",
"name": "Cybersecurity",
"display_name": "Cybersecurity",
"description": "Security analysis, threat detection, and incident response",
"icon": "🛡️",
"color": "#EF4444", # Red
"is_system": True,
"is_global": True,
"sort_order": 30,
},
{
"slug": "education",
"name": "Education & Training",
"display_name": "Education & Training",
"description": "Educational agents and AI literacy tools",
"icon": "🎓",
"color": "#8B5CF6", # Purple
"is_system": True,
"is_global": True,
"sort_order": 40,
},
{
"slug": "creative",
"name": "Creative & Content",
"display_name": "Creative & Content",
"description": "Writing, design, and creative content generation",
"icon": "",
"color": "#F59E0B", # Amber
"is_system": True,
"is_global": True,
"sort_order": 50,
},
{
"slug": "analytics",
"name": "Data & Analytics",
"display_name": "Data & Analytics",
"description": "Data analysis, visualization, and insights",
"icon": "📊",
"color": "#06B6D4", # Cyan
"is_system": True,
"is_global": True,
"sort_order": 60,
},
{
"slug": "business",
"name": "Business & Operations",
"display_name": "Business & Operations",
"description": "Business analysis, planning, and operations",
"icon": "💼",
"color": "#64748B", # Slate
"is_system": True,
"is_global": True,
"sort_order": 70,
},
{
"slug": "personal",
"name": "Personal Productivity",
"display_name": "Personal Productivity",
"description": "Personal agents and productivity tools",
"icon": "🚀",
"color": "#14B8A6", # Teal
"is_system": True,
"is_global": True,
"sort_order": 80,
},
{
"slug": "custom",
"name": "Custom & Specialized",
"display_name": "Custom & Specialized",
"description": "Custom-built and specialized agents",
"icon": "⚙️",
"color": "#71717A", # Zinc
"is_system": True,
"is_global": True,
"sort_order": 90,
},
]
# Sub-categories (examples)
DEFAULT_SUBCATEGORIES = [
# Research subcategories
{
"slug": "research-academic",
"name": "Academic Research",
"display_name": "Academic Research",
"description": "Academic papers, citations, and literature review",
"icon": "📚",
"parent_slug": "research", # Will be resolved to parent_id
"is_system": True,
"is_global": True,
"sort_order": 11,
},
{
"slug": "research-market",
"name": "Market Research",
"display_name": "Market Research",
"description": "Market analysis, competitor research, and trends",
"icon": "📈",
"parent_slug": "research",
"is_system": True,
"is_global": True,
"sort_order": 12,
},
# Development subcategories
{
"slug": "dev-web",
"name": "Web Development",
"display_name": "Web Development",
"description": "Frontend, backend, and full-stack development",
"icon": "🌐",
"parent_slug": "development",
"is_system": True,
"is_global": True,
"sort_order": 21,
},
{
"slug": "dev-mobile",
"name": "Mobile Development",
"display_name": "Mobile Development",
"description": "iOS, Android, and cross-platform development",
"icon": "📱",
"parent_slug": "development",
"is_system": True,
"is_global": True,
"sort_order": 22,
},
{
"slug": "dev-devops",
"name": "DevOps & Infrastructure",
"display_name": "DevOps & Infrastructure",
"description": "CI/CD, containerization, and infrastructure",
"icon": "🔧",
"parent_slug": "development",
"is_system": True,
"is_global": True,
"sort_order": 23,
},
# Cybersecurity subcategories
{
"slug": "cyber-analysis",
"name": "Threat Analysis",
"display_name": "Threat Analysis",
"description": "Threat detection, analysis, and intelligence",
"icon": "🔍",
"parent_slug": "cybersecurity",
"is_system": True,
"is_global": True,
"sort_order": 31,
},
{
"slug": "cyber-incident",
"name": "Incident Response",
"display_name": "Incident Response",
"description": "Incident handling and forensics",
"icon": "🚨",
"parent_slug": "cybersecurity",
"is_system": True,
"is_global": True,
"sort_order": 32,
},
# Education subcategories
{
"slug": "edu-ai-literacy",
"name": "AI Literacy",
"display_name": "AI Literacy",
"description": "Understanding and working with AI systems",
"icon": "🤖",
"parent_slug": "education",
"is_system": True,
"is_global": True,
"sort_order": 41,
},
{
"slug": "edu-critical-thinking",
"name": "Critical Thinking",
"display_name": "Critical Thinking",
"description": "Logic, reasoning, and problem-solving",
"icon": "🧠",
"parent_slug": "education",
"is_system": True,
"is_global": True,
"sort_order": 42,
},
]

View File

@@ -0,0 +1,263 @@
"""
Collaboration Team Models for GT 2.0 Tenant Backend
Pydantic models for user collaboration teams (team sharing system).
This is separate from the tenant isolation 'tenants' table (formerly 'teams').
Database Schema:
- teams: User collaboration groups within a tenant
- team_memberships: Team members with two-tier permissions
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from pydantic import BaseModel, Field, ConfigDict, field_validator
class TeamBase(BaseModel):
"""Base team model with common fields"""
name: str = Field(..., min_length=1, max_length=255, description="Team name")
description: Optional[str] = Field(None, description="Team description")
class TeamCreate(TeamBase):
"""Model for creating a new team"""
pass
class TeamUpdate(BaseModel):
"""Model for updating a team"""
name: Optional[str] = Field(None, min_length=1, max_length=255)
description: Optional[str] = None
class TeamMember(BaseModel):
"""Team member with permissions"""
id: str = Field(..., description="Membership UUID")
team_id: str = Field(..., description="Team UUID")
user_id: str = Field(..., description="User UUID")
user_email: str = Field(..., description="User email")
user_name: str = Field(..., description="User display name")
team_permission: str = Field(..., description="Team-level permission: 'read', 'share', or 'manager'")
resource_permissions: Dict[str, str] = Field(default_factory=dict, description="Resource-level permissions JSONB")
is_owner: bool = Field(default=False, description="Whether this member is the team owner")
is_observable: bool = Field(default=False, description="Member consents to activity observation")
observable_consent_status: str = Field(default="none", description="Consent status: 'none', 'pending', 'approved', 'revoked'")
observable_consent_at: Optional[str] = Field(None, description="When Observable status was approved")
status: str = Field(default="accepted", description="Membership status: 'pending', 'accepted', or 'declined'")
invited_at: Optional[str] = None
responded_at: Optional[str] = None
joined_at: Optional[str] = None
created_at: Optional[str] = None
updated_at: Optional[str] = None
model_config = ConfigDict(from_attributes=True)
class Team(TeamBase):
"""Complete team model with metadata"""
id: str = Field(..., description="Team UUID")
tenant_id: str = Field(..., description="Tenant UUID")
owner_id: str = Field(..., description="Owner user UUID")
owner_name: Optional[str] = Field(None, description="Owner display name")
owner_email: Optional[str] = Field(None, description="Owner email")
is_owner: bool = Field(..., description="Whether current user is the owner")
can_manage: bool = Field(..., description="Whether current user can manage the team")
user_permission: Optional[str] = Field(None, description="Current user's team permission: 'read' or 'share' (None if owner)")
member_count: int = Field(0, description="Number of team members")
shared_resource_count: int = Field(0, description="Number of shared resources (agents and datasets)")
created_at: Optional[str] = None
updated_at: Optional[str] = None
model_config = ConfigDict(from_attributes=True)
class TeamWithMembers(Team):
"""Team with full member list"""
members: List[TeamMember] = Field(default_factory=list, description="List of team members")
class TeamListResponse(BaseModel):
"""Response model for listing teams"""
data: List[Team]
total: int
model_config = ConfigDict(from_attributes=True)
class TeamResponse(BaseModel):
"""Response model for single team operation"""
data: Team
model_config = ConfigDict(from_attributes=True)
class TeamWithMembersResponse(BaseModel):
"""Response model for team with members"""
data: TeamWithMembers
model_config = ConfigDict(from_attributes=True)
# Team Membership Models
class AddMemberRequest(BaseModel):
"""Request model for adding a member to a team"""
user_email: str = Field(..., description="Email of user to add")
team_permission: str = Field("read", description="Team permission: 'read', 'share', or 'manager'")
class UpdateMemberPermissionRequest(BaseModel):
"""Request model for updating member permission"""
team_permission: str = Field(..., description="New permission: 'read', 'share', or 'manager'")
@field_validator('team_permission')
@classmethod
def validate_permission(cls, v: str) -> str:
if v not in ["read", "share", "manager"]:
raise ValueError(f"Invalid permission: {v}. Must be 'read', 'share', or 'manager'")
return v
class MemberListResponse(BaseModel):
"""Response model for listing team members"""
data: List[TeamMember]
total: int
model_config = ConfigDict(from_attributes=True)
class MemberResponse(BaseModel):
"""Response model for single member operation"""
data: TeamMember
model_config = ConfigDict(from_attributes=True)
# Team Invitation Models
class TeamInvitation(BaseModel):
"""Pending team invitation"""
id: str = Field(..., description="Invitation (membership) UUID")
team_id: str = Field(..., description="Team UUID")
team_name: str = Field(..., description="Team name")
team_description: Optional[str] = Field(None, description="Team description")
owner_name: str = Field(..., description="Team owner display name")
owner_email: str = Field(..., description="Team owner email")
team_permission: str = Field(..., description="Invited permission: 'read', 'share', or 'manager'")
observable_requested: bool = Field(default=False, description="Whether Observable access was requested on invite")
invited_at: str = Field(..., description="Invitation timestamp")
model_config = ConfigDict(from_attributes=True)
class InvitationActionRequest(BaseModel):
"""Request to accept or decline invitation"""
action: str = Field(..., description="Action: 'accept' or 'decline'")
class InvitationListResponse(BaseModel):
"""Response model for listing invitations"""
data: List[TeamInvitation]
total: int
model_config = ConfigDict(from_attributes=True)
# Resource Sharing Models
class ShareResourceRequest(BaseModel):
"""Request model for sharing a resource to team"""
resource_type: str = Field(..., description="Resource type: 'agent' or 'dataset'")
resource_id: str = Field(..., description="Resource UUID")
user_permissions: Dict[str, str] = Field(
...,
description="User permissions: {user_id: 'read'|'edit'}"
)
class SharedResource(BaseModel):
"""Model for a shared resource"""
resource_type: str = Field(..., description="Resource type: 'agent' or 'dataset'")
resource_id: str = Field(..., description="Resource UUID")
resource_name: str = Field(..., description="Resource name")
resource_owner: str = Field(..., description="Resource owner name or email")
user_permissions: Dict[str, str] = Field(..., description="User permissions map")
class SharedResourcesResponse(BaseModel):
"""Response model for listing shared resources"""
data: List[SharedResource]
total: int
model_config = ConfigDict(from_attributes=True)
# Observable Request Models
class ObservableRequest(BaseModel):
"""Observable access request for a team member"""
team_id: str = Field(..., description="Team UUID")
team_name: str = Field(..., description="Team name")
requested_by_name: str = Field(..., description="Name of manager/owner who requested")
requested_by_email: str = Field(..., description="Email of manager/owner who requested")
requested_at: str = Field(..., description="When request was made")
model_config = ConfigDict(from_attributes=True)
class ObservableRequestListResponse(BaseModel):
"""Response model for listing Observable requests"""
data: List[ObservableRequest]
total: int
model_config = ConfigDict(from_attributes=True)
# Team Activity Models
class TeamActivityMetrics(BaseModel):
"""Team activity metrics for Observable members"""
team_id: str
team_name: str
date_range_days: int
observable_member_count: int
total_member_count: int
team_totals: Dict[str, Any] = Field(
default_factory=dict,
description="Aggregated metrics: conversations, messages, tokens"
)
member_breakdown: List[Dict[str, Any]] = Field(
default_factory=list,
description="Per-member activity stats"
)
time_series: List[Dict[str, Any]] = Field(
default_factory=list,
description="Activity over time"
)
model_config = ConfigDict(from_attributes=True)
class TeamActivityResponse(BaseModel):
"""Response model for team activity"""
data: TeamActivityMetrics
model_config = ConfigDict(from_attributes=True)
# Error Response Models
class ErrorDetail(BaseModel):
"""Error detail model"""
message: str
field: Optional[str] = None
code: Optional[str] = None
class ErrorResponse(BaseModel):
"""Error response model"""
error: str
details: Optional[List[ErrorDetail]] = None
model_config = ConfigDict(from_attributes=True)

View File

@@ -0,0 +1,148 @@
"""
Conversation Model for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for conversation entities using the PostgreSQL + PGVector backend.
Stores conversation metadata and settings for AI chat sessions.
Perfect tenant isolation - each tenant has separate conversation data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
class ConversationStatus(str, Enum):
"""Conversation status enumeration"""
ACTIVE = "active"
ARCHIVED = "archived"
DELETED = "deleted"
class Conversation(BaseServiceModel):
"""
Conversation model for GT 2.0 service-based architecture.
Represents a chat session with an AI agent including metadata,
configuration, and usage statistics.
"""
# Core conversation properties
title: str = Field(..., min_length=1, max_length=200, description="Conversation title")
agent_id: Optional[str] = Field(None, description="Associated agent ID")
# User information
created_by: str = Field(..., description="User email or ID who created this")
user_name: Optional[str] = Field(None, max_length=100, description="User display name")
# Configuration
system_prompt: Optional[str] = Field(None, description="Custom system prompt override")
model_id: str = Field(default="groq:llama3-70b-8192", description="AI model identifier")
configuration: Dict[str, Any] = Field(default_factory=dict, description="Model parameters and settings")
# Status and metadata
status: ConversationStatus = Field(default=ConversationStatus.ACTIVE, description="Conversation status")
tags: List[str] = Field(default_factory=list, description="Conversation tags")
# Statistics
message_count: int = Field(default=0, description="Number of messages in conversation")
total_tokens_used: int = Field(default=0, description="Total tokens used")
total_cost_cents: int = Field(default=0, description="Total cost in cents")
# Timestamps
last_activity_at: Optional[datetime] = Field(None, description="Last activity timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "conversations"
@classmethod
def from_dict(cls, data: Dict[str, Any]) -> "Conversation":
"""Create from dictionary"""
return cls(
agent_id=data.get("agent_id"),
title=data.get("title", ""),
system_prompt=data.get("system_prompt"),
model_id=data.get("model_id", "groq:llama3-70b-8192"),
created_by=data.get("created_by", ""),
user_name=data.get("user_name"),
configuration=data.get("configuration", {}),
tags=data.get("tags", []),
)
def update_statistics(self, message_count: int, tokens_used: int, cost_cents: int) -> None:
"""Update conversation statistics"""
self.message_count = message_count
self.total_tokens_used = tokens_used
self.total_cost_cents = cost_cents
self.last_activity_at = datetime.utcnow()
self.update_timestamp()
def archive(self) -> None:
"""Archive this conversation"""
self.status = ConversationStatus.ARCHIVED
self.update_timestamp()
def delete(self) -> None:
"""Mark conversation as deleted"""
self.status = ConversationStatus.DELETED
self.update_timestamp()
class ConversationCreate(BaseCreateModel):
"""Model for creating new conversations"""
title: str = Field(..., min_length=1, max_length=200)
agent_id: Optional[str] = None
created_by: str
user_name: Optional[str] = Field(None, max_length=100)
system_prompt: Optional[str] = None
model_id: str = Field(default="groq:llama3-70b-8192")
configuration: Dict[str, Any] = Field(default_factory=dict)
tags: List[str] = Field(default_factory=list)
model_config = ConfigDict(protected_namespaces=())
class ConversationUpdate(BaseUpdateModel):
"""Model for updating conversations"""
title: Optional[str] = Field(None, min_length=1, max_length=200)
system_prompt: Optional[str] = None
model_id: Optional[str] = None
configuration: Optional[Dict[str, Any]] = None
status: Optional[ConversationStatus] = None
tags: Optional[List[str]] = None
model_config = ConfigDict(protected_namespaces=())
class ConversationResponse(BaseResponseModel):
"""Model for conversation API responses"""
id: str
title: str
agent_id: Optional[str]
created_by: str
user_name: Optional[str]
system_prompt: Optional[str]
model_id: str
configuration: Dict[str, Any]
status: ConversationStatus
tags: List[str]
message_count: int
total_tokens_used: int
total_cost_cents: int
last_activity_at: Optional[datetime]
created_at: datetime
updated_at: datetime
model_config = ConfigDict(protected_namespaces=())

View File

@@ -0,0 +1,435 @@
"""
Document and RAG Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for document entities using the PostgreSQL + PGVector backend.
Stores document metadata, RAG datasets, and processing status.
Perfect tenant isolation - each tenant has separate document data.
All vectors stored encrypted in tenant-specific ChromaDB.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
# SQLAlchemy imports for database models
from sqlalchemy import Column, String, Integer, BigInteger, Text, DateTime, Boolean, JSON, ForeignKey
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from app.core.database import Base
# PGVector import for embeddings
try:
from pgvector.sqlalchemy import Vector
except ImportError:
# Fallback if pgvector not available
from sqlalchemy import Text as Vector
class DocumentStatus(str, Enum):
"""Document processing status enumeration"""
UPLOADING = "uploading"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
ARCHIVED = "archived"
class DocumentType(str, Enum):
"""Document type enumeration"""
PDF = "pdf"
DOCX = "docx"
TXT = "txt"
MD = "md"
HTML = "html"
JSON = "json"
CSV = "csv"
OTHER = "other"
class Document(BaseServiceModel):
"""
Document model for GT 2.0 service-based architecture.
Represents a document with metadata, processing status,
and RAG integration for knowledge retrieval.
"""
# Core document properties
filename: str = Field(..., min_length=1, max_length=255, description="Original filename")
original_name: str = Field(..., min_length=1, max_length=255, description="User-provided name")
file_size: int = Field(..., ge=0, description="File size in bytes")
mime_type: str = Field(..., max_length=100, description="MIME type of the file")
doc_type: DocumentType = Field(..., description="Document type classification")
# Storage and processing
file_path: str = Field(..., description="Storage path for the file")
content_hash: Optional[str] = Field(None, max_length=64, description="SHA-256 hash of content")
status: DocumentStatus = Field(default=DocumentStatus.UPLOADING, description="Processing status")
# Owner and access
owner_id: str = Field(..., description="User ID of the document owner")
dataset_id: Optional[str] = Field(None, description="Associated dataset ID")
# RAG and processing metadata
content_preview: Optional[str] = Field(None, max_length=500, description="Content preview")
extracted_text: Optional[str] = Field(None, description="Extracted text content")
metadata: Dict[str, Any] = Field(default_factory=dict, description="Document metadata")
# Processing statistics
chunk_count: int = Field(default=0, description="Number of chunks created")
vector_count: int = Field(default=0, description="Number of vectors stored")
processing_time_ms: Optional[float] = Field(None, description="Processing time in milliseconds")
# Errors and logs
error_message: Optional[str] = Field(None, description="Error message if processing failed")
processing_log: List[str] = Field(default_factory=list, description="Processing log entries")
# Timestamps
processed_at: Optional[datetime] = Field(None, description="When processing completed")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "documents"
def mark_processing(self) -> None:
"""Mark document as processing"""
self.status = DocumentStatus.PROCESSING
self.update_timestamp()
def mark_completed(self, chunk_count: int, vector_count: int, processing_time_ms: float) -> None:
"""Mark document processing as completed"""
self.status = DocumentStatus.COMPLETED
self.chunk_count = chunk_count
self.vector_count = vector_count
self.processing_time_ms = processing_time_ms
self.processed_at = datetime.utcnow()
self.update_timestamp()
def mark_failed(self, error_message: str) -> None:
"""Mark document processing as failed"""
self.status = DocumentStatus.FAILED
self.error_message = error_message
self.update_timestamp()
def add_log_entry(self, message: str) -> None:
"""Add a processing log entry"""
timestamp = datetime.utcnow().isoformat()
self.processing_log.append(f"[{timestamp}] {message}")
class RAGDataset(BaseServiceModel):
"""
RAG Dataset model for organizing documents into collections.
Groups related documents together for focused retrieval and
provides dataset-level configuration and statistics.
"""
# Core dataset properties
name: str = Field(..., min_length=1, max_length=255, description="Dataset name")
description: Optional[str] = Field(None, max_length=1000, description="Dataset description")
# Owner and access
owner_id: str = Field(..., description="User ID of the dataset owner")
# Configuration
chunk_size: int = Field(default=1000, ge=100, le=5000, description="Default chunk size")
chunk_overlap: int = Field(default=200, ge=0, le=1000, description="Default chunk overlap")
embedding_model: str = Field(default="all-MiniLM-L6-v2", description="Embedding model to use")
# Statistics
document_count: int = Field(default=0, description="Number of documents")
total_chunks: int = Field(default=0, description="Total chunks across all documents")
total_vectors: int = Field(default=0, description="Total vectors stored")
total_size_bytes: int = Field(default=0, description="Total size of all documents")
# Status
is_public: bool = Field(default=False, description="Whether dataset is publicly accessible")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "rag_datasets"
def update_statistics(self, doc_count: int, chunk_count: int, vector_count: int, size_bytes: int) -> None:
"""Update dataset statistics"""
self.document_count = doc_count
self.total_chunks = chunk_count
self.total_vectors = vector_count
self.total_size_bytes = size_bytes
self.update_timestamp()
class DatasetDocument(BaseServiceModel):
"""
Dataset-Document relationship model for GT 2.0 service-based architecture.
Junction table model that links documents to RAG datasets,
tracking the relationship and statistics.
"""
# Core relationship properties
dataset_id: str = Field(..., description="RAG dataset ID")
document_id: str = Field(..., description="Document ID")
user_id: str = Field(..., description="User who added document to dataset")
# Statistics
chunk_count: int = Field(default=0, description="Number of chunks for this document")
vector_count: int = Field(default=0, description="Number of vectors stored for this document")
# Status
processing_status: str = Field(default="pending", max_length=50, description="Processing status")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "dataset_documents"
class DocumentChunk(BaseServiceModel):
"""
Document chunk model for processed document pieces.
Represents individual chunks of processed documents with
embeddings and metadata for RAG retrieval.
"""
# Core chunk properties
document_id: str = Field(..., description="Parent document ID")
chunk_index: int = Field(..., ge=0, description="Chunk index within document")
chunk_text: str = Field(..., min_length=1, description="Chunk text content")
# Chunk metadata
chunk_size: int = Field(..., ge=1, description="Character count of chunk")
token_count: Optional[int] = Field(None, description="Token count for chunk")
chunk_metadata: Dict[str, Any] = Field(default_factory=dict, description="Chunk-specific metadata")
# Embedding information
embedding_id: Optional[str] = Field(None, description="Vector store embedding ID")
embedding_model: Optional[str] = Field(None, max_length=100, description="Model used for embedding")
# Position and context
start_char: Optional[int] = Field(None, description="Starting character position in document")
end_char: Optional[int] = Field(None, description="Ending character position in document")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "document_chunks"
class DocumentCreate(BaseCreateModel):
"""Model for creating new documents"""
filename: str = Field(..., min_length=1, max_length=255)
original_name: str = Field(..., min_length=1, max_length=255)
file_size: int = Field(..., ge=0)
mime_type: str = Field(..., max_length=100)
doc_type: DocumentType
file_path: str
content_hash: Optional[str] = Field(None, max_length=64)
owner_id: str
dataset_id: Optional[str] = None
content_preview: Optional[str] = Field(None, max_length=500)
metadata: Dict[str, Any] = Field(default_factory=dict)
class DocumentUpdate(BaseUpdateModel):
"""Model for updating documents"""
original_name: Optional[str] = Field(None, min_length=1, max_length=255)
status: Optional[DocumentStatus] = None
dataset_id: Optional[str] = None
content_preview: Optional[str] = Field(None, max_length=500)
extracted_text: Optional[str] = None
metadata: Optional[Dict[str, Any]] = None
chunk_count: Optional[int] = Field(None, ge=0)
vector_count: Optional[int] = Field(None, ge=0)
processing_time_ms: Optional[float] = None
error_message: Optional[str] = None
processed_at: Optional[datetime] = None
class DocumentResponse(BaseResponseModel):
"""Model for document API responses"""
id: str
filename: str
original_name: str
file_size: int
mime_type: str
doc_type: DocumentType
file_path: str
content_hash: Optional[str]
status: DocumentStatus
owner_id: str
dataset_id: Optional[str]
content_preview: Optional[str]
metadata: Dict[str, Any]
chunk_count: int
vector_count: int
processing_time_ms: Optional[float]
error_message: Optional[str]
processing_log: List[str]
processed_at: Optional[datetime]
created_at: datetime
updated_at: datetime
class RAGDatasetCreate(BaseCreateModel):
"""Model for creating new RAG datasets"""
name: str = Field(..., min_length=1, max_length=255)
description: Optional[str] = Field(None, max_length=1000)
owner_id: str
chunk_size: int = Field(default=1000, ge=100, le=5000)
chunk_overlap: int = Field(default=200, ge=0, le=1000)
embedding_model: str = Field(default="all-MiniLM-L6-v2")
is_public: bool = Field(default=False)
class RAGDatasetUpdate(BaseUpdateModel):
"""Model for updating RAG datasets"""
name: Optional[str] = Field(None, min_length=1, max_length=255)
description: Optional[str] = Field(None, max_length=1000)
chunk_size: Optional[int] = Field(None, ge=100, le=5000)
chunk_overlap: Optional[int] = Field(None, ge=0, le=1000)
embedding_model: Optional[str] = None
is_public: Optional[bool] = None
class RAGDatasetResponse(BaseResponseModel):
"""Model for RAG dataset API responses"""
id: str
name: str
description: Optional[str]
owner_id: str
chunk_size: int
chunk_overlap: int
embedding_model: str
document_count: int
total_chunks: int
total_vectors: int
total_size_bytes: int
is_public: bool
created_at: datetime
updated_at: datetime
# SQLAlchemy Database Models for PostgreSQL + PGVector
class Document(Base):
"""SQLAlchemy model for documents table"""
__tablename__ = "documents"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
dataset_id = Column(UUID(as_uuid=True), nullable=True, index=True)
filename = Column(String(255), nullable=False)
original_filename = Column(String(255), nullable=False)
file_type = Column(String(100), nullable=False)
file_size_bytes = Column(BigInteger, nullable=False)
file_hash = Column(String(64), nullable=True)
content_text = Column(Text, nullable=True)
chunk_count = Column(Integer, default=0)
processing_status = Column(String(50), default="pending")
error_message = Column(Text, nullable=True)
doc_metadata = Column(JSONB, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
# Relationships
chunks = relationship("DocumentChunk", back_populates="document", cascade="all, delete-orphan")
class DocumentChunk(Base):
"""SQLAlchemy model for document_chunks table"""
__tablename__ = "document_chunks"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
document_id = Column(UUID(as_uuid=True), ForeignKey("documents.id", ondelete="CASCADE"), nullable=False, index=True)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
dataset_id = Column(UUID(as_uuid=True), nullable=True, index=True)
chunk_index = Column(Integer, nullable=False)
content = Column(Text, nullable=False)
content_hash = Column(String(32), nullable=True)
token_count = Column(Integer, nullable=True)
# PGVector embedding column (1024 dimensions for BGE-M3)
embedding = Column(Vector(1024), nullable=True)
chunk_metadata = Column(JSONB, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
# Relationships
document = relationship("Document", back_populates="chunks")
class Dataset(Base):
"""SQLAlchemy model for datasets table"""
__tablename__ = "datasets"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True) # created_by in schema
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
chunk_size = Column(Integer, default=512)
chunk_overlap = Column(Integer, default=128)
embedding_model = Column(String(100), default='BAAI/bge-m3')
search_method = Column(String(20), default='hybrid')
specialized_language = Column(Boolean, default=False)
is_active = Column(Boolean, default=True)
visibility = Column(String(20), default='individual')
access_group = Column(String(50), default='individual')
dataset_metadata = Column(JSONB, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())

View File

@@ -0,0 +1,283 @@
"""
Event Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for event entities using the PostgreSQL + PGVector backend.
Handles event automation, triggers, and action definitions.
Perfect tenant isolation with encrypted storage.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class EventStatus(str, Enum):
"""Event status enumeration"""
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
RETRYING = "retrying"
class Event(BaseServiceModel):
"""
Event model for GT 2.0 service-based architecture.
Represents an automation event with processing status,
payload data, and retry logic.
"""
# Core event properties
event_id: str = Field(default_factory=generate_uuid, description="Unique event identifier")
event_type: str = Field(..., min_length=1, max_length=100, description="Event type identifier")
user_id: str = Field(..., description="User who triggered the event")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Event data
payload: Dict[str, Any] = Field(default_factory=dict, description="Encrypted event data")
event_metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
# Processing status
status: EventStatus = Field(default=EventStatus.PENDING, description="Processing status")
error_message: Optional[str] = Field(None, description="Error message if failed")
retry_count: int = Field(default=0, ge=0, description="Number of retry attempts")
# Timestamps
started_at: Optional[datetime] = Field(None, description="Processing start time")
completed_at: Optional[datetime] = Field(None, description="Processing completion time")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "events"
def is_completed(self) -> bool:
"""Check if event processing is completed"""
return self.status == EventStatus.COMPLETED
def is_failed(self) -> bool:
"""Check if event processing failed"""
return self.status == EventStatus.FAILED
def mark_processing(self) -> None:
"""Mark event as processing"""
self.status = EventStatus.PROCESSING
self.started_at = datetime.utcnow()
self.update_timestamp()
def mark_completed(self) -> None:
"""Mark event as completed"""
self.status = EventStatus.COMPLETED
self.completed_at = datetime.utcnow()
self.update_timestamp()
def mark_failed(self, error_message: str) -> None:
"""Mark event as failed"""
self.status = EventStatus.FAILED
self.error_message = error_message
self.completed_at = datetime.utcnow()
self.update_timestamp()
def increment_retry(self) -> None:
"""Increment retry count"""
self.retry_count += 1
self.status = EventStatus.RETRYING
self.update_timestamp()
class EventTrigger(BaseServiceModel):
"""
Event trigger model for automation conditions.
Defines conditions that will trigger event processing.
"""
# Core trigger properties
trigger_name: str = Field(..., min_length=1, max_length=100, description="Trigger name")
event_type: str = Field(..., min_length=1, max_length=100, description="Event type to trigger")
user_id: str = Field(..., description="User who owns this trigger")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Trigger configuration
conditions: Dict[str, Any] = Field(default_factory=dict, description="Trigger conditions")
trigger_config: Dict[str, Any] = Field(default_factory=dict, description="Trigger configuration")
# Status
is_active: bool = Field(default=True, description="Whether trigger is active")
trigger_count: int = Field(default=0, description="Number of times triggered")
last_triggered: Optional[datetime] = Field(None, description="Last trigger timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "event_triggers"
class EventAction(BaseServiceModel):
"""
Event action model for automation responses.
Defines actions to take when events are processed.
"""
# Core action properties
action_name: str = Field(..., min_length=1, max_length=100, description="Action name")
event_type: str = Field(..., min_length=1, max_length=100, description="Event type this action handles")
user_id: str = Field(..., description="User who owns this action")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Action configuration
action_type: str = Field(..., min_length=1, max_length=50, description="Type of action")
action_config: Dict[str, Any] = Field(default_factory=dict, description="Action configuration")
# Execution settings
priority: int = Field(default=10, ge=1, le=100, description="Execution priority")
timeout_seconds: int = Field(default=300, ge=1, le=3600, description="Action timeout")
max_retries: int = Field(default=3, ge=0, le=10, description="Maximum retry attempts")
# Status
is_active: bool = Field(default=True, description="Whether action is active")
execution_count: int = Field(default=0, description="Number of times executed")
last_executed: Optional[datetime] = Field(None, description="Last execution timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "event_actions"
class EventSubscription(BaseServiceModel):
"""
Event subscription model for user notifications.
Manages user subscriptions to specific event types.
"""
# Core subscription properties
user_id: str = Field(..., description="Subscribing user ID")
tenant_id: str = Field(..., description="Tenant domain identifier")
event_type: str = Field(..., min_length=1, max_length=100, description="Subscribed event type")
# Subscription configuration
notification_method: str = Field(default="websocket", max_length=50, description="Notification delivery method")
subscription_config: Dict[str, Any] = Field(default_factory=dict, description="Subscription settings")
# Filtering
event_filters: Dict[str, Any] = Field(default_factory=dict, description="Event filtering criteria")
# Status
is_active: bool = Field(default=True, description="Whether subscription is active")
notification_count: int = Field(default=0, description="Number of notifications sent")
last_notified: Optional[datetime] = Field(None, description="Last notification timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "event_subscriptions"
# Create/Update/Response models
class EventCreate(BaseCreateModel):
"""Model for creating new events"""
event_type: str = Field(..., min_length=1, max_length=100)
user_id: str
tenant_id: str
payload: Dict[str, Any] = Field(default_factory=dict)
event_metadata: Dict[str, Any] = Field(default_factory=dict)
class EventUpdate(BaseUpdateModel):
"""Model for updating events"""
status: Optional[EventStatus] = None
error_message: Optional[str] = None
retry_count: Optional[int] = Field(None, ge=0)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class EventResponse(BaseResponseModel):
"""Model for event API responses"""
id: str
event_id: str
event_type: str
user_id: str
tenant_id: str
payload: Dict[str, Any]
event_metadata: Dict[str, Any]
status: EventStatus
error_message: Optional[str]
retry_count: int
started_at: Optional[datetime]
completed_at: Optional[datetime]
created_at: datetime
updated_at: datetime
# Legacy compatibility - simplified versions of missing models
class EventLog(BaseServiceModel):
"""Minimal EventLog model for compatibility"""
event_id: str = Field(..., description="Related event ID")
log_message: str = Field(..., description="Log message")
log_level: str = Field(default="info", description="Log level")
model_config = ConfigDict(protected_namespaces=())
@classmethod
def get_table_name(cls) -> str:
return "event_logs"
class ScheduledTask(BaseServiceModel):
"""Minimal ScheduledTask model for compatibility"""
task_name: str = Field(..., description="Task name")
schedule: str = Field(..., description="Cron schedule")
is_active: bool = Field(default=True, description="Whether task is active")
model_config = ConfigDict(protected_namespaces=())
@classmethod
def get_table_name(cls) -> str:
return "scheduled_tasks"

View File

@@ -0,0 +1,254 @@
"""
External Service Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for external service entities using the PostgreSQL + PGVector backend.
Manages external web services integration with SSO and iframe embedding.
Perfect tenant isolation - each tenant has separate external service data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class ServiceStatus(str, Enum):
"""Service status enumeration"""
ACTIVE = "active"
INACTIVE = "inactive"
MAINTENANCE = "maintenance"
DEPRECATED = "deprecated"
class AccessLevel(str, Enum):
"""Access level enumeration"""
PUBLIC = "public"
AUTHENTICATED = "authenticated"
ADMIN_ONLY = "admin_only"
RESTRICTED = "restricted"
class ExternalServiceInstance(BaseServiceModel):
"""
External service instance model for GT 2.0 service-based architecture.
Represents external web services like Canvas LMS, Jupyter Hub, CTFd
with SSO integration and iframe embedding.
"""
# Core service properties
service_name: str = Field(..., min_length=1, max_length=100, description="Service name")
service_type: str = Field(..., min_length=1, max_length=50, description="Service type")
service_url: str = Field(..., description="Service URL")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Service configuration
config: Dict[str, Any] = Field(default_factory=dict, description="Service configuration")
auth_config: Dict[str, Any] = Field(default_factory=dict, description="Authentication configuration")
iframe_config: Dict[str, Any] = Field(default_factory=dict, description="Iframe embedding configuration")
# Service details
description: Optional[str] = Field(None, max_length=500, description="Service description")
version: str = Field(default="1.0.0", max_length=50, description="Service version")
provider: str = Field(..., max_length=100, description="Service provider")
# Access control
access_level: AccessLevel = Field(default=AccessLevel.AUTHENTICATED, description="Access level required")
allowed_users: List[str] = Field(default_factory=list, description="Allowed user IDs")
allowed_roles: List[str] = Field(default_factory=list, description="Allowed user roles")
# Status and monitoring
status: ServiceStatus = Field(default=ServiceStatus.ACTIVE, description="Service status")
health_check_url: Optional[str] = Field(None, description="Health check endpoint")
last_health_check: Optional[datetime] = Field(None, description="Last health check timestamp")
is_healthy: bool = Field(default=True, description="Health status")
# Usage statistics
total_access_count: int = Field(default=0, description="Total access count")
active_user_count: int = Field(default=0, description="Current active users")
last_accessed: Optional[datetime] = Field(None, description="Last access timestamp")
# Metadata
tags: List[str] = Field(default_factory=list, description="Service tags")
category: str = Field(default="general", max_length=50, description="Service category")
priority: int = Field(default=10, ge=1, le=100, description="Display priority")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "external_service_instances"
def activate(self) -> None:
"""Activate the service"""
self.status = ServiceStatus.ACTIVE
self.update_timestamp()
def deactivate(self) -> None:
"""Deactivate the service"""
self.status = ServiceStatus.INACTIVE
self.update_timestamp()
def record_access(self, user_id: str) -> None:
"""Record service access"""
self.total_access_count += 1
self.last_accessed = datetime.utcnow()
self.update_timestamp()
def update_health_status(self, is_healthy: bool) -> None:
"""Update health status"""
self.is_healthy = is_healthy
self.last_health_check = datetime.utcnow()
self.update_timestamp()
class ServiceAccessLog(BaseServiceModel):
"""
Service access log model for tracking usage and security.
Logs all access attempts to external services for auditing.
"""
# Core access properties
service_id: str = Field(..., description="External service instance ID")
user_id: str = Field(..., description="User who accessed the service")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Access details
access_type: str = Field(..., max_length=50, description="Type of access")
ip_address: Optional[str] = Field(None, max_length=45, description="User IP address")
user_agent: Optional[str] = Field(None, max_length=500, description="User agent string")
# Session information
session_id: Optional[str] = Field(None, description="User session ID")
session_duration_seconds: Optional[int] = Field(None, description="Session duration")
# Access result
access_granted: bool = Field(default=True, description="Whether access was granted")
denial_reason: Optional[str] = Field(None, description="Reason for access denial")
# Additional metadata
referrer_url: Optional[str] = Field(None, description="Referrer URL")
access_metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional access data")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "service_access_logs"
class ServiceTemplate(BaseServiceModel):
"""
Service template model for reusable service configurations.
Defines templates for common external service integrations.
"""
# Core template properties
template_name: str = Field(..., min_length=1, max_length=100, description="Template name")
service_type: str = Field(..., min_length=1, max_length=50, description="Service type")
template_description: str = Field(..., max_length=500, description="Template description")
# Template configuration
default_config: Dict[str, Any] = Field(default_factory=dict, description="Default service configuration")
default_auth_config: Dict[str, Any] = Field(default_factory=dict, description="Default auth configuration")
default_iframe_config: Dict[str, Any] = Field(default_factory=dict, description="Default iframe configuration")
# Template metadata
version: str = Field(default="1.0.0", max_length=50, description="Template version")
provider: str = Field(..., max_length=100, description="Service provider")
supported_versions: List[str] = Field(default_factory=list, description="Supported service versions")
# Documentation
setup_instructions: Optional[str] = Field(None, description="Setup instructions")
configuration_schema: Dict[str, Any] = Field(default_factory=dict, description="Configuration schema")
example_config: Dict[str, Any] = Field(default_factory=dict, description="Example configuration")
# Template status
is_active: bool = Field(default=True, description="Whether template is active")
is_verified: bool = Field(default=False, description="Whether template is verified")
usage_count: int = Field(default=0, description="Number of times used")
# Access control
is_public: bool = Field(default=True, description="Whether template is publicly available")
created_by: str = Field(..., description="Creator of the template")
tenant_id: Optional[str] = Field(None, description="Tenant ID if tenant-specific")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "service_templates"
def increment_usage(self) -> None:
"""Increment usage count"""
self.usage_count += 1
self.update_timestamp()
def verify_template(self) -> None:
"""Mark template as verified"""
self.is_verified = True
self.update_timestamp()
# Create/Update/Response models - minimal for now
class ExternalServiceInstanceCreate(BaseCreateModel):
"""Model for creating external service instances"""
service_name: str = Field(..., min_length=1, max_length=100)
service_type: str = Field(..., min_length=1, max_length=50)
service_url: str
tenant_id: str
provider: str = Field(..., max_length=100)
class ExternalServiceInstanceUpdate(BaseUpdateModel):
"""Model for updating external service instances"""
service_name: Optional[str] = Field(None, min_length=1, max_length=100)
service_url: Optional[str] = None
status: Optional[ServiceStatus] = None
is_healthy: Optional[bool] = None
class ExternalServiceInstanceResponse(BaseResponseModel):
"""Model for external service instance API responses"""
id: str
service_name: str
service_type: str
service_url: str
tenant_id: str
provider: str
status: ServiceStatus
is_healthy: bool
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,383 @@
"""
Game Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for game entities using the PostgreSQL + PGVector backend.
Game sessions for AI literacy and strategic thinking development.
Perfect tenant isolation - each tenant has separate game data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class GameType(str, Enum):
"""Game type enumeration"""
CHESS = "chess"
GO = "go"
LOGIC_PUZZLE = "logic_puzzle"
PHILOSOPHICAL_DILEMMA = "philosophical_dilemma"
TRIVIA = "trivia"
DEBATE = "debate"
class DifficultyLevel(str, Enum):
"""Difficulty level enumeration"""
BEGINNER = "beginner"
INTERMEDIATE = "intermediate"
ADVANCED = "advanced"
EXPERT = "expert"
class GameStatus(str, Enum):
"""Game status enumeration"""
ACTIVE = "active"
COMPLETED = "completed"
PAUSED = "paused"
ABANDONED = "abandoned"
class GameSession(BaseServiceModel):
"""
Game session model for GT 2.0 service-based architecture.
Represents AI literacy and strategic thinking game sessions
with progress tracking and skill development.
"""
# Core game properties
user_id: str = Field(..., description="User playing the game")
tenant_id: str = Field(..., description="Tenant domain identifier")
game_type: GameType = Field(..., description="Type of game")
game_name: str = Field(..., min_length=1, max_length=100, description="Game name")
# Game configuration
difficulty_level: DifficultyLevel = Field(default=DifficultyLevel.INTERMEDIATE, description="Difficulty level")
ai_opponent_config: Dict[str, Any] = Field(default_factory=dict, description="AI opponent settings")
game_rules: Dict[str, Any] = Field(default_factory=dict, description="Game-specific rules")
# Game state
current_state: Dict[str, Any] = Field(default_factory=dict, description="Current game state")
move_history: List[Dict[str, Any]] = Field(default_factory=list, description="History of moves")
game_status: GameStatus = Field(default=GameStatus.ACTIVE, description="Game status")
# Progress tracking
moves_count: int = Field(default=0, description="Number of moves made")
hints_used: int = Field(default=0, description="Number of hints used")
time_spent_seconds: int = Field(default=0, description="Time spent in seconds")
current_rating: int = Field(default=1200, description="ELO-style rating")
# Results
winner: Optional[str] = Field(None, description="Winner of the game")
final_score: Optional[Dict[str, Any]] = Field(None, description="Final score details")
learning_insights: List[str] = Field(default_factory=list, description="Learning insights")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "game_sessions"
def add_move(self, move_data: Dict[str, Any]) -> None:
"""Add a move to the game history"""
self.move_history.append(move_data)
self.moves_count += 1
self.update_timestamp()
def use_hint(self) -> None:
"""Record hint usage"""
self.hints_used += 1
self.update_timestamp()
def complete_game(self, winner: str, final_score: Dict[str, Any]) -> None:
"""Mark game as completed"""
self.game_status = GameStatus.COMPLETED
self.winner = winner
self.final_score = final_score
self.update_timestamp()
def pause_game(self) -> None:
"""Pause the game"""
self.game_status = GameStatus.PAUSED
self.update_timestamp()
def resume_game(self) -> None:
"""Resume the game"""
self.game_status = GameStatus.ACTIVE
self.update_timestamp()
class PuzzleSession(BaseServiceModel):
"""
Puzzle session model for logic and problem-solving games.
Tracks puzzle-specific metrics and progress.
"""
# Core puzzle properties
user_id: str = Field(..., description="User solving the puzzle")
tenant_id: str = Field(..., description="Tenant domain identifier")
puzzle_type: str = Field(..., max_length=50, description="Type of puzzle")
puzzle_name: str = Field(..., min_length=1, max_length=100, description="Puzzle name")
# Puzzle configuration
difficulty_level: DifficultyLevel = Field(default=DifficultyLevel.INTERMEDIATE, description="Difficulty level")
puzzle_data: Dict[str, Any] = Field(default_factory=dict, description="Puzzle configuration")
solution_data: Dict[str, Any] = Field(default_factory=dict, description="Solution information")
# Progress tracking
attempts_made: int = Field(default=0, description="Number of attempts")
hints_requested: int = Field(default=0, description="Hints requested")
is_solved: bool = Field(default=False, description="Whether puzzle is solved")
solve_time_seconds: Optional[int] = Field(None, description="Time to solve")
# Learning metrics
skill_points_earned: int = Field(default=0, description="Skill points earned")
concepts_learned: List[str] = Field(default_factory=list, description="Concepts learned")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "puzzle_sessions"
def add_attempt(self) -> None:
"""Record a puzzle attempt"""
self.attempts_made += 1
self.update_timestamp()
def solve_puzzle(self, solve_time: int, skill_points: int) -> None:
"""Mark puzzle as solved"""
self.is_solved = True
self.solve_time_seconds = solve_time
self.skill_points_earned = skill_points
self.update_timestamp()
class PhilosophicalDialogue(BaseServiceModel):
"""
Philosophical dialogue model for ethical and critical thinking development.
Tracks philosophical discussions and thinking development.
"""
# Core dialogue properties
user_id: str = Field(..., description="User participating in dialogue")
tenant_id: str = Field(..., description="Tenant domain identifier")
dialogue_topic: str = Field(..., min_length=1, max_length=200, description="Dialogue topic")
dialogue_type: str = Field(..., max_length=50, description="Type of philosophical dialogue")
# Dialogue configuration
ai_persona: str = Field(default="socratic", max_length=50, description="AI dialogue persona")
dialogue_style: str = Field(default="questioning", max_length=50, description="Dialogue style")
target_concepts: List[str] = Field(default_factory=list, description="Target concepts to explore")
# Dialogue content
messages: List[Dict[str, Any]] = Field(default_factory=list, description="Dialogue messages")
key_insights: List[str] = Field(default_factory=list, description="Key insights generated")
# Progress metrics
turns_count: int = Field(default=0, description="Number of dialogue turns")
depth_score: float = Field(default=0.0, description="Depth of philosophical exploration")
critical_thinking_score: float = Field(default=0.0, description="Critical thinking score")
# Status
is_completed: bool = Field(default=False, description="Whether dialogue is completed")
completion_reason: Optional[str] = Field(None, description="Reason for completion")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "philosophical_dialogues"
def add_message(self, message_data: Dict[str, Any]) -> None:
"""Add a message to the dialogue"""
self.messages.append(message_data)
self.turns_count += 1
self.update_timestamp()
def complete_dialogue(self, reason: str) -> None:
"""Mark dialogue as completed"""
self.is_completed = True
self.completion_reason = reason
self.update_timestamp()
class LearningAnalytics(BaseServiceModel):
"""
Learning analytics model for tracking educational progress.
Aggregates learning data across all game types.
"""
# Core analytics properties
user_id: str = Field(..., description="User being analyzed")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Skill tracking
chess_rating: int = Field(default=1200, description="Chess skill rating")
go_rating: int = Field(default=1200, description="Go skill rating")
puzzle_solving_level: int = Field(default=1, description="Puzzle solving level")
critical_thinking_level: int = Field(default=1, description="Critical thinking level")
# Activity metrics
total_games_played: int = Field(default=0, description="Total games played")
total_puzzles_solved: int = Field(default=0, description="Total puzzles solved")
total_dialogues_completed: int = Field(default=0, description="Total dialogues completed")
total_time_spent_hours: float = Field(default=0.0, description="Total time spent in hours")
# Learning metrics
concepts_mastered: List[str] = Field(default_factory=list, description="Mastered concepts")
learning_streaks: Dict[str, int] = Field(default_factory=dict, description="Learning streaks")
achievement_badges: List[str] = Field(default_factory=list, description="Achievement badges")
# Progress tracking
last_activity_date: Optional[datetime] = Field(None, description="Last activity date")
learning_goals: List[Dict[str, Any]] = Field(default_factory=list, description="Learning goals")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "learning_analytics"
def update_activity(self) -> None:
"""Update last activity timestamp"""
self.last_activity_date = datetime.utcnow()
self.update_timestamp()
def earn_badge(self, badge_name: str) -> None:
"""Earn an achievement badge"""
if badge_name not in self.achievement_badges:
self.achievement_badges.append(badge_name)
self.update_timestamp()
class GameTemplate(BaseServiceModel):
"""
Game template model for configuring game types and rules.
Defines reusable game configurations and templates.
"""
# Core template properties
template_name: str = Field(..., min_length=1, max_length=100, description="Template name")
game_type: GameType = Field(..., description="Game type")
template_description: str = Field(..., max_length=500, description="Template description")
# Template configuration
default_rules: Dict[str, Any] = Field(default_factory=dict, description="Default game rules")
ai_configurations: List[Dict[str, Any]] = Field(default_factory=list, description="AI opponent configs")
difficulty_settings: Dict[str, Any] = Field(default_factory=dict, description="Difficulty settings")
# Educational content
learning_objectives: List[str] = Field(default_factory=list, description="Learning objectives")
skill_categories: List[str] = Field(default_factory=list, description="Skill categories")
educational_notes: Optional[str] = Field(None, description="Educational notes")
# Template metadata
created_by: str = Field(..., description="Creator of the template")
tenant_id: str = Field(..., description="Tenant domain identifier")
is_public: bool = Field(default=False, description="Whether template is publicly available")
usage_count: int = Field(default=0, description="Number of times used")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "game_templates"
def increment_usage(self) -> None:
"""Increment usage count"""
self.usage_count += 1
self.update_timestamp()
# Create/Update/Response models
class GameSessionCreate(BaseCreateModel):
"""Model for creating new game sessions"""
user_id: str
tenant_id: str
game_type: GameType
game_name: str = Field(..., min_length=1, max_length=100)
difficulty_level: DifficultyLevel = Field(default=DifficultyLevel.INTERMEDIATE)
ai_opponent_config: Dict[str, Any] = Field(default_factory=dict)
game_rules: Dict[str, Any] = Field(default_factory=dict)
class GameSessionUpdate(BaseUpdateModel):
"""Model for updating game sessions"""
current_state: Optional[Dict[str, Any]] = None
game_status: Optional[GameStatus] = None
time_spent_seconds: Optional[int] = Field(None, ge=0)
current_rating: Optional[int] = Field(None, ge=0, le=3000)
winner: Optional[str] = None
final_score: Optional[Dict[str, Any]] = None
class GameSessionResponse(BaseResponseModel):
"""Model for game session API responses"""
id: str
user_id: str
tenant_id: str
game_type: GameType
game_name: str
difficulty_level: DifficultyLevel
current_state: Dict[str, Any]
move_history: List[Dict[str, Any]]
game_status: GameStatus
moves_count: int
hints_used: int
time_spent_seconds: int
current_rating: int
winner: Optional[str]
final_score: Optional[Dict[str, Any]]
learning_insights: List[str]
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,123 @@
"""
Message Model for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for message entities using the PostgreSQL + PGVector backend.
Stores individual messages within conversations with full context tracking.
Perfect tenant isolation - each tenant has separate message data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
class MessageRole(str, Enum):
"""Message role enumeration"""
SYSTEM = "system"
USER = "user"
AGENT = "agent"
TOOL = "tool"
class Message(BaseServiceModel):
"""
Message model for GT 2.0 service-based architecture.
Represents a single message within a conversation including content,
role, metadata, and usage statistics.
"""
# Core message properties
conversation_id: str = Field(..., description="ID of the parent conversation")
role: MessageRole = Field(..., description="Message role (system, user, agent, tool)")
content: str = Field(..., description="Message content")
# Optional metadata
model_used: Optional[str] = Field(None, description="AI model used for generation")
tool_calls: Optional[List[Dict[str, Any]]] = Field(default_factory=list, description="Tool calls made")
tool_call_id: Optional[str] = Field(None, description="Tool call ID if this is a tool response")
# Usage statistics
tokens_used: int = Field(default=0, description="Tokens consumed by this message")
cost_cents: int = Field(default=0, description="Cost in cents for this message")
# Processing metadata
processing_time_ms: Optional[float] = Field(None, description="Time taken to process this message")
temperature: Optional[float] = Field(None, description="Temperature used for generation")
max_tokens: Optional[int] = Field(None, description="Max tokens setting used")
# Status
is_edited: bool = Field(default=False, description="Whether message was edited")
is_deleted: bool = Field(default=False, description="Whether message was deleted")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "messages"
def mark_edited(self) -> None:
"""Mark message as edited"""
self.is_edited = True
self.update_timestamp()
def mark_deleted(self) -> None:
"""Mark message as deleted"""
self.is_deleted = True
self.update_timestamp()
class MessageCreate(BaseCreateModel):
"""Model for creating new messages"""
conversation_id: str
role: MessageRole
content: str
model_used: Optional[str] = None
tool_calls: Optional[List[Dict[str, Any]]] = Field(default_factory=list)
tool_call_id: Optional[str] = None
tokens_used: int = Field(default=0)
cost_cents: int = Field(default=0)
processing_time_ms: Optional[float] = None
temperature: Optional[float] = None
max_tokens: Optional[int] = None
model_config = ConfigDict(protected_namespaces=())
class MessageUpdate(BaseUpdateModel):
"""Model for updating messages"""
content: Optional[str] = None
is_edited: Optional[bool] = None
is_deleted: Optional[bool] = None
class MessageResponse(BaseResponseModel):
"""Model for message API responses"""
id: str
conversation_id: str
role: MessageRole
content: str
model_used: Optional[str]
tool_calls: List[Dict[str, Any]]
tool_call_id: Optional[str]
tokens_used: int
cost_cents: int
processing_time_ms: Optional[float]
temperature: Optional[float]
max_tokens: Optional[int]
is_edited: bool
is_deleted: bool
created_at: datetime
updated_at: datetime
model_config = ConfigDict(protected_namespaces=())

View File

@@ -0,0 +1,309 @@
"""
Team and Organization Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for team entities using the PostgreSQL + PGVector backend.
Implements team-based collaboration with file-based isolation.
Follows GT 2.0's principle of "Elegant Simplicity Through Intelligent Architecture"
- File-based team configurations with PostgreSQL reference tracking
- Perfect tenant isolation - each tenant has separate team data
- Zero complexity addition through simple file structures
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
import uuid
import os
import json
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
def generate_uuid():
"""Generate a unique identifier"""
return str(uuid.uuid4())
class TeamType(str, Enum):
"""Team type enumeration"""
DEPARTMENT = "department"
PROJECT = "project"
CROSS_FUNCTIONAL = "cross_functional"
class RoleType(str, Enum):
"""Role type enumeration"""
OWNER = "owner"
ADMIN = "admin"
MEMBER = "member"
VIEWER = "viewer"
class Team(BaseServiceModel):
"""
Team model for GT 2.0 service-based architecture.
GT 2.0 Design: Teams are lightweight DuckDB references to file-based configurations.
Team data is stored in encrypted files, not complex database relationships.
"""
# Team identifier
team_uuid: str = Field(default_factory=generate_uuid, description="Unique team identifier")
# Team details
name: str = Field(..., min_length=1, max_length=200, description="Team name")
description: Optional[str] = Field(None, max_length=1000, description="Team description")
team_type: TeamType = Field(default=TeamType.PROJECT, description="Team type")
# File-based configuration reference
config_file_path: str = Field(..., description="Path to team config.json")
members_file_path: str = Field(..., description="Path to members.json")
# Owner and access
created_by: str = Field(..., description="User who created this team")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Team settings
is_active: bool = Field(default=True, description="Whether team is active")
is_public: bool = Field(default=False, description="Whether team is publicly visible")
max_members: int = Field(default=50, ge=1, le=1000, description="Maximum team members")
# Statistics
member_count: int = Field(default=0, description="Current member count")
resource_count: int = Field(default=0, description="Number of shared resources")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "teams"
def get_config_path(self) -> str:
"""Get the full path to team configuration file"""
return self.config_file_path
def get_members_path(self) -> str:
"""Get the full path to team members file"""
return self.members_file_path
def activate(self) -> None:
"""Activate the team"""
self.is_active = True
self.update_timestamp()
def deactivate(self) -> None:
"""Deactivate the team"""
self.is_active = False
self.update_timestamp()
class TeamRole(BaseServiceModel):
"""
Team role model for user permissions within teams.
Manages role-based access control for team resources.
"""
# Core role properties
team_id: str = Field(..., description="Team ID")
user_id: str = Field(..., description="User ID")
role_type: RoleType = Field(..., description="Role type")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Role configuration
permissions: Dict[str, bool] = Field(default_factory=dict, description="Role permissions")
custom_permissions: Dict[str, Any] = Field(default_factory=dict, description="Custom permissions")
# Role details
assigned_by: str = Field(..., description="User who assigned this role")
role_description: Optional[str] = Field(None, max_length=500, description="Role description")
# Status
is_active: bool = Field(default=True, description="Whether role is active")
expires_at: Optional[datetime] = Field(None, description="Role expiration time")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "team_roles"
def is_expired(self) -> bool:
"""Check if role is expired"""
if self.expires_at is None:
return False
return datetime.utcnow() > self.expires_at
def has_permission(self, permission: str) -> bool:
"""Check if role has specific permission"""
return self.permissions.get(permission, False)
def grant_permission(self, permission: str) -> None:
"""Grant a permission to this role"""
self.permissions[permission] = True
self.update_timestamp()
def revoke_permission(self, permission: str) -> None:
"""Revoke a permission from this role"""
self.permissions[permission] = False
self.update_timestamp()
class OrganizationSettings(BaseServiceModel):
"""
Organization settings model for tenant-wide configuration.
Manages organization-level settings and policies.
"""
# Organization details
tenant_id: str = Field(..., description="Tenant domain identifier")
organization_name: str = Field(..., min_length=1, max_length=200, description="Organization name")
organization_domain: str = Field(..., description="Organization domain")
# Organization settings
settings: Dict[str, Any] = Field(default_factory=dict, description="Organization settings")
branding: Dict[str, Any] = Field(default_factory=dict, description="Branding configuration")
# Team policies
default_team_settings: Dict[str, Any] = Field(default_factory=dict, description="Default team settings")
team_creation_policy: str = Field(default="admin_only", description="Who can create teams")
max_teams_per_user: int = Field(default=10, ge=1, le=100, description="Max teams per user")
# Security policies
security_settings: Dict[str, Any] = Field(default_factory=dict, description="Security settings")
data_retention_days: int = Field(default=365, ge=30, le=2555, description="Data retention period")
# Feature flags
features_enabled: Dict[str, bool] = Field(default_factory=dict, description="Enabled features")
# Contact and billing
admin_email: Optional[str] = Field(None, description="Primary admin email")
billing_contact: Optional[str] = Field(None, description="Billing contact email")
# Status
is_active: bool = Field(default=True, description="Whether organization is active")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "organization_settings"
def is_feature_enabled(self, feature: str) -> bool:
"""Check if a feature is enabled"""
return self.features_enabled.get(feature, False)
def enable_feature(self, feature: str) -> None:
"""Enable a feature"""
self.features_enabled[feature] = True
self.update_timestamp()
def disable_feature(self, feature: str) -> None:
"""Disable a feature"""
self.features_enabled[feature] = False
self.update_timestamp()
# Create/Update/Response models
class TeamCreate(BaseCreateModel):
"""Model for creating new teams"""
name: str = Field(..., min_length=1, max_length=200)
description: Optional[str] = Field(None, max_length=1000)
team_type: TeamType = Field(default=TeamType.PROJECT)
created_by: str
tenant_id: str
is_public: bool = Field(default=False)
max_members: int = Field(default=50, ge=1, le=1000)
class TeamUpdate(BaseUpdateModel):
"""Model for updating teams"""
name: Optional[str] = Field(None, min_length=1, max_length=200)
description: Optional[str] = Field(None, max_length=1000)
team_type: Optional[TeamType] = None
is_active: Optional[bool] = None
is_public: Optional[bool] = None
max_members: Optional[int] = Field(None, ge=1, le=1000)
class TeamResponse(BaseResponseModel):
"""Model for team API responses"""
id: str
team_uuid: str
name: str
description: Optional[str]
team_type: TeamType
config_file_path: str
members_file_path: str
created_by: str
tenant_id: str
is_active: bool
is_public: bool
max_members: int
member_count: int
resource_count: int
created_at: datetime
updated_at: datetime
class TeamRoleCreate(BaseCreateModel):
"""Model for creating team roles"""
team_id: str
user_id: str
role_type: RoleType
tenant_id: str
assigned_by: str
permissions: Dict[str, bool] = Field(default_factory=dict)
role_description: Optional[str] = Field(None, max_length=500)
expires_at: Optional[datetime] = None
class TeamRoleUpdate(BaseUpdateModel):
"""Model for updating team roles"""
role_type: Optional[RoleType] = None
permissions: Optional[Dict[str, bool]] = None
custom_permissions: Optional[Dict[str, Any]] = None
role_description: Optional[str] = Field(None, max_length=500)
is_active: Optional[bool] = None
expires_at: Optional[datetime] = None
class TeamRoleResponse(BaseResponseModel):
"""Model for team role API responses"""
id: str
team_id: str
user_id: str
role_type: RoleType
tenant_id: str
permissions: Dict[str, bool]
custom_permissions: Dict[str, Any]
assigned_by: str
role_description: Optional[str]
is_active: bool
expires_at: Optional[datetime]
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,146 @@
"""
User Session Model for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for user session entities using the PostgreSQL + PGVector backend.
Stores user session data and authentication state.
Perfect tenant isolation - each tenant has separate session data.
"""
from datetime import datetime, timedelta
from typing import List, Optional, Dict, Any
from enum import Enum
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
class SessionStatus(str, Enum):
"""Session status enumeration"""
ACTIVE = "active"
EXPIRED = "expired"
REVOKED = "revoked"
class UserSession(BaseServiceModel):
"""
User session model for GT 2.0 service-based architecture.
Represents a user authentication session with state management,
preferences, and activity tracking.
"""
# Core session properties
session_id: str = Field(..., description="Unique session identifier")
user_id: str = Field(..., description="User ID (email or unique identifier)")
user_email: Optional[str] = Field(None, max_length=255, description="User email address")
user_name: Optional[str] = Field(None, max_length=100, description="User display name")
# Authentication details
auth_provider: str = Field(default="jwt", max_length=50, description="Authentication provider")
auth_method: str = Field(default="bearer", max_length=50, description="Authentication method")
# Session lifecycle
status: SessionStatus = Field(default=SessionStatus.ACTIVE, description="Session status")
expires_at: datetime = Field(..., description="Session expiration time")
last_activity_at: datetime = Field(default_factory=datetime.utcnow, description="Last activity timestamp")
# User preferences and state
preferences: Dict[str, Any] = Field(default_factory=dict, description="User preferences")
session_data: Dict[str, Any] = Field(default_factory=dict, description="Session-specific data")
# Activity tracking
login_ip: Optional[str] = Field(None, max_length=45, description="Login IP address")
user_agent: Optional[str] = Field(None, max_length=500, description="User agent string")
activity_count: int = Field(default=1, description="Number of activities in this session")
# Security
csrf_token: Optional[str] = Field(None, max_length=64, description="CSRF protection token")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "user_sessions"
def is_expired(self) -> bool:
"""Check if session is expired"""
return datetime.utcnow() > self.expires_at or self.status != SessionStatus.ACTIVE
def extend_session(self, minutes: int = 30) -> None:
"""Extend session expiration time"""
if self.status == SessionStatus.ACTIVE:
self.expires_at = datetime.utcnow() + timedelta(minutes=minutes)
self.update_timestamp()
def update_activity(self) -> None:
"""Update last activity timestamp"""
self.last_activity_at = datetime.utcnow()
self.activity_count += 1
self.update_timestamp()
def revoke(self) -> None:
"""Revoke the session"""
self.status = SessionStatus.REVOKED
self.update_timestamp()
def expire(self) -> None:
"""Mark session as expired"""
self.status = SessionStatus.EXPIRED
self.update_timestamp()
class UserSessionCreate(BaseCreateModel):
"""Model for creating new user sessions"""
session_id: str
user_id: str
user_email: Optional[str] = Field(None, max_length=255)
user_name: Optional[str] = Field(None, max_length=100)
auth_provider: str = Field(default="jwt", max_length=50)
auth_method: str = Field(default="bearer", max_length=50)
expires_at: datetime
preferences: Dict[str, Any] = Field(default_factory=dict)
session_data: Dict[str, Any] = Field(default_factory=dict)
login_ip: Optional[str] = Field(None, max_length=45)
user_agent: Optional[str] = Field(None, max_length=500)
csrf_token: Optional[str] = Field(None, max_length=64)
class UserSessionUpdate(BaseUpdateModel):
"""Model for updating user sessions"""
user_email: Optional[str] = Field(None, max_length=255)
user_name: Optional[str] = Field(None, max_length=100)
status: Optional[SessionStatus] = None
expires_at: Optional[datetime] = None
preferences: Optional[Dict[str, Any]] = None
session_data: Optional[Dict[str, Any]] = None
activity_count: Optional[int] = Field(None, ge=0)
csrf_token: Optional[str] = Field(None, max_length=64)
class UserSessionResponse(BaseResponseModel):
"""Model for user session API responses"""
id: str
session_id: str
user_id: str
user_email: Optional[str]
user_name: Optional[str]
auth_provider: str
auth_method: str
status: SessionStatus
expires_at: datetime
last_activity_at: datetime
preferences: Dict[str, Any]
session_data: Dict[str, Any]
login_ip: Optional[str]
user_agent: Optional[str]
activity_count: int
csrf_token: Optional[str]
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,603 @@
"""
Workflow Models for GT 2.0 Tenant Backend - Service-Based Architecture
Pydantic models for workflow entities using the PostgreSQL + PGVector backend.
Stores workflow definitions, executions, triggers, and chat sessions.
Perfect tenant isolation - each tenant has separate workflow data.
"""
from datetime import datetime
from typing import List, Optional, Dict, Any
from enum import Enum
from pydantic import Field, ConfigDict
from app.models.base import BaseServiceModel, BaseCreateModel, BaseUpdateModel, BaseResponseModel
class WorkflowStatus(str, Enum):
"""Workflow status enumeration"""
DRAFT = "draft"
ACTIVE = "active"
PAUSED = "paused"
ARCHIVED = "archived"
class TriggerType(str, Enum):
"""Trigger type enumeration"""
MANUAL = "manual"
WEBHOOK = "webhook"
CRON = "cron"
EVENT = "event"
API = "api"
class InteractionMode(str, Enum):
"""Interaction mode enumeration"""
CHAT = "chat"
BUTTON = "button"
FORM = "form"
DASHBOARD = "dashboard"
API = "api"
class ExecutionStatus(str, Enum):
"""Execution status enumeration"""
PENDING = "pending"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class Workflow(BaseServiceModel):
"""
Workflow model for GT 2.0 service-based architecture.
Represents an agentic workflow with nodes, triggers, and execution logic.
Supports chat interfaces, form inputs, API endpoints, and dashboard views.
"""
# Basic workflow properties
tenant_id: str = Field(..., description="Tenant domain identifier")
user_id: str = Field(..., description="User who owns this workflow")
name: str = Field(..., min_length=1, max_length=200, description="Workflow name")
description: Optional[str] = Field(None, max_length=1000, description="Workflow description")
# Workflow definition as JSON structure
definition: Dict[str, Any] = Field(..., description="Nodes, edges, and configuration")
# Triggers and interaction modes
triggers: List[Dict[str, Any]] = Field(default_factory=list, description="Webhook, cron, event triggers")
interaction_modes: List[InteractionMode] = Field(default_factory=list, description="UI interaction modes")
# Resource references - ensuring user owns all resources
agent_ids: List[str] = Field(default_factory=list, description="Referenced agents")
api_key_ids: List[str] = Field(default_factory=list, description="Referenced API keys")
webhook_ids: List[str] = Field(default_factory=list, description="Referenced webhooks")
dataset_ids: List[str] = Field(default_factory=list, description="Referenced datasets")
integration_ids: List[str] = Field(default_factory=list, description="Referenced integrations")
# Workflow configuration
config: Dict[str, Any] = Field(default_factory=dict, description="Runtime configuration")
timeout_seconds: int = Field(default=300, ge=1, le=3600, description="Execution timeout (5 min default)")
max_retries: int = Field(default=3, ge=0, le=10, description="Maximum retry attempts")
# Status and metadata
status: WorkflowStatus = Field(default=WorkflowStatus.DRAFT, description="Workflow status")
execution_count: int = Field(default=0, description="Total execution count")
last_executed: Optional[datetime] = Field(None, description="Last execution timestamp")
# Analytics
total_tokens_used: int = Field(default=0, description="Total tokens consumed")
total_cost_cents: int = Field(default=0, description="Total cost in cents")
average_execution_time_ms: Optional[int] = Field(None, description="Average execution time")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "workflows"
def activate(self) -> None:
"""Activate the workflow"""
self.status = WorkflowStatus.ACTIVE
self.update_timestamp()
def pause(self) -> None:
"""Pause the workflow"""
self.status = WorkflowStatus.PAUSED
self.update_timestamp()
def archive(self) -> None:
"""Archive the workflow"""
self.status = WorkflowStatus.ARCHIVED
self.update_timestamp()
def update_execution_stats(self, tokens_used: int, cost_cents: int, execution_time_ms: int) -> None:
"""Update execution statistics"""
self.execution_count += 1
self.total_tokens_used += tokens_used
self.total_cost_cents += cost_cents
self.last_executed = datetime.utcnow()
# Update rolling average execution time
if self.average_execution_time_ms is None:
self.average_execution_time_ms = execution_time_ms
else:
# Simple moving average
self.average_execution_time_ms = int(
(self.average_execution_time_ms * (self.execution_count - 1) + execution_time_ms) / self.execution_count
)
self.update_timestamp()
class WorkflowExecution(BaseServiceModel):
"""
Workflow execution model for tracking individual workflow runs.
Stores execution state, progress, timing, and resource usage.
"""
# Core execution properties
workflow_id: str = Field(..., description="Parent workflow ID")
user_id: str = Field(..., description="User who triggered execution")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Execution state
status: ExecutionStatus = Field(default=ExecutionStatus.PENDING, description="Execution status")
current_node_id: Optional[str] = Field(None, description="Currently executing node")
progress_percentage: int = Field(default=0, ge=0, le=100, description="Execution progress")
# Data and context
input_data: Dict[str, Any] = Field(default_factory=dict, description="Execution input data")
output_data: Dict[str, Any] = Field(default_factory=dict, description="Execution output data")
execution_trace: List[Dict[str, Any]] = Field(default_factory=list, description="Step-by-step log")
error_details: Optional[str] = Field(None, description="Error details if failed")
# Timing and performance
started_at: datetime = Field(default_factory=datetime.utcnow, description="Execution start time")
completed_at: Optional[datetime] = Field(None, description="Execution completion time")
duration_ms: Optional[int] = Field(None, description="Execution duration in milliseconds")
# Resource usage
tokens_used: int = Field(default=0, description="Tokens consumed")
cost_cents: int = Field(default=0, description="Cost in cents")
tool_calls_count: int = Field(default=0, description="Number of tool calls made")
# Trigger information
trigger_type: Optional[TriggerType] = Field(None, description="How execution was triggered")
trigger_data: Dict[str, Any] = Field(default_factory=dict, description="Trigger-specific data")
trigger_source: Optional[str] = Field(None, description="Source identifier for trigger")
# Session information for chat mode
session_id: Optional[str] = Field(None, description="Chat session ID if applicable")
interaction_mode: Optional[InteractionMode] = Field(None, description="User interaction mode")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "workflow_executions"
def mark_running(self, current_node_id: str) -> None:
"""Mark execution as running"""
self.status = ExecutionStatus.RUNNING
self.current_node_id = current_node_id
self.update_timestamp()
def mark_completed(self, output_data: Dict[str, Any]) -> None:
"""Mark execution as completed"""
self.status = ExecutionStatus.COMPLETED
self.completed_at = datetime.utcnow()
self.output_data = output_data
self.progress_percentage = 100
if self.started_at:
self.duration_ms = int((self.completed_at - self.started_at).total_seconds() * 1000)
self.update_timestamp()
def mark_failed(self, error_details: str) -> None:
"""Mark execution as failed"""
self.status = ExecutionStatus.FAILED
self.completed_at = datetime.utcnow()
self.error_details = error_details
if self.started_at:
self.duration_ms = int((self.completed_at - self.started_at).total_seconds() * 1000)
self.update_timestamp()
def add_trace_entry(self, node_id: str, action: str, data: Dict[str, Any]) -> None:
"""Add entry to execution trace"""
trace_entry = {
"timestamp": datetime.utcnow().isoformat(),
"node_id": node_id,
"action": action,
"data": data
}
self.execution_trace.append(trace_entry)
class WorkflowTrigger(BaseServiceModel):
"""
Workflow trigger model for automated workflow execution.
Supports webhook, cron, event, and API triggers.
"""
# Core trigger properties
workflow_id: str = Field(..., description="Parent workflow ID")
user_id: str = Field(..., description="User who owns this trigger")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Trigger configuration
trigger_type: TriggerType = Field(..., description="Type of trigger")
trigger_config: Dict[str, Any] = Field(..., description="Trigger-specific configuration")
# Webhook-specific fields
webhook_url: Optional[str] = Field(None, description="Generated webhook URL")
webhook_secret: Optional[str] = Field(None, max_length=128, description="Webhook signature secret")
# Cron-specific fields
cron_schedule: Optional[str] = Field(None, max_length=100, description="Cron expression")
timezone: str = Field(default="UTC", max_length=50, description="Timezone for cron schedule")
# Event-specific fields
event_source: Optional[str] = Field(None, max_length=100, description="Event source system")
event_filters: Dict[str, Any] = Field(default_factory=dict, description="Event filtering criteria")
# Status and metadata
is_active: bool = Field(default=True, description="Whether trigger is active")
trigger_count: int = Field(default=0, description="Number of times triggered")
last_triggered: Optional[datetime] = Field(None, description="Last trigger timestamp")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "workflow_triggers"
def activate(self) -> None:
"""Activate the trigger"""
self.is_active = True
self.update_timestamp()
def deactivate(self) -> None:
"""Deactivate the trigger"""
self.is_active = False
self.update_timestamp()
def record_trigger(self) -> None:
"""Record a trigger event"""
self.trigger_count += 1
self.last_triggered = datetime.utcnow()
self.update_timestamp()
class WorkflowSession(BaseServiceModel):
"""
Workflow session model for chat-based workflow interactions.
Manages conversational state for workflow chat interfaces.
"""
# Core session properties
workflow_id: str = Field(..., description="Parent workflow ID")
user_id: str = Field(..., description="User participating in session")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Session configuration
session_type: str = Field(default="chat", max_length=50, description="Session type")
session_state: Dict[str, Any] = Field(default_factory=dict, description="Current conversation state")
# Chat history
message_count: int = Field(default=0, description="Number of messages in session")
last_message_at: Optional[datetime] = Field(None, description="Last message timestamp")
# Status
is_active: bool = Field(default=True, description="Whether session is active")
expires_at: Optional[datetime] = Field(None, description="Session expiration time")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "workflow_sessions"
def add_message(self) -> None:
"""Record a new message in the session"""
self.message_count += 1
self.last_message_at = datetime.utcnow()
self.update_timestamp()
def close_session(self) -> None:
"""Close the session"""
self.is_active = False
self.update_timestamp()
class WorkflowMessage(BaseServiceModel):
"""
Workflow message model for chat session messages.
Stores individual messages within workflow chat sessions.
"""
# Core message properties
session_id: str = Field(..., description="Parent session ID")
workflow_id: str = Field(..., description="Parent workflow ID")
execution_id: Optional[str] = Field(None, description="Associated execution ID")
user_id: str = Field(..., description="User who sent/received message")
tenant_id: str = Field(..., description="Tenant domain identifier")
# Message content
role: str = Field(..., max_length=20, description="Message role (user, agent, system)")
content: str = Field(..., description="Message content")
message_type: str = Field(default="text", max_length=50, description="Message type")
# Agent information for agent messages
agent_id: Optional[str] = Field(None, description="Agent that generated this message")
confidence_score: Optional[int] = Field(None, ge=0, le=100, description="Agent confidence (0-100)")
# Additional data
message_metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional message data")
tokens_used: int = Field(default=0, description="Tokens consumed for this message")
# Model configuration
model_config = ConfigDict(
protected_namespaces=(),
json_encoders={
datetime: lambda v: v.isoformat() if v else None
}
)
@classmethod
def get_table_name(cls) -> str:
"""Get the database table name"""
return "workflow_messages"
# Create/Update/Response models for each entity
class WorkflowCreate(BaseCreateModel):
"""Model for creating new workflows"""
tenant_id: str
user_id: str
name: str = Field(..., min_length=1, max_length=200)
description: Optional[str] = Field(None, max_length=1000)
definition: Dict[str, Any]
triggers: List[Dict[str, Any]] = Field(default_factory=list)
interaction_modes: List[InteractionMode] = Field(default_factory=list)
agent_ids: List[str] = Field(default_factory=list)
api_key_ids: List[str] = Field(default_factory=list)
webhook_ids: List[str] = Field(default_factory=list)
dataset_ids: List[str] = Field(default_factory=list)
integration_ids: List[str] = Field(default_factory=list)
config: Dict[str, Any] = Field(default_factory=dict)
timeout_seconds: int = Field(default=300, ge=1, le=3600)
max_retries: int = Field(default=3, ge=0, le=10)
class WorkflowUpdate(BaseUpdateModel):
"""Model for updating workflows"""
name: Optional[str] = Field(None, min_length=1, max_length=200)
description: Optional[str] = Field(None, max_length=1000)
definition: Optional[Dict[str, Any]] = None
triggers: Optional[List[Dict[str, Any]]] = None
interaction_modes: Optional[List[InteractionMode]] = None
config: Optional[Dict[str, Any]] = None
timeout_seconds: Optional[int] = Field(None, ge=1, le=3600)
max_retries: Optional[int] = Field(None, ge=0, le=10)
status: Optional[WorkflowStatus] = None
class WorkflowResponse(BaseResponseModel):
"""Model for workflow API responses"""
id: str
tenant_id: str
user_id: str
name: str
description: Optional[str]
definition: Dict[str, Any]
triggers: List[Dict[str, Any]]
interaction_modes: List[InteractionMode]
agent_ids: List[str]
api_key_ids: List[str]
webhook_ids: List[str]
dataset_ids: List[str]
integration_ids: List[str]
config: Dict[str, Any]
timeout_seconds: int
max_retries: int
status: WorkflowStatus
execution_count: int
last_executed: Optional[datetime]
total_tokens_used: int
total_cost_cents: int
average_execution_time_ms: Optional[int]
created_at: datetime
updated_at: datetime
class WorkflowExecutionCreate(BaseCreateModel):
"""Model for creating new workflow executions"""
workflow_id: str
user_id: str
tenant_id: str
input_data: Dict[str, Any] = Field(default_factory=dict)
trigger_type: Optional[TriggerType] = None
trigger_data: Dict[str, Any] = Field(default_factory=dict)
trigger_source: Optional[str] = None
session_id: Optional[str] = None
interaction_mode: Optional[InteractionMode] = None
class WorkflowExecutionUpdate(BaseUpdateModel):
"""Model for updating workflow executions"""
status: Optional[ExecutionStatus] = None
current_node_id: Optional[str] = None
progress_percentage: Optional[int] = Field(None, ge=0, le=100)
output_data: Optional[Dict[str, Any]] = None
error_details: Optional[str] = None
completed_at: Optional[datetime] = None
tokens_used: Optional[int] = Field(None, ge=0)
cost_cents: Optional[int] = Field(None, ge=0)
tool_calls_count: Optional[int] = Field(None, ge=0)
class WorkflowExecutionResponse(BaseResponseModel):
"""Model for workflow execution API responses"""
id: str
workflow_id: str
user_id: str
tenant_id: str
status: ExecutionStatus
current_node_id: Optional[str]
progress_percentage: int
input_data: Dict[str, Any]
output_data: Dict[str, Any]
execution_trace: List[Dict[str, Any]]
error_details: Optional[str]
started_at: datetime
completed_at: Optional[datetime]
duration_ms: Optional[int]
tokens_used: int
cost_cents: int
tool_calls_count: int
trigger_type: Optional[TriggerType]
trigger_data: Dict[str, Any]
trigger_source: Optional[str]
session_id: Optional[str]
interaction_mode: Optional[InteractionMode]
created_at: datetime
updated_at: datetime
# Node type definitions for workflow canvas
WORKFLOW_NODE_TYPES = {
"agent": {
"name": "Agent",
"description": "Execute an AI Agent with personality",
"inputs": ["text", "context"],
"outputs": ["response", "confidence"],
"config_schema": {
"agent_id": {"type": "string", "required": True},
"confidence_threshold": {"type": "integer", "default": 70},
"max_tokens": {"type": "integer", "default": 2000},
"temperature": {"type": "number", "default": 0.7}
}
},
"trigger": {
"name": "Trigger",
"description": "Start workflow execution",
"inputs": [],
"outputs": ["trigger_data"],
"subtypes": ["webhook", "cron", "event", "manual", "api"],
"config_schema": {
"trigger_type": {"type": "string", "required": True}
}
},
"integration": {
"name": "Integration",
"description": "Connect to external services",
"inputs": ["data"],
"outputs": ["response"],
"subtypes": ["api", "database", "storage", "webhook"],
"config_schema": {
"integration_type": {"type": "string", "required": True},
"api_key_id": {"type": "string"},
"endpoint_url": {"type": "string"},
"method": {"type": "string", "default": "GET"}
}
},
"logic": {
"name": "Logic",
"description": "Control flow and data transformation",
"inputs": ["data"],
"outputs": ["result"],
"subtypes": ["decision", "loop", "transform", "aggregate", "filter"],
"config_schema": {
"logic_type": {"type": "string", "required": True}
}
},
"output": {
"name": "Output",
"description": "Send results to external systems",
"inputs": ["data"],
"outputs": [],
"subtypes": ["webhook", "api", "email", "storage", "notification"],
"config_schema": {
"output_type": {"type": "string", "required": True}
}
}
}
# Interaction mode configurations
INTERACTION_MODE_CONFIGS = {
"chat": {
"name": "Chat Interface",
"description": "Conversational interaction with workflow",
"supports_streaming": True,
"supports_history": True,
"ui_components": ["chat_input", "message_history", "agent_avatars"]
},
"button": {
"name": "Button Trigger",
"description": "Simple one-click workflow execution",
"supports_streaming": False,
"supports_history": False,
"ui_components": ["trigger_button", "progress_indicator", "result_display"]
},
"form": {
"name": "Form Input",
"description": "Structured input with validation",
"supports_streaming": False,
"supports_history": True,
"ui_components": ["dynamic_form", "validation", "submit_button"]
},
"dashboard": {
"name": "Dashboard View",
"description": "Overview of workflow status and metrics",
"supports_streaming": True,
"supports_history": True,
"ui_components": ["metrics_cards", "execution_history", "status_indicators"]
},
"api": {
"name": "API Endpoint",
"description": "Programmatic access to workflow",
"supports_streaming": True,
"supports_history": False,
"ui_components": []
}
}