GT AI OS Community v2.0.33 - Add NVIDIA NIM and Nemotron agents

- Updated python_coding_microproject.csv to use NVIDIA NIM Kimi K2
- Updated kali_linux_shell_simulator.csv to use NVIDIA NIM Kimi K2
  - Made more general-purpose (flexible targets, expanded tools)
- Added nemotron-mini-agent.csv for fast local inference via Ollama
- Added nemotron-agent.csv for advanced reasoning via Ollama
- Added wiki page: Projects for NVIDIA NIMs and Nemotron
This commit is contained in:
HackWeasel
2025-12-12 17:47:14 -05:00
commit 310491a557
750 changed files with 232701 additions and 0 deletions

View File

@@ -0,0 +1,197 @@
"""Add user-tenant assignments for multi-tenant user management
Revision ID: 005_add_user_tenant_assignments
Revises: 004_add_license_billing_tables
Create Date: 2025-09-10 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '005_add_user_tenant_assignments'
down_revision: Union[str, None] = '004_add_license_billing_tables'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade to add user-tenant assignments table and update user table"""
# Create user_tenant_assignments table
op.create_table(
'user_tenant_assignments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('tenant_id', sa.Integer(), nullable=False),
# Tenant-specific user profile
sa.Column('tenant_user_role', sa.String(20), nullable=False, default='tenant_user'),
sa.Column('tenant_display_name', sa.String(100), nullable=True),
sa.Column('tenant_email', sa.String(255), nullable=True),
sa.Column('tenant_department', sa.String(100), nullable=True),
sa.Column('tenant_title', sa.String(100), nullable=True),
# Tenant-specific authentication (optional)
sa.Column('tenant_password_hash', sa.String(255), nullable=True),
sa.Column('requires_2fa', sa.Boolean(), nullable=False, default=False),
sa.Column('last_password_change', sa.DateTime(timezone=True), nullable=True),
# Tenant-specific permissions and limits
sa.Column('tenant_capabilities', sa.JSON(), nullable=False, default=list),
sa.Column('resource_limits', sa.JSON(), nullable=False, default=dict),
# Status and activity tracking
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
sa.Column('is_primary_tenant', sa.Boolean(), nullable=False, default=False),
sa.Column('joined_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('last_accessed', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_login_at', sa.DateTime(timezone=True), nullable=True),
# Invitation tracking
sa.Column('invited_by', sa.Integer(), nullable=True),
sa.Column('invitation_accepted_at', sa.DateTime(timezone=True), nullable=True),
# Timestamps
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
# Primary key
sa.PrimaryKeyConstraint('id'),
# Foreign key constraints
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['invited_by'], ['users.id']),
# Indexes (created separately with CONCURRENTLY for zero downtime)
# sa.Index('ix_user_tenant_assignments_user_id', 'user_id'),
# sa.Index('ix_user_tenant_assignments_tenant_id', 'tenant_id'),
# sa.Index('ix_user_tenant_assignments_tenant_email', 'tenant_email'),
# Unique constraint
sa.UniqueConstraint('user_id', 'tenant_id', name='unique_user_tenant_assignment')
)
# Add current_tenant_id to users table (remove old tenant_id later)
op.add_column('users', sa.Column('current_tenant_id', sa.Integer(), nullable=True))
# Create index for current_tenant_id (using CONCURRENTLY for zero downtime)
op.execute("CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_users_current_tenant_id ON users(current_tenant_id)")
# Create indexes for user_tenant_assignments table (using CONCURRENTLY for zero downtime)
op.execute("CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_user_tenant_assignments_user_id ON user_tenant_assignments(user_id)")
op.execute("CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_user_tenant_assignments_tenant_id ON user_tenant_assignments(tenant_id)")
op.execute("CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_user_tenant_assignments_tenant_email ON user_tenant_assignments(tenant_email)")
# Data migration: Convert existing users.tenant_id to user_tenant_assignments
# This is a raw SQL operation to handle the data migration
connection = op.get_bind()
# Step 1: Get all existing users with tenant_id
result = connection.execute(sa.text("""
SELECT id, tenant_id, user_type, email, full_name, capabilities
FROM users
WHERE tenant_id IS NOT NULL
"""))
users_to_migrate = result.fetchall()
# Step 2: Create user_tenant_assignments for each user
for user in users_to_migrate:
user_id, tenant_id, user_type, email, full_name, capabilities = user
# Set default resource limits based on user type
resource_limits = {
"max_conversations": 1000 if user_type == "super_admin" else 100,
"max_datasets": 100 if user_type == "super_admin" else 10,
"max_agents": 200 if user_type == "super_admin" else 20,
"daily_api_calls": 10000 if user_type == "super_admin" else 1000
}
# Convert old capabilities to tenant_capabilities
tenant_capabilities = capabilities if capabilities else []
# Insert user_tenant_assignment
connection.execute(sa.text("""
INSERT INTO user_tenant_assignments (
user_id, tenant_id, tenant_user_role, tenant_display_name,
tenant_email, tenant_capabilities, resource_limits,
is_active, is_primary_tenant, joined_at, created_at, updated_at
) VALUES (
:user_id, :tenant_id, :user_type, :full_name,
:email, :tenant_capabilities, :resource_limits,
true, true, now(), now(), now()
)
"""), {
'user_id': user_id,
'tenant_id': tenant_id,
'user_type': user_type,
'full_name': full_name,
'email': email,
'tenant_capabilities': sa.dialects.postgresql.JSON().literal_processor(dialect=connection.dialect)(tenant_capabilities),
'resource_limits': sa.dialects.postgresql.JSON().literal_processor(dialect=connection.dialect)(resource_limits)
})
# Update user's current_tenant_id to their primary tenant
connection.execute(sa.text("""
UPDATE users
SET current_tenant_id = :tenant_id
WHERE id = :user_id
"""), {'tenant_id': tenant_id, 'user_id': user_id})
# Step 3: Remove old tenant_id column from users (this is irreversible)
# First remove the foreign key constraint
op.drop_constraint('users_tenant_id_fkey', 'users', type_='foreignkey')
# Then drop the column
op.drop_column('users', 'tenant_id')
def downgrade() -> None:
"""Downgrade: Remove user-tenant assignments and restore single tenant_id"""
# Re-add tenant_id column to users
op.add_column('users', sa.Column('tenant_id', sa.Integer(), nullable=True))
# Re-create foreign key constraint
op.create_foreign_key('users_tenant_id_fkey', 'users', 'tenants', ['tenant_id'], ['id'], ondelete='CASCADE')
# Data migration back: Convert user_tenant_assignments to users.tenant_id
connection = op.get_bind()
# Get primary tenant assignments for each user
result = connection.execute(sa.text("""
SELECT user_id, tenant_id, tenant_capabilities
FROM user_tenant_assignments
WHERE is_primary_tenant = true AND is_active = true
"""))
assignments_to_migrate = result.fetchall()
# Update users table with their primary tenant
for assignment in assignments_to_migrate:
user_id, tenant_id, tenant_capabilities = assignment
connection.execute(sa.text("""
UPDATE users
SET tenant_id = :tenant_id,
capabilities = :capabilities
WHERE id = :user_id
"""), {
'tenant_id': tenant_id,
'user_id': user_id,
'capabilities': sa.dialects.postgresql.JSON().literal_processor(dialect=connection.dialect)(tenant_capabilities or [])
})
# Drop current_tenant_id column and index
op.drop_index('ix_users_current_tenant_id', 'users')
op.drop_column('users', 'current_tenant_id')
# Drop user_tenant_assignments table
op.drop_table('user_tenant_assignments')

View File

@@ -0,0 +1,38 @@
"""add tenant templates table
Revision ID: 006_add_tenant_templates
Revises: 005_add_user_tenant_assignments
Create Date: 2025-09-24
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
# revision identifiers, used by Alembic.
revision = '006_add_tenant_templates'
down_revision = '005_add_user_tenant_assignments'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tenant_templates',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('template_data', JSONB, nullable=False),
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tenant_templates_id'), 'tenant_templates', ['id'], unique=False)
op.create_index(op.f('ix_tenant_templates_name'), 'tenant_templates', ['name'], unique=False)
def downgrade():
op.drop_index(op.f('ix_tenant_templates_name'), table_name='tenant_templates')
op.drop_index(op.f('ix_tenant_templates_id'), table_name='tenant_templates')
op.drop_table('tenant_templates')

View File

@@ -0,0 +1,37 @@
"""add password reset rate limits table
Revision ID: 007_add_password_reset_rate_limits
Revises: 006_add_tenant_templates
Create Date: 2025-10-06
Email-based rate limiting only (no IP tracking)
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '007_add_password_reset_rate_limits'
down_revision = '006_add_tenant_templates'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'password_reset_rate_limits',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('request_count', sa.Integer(), nullable=False, server_default='1'),
sa.Column('window_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('window_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_password_reset_rate_limits_email'), 'password_reset_rate_limits', ['email'], unique=False)
op.create_index(op.f('ix_password_reset_rate_limits_window_end'), 'password_reset_rate_limits', ['window_end'], unique=False)
def downgrade():
op.drop_index(op.f('ix_password_reset_rate_limits_window_end'), table_name='password_reset_rate_limits')
op.drop_index(op.f('ix_password_reset_rate_limits_email'), table_name='password_reset_rate_limits')
op.drop_table('password_reset_rate_limits')

View File

@@ -0,0 +1,76 @@
"""add totp 2fa fields
Revision ID: 008_add_totp_2fa
Revises: 007_add_password_reset_rate_limits
Create Date: 2025-10-07
Adds TOTP Two-Factor Authentication support with optional and mandatory enforcement.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '008_add_totp_2fa'
down_revision = '007_add_password_reset_rate_limits'
branch_labels = None
depends_on = None
def upgrade():
# Add TFA fields to users table
op.add_column('users', sa.Column('tfa_enabled', sa.Boolean(), nullable=False, server_default='false'))
op.add_column('users', sa.Column('tfa_secret', sa.Text(), nullable=True))
op.add_column('users', sa.Column('tfa_required', sa.Boolean(), nullable=False, server_default='false'))
# Add indexes for query optimization
op.create_index(op.f('ix_users_tfa_enabled'), 'users', ['tfa_enabled'], unique=False)
op.create_index(op.f('ix_users_tfa_required'), 'users', ['tfa_required'], unique=False)
# Create TFA verification rate limits table
op.create_table(
'tfa_verification_rate_limits',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('request_count', sa.Integer(), nullable=False, server_default='1'),
sa.Column('window_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('window_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tfa_verification_rate_limits_user_id'), 'tfa_verification_rate_limits', ['user_id'], unique=False)
op.create_index(op.f('ix_tfa_verification_rate_limits_window_end'), 'tfa_verification_rate_limits', ['window_end'], unique=False)
# Create used temp tokens table for replay prevention
op.create_table(
'used_temp_tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token_id', sa.String(length=255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('used_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token_id')
)
op.create_index(op.f('ix_used_temp_tokens_token_id'), 'used_temp_tokens', ['token_id'], unique=True)
op.create_index(op.f('ix_used_temp_tokens_expires_at'), 'used_temp_tokens', ['expires_at'], unique=False)
def downgrade():
# Drop used temp tokens table
op.drop_index(op.f('ix_used_temp_tokens_expires_at'), table_name='used_temp_tokens')
op.drop_index(op.f('ix_used_temp_tokens_token_id'), table_name='used_temp_tokens')
op.drop_table('used_temp_tokens')
# Drop TFA verification rate limits table
op.drop_index(op.f('ix_tfa_verification_rate_limits_window_end'), table_name='tfa_verification_rate_limits')
op.drop_index(op.f('ix_tfa_verification_rate_limits_user_id'), table_name='tfa_verification_rate_limits')
op.drop_table('tfa_verification_rate_limits')
# Drop TFA fields from users table
op.drop_index(op.f('ix_users_tfa_required'), table_name='users')
op.drop_index(op.f('ix_users_tfa_enabled'), table_name='users')
op.drop_column('users', 'tfa_required')
op.drop_column('users', 'tfa_secret')
op.drop_column('users', 'tfa_enabled')

View File

@@ -0,0 +1,51 @@
"""Add TFA session fields to used_temp_tokens
Revision ID: 009_add_tfa_session_fields
Revises: 008_add_totp_2fa
Create Date: 2025-10-07
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '009_add_tfa_session_fields'
down_revision = '008_add_totp_2fa'
branch_labels = None
depends_on = None
def upgrade():
# Add TFA session fields to used_temp_tokens table
op.add_column('used_temp_tokens', sa.Column('user_email', sa.String(255), nullable=True))
op.add_column('used_temp_tokens', sa.Column('tfa_configured', sa.Boolean(), nullable=True))
op.add_column('used_temp_tokens', sa.Column('qr_code_uri', sa.Text(), nullable=True))
op.add_column('used_temp_tokens', sa.Column('manual_entry_key', sa.String(255), nullable=True))
op.add_column('used_temp_tokens', sa.Column('temp_token', sa.Text(), nullable=True))
op.add_column('used_temp_tokens', sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False))
# Modify used_at to be nullable (NULL until token is used)
op.alter_column('used_temp_tokens', 'used_at',
existing_type=sa.DateTime(timezone=True),
nullable=True,
existing_server_default=sa.func.now())
# Remove server default from used_at (manually set when used)
op.alter_column('used_temp_tokens', 'used_at', server_default=None)
def downgrade():
# Remove TFA session fields
op.drop_column('used_temp_tokens', 'created_at')
op.drop_column('used_temp_tokens', 'temp_token')
op.drop_column('used_temp_tokens', 'manual_entry_key')
op.drop_column('used_temp_tokens', 'qr_code_uri')
op.drop_column('used_temp_tokens', 'tfa_configured')
op.drop_column('used_temp_tokens', 'user_email')
# Restore used_at to non-nullable with server default
op.alter_column('used_temp_tokens', 'used_at',
existing_type=sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now())

View File

@@ -0,0 +1,103 @@
"""Add system management tables (versions, updates, backups)
Revision ID: 010_add_system_management_tables
Revises: 009_add_tfa_session_fields
Create Date: 2025-11-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision = '010_add_system_management_tables'
down_revision = '009_add_tfa_session_fields'
branch_labels = None
depends_on = None
def upgrade():
# Create system_versions table
op.create_table(
'system_versions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(36), nullable=False),
sa.Column('version', sa.String(50), nullable=False),
sa.Column('installed_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('installed_by', sa.String(255), nullable=True),
sa.Column('is_current', sa.Boolean(), nullable=False, default=True),
sa.Column('release_notes', sa.Text(), nullable=True),
sa.Column('git_commit', sa.String(40), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid')
)
op.create_index('ix_system_versions_id', 'system_versions', ['id'])
op.create_index('ix_system_versions_version', 'system_versions', ['version'])
# Create update_jobs table
op.create_table(
'update_jobs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(36), nullable=False),
sa.Column('target_version', sa.String(50), nullable=False),
sa.Column('status', sa.Enum('pending', 'in_progress', 'completed', 'failed', 'rolled_back', name='updatestatus'), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('current_stage', sa.String(100), nullable=True),
sa.Column('logs', JSON, nullable=False, default=[]),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('backup_id', sa.Integer(), nullable=True),
sa.Column('started_by', sa.String(255), nullable=True),
sa.Column('rollback_reason', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid')
)
op.create_index('ix_update_jobs_id', 'update_jobs', ['id'])
op.create_index('ix_update_jobs_uuid', 'update_jobs', ['uuid'])
op.create_index('ix_update_jobs_status', 'update_jobs', ['status'])
# Create backup_records table
op.create_table(
'backup_records',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(36), nullable=False),
sa.Column('backup_type', sa.Enum('manual', 'pre_update', 'scheduled', name='backuptype'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('size_bytes', sa.BigInteger(), nullable=True),
sa.Column('location', sa.String(500), nullable=False),
sa.Column('version', sa.String(50), nullable=True),
sa.Column('components', JSON, nullable=False, default={}),
sa.Column('checksum', sa.String(64), nullable=True),
sa.Column('created_by', sa.String(255), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_valid', sa.Boolean(), nullable=False, default=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid')
)
op.create_index('ix_backup_records_id', 'backup_records', ['id'])
op.create_index('ix_backup_records_uuid', 'backup_records', ['uuid'])
# Insert initial system version (v2.0.31 as per current deployment)
op.execute("""
INSERT INTO system_versions (uuid, version, installed_by, is_current, installed_at)
VALUES (
'initial-version-uuid',
'v2.0.31',
'system',
true,
NOW()
)
""")
def downgrade():
# Drop tables
op.drop_table('backup_records')
op.drop_table('update_jobs')
op.drop_table('system_versions')
# Drop enum types
op.execute('DROP TYPE IF EXISTS updatestatus')
op.execute('DROP TYPE IF EXISTS backuptype')