mirror of
https://github.com/omnara-ai/omnara.git
synced 2025-08-12 20:39:09 +03:00
Initial commit
This commit is contained in:
77
shared/README.md
Normal file
77
shared/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Shared
|
||||
|
||||
This directory contains shared infrastructure for database operations and configurations used across the Omnara platform.
|
||||
|
||||
## Purpose
|
||||
|
||||
The shared directory serves as the single source of truth for:
|
||||
- Database schema definitions and models
|
||||
- Database connection management
|
||||
- Configuration settings
|
||||
- Schema migration infrastructure
|
||||
|
||||
## Architecture
|
||||
|
||||
### Database Layer
|
||||
- **ORM**: SQLAlchemy 2.0+ with modern declarative mapping
|
||||
- **Database**: PostgreSQL for reliable, scalable data persistence
|
||||
- **Models**: Centralized schema definitions for all platform entities
|
||||
- **Session Management**: Shared database connection handling
|
||||
|
||||
### Configuration Management
|
||||
- Environment-aware settings (development, production)
|
||||
- Centralized configuration using Pydantic settings
|
||||
- Support for multiple deployment scenarios
|
||||
|
||||
### Schema Migrations
|
||||
- Alembic for version-controlled database schema changes
|
||||
- Automatic migration application during startup
|
||||
- Safe rollback capabilities
|
||||
|
||||
## Database Migrations
|
||||
|
||||
### Essential Commands
|
||||
|
||||
```bash
|
||||
# Apply pending migrations
|
||||
cd shared/
|
||||
alembic upgrade head
|
||||
|
||||
# Create a new migration after model changes
|
||||
alembic revision --autogenerate -m "Description of changes"
|
||||
|
||||
# Check migration status
|
||||
alembic current
|
||||
|
||||
# View migration history
|
||||
alembic history
|
||||
|
||||
# Rollback one migration
|
||||
alembic downgrade -1
|
||||
```
|
||||
|
||||
### Migration Workflow
|
||||
|
||||
1. Modify database models
|
||||
2. Generate migration: `alembic revision --autogenerate -m "Description"`
|
||||
3. Review generated migration file
|
||||
4. Apply migration (automatic on restart or manual with `alembic upgrade head`)
|
||||
5. Commit both model changes and migration files
|
||||
|
||||
**Important**: Always create migrations when changing the database schema. A pre-commit hook enforces this requirement.
|
||||
|
||||
## Key Benefits
|
||||
|
||||
- **Consistency**: Single schema definition prevents drift between services
|
||||
- **Type Safety**: Shared type definitions and enumerations
|
||||
- **Maintainability**: Centralized database operations reduce duplication
|
||||
- **Version Control**: Migration history tracks all schema changes
|
||||
- **Multi-Service**: Both API backend and MCP servers use the same database layer
|
||||
|
||||
## Dependencies
|
||||
|
||||
Core dependencies are managed in `requirements.txt` and include:
|
||||
- SQLAlchemy for ORM functionality
|
||||
- PostgreSQL driver for database connectivity
|
||||
- Pydantic for configuration and validation
|
||||
- Alembic for migration management
|
||||
1
shared/__init__.py
Normal file
1
shared/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Shared package initialization
|
||||
118
shared/alembic.ini
Normal file
118
shared/alembic.ini
Normal file
@@ -0,0 +1,118 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
# version_path_separator = newline
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
# Database URL is now configured in env.py using the shared settings
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
81
shared/alembic/env.py
Normal file
81
shared/alembic/env.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Import our models and settings
|
||||
from database.models import Base
|
||||
from config.settings import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Set the database URL from our settings
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
shared/alembic/script.py.mako
Normal file
26
shared/alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
349
shared/alembic/versions/5bb1cbc0f6e8_remote_agent_start.py
Normal file
349
shared/alembic/versions/5bb1cbc0f6e8_remote_agent_start.py
Normal file
@@ -0,0 +1,349 @@
|
||||
"""remote_agent_start
|
||||
|
||||
Restructure agent system to support remote agent starting via webhooks.
|
||||
Major changes:
|
||||
- Create user_agents table to store user-specific agent configurations
|
||||
- Drop agent_types table (replaced by user_agents)
|
||||
- Update agent_instances to reference user_agents instead of agent_types
|
||||
- All agents can now optionally have webhooks for remote triggering
|
||||
|
||||
IMPORTANT MIGRATION NOTES:
|
||||
1. This migration should be run with the application stopped to avoid concurrency issues
|
||||
2. Ensure you have a database backup before running
|
||||
3. The downgrade will lose webhook configuration data (webhook_url and webhook_api_key)
|
||||
4. All agent_instances must have valid agent_type_id references before migration
|
||||
|
||||
Revision ID: 5bb1cbc0f6e8
|
||||
Revises: e80f941f1bf8
|
||||
Create Date: 2025-07-04 19:46:15.011478
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
from uuid import uuid4
|
||||
import logging
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "5bb1cbc0f6e8"
|
||||
down_revision: Union[str, None] = "e80f941f1bf8"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""
|
||||
Migrate from shared agent_types to user-specific user_agents.
|
||||
This is a high-stakes migration that preserves all existing data relationships.
|
||||
"""
|
||||
|
||||
# Create user_agents table with all required columns and defaults
|
||||
op.create_table(
|
||||
"user_agents",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("user_id", sa.UUID(), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("webhook_url", sa.Text(), nullable=True),
|
||||
sa.Column("webhook_api_key", sa.Text(), nullable=True),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(),
|
||||
nullable=False,
|
||||
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(),
|
||||
nullable=False,
|
||||
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# Create index on user_id for faster queries filtering by user
|
||||
op.create_index("ix_user_agents_user_id", "user_agents", ["user_id"], unique=False)
|
||||
|
||||
# Add user_agent_id column to agent_instances as nullable initially
|
||||
op.add_column(
|
||||
"agent_instances", sa.Column("user_agent_id", sa.UUID(), nullable=True)
|
||||
)
|
||||
|
||||
# Data migration: Create user_agents for each unique user/agent_type combination
|
||||
connection = op.get_bind()
|
||||
|
||||
# First, check for orphaned agent_instances with invalid agent_type_id
|
||||
orphan_check = connection.execute(
|
||||
sa.text("""
|
||||
SELECT COUNT(*) FROM agent_instances
|
||||
WHERE agent_type_id IS NULL
|
||||
OR agent_type_id NOT IN (SELECT id FROM agent_types)
|
||||
""")
|
||||
)
|
||||
orphan_count = orphan_check.scalar() or 0
|
||||
|
||||
if orphan_count > 0:
|
||||
raise RuntimeError(
|
||||
f"Cannot proceed: Found {orphan_count} agent_instances with invalid agent_type_id. "
|
||||
"These must be cleaned up before migration."
|
||||
)
|
||||
|
||||
# Check for agent_instances with invalid user_id references
|
||||
invalid_user_check = connection.execute(
|
||||
sa.text("""
|
||||
SELECT COUNT(*) FROM agent_instances
|
||||
WHERE user_id NOT IN (SELECT id FROM users)
|
||||
""")
|
||||
)
|
||||
invalid_user_count = invalid_user_check.scalar() or 0
|
||||
|
||||
if invalid_user_count > 0:
|
||||
raise RuntimeError(
|
||||
f"Cannot proceed: Found {invalid_user_count} agent_instances with invalid user_id. "
|
||||
"These must be cleaned up before migration."
|
||||
)
|
||||
|
||||
# Check if there are any agent_instances to migrate
|
||||
count_result = connection.execute(sa.text("SELECT COUNT(*) FROM agent_instances"))
|
||||
instance_count = count_result.scalar() or 0
|
||||
|
||||
if instance_count > 0:
|
||||
logger.info(f"Migrating {instance_count} agent instances...")
|
||||
|
||||
# Get all unique user_id, agent_type_id combinations with agent names
|
||||
# Use DISTINCT ON to handle potential duplicates at the database level
|
||||
result = connection.execute(
|
||||
sa.text("""
|
||||
SELECT DISTINCT ON (ai.user_id, at.name)
|
||||
ai.user_id, ai.agent_type_id, at.name
|
||||
FROM agent_instances ai
|
||||
JOIN agent_types at ON ai.agent_type_id = at.id
|
||||
ORDER BY ai.user_id, at.name, ai.started_at DESC
|
||||
""")
|
||||
)
|
||||
|
||||
# Create user_agents for each combination and build mapping
|
||||
user_agent_mapping = {}
|
||||
rows_processed = 0
|
||||
|
||||
for row in result:
|
||||
user_id, agent_type_id, agent_name = row
|
||||
new_id = str(uuid4())
|
||||
|
||||
# Keep the original case of the agent name to maintain compatibility
|
||||
# Check if this user_agent already exists (for idempotency)
|
||||
existing_check = connection.execute(
|
||||
sa.text("""
|
||||
SELECT id FROM user_agents
|
||||
WHERE user_id = :user_id AND name = :name
|
||||
"""),
|
||||
{"user_id": str(user_id), "name": agent_name},
|
||||
)
|
||||
existing_row = existing_check.fetchone()
|
||||
|
||||
if existing_row:
|
||||
# Use existing user_agent
|
||||
user_agent_mapping[(str(user_id), str(agent_type_id))] = str(
|
||||
existing_row[0]
|
||||
)
|
||||
logger.info(
|
||||
f"Using existing user_agent for user {user_id}, agent {agent_name}"
|
||||
)
|
||||
else:
|
||||
# Create new user_agent
|
||||
try:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO user_agents (id, user_id, name, is_active, created_at, updated_at)
|
||||
VALUES (:id, :user_id, :name, true, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
||||
"""),
|
||||
{"id": new_id, "user_id": str(user_id), "name": agent_name},
|
||||
)
|
||||
|
||||
# Store mapping for both the specific agent_type_id and the user/name combination
|
||||
user_agent_mapping[(str(user_id), str(agent_type_id))] = new_id
|
||||
rows_processed += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to insert user_agent for user {user_id}, agent {agent_name}: {e}"
|
||||
)
|
||||
raise
|
||||
|
||||
logger.info(f"Created {rows_processed} user_agent entries")
|
||||
|
||||
# Update agent_instances with new user_agent_id
|
||||
# Use a single UPDATE with a JOIN for better performance
|
||||
update_result = connection.execute(
|
||||
sa.text("""
|
||||
UPDATE agent_instances ai
|
||||
SET user_agent_id = ua.id
|
||||
FROM user_agents ua, agent_types at
|
||||
WHERE ai.user_id = ua.user_id
|
||||
AND ai.agent_type_id = at.id
|
||||
AND ua.name = at.name
|
||||
""")
|
||||
)
|
||||
|
||||
logger.info(f"Updated {update_result.rowcount} agent_instance records")
|
||||
|
||||
# Verify all agent_instances have been updated
|
||||
orphan_check = connection.execute(
|
||||
sa.text("SELECT COUNT(*) FROM agent_instances WHERE user_agent_id IS NULL")
|
||||
)
|
||||
orphan_count = orphan_check.scalar() or 0
|
||||
|
||||
if orphan_count > 0:
|
||||
raise RuntimeError(
|
||||
f"Migration failed: {orphan_count} agent_instances have no user_agent_id. "
|
||||
"This indicates a data integrity issue that must be resolved manually."
|
||||
)
|
||||
else:
|
||||
logger.info("No agent instances to migrate")
|
||||
|
||||
# Add unique constraint AFTER data migration to avoid conflicts
|
||||
op.create_unique_constraint(
|
||||
"uq_user_agents_user_id_name", "user_agents", ["user_id", "name"]
|
||||
)
|
||||
|
||||
# Create foreign key for user_agent_id
|
||||
op.create_foreign_key(
|
||||
"agent_instances_user_agent_id_fkey",
|
||||
"agent_instances",
|
||||
"user_agents",
|
||||
["user_agent_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# Make user_agent_id NOT NULL now that all data is migrated
|
||||
op.alter_column("agent_instances", "user_agent_id", nullable=False)
|
||||
|
||||
# Drop the old foreign key constraint
|
||||
op.drop_constraint(
|
||||
"agent_instances_agent_type_id_fkey", "agent_instances", type_="foreignkey"
|
||||
)
|
||||
|
||||
# Drop agent_type_id column
|
||||
op.drop_column("agent_instances", "agent_type_id")
|
||||
|
||||
# Finally, drop agent_types table
|
||||
op.drop_table("agent_types")
|
||||
|
||||
logger.info("Migration completed successfully")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""
|
||||
Reverse the migration, restoring the shared agent_types structure.
|
||||
This preserves all data by recreating agent_types from unique user_agent names.
|
||||
"""
|
||||
|
||||
# Recreate agent_types table
|
||||
op.create_table(
|
||||
"agent_types",
|
||||
sa.Column("id", sa.UUID(), autoincrement=False, nullable=False),
|
||||
sa.Column("name", sa.VARCHAR(length=255), autoincrement=False, nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
postgresql.TIMESTAMP(),
|
||||
autoincrement=False,
|
||||
nullable=False,
|
||||
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name="agent_types_pkey"),
|
||||
sa.UniqueConstraint(
|
||||
"name",
|
||||
name="agent_types_name_key",
|
||||
),
|
||||
)
|
||||
op.create_index("ix_agent_types_id", "agent_types", ["id"], unique=False)
|
||||
|
||||
# Add agent_type_id column back to agent_instances
|
||||
op.add_column(
|
||||
"agent_instances",
|
||||
sa.Column("agent_type_id", sa.UUID(), autoincrement=False, nullable=True),
|
||||
)
|
||||
|
||||
# Migrate data back
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check if there are any user_agents to migrate back
|
||||
count_result = connection.execute(sa.text("SELECT COUNT(*) FROM user_agents"))
|
||||
agent_count = count_result.scalar() or 0
|
||||
|
||||
if agent_count > 0:
|
||||
logger.info(f"Migrating {agent_count} user agents back to agent types...")
|
||||
|
||||
# Get all unique agent names from user_agents
|
||||
result = connection.execute(
|
||||
sa.text("SELECT DISTINCT name FROM user_agents ORDER BY name")
|
||||
)
|
||||
|
||||
# Create agent_types for each unique name
|
||||
for row in result:
|
||||
agent_name = row[0]
|
||||
new_id = str(uuid4())
|
||||
|
||||
try:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO agent_types (id, name, created_at)
|
||||
VALUES (:id, :name, CURRENT_TIMESTAMP)
|
||||
"""),
|
||||
{"id": new_id, "name": agent_name},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to insert agent_type {agent_name}: {e}")
|
||||
raise
|
||||
|
||||
# Update agent_instances with agent_type_id based on user_agent name
|
||||
update_result = connection.execute(
|
||||
sa.text("""
|
||||
UPDATE agent_instances ai
|
||||
SET agent_type_id = at.id
|
||||
FROM user_agents ua, agent_types at
|
||||
WHERE ai.user_agent_id = ua.id
|
||||
AND ua.name = at.name
|
||||
""")
|
||||
)
|
||||
|
||||
logger.info(f"Updated {update_result.rowcount} agent_instance records")
|
||||
else:
|
||||
logger.info("No user agents to migrate back")
|
||||
|
||||
# Make agent_type_id NOT NULL now that data is migrated
|
||||
op.alter_column("agent_instances", "agent_type_id", nullable=False)
|
||||
|
||||
# Create foreign key for agent_type_id
|
||||
op.create_foreign_key(
|
||||
"agent_instances_agent_type_id_fkey",
|
||||
"agent_instances",
|
||||
"agent_types",
|
||||
["agent_type_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# Drop the foreign key constraint for user_agent_id
|
||||
op.drop_constraint(
|
||||
"agent_instances_user_agent_id_fkey", "agent_instances", type_="foreignkey"
|
||||
)
|
||||
|
||||
# Drop user_agent_id column
|
||||
op.drop_column("agent_instances", "user_agent_id")
|
||||
|
||||
# Drop indexes and constraints
|
||||
op.drop_constraint("uq_user_agents_user_id_name", "user_agents", type_="unique")
|
||||
op.drop_index("ix_user_agents_user_id", table_name="user_agents")
|
||||
|
||||
# Drop user_agents table
|
||||
op.drop_table("user_agents")
|
||||
|
||||
logger.info("Downgrade completed successfully")
|
||||
@@ -0,0 +1,47 @@
|
||||
"""Add push_tokens table for mobile notifications
|
||||
|
||||
Revision ID: 9925a2f3117d
|
||||
Revises: 5bb1cbc0f6e8
|
||||
Create Date: 2025-07-08 18:40:52.005545
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9925a2f3117d"
|
||||
down_revision: Union[str, None] = "5bb1cbc0f6e8"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"push_tokens",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("user_id", sa.UUID(), nullable=False),
|
||||
sa.Column("token", sa.String(length=255), nullable=False),
|
||||
sa.Column("platform", sa.String(length=50), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("token"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("push_tokens")
|
||||
# ### end Alembic commands ###
|
||||
154
shared/alembic/versions/e80f941f1bf8_initial_migration.py
Normal file
154
shared/alembic/versions/e80f941f1bf8_initial_migration.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: e80f941f1bf8
|
||||
Revises:
|
||||
Create Date: 2025-06-27 17:17:38.807339
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "e80f941f1bf8"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"agent_types",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("name"),
|
||||
)
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("email", sa.String(length=255), nullable=False),
|
||||
sa.Column("display_name", sa.String(length=255), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("email"),
|
||||
)
|
||||
op.create_table(
|
||||
"agent_instances",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("agent_type_id", sa.UUID(), nullable=False),
|
||||
sa.Column("user_id", sa.UUID(), nullable=False),
|
||||
sa.Column(
|
||||
"status",
|
||||
sa.Enum(
|
||||
"ACTIVE",
|
||||
"AWAITING_INPUT",
|
||||
"PAUSED",
|
||||
"STALE",
|
||||
"COMPLETED",
|
||||
"FAILED",
|
||||
"KILLED",
|
||||
"DISCONNECTED",
|
||||
name="agentstatus",
|
||||
),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("started_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("ended_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["agent_type_id"],
|
||||
["agent_types.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"api_keys",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("user_id", sa.UUID(), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("api_key_hash", sa.String(length=128), nullable=False),
|
||||
sa.Column("api_key", sa.Text(), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("expires_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"agent_questions",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("agent_instance_id", sa.UUID(), nullable=False),
|
||||
sa.Column("question_text", sa.Text(), nullable=False),
|
||||
sa.Column("answer_text", sa.Text(), nullable=True),
|
||||
sa.Column("answered_by_user_id", sa.UUID(), nullable=True),
|
||||
sa.Column("asked_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("answered_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["agent_instance_id"],
|
||||
["agent_instances.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["answered_by_user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"agent_steps",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("agent_instance_id", sa.UUID(), nullable=False),
|
||||
sa.Column("step_number", sa.Integer(), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["agent_instance_id"],
|
||||
["agent_instances.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"agent_user_feedback",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("agent_instance_id", sa.UUID(), nullable=False),
|
||||
sa.Column("created_by_user_id", sa.UUID(), nullable=False),
|
||||
sa.Column("feedback_text", sa.Text(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("retrieved_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["agent_instance_id"],
|
||||
["agent_instances.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["created_by_user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("agent_user_feedback")
|
||||
op.drop_table("agent_steps")
|
||||
op.drop_table("agent_questions")
|
||||
op.drop_table("api_keys")
|
||||
op.drop_table("agent_instances")
|
||||
op.drop_table("users")
|
||||
op.drop_table("agent_types")
|
||||
# ### end Alembic commands ###
|
||||
3
shared/config/__init__.py
Normal file
3
shared/config/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .settings import settings
|
||||
|
||||
__all__ = ["settings"]
|
||||
95
shared/config/settings.py
Normal file
95
shared/config/settings.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
def get_port_from_env() -> int:
|
||||
"""Get port from environment variables, handling potential string literals"""
|
||||
port_env = os.getenv("PORT")
|
||||
mcp_port_env = os.getenv("MCP_SERVER_PORT")
|
||||
|
||||
# Handle case where PORT might be '$PORT' literal string
|
||||
if port_env and port_env != "$PORT":
|
||||
try:
|
||||
return int(port_env)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if mcp_port_env and mcp_port_env != "$MCP_SERVER_PORT":
|
||||
try:
|
||||
return int(mcp_port_env)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return 8080
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""Get database URL based on environment"""
|
||||
# Note: This will be called during Settings initialization,
|
||||
# so we need to read directly from environment variables
|
||||
environment = os.getenv("ENVIRONMENT", "development").lower()
|
||||
|
||||
if environment == "production":
|
||||
production_url = os.getenv("PRODUCTION_DB_URL")
|
||||
if production_url:
|
||||
return production_url
|
||||
|
||||
# Default to development URL or fallback
|
||||
development_url = os.getenv("DEVELOPMENT_DB_URL")
|
||||
if development_url:
|
||||
return development_url
|
||||
|
||||
# Final fallback to local PostgreSQL
|
||||
return "postgresql://user:password@localhost:5432/agent_dashboard"
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# Environment Configuration
|
||||
environment: str = "development"
|
||||
development_db_url: str = (
|
||||
"postgresql://user:password@localhost:5432/agent_dashboard"
|
||||
)
|
||||
production_db_url: str = ""
|
||||
|
||||
# Database - automatically chooses based on ENVIRONMENT variable
|
||||
database_url: str = get_database_url()
|
||||
|
||||
# MCP Server - use PORT env var if available (for Render), otherwise default
|
||||
mcp_server_port: int = get_port_from_env()
|
||||
|
||||
# Backend API - use PORT env var if available (for Render), otherwise default
|
||||
api_port: int = int(os.getenv("PORT") or os.getenv("API_PORT") or "8000")
|
||||
|
||||
# Frontend URLs - can be set as JSON array string in env var
|
||||
@property
|
||||
def frontend_urls(self) -> list[str]:
|
||||
frontend_urls_env = os.getenv("FRONTEND_URLS")
|
||||
if frontend_urls_env:
|
||||
try:
|
||||
return json.loads(frontend_urls_env)
|
||||
except json.JSONDecodeError:
|
||||
# If it's a single URL string, wrap it in a list
|
||||
return [frontend_urls_env]
|
||||
return ["http://localhost:3000"]
|
||||
|
||||
# API Versioning
|
||||
api_v1_prefix: str = "/api/v1"
|
||||
|
||||
# Supabase Configuration
|
||||
supabase_url: str = ""
|
||||
supabase_anon_key: str = ""
|
||||
supabase_service_role_key: str = ""
|
||||
|
||||
# JWT Signing Keys for API Keys
|
||||
jwt_private_key: str = ""
|
||||
jwt_public_key: str = ""
|
||||
|
||||
# Sentry Configuration
|
||||
sentry_dsn: str = ""
|
||||
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8")
|
||||
|
||||
|
||||
settings = Settings()
|
||||
25
shared/database/__init__.py
Normal file
25
shared/database/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from .enums import AgentStatus
|
||||
from .models import (
|
||||
AgentInstance,
|
||||
AgentQuestion,
|
||||
AgentStep,
|
||||
AgentUserFeedback,
|
||||
APIKey,
|
||||
Base,
|
||||
PushToken,
|
||||
User,
|
||||
UserAgent,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"User",
|
||||
"UserAgent",
|
||||
"AgentInstance",
|
||||
"AgentStep",
|
||||
"AgentQuestion",
|
||||
"AgentStatus",
|
||||
"AgentUserFeedback",
|
||||
"APIKey",
|
||||
"PushToken",
|
||||
]
|
||||
12
shared/database/enums.py
Normal file
12
shared/database/enums.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class AgentStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
AWAITING_INPUT = "awaiting_input"
|
||||
PAUSED = "paused"
|
||||
STALE = "stale"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
KILLED = "killed"
|
||||
DISCONNECTED = "disconnected"
|
||||
226
shared/database/models.py
Normal file
226
shared/database/models.py
Normal file
@@ -0,0 +1,226 @@
|
||||
from datetime import UTC, datetime
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import ForeignKey, Index, String, Text, UniqueConstraint
|
||||
from sqlalchemy.dialects.postgresql import UUID as PostgresUUID
|
||||
from sqlalchemy.orm import (
|
||||
DeclarativeBase, # type: ignore[attr-defined]
|
||||
Mapped, # type: ignore[attr-defined]
|
||||
mapped_column, # type: ignore[attr-defined]
|
||||
relationship,
|
||||
)
|
||||
|
||||
from .enums import AgentStatus
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True
|
||||
) # Matches Supabase auth.users.id
|
||||
email: Mapped[str] = mapped_column(String(255), unique=True)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
|
||||
)
|
||||
|
||||
# Relationships
|
||||
agent_instances: Mapped[list["AgentInstance"]] = relationship(
|
||||
"AgentInstance", back_populates="user"
|
||||
)
|
||||
answered_questions: Mapped[list["AgentQuestion"]] = relationship(
|
||||
"AgentQuestion", back_populates="answered_by_user"
|
||||
)
|
||||
feedback: Mapped[list["AgentUserFeedback"]] = relationship(
|
||||
"AgentUserFeedback", back_populates="created_by_user"
|
||||
)
|
||||
api_keys: Mapped[list["APIKey"]] = relationship("APIKey", back_populates="user")
|
||||
user_agents: Mapped[list["UserAgent"]] = relationship(
|
||||
"UserAgent", back_populates="user"
|
||||
)
|
||||
push_tokens: Mapped[list["PushToken"]] = relationship(
|
||||
"PushToken", back_populates="user"
|
||||
)
|
||||
|
||||
|
||||
class UserAgent(Base):
|
||||
__tablename__ = "user_agents"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("user_id", "name", name="uq_user_agents_user_id_name"),
|
||||
Index("ix_user_agents_user_id", "user_id"),
|
||||
)
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255))
|
||||
webhook_url: Mapped[str | None] = mapped_column(Text, default=None)
|
||||
webhook_api_key: Mapped[str | None] = mapped_column(Text, default=None) # Encrypted
|
||||
is_active: Mapped[bool] = mapped_column(default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
|
||||
)
|
||||
|
||||
# Relationships
|
||||
user: Mapped["User"] = relationship("User", back_populates="user_agents")
|
||||
instances: Mapped[list["AgentInstance"]] = relationship(
|
||||
"AgentInstance", back_populates="user_agent"
|
||||
)
|
||||
|
||||
|
||||
class AgentInstance(Base):
|
||||
__tablename__ = "agent_instances"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
user_agent_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("user_agents.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
status: Mapped[AgentStatus] = mapped_column(default=AgentStatus.ACTIVE)
|
||||
started_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
ended_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
|
||||
# Relationships
|
||||
user_agent: Mapped["UserAgent"] = relationship(
|
||||
"UserAgent", back_populates="instances"
|
||||
)
|
||||
user: Mapped["User"] = relationship("User", back_populates="agent_instances")
|
||||
steps: Mapped[list["AgentStep"]] = relationship(
|
||||
"AgentStep", back_populates="instance", order_by="AgentStep.created_at"
|
||||
)
|
||||
questions: Mapped[list["AgentQuestion"]] = relationship(
|
||||
"AgentQuestion", back_populates="instance", order_by="AgentQuestion.asked_at"
|
||||
)
|
||||
user_feedback: Mapped[list["AgentUserFeedback"]] = relationship(
|
||||
"AgentUserFeedback",
|
||||
back_populates="instance",
|
||||
order_by="AgentUserFeedback.created_at",
|
||||
)
|
||||
|
||||
|
||||
class AgentStep(Base):
|
||||
__tablename__ = "agent_steps"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
agent_instance_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("agent_instances.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
step_number: Mapped[int] = mapped_column()
|
||||
description: Mapped[str] = mapped_column(Text)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
|
||||
# Relationships
|
||||
instance: Mapped["AgentInstance"] = relationship(
|
||||
"AgentInstance", back_populates="steps"
|
||||
)
|
||||
|
||||
|
||||
class AgentQuestion(Base):
|
||||
__tablename__ = "agent_questions"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
agent_instance_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("agent_instances.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
question_text: Mapped[str] = mapped_column(Text)
|
||||
answer_text: Mapped[str | None] = mapped_column(Text, default=None)
|
||||
answered_by_user_id: Mapped[UUID | None] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True), default=None
|
||||
)
|
||||
asked_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
answered_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
is_active: Mapped[bool] = mapped_column(default=True)
|
||||
|
||||
# Relationships
|
||||
instance: Mapped["AgentInstance"] = relationship(
|
||||
"AgentInstance", back_populates="questions"
|
||||
)
|
||||
answered_by_user: Mapped["User | None"] = relationship(
|
||||
"User", back_populates="answered_questions"
|
||||
)
|
||||
|
||||
|
||||
class AgentUserFeedback(Base):
|
||||
__tablename__ = "agent_user_feedback"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
agent_instance_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("agent_instances.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
created_by_user_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
feedback_text: Mapped[str] = mapped_column(Text)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
retrieved_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
|
||||
# Relationships
|
||||
instance: Mapped["AgentInstance"] = relationship(
|
||||
"AgentInstance", back_populates="user_feedback"
|
||||
)
|
||||
created_by_user: Mapped["User"] = relationship("User", back_populates="feedback")
|
||||
|
||||
|
||||
class APIKey(Base):
|
||||
__tablename__ = "api_keys"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255))
|
||||
api_key_hash: Mapped[str] = mapped_column(String(128))
|
||||
api_key: Mapped[str] = mapped_column(
|
||||
Text
|
||||
) # Store the actual JWT for user viewing, not good for security
|
||||
is_active: Mapped[bool] = mapped_column(default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
expires_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
last_used_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
|
||||
# Relationships
|
||||
user: Mapped["User"] = relationship("User", back_populates="api_keys")
|
||||
|
||||
|
||||
class PushToken(Base):
|
||||
__tablename__ = "push_tokens"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(
|
||||
PostgresUUID(as_uuid=True), primary_key=True, default=uuid4
|
||||
)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey("users.id"), type_=PostgresUUID(as_uuid=True)
|
||||
)
|
||||
token: Mapped[str] = mapped_column(String(255), unique=True)
|
||||
platform: Mapped[str] = mapped_column(String(50)) # 'ios' or 'android'
|
||||
is_active: Mapped[bool] = mapped_column(default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(default=lambda: datetime.now(UTC))
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
|
||||
)
|
||||
last_used_at: Mapped[datetime | None] = mapped_column(default=None)
|
||||
|
||||
# Relationships
|
||||
user: Mapped["User"] = relationship("User", back_populates="push_tokens")
|
||||
18
shared/database/session.py
Normal file
18
shared/database/session.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from ..config.settings import settings
|
||||
|
||||
engine = create_engine(settings.database_url)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
"""Get database session"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
7
shared/requirements.txt
Normal file
7
shared/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
sqlalchemy==2.0.41
|
||||
psycopg2-binary==2.9.10
|
||||
pydantic==2.11.5
|
||||
pydantic-settings==2.9.1
|
||||
python-dotenv==1.1.0
|
||||
alembic==1.14.0
|
||||
sentry-sdk[fastapi]==2.32.0
|
||||
Reference in New Issue
Block a user