Commit inicial - upload de todos os arquivos da pasta
This commit is contained in:
116
migrations/env.py
Normal file
116
migrations/env.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine
|
||||
|
||||
from alembic import context
|
||||
|
||||
from src.models.models import Base
|
||||
from src.config.settings import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Sobrescreve a URL do banco de dados com a definida nas configurações
|
||||
config.set_main_option("sqlalchemy.url", settings.POSTGRES_CONNECTION_STRING)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = [Base.metadata]
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
# Lista de tabelas a serem ignoradas na geração automática de migrações
|
||||
exclude_tables = ["sessions", "events", "app_states", "user_states"]
|
||||
|
||||
|
||||
def include_object(object, name, type_, reflected, compare_to):
|
||||
"""
|
||||
Função de filtro para excluir determinadas tabelas da geração automática de migrações
|
||||
"""
|
||||
if type_ == "table" and name in exclude_tables:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
include_object=include_object,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = context.config.attributes.get("connection", None)
|
||||
if connectable is None:
|
||||
connectable = create_async_engine(
|
||||
context.config.get_main_option("sqlalchemy.url").replace(
|
||||
"postgresql://", "postgresql+asyncpg://"
|
||||
),
|
||||
poolclass=pool.NullPool,
|
||||
future=True,
|
||||
)
|
||||
|
||||
if isinstance(connectable, AsyncEngine):
|
||||
asyncio.run(run_async_migrations(connectable))
|
||||
else:
|
||||
do_run_migrations(connectable)
|
||||
|
||||
|
||||
async def run_async_migrations(connectable):
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def do_run_migrations(connection):
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
include_object=include_object,
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
run_migrations_online()
|
||||
28
migrations/script.py.mako
Normal file
28
migrations/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,43 @@
|
||||
"""add_task_agent_type_agents_table
|
||||
|
||||
Revision ID: 2df073c7b564
|
||||
Revises: 611d84e70bb2
|
||||
Create Date: 2025-05-14 11:46:39.573247
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "2df073c7b564"
|
||||
down_revision: Union[str, None] = "611d84e70bb2"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("check_agent_type", "agents", type_="check")
|
||||
op.create_check_constraint(
|
||||
"check_agent_type",
|
||||
"agents",
|
||||
"type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a', 'workflow', 'crew_ai', 'task')",
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("check_agent_type", "agents", type_="check")
|
||||
op.create_check_constraint(
|
||||
"check_agent_type",
|
||||
"agents",
|
||||
"type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a', 'workflow', 'crew_ai')",
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,34 @@
|
||||
"""add_crew_ai_coluns_agents_table
|
||||
|
||||
Revision ID: 611d84e70bb2
|
||||
Revises: bdc5d363e2e1
|
||||
Create Date: 2025-05-14 07:31:08.741620
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '611d84e70bb2'
|
||||
down_revision: Union[str, None] = 'bdc5d363e2e1'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('agents', sa.Column('role', sa.String(), nullable=True))
|
||||
op.add_column('agents', sa.Column('goal', sa.Text(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('agents', 'goal')
|
||||
op.drop_column('agents', 'role')
|
||||
# ### end Alembic commands ###
|
||||
147
migrations/versions/6db4a526335b_init_migrations.py
Normal file
147
migrations/versions/6db4a526335b_init_migrations.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""init migrations
|
||||
|
||||
Revision ID: 6db4a526335b
|
||||
Revises:
|
||||
Create Date: 2025-05-07 13:30:04.971294
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '6db4a526335b'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('clients',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_clients_email'), 'clients', ['email'], unique=True)
|
||||
op.create_table('mcp_servers',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('config_type', sa.String(), nullable=False),
|
||||
sa.Column('config_json', sa.JSON(), nullable=False),
|
||||
sa.Column('environments', sa.JSON(), nullable=False),
|
||||
sa.Column('tools', sa.JSON(), nullable=False),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("config_type IN ('studio', 'sse')", name='check_mcp_server_config_type'),
|
||||
sa.CheckConstraint("type IN ('official', 'community')", name='check_mcp_server_type'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('tools',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('config_json', sa.JSON(), nullable=False),
|
||||
sa.Column('environments', sa.JSON(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('agent_folders',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('api_keys',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('provider', sa.String(), nullable=False),
|
||||
sa.Column('encrypted_key', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('password_hash', sa.String(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_admin', sa.Boolean(), nullable=True),
|
||||
sa.Column('email_verified', sa.Boolean(), nullable=True),
|
||||
sa.Column('verification_token', sa.String(), nullable=True),
|
||||
sa.Column('verification_token_expiry', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('password_reset_token', sa.String(), nullable=True),
|
||||
sa.Column('password_reset_expiry', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_table('agents',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('model', sa.String(), nullable=True),
|
||||
sa.Column('api_key_id', sa.UUID(), nullable=True),
|
||||
sa.Column('instruction', sa.Text(), nullable=True),
|
||||
sa.Column('agent_card_url', sa.String(), nullable=True),
|
||||
sa.Column('folder_id', sa.UUID(), nullable=True),
|
||||
sa.Column('config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a', 'workflow')", name='check_agent_type'),
|
||||
sa.ForeignKeyConstraint(['api_key_id'], ['api_keys.id'], ondelete='SET NULL'),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['folder_id'], ['agent_folders.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=True),
|
||||
sa.Column('action', sa.String(), nullable=False),
|
||||
sa.Column('resource_type', sa.String(), nullable=False),
|
||||
sa.Column('resource_id', sa.String(), nullable=True),
|
||||
sa.Column('details', sa.JSON(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(), nullable=True),
|
||||
sa.Column('user_agent', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_table('agents')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_table('api_keys')
|
||||
op.drop_table('agent_folders')
|
||||
op.drop_table('tools')
|
||||
op.drop_table('mcp_servers')
|
||||
op.drop_index(op.f('ix_clients_email'), table_name='clients')
|
||||
op.drop_table('clients')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,43 @@
|
||||
"""add_crew_ai_agent_type_agents_table
|
||||
|
||||
Revision ID: bdc5d363e2e1
|
||||
Revises: 6db4a526335b
|
||||
Create Date: 2025-05-14 06:23:14.701878
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "bdc5d363e2e1"
|
||||
down_revision: Union[str, None] = "6db4a526335b"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("check_agent_type", "agents", type_="check")
|
||||
op.create_check_constraint(
|
||||
"check_agent_type",
|
||||
"agents",
|
||||
"type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a', 'workflow', 'crew_ai')",
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("check_agent_type", "agents", type_="check")
|
||||
op.create_check_constraint(
|
||||
"check_agent_type",
|
||||
"agents",
|
||||
"type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a', 'workflow')",
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user