first sync
Some checks failed
Deployment Verification / deploy-and-test (push) Failing after 29s

This commit is contained in:
2025-03-04 07:59:21 +01:00
parent 9cdcf486b6
commit 506716e703
1450 changed files with 577316 additions and 62 deletions

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,27 @@
from alembic import op
from sqlalchemy import engine_from_config
from sqlalchemy.engine import reflection
def _table_has_column(table, column):
config = op.get_context().config
engine = engine_from_config(
config.get_section(config.config_ini_section), prefix='sqlalchemy.')
insp = reflection.Inspector.from_engine(engine)
has_column = False
for col in insp.get_columns(table):
if column != col['name']:
continue
has_column = True
return has_column
def _has_table(table_name):
config = op.get_context().config
engine = engine_from_config(
config.get_section(config.config_ini_section), prefix="sqlalchemy."
)
inspector = reflection.Inspector.from_engine(engine)
tables = inspector.get_table_names()
return table_name in tables

View File

@ -0,0 +1,82 @@
from alembic import context
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
import os
os.environ["ALEMBIC"] = "1"
from app.configuration import SQLALCHEMY_BASE_ADMIN_URI, PG_DB_
config.set_main_option('sqlalchemy.url', SQLALCHEMY_BASE_ADMIN_URI + PG_DB_)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
#with context.begin_transaction(): -- Fixes stuck transaction. Need more info on that
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,31 @@
"""Add prevent post-init to register case objects again during boot
Revision ID: 00b43bc4e8ac
Revises: 2604f6962838
Create Date: 2023-05-05 18:43:07.236041
"""
from alembic import op
import sqlalchemy as sa
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '00b43bc4e8ac'
down_revision = '2604f6962838'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('server_settings', 'prevent_post_objects_repush'):
op.add_column('server_settings',
sa.Column('prevent_post_objects_repush', sa.Boolean(), default=False)
)
pass
def downgrade():
if _table_has_column('server_settings', 'prevent_post_objects_repush'):
op.drop_column('server_settings', 'prevent_post_objects_repush')
pass

View File

@ -0,0 +1,63 @@
"""Add tags to assets
Revision ID: 0db700644a4f
Revises: 6a3b3b627d45
Create Date: 2022-01-06 13:47:12.648707
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = '0db700644a4f'
down_revision = '6a3b3b627d45'
branch_labels = None
depends_on = None
def upgrade():
# Now issue changes on existing tables and migrate Asset tags
# Add column asset_tags to CaseAssets if not existing
if not _table_has_column('case_assets', 'asset_tags'):
op.add_column('case_assets',
sa.Column('asset_tags', sa.Text)
)
if _table_has_column('case_assets', 'asset_tags'):
# Set schema and make migration of data
t_case_assets = sa.Table(
'case_assets',
sa.MetaData(),
sa.Column('asset_id', sa.Integer, primary_key=True),
sa.Column('asset_name', sa.Text),
sa.Column('asset_description', sa.Text),
sa.Column('asset_domain', sa.Text),
sa.Column('asset_ip', sa.Text),
sa.Column('asset_info', sa.Text),
sa.Column('asset_compromised', sa.Boolean),
sa.Column('asset_type_id', sa.ForeignKey('asset_type.asset_id')),
sa.Column('asset_tags', sa.Text),
sa.Column('case_id', sa.ForeignKey('cases.case_id')),
sa.Column('date_added', sa.DateTime),
sa.Column('date_update', sa.DateTime),
sa.Column('user_id', sa.ForeignKey('user.id')),
sa.Column('analysis_status_id', sa.ForeignKey('analysis_status.id'))
)
# Migrate existing Assets
conn = op.get_bind()
res = conn.execute("SELECT asset_id from case_assets WHERE asset_tags IS NULL;")
results = res.fetchall()
if results:
for res in results:
conn.execute(t_case_assets.update().where(t_case_assets.c.asset_id == res[0]).values(
asset_tags=''
))
def downgrade():
pass

View File

@ -0,0 +1,45 @@
"""Add module types
Revision ID: 10a7616f3cc7
Revises: 874ba5e5da44
Create Date: 2022-02-04 07:46:32.382640
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '10a7616f3cc7'
down_revision = '874ba5e5da44'
branch_labels = None
depends_on = None
def upgrade():
# Issue changes on existing user activities table and migrate existing rows
# Add column is_from_api to user_activities if not existing and set existing ones to false
if not _table_has_column('iris_module', 'module_type'):
op.add_column('iris_module',
sa.Column('module_type', sa.Text)
)
t_ua = sa.Table(
'iris_module',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('module_type', sa.Text)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
module_type='pipeline'
))
pass
def downgrade():
pass

View File

@ -0,0 +1,32 @@
"""Add deletion confirmation option
Revision ID: 11e066542a88
Revises: 20447ecb2245
Create Date: 2022-09-25 08:51:13.383431
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from sqlalchemy import Boolean
from app.alembic.alembic_utils import _table_has_column
revision = '11e066542a88'
down_revision = '20447ecb2245'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('user', 'has_deletion_confirmation'):
op.add_column('user',
sa.Column('has_deletion_confirmation', Boolean(), nullable=False, server_default='false')
)
pass
def downgrade():
pass

View File

@ -0,0 +1,51 @@
"""Add customer extended fields
Revision ID: 1df4adfa3160
Revises: a3eb60654ec4
Create Date: 2022-11-11 19:23:30.355618
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = '1df4adfa3160'
down_revision = 'a3eb60654ec4'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('client', 'description'):
op.add_column('client',
sa.Column('description', sa.Text())
)
if not _table_has_column('client', 'sla'):
op.add_column('client',
sa.Column('sla', sa.Text())
)
if not _table_has_column('client', 'creation_date'):
op.add_column('client',
sa.Column('creation_date', sa.DateTime())
)
if not _table_has_column('client', 'last_update_date'):
op.add_column('client',
sa.Column('last_update_date', sa.DateTime())
)
if not _table_has_column('client', 'created_by'):
op.add_column('client',
sa.Column('created_by', sa.BigInteger(), sa.ForeignKey('user.id'), nullable=True)
)
pass
def downgrade():
pass

View File

@ -0,0 +1,183 @@
"""Objects UUID field
Revision ID: 20447ecb2245
Revises: ad4e0cd17597
Create Date: 2022-09-23 21:07:20.007874
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from sqlalchemy import text
from sqlalchemy.dialects.postgresql import UUID
from app.alembic.alembic_utils import _table_has_column
revision = '20447ecb2245'
down_revision = 'ad4e0cd17597'
branch_labels = None
depends_on = None
def upgrade():
# ---- Cases ----
op.alter_column('cases', 'case_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('cases', 'case_uuid'):
op.add_column('cases',
sa.Column('case_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Events ----
op.alter_column('cases_events', 'event_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('cases_events', 'event_uuid'):
op.add_column('cases_events',
sa.Column('event_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Clients ----
op.alter_column('client', 'client_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('client', 'client_uuid'):
op.add_column('client',
sa.Column('client_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Case assets ----
op.alter_column('case_assets', 'asset_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('case_assets', 'asset_uuid'):
op.add_column('case_assets',
sa.Column('asset_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Case objects states ----
op.alter_column('object_state', 'object_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ---- Case event IOC ----
op.alter_column('case_events_ioc', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ---- Case event assets ----
op.alter_column('case_events_assets', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ---- IOC ----
op.alter_column('ioc', 'ioc_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('ioc', 'ioc_uuid'):
op.add_column('ioc',
sa.Column('ioc_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Notes ----
op.alter_column('notes', 'note_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('notes', 'note_uuid'):
op.add_column('notes',
sa.Column('note_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Notes group ----
op.alter_column('notes_group', 'group_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('notes_group', 'group_uuid'):
op.add_column('notes_group',
sa.Column('group_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- Notes group link ----
op.alter_column('notes_group_link', 'link_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ---- case received files ----
op.alter_column('case_received_file', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('case_received_file', 'file_uuid'):
op.add_column('case_received_file',
sa.Column('file_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- case tasks ----
op.alter_column('case_tasks', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('case_tasks', 'task_uuid'):
op.add_column('case_tasks',
sa.Column('task_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- global tasks ----
op.alter_column('global_tasks', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
if not _table_has_column('global_tasks', 'task_uuid'):
op.add_column('global_tasks',
sa.Column('task_uuid', UUID(as_uuid=True), server_default=text("gen_random_uuid()"),
nullable=False)
)
# ---- user activity ----
op.alter_column('user_activity', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ---- Iris Hooks ----
op.alter_column('iris_module_hooks', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
pass
def downgrade():
pass

View File

@ -0,0 +1,54 @@
"""Add case state
Revision ID: 2604f6962838
Revises: db93d5c4c0aa
Create Date: 2023-05-05 11:16:19.997383
"""
from alembic import op
import sqlalchemy as sa
from app.models.cases import CaseState
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '2604f6962838'
down_revision = 'db93d5c4c0aa'
branch_labels = None
depends_on = None
def upgrade():
# Add the state_id column to the cases table
if not _table_has_column('cases', 'state_id'):
state_id = 1
state = CaseState.query.filter_by(state_id=state_id).first()
if state is None:
state = CaseState()
state.id=state_id
state.state_name='Unspecified'
state.state_description='Unspecified'
state.protected=True
op.bulk_insert(CaseState.__table__, [state.__dict__])
op.add_column(
'cases',
sa.Column('state_id', sa.Integer, sa.ForeignKey('case_state.state_id'), nullable=True,
server_default=sa.text("1"))
)
# Set the default value for the state_id column
op.execute("UPDATE cases SET state_id = 1")
# Create a foreign key constraint between cases.state_id and case_state.state_id
op.create_foreign_key(
None, 'cases', 'case_state', ['state_id'], ['state_id']
)
def downgrade():
pass

View File

@ -0,0 +1,38 @@
"""Adding IOC and assets enrichments
Revision ID: 2a4a8330b908
Revises: f727badcc4e1
Create Date: 2023-04-26 08:42:19.397146
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '2a4a8330b908'
down_revision = 'f727badcc4e1'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('case_assets', 'asset_enrichment'):
# Add asset_enrichment column to case_assets
op.add_column('case_assets', sa.Column('asset_enrichment', JSONB, nullable=True))
if not _table_has_column('ioc', 'ioc_enrichment'):
# Add ioc_enrichment column to ioc
op.add_column('ioc', sa.Column('ioc_enrichment', JSONB, nullable=True))
def downgrade():
if not _table_has_column('case_assets', 'asset_enrichment'):
# Remove asset_enrichment column from case_assets
op.drop_column('case_assets', 'asset_enrichment')
if _table_has_column('ioc', 'ioc_enrichment'):
# Remove ioc_enrichment column from ioc
op.drop_column('ioc', 'ioc_enrichment')

View File

@ -0,0 +1,43 @@
"""Add objects attributes
Revision ID: 2df770a4989c
Revises: 10a7616f3cc7
Create Date: 2022-02-11 20:13:14.365469
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '2df770a4989c'
down_revision = '10a7616f3cc7'
branch_labels = None
depends_on = None
def upgrade():
tables = ['ioc', 'case_assets', 'case_received_file', 'case_tasks', 'notes', 'cases_events', 'cases', 'client']
for table in tables:
if not _table_has_column(table, 'custom_attributes'):
op.add_column(table,
sa.Column('custom_attributes', sa.JSON)
)
t_ua = sa.Table(
table,
sa.MetaData(),
sa.Column('custom_attributes', sa.JSON)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
custom_attributes={}
))
pass
def downgrade():
pass

View File

@ -0,0 +1,31 @@
"""Add event flag
Revision ID: 3204e9116233
Revises: 11e066542a88
Create Date: 2022-10-02 13:44:36.996070
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = '3204e9116233'
down_revision = '11e066542a88'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('cases_events', 'event_is_flagged'):
op.add_column('cases_events',
sa.Column('event_is_flagged', sa.Boolean, default=False)
)
pass
def downgrade():
pass

View File

@ -0,0 +1,28 @@
"""Rename opened to open
Revision ID: 3a4d4f15bd69
Revises: 65168cb6cc90
Create Date: 2023-10-05 11:36:45.246779
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3a4d4f15bd69'
down_revision = '65168cb6cc90'
branch_labels = None
depends_on = None
def upgrade():
op.execute(
"UPDATE case_state SET state_name='Open' WHERE state_name='Opened'"
)
def downgrade():
op.execute(
"UPDATE case_state SET state_name='Opened' WHERE state_name='Open'"
)

View File

@ -0,0 +1,51 @@
"""Add compromise status to assets
Revision ID: 4ecdfcb34f7c
Revises: a929ef458490
Create Date: 2022-11-26 17:06:33.061363
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
from app.models import CompromiseStatus
revision = '4ecdfcb34f7c'
down_revision = 'a929ef458490'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('case_assets', 'asset_compromise_status_id'):
op.add_column('case_assets',
sa.Column('asset_compromise_status_id',
sa.Integer(),
nullable=True))
# Set schema and make migration of data
t_assets = sa.Table(
'case_assets',
sa.MetaData(),
sa.Column('asset_id', sa.BigInteger, primary_key=True),
sa.Column('asset_compromise_status_id', sa.Integer, nullable=True),
sa.Column('asset_compromised', sa.Boolean, nullable=True)
)
conn = op.get_bind()
conn.execute(t_assets.update().values(
asset_compromise_status_id=CompromiseStatus.compromised.value
).where(t_assets.c.asset_compromised == True))
conn.execute(t_assets.update().values(
asset_compromise_status_id=CompromiseStatus.not_compromised.value
).where(t_assets.c.asset_compromised == False))
op.drop_column('case_assets', 'asset_compromised')
pass
def downgrade():
pass

View File

@ -0,0 +1,75 @@
"""Add uniqueness to Tags table
Revision ID: 50f28953a485
Revises: c959c298ca00
Create Date: 2023-04-06 16:17:40.043545
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = '50f28953a485'
down_revision = 'c959c298ca00'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
# Update the CaseTags table to point to the first tag with the same title
conn.execute(text("""
WITH duplicates AS (
SELECT
MIN(id) as min_id,
tag_title
FROM
tags
GROUP BY
tag_title
HAVING
COUNT(*) > 1
),
duplicate_tags AS (
SELECT
id,
tag_title
FROM
tags
WHERE
tag_title IN (SELECT tag_title FROM duplicates)
)
UPDATE
case_tags
SET
tag_id = duplicates.min_id
FROM
duplicates,
duplicate_tags
WHERE
case_tags.tag_id = duplicate_tags.id
AND duplicate_tags.tag_title = duplicates.tag_title
AND duplicate_tags.id <> duplicates.min_id;
"""))
# Remove duplicates in the tags table
conn.execute(text("""
DELETE FROM tags
WHERE id IN (
SELECT id FROM (
SELECT id, ROW_NUMBER()
OVER (PARTITION BY tag_title ORDER BY id) AS rnum
FROM tags) t
WHERE t.rnum > 1);
"""))
# Add the unique constraint to the tag_title column
op.create_unique_constraint(None, 'tags', ['tag_title'])
pass
def downgrade():
pass

View File

@ -0,0 +1,39 @@
"""Reviewer in case
Revision ID: 65168cb6cc90
Revises: e33dd011fb87
Create Date: 2023-07-09 09:01:39.243870
"""
from alembic import op
import sqlalchemy as sa
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '65168cb6cc90'
down_revision = 'e33dd011fb87'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('cases', 'reviewer_id'):
op.add_column('cases',
sa.Column('reviewer_id', sa.Integer(), nullable=True)
)
op.create_foreign_key('fkey_cases_reviewer_id', 'cases', 'user', ['reviewer_id'], ['id'])
if not _table_has_column('cases', 'review_status_id'):
op.add_column('cases',
sa.Column('review_status_id', sa.Integer(), nullable=True)
)
op.create_foreign_key('fkey_cases_review_status_id', 'cases', 'review_status', ['review_status_id'], ['id'])
pass
def downgrade():
pass

View File

@ -0,0 +1,79 @@
"""Add IOC type
Revision ID: 6a3b3b627d45
Revises:
Create Date: 2022-01-01 23:40:35.283005
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = '6a3b3b627d45'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# IOC types is created by post init if not existing
# Now issue changes on existing tables and migrate IOC types
# Add column ioc_type_id to IOC if not existing
if not _table_has_column('ioc', 'ioc_type_id'):
op.add_column('ioc',
sa.Column('ioc_type_id', sa.Integer, sa.ForeignKey('ioc_type.type_id'))
)
# Add the foreign key of ioc_type to ioc
op.create_foreign_key(
constraint_name='ioc_ioc_type_id',
source_table="ioc",
referent_table="ioc_type",
local_cols=["ioc_type_id"],
remote_cols=["type_id"])
if _table_has_column('ioc', 'ioc_type'):
# Set schema and make migration of data
t_ioc = sa.Table(
'ioc',
sa.MetaData(),
sa.Column('ioc_id', sa.Integer, primary_key=True),
sa.Column('ioc_value', sa.Text),
sa.Column('ioc_type', sa.Unicode(length=50)),
sa.Column('ioc_type_id', sa.ForeignKey('ioc_type.type_id')),
sa.Column('ioc_tags', sa.Text),
sa.Column('user_id', sa.ForeignKey('user.id')),
sa.Column('ioc_misp', sa.Text),
sa.Column('ioc_tlp_id', sa.ForeignKey('tlp.tlp_id'))
)
to_update = [('Domain', 'domain'), ('IP', 'ip-any'), ('Hash', 'other'), ('File', 'filename'),
('Path', 'file-path'), ('Account', 'account'), ("Other", 'other')]
# Migrate existing IOCs
for src_up, dst_up in to_update:
conn = op.get_bind()
res = conn.execute(f"select ioc_id from ioc where ioc_type = '{src_up}';")
results = res.fetchall()
res = conn.execute(f"select type_id from ioc_type where type_name = '{dst_up}';")
e_info = res.fetchall()
if e_info:
domain_id = e_info[0][0]
for res in results:
conn.execute(t_ioc.update().where(t_ioc.c.ioc_id == res[0]).values(
ioc_type_id=domain_id
))
op.drop_column(
table_name='ioc',
column_name='ioc_type'
)
pass
def downgrade():
pass

View File

@ -0,0 +1,58 @@
"""Add server settings updates info
Revision ID: 79a9a54e8f9d
Revises: ff917e2ab02e
Create Date: 2022-05-05 18:39:19.027828
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '79a9a54e8f9d'
down_revision = 'ff917e2ab02e'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('server_settings', 'has_updates_available'):
op.add_column('server_settings',
sa.Column('has_updates_available', sa.Boolean)
)
t_ua = sa.Table(
'server_settings',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('has_updates_available', sa.Boolean)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
has_updates_available=False
))
if not _table_has_column('server_settings', 'enable_updates_check'):
op.add_column('server_settings',
sa.Column('enable_updates_check', sa.Boolean)
)
t_ua = sa.Table(
'server_settings',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('enable_updates_check', sa.Boolean)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
enable_updates_check=True
))
pass
def downgrade():
pass

View File

@ -0,0 +1,28 @@
"""Migrate user int to big int
Revision ID: 7cc588444b79
Revises: 92ecbf0f6d10
Create Date: 2022-06-14 08:28:59.027411
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7cc588444b79'
down_revision = '92ecbf0f6d10'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('user', 'id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
pass
def downgrade():
pass

View File

@ -0,0 +1,44 @@
"""Add task log api field
Revision ID: 874ba5e5da44
Revises: c773a35c280f
Create Date: 2022-02-03 16:22:37.506019
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '874ba5e5da44'
down_revision = 'c773a35c280f'
branch_labels = None
depends_on = None
def upgrade():
# Issue changes on existing user activities table and migrate existing rows
# Add column is_from_api to user_activities if not existing and set existing ones to false
if not _table_has_column('user_activity', 'is_from_api'):
op.add_column('user_activity',
sa.Column('is_from_api', sa.Boolean)
)
t_ua = sa.Table(
'user_activity',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('is_from_api', sa.Boolean)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
is_from_api=False
))
pass
def downgrade():
pass

View File

@ -0,0 +1,46 @@
"""Modifying case tasks to remove assignee id for instead, adding a table named task_assignee
Revision ID: 875edc4adb40
Revises: fcc375ed37d1
Create Date: 2022-07-17 14:57:22.809977
"""
from alembic import op
from app.alembic.alembic_utils import _has_table
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '875edc4adb40'
down_revision = 'fcc375ed37d1'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
# Get all users with their roles
if _has_table("case_tasks"):
if _table_has_column("case_tasks", "task_assignee_id"):
res = conn.execute(f"select id, task_assignee_id from case_tasks")
results_tasks = res.fetchall()
for task in results_tasks:
task_id = task[0]
user_id = task[1]
if not user_id:
user_id = 1
# Migrate assignees to task_assignee
conn.execute(f"insert into task_assignee (user_id, task_id) values ({user_id}, {task_id}) "
f"on conflict do nothing;")
op.drop_column(
table_name='case_tasks',
column_name='task_assignee_id'
)
def downgrade():
pass

View File

@ -0,0 +1,42 @@
"""Add user external ID
Revision ID: 92ecbf0f6d10
Revises: cd519d2d24df
Create Date: 2022-06-13 08:59:04.860887
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = '92ecbf0f6d10'
down_revision = 'cd519d2d24df'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('user', 'external_id'):
op.add_column('user',
sa.Column('external_id', sa.Text)
)
t_ua = sa.Table(
'user',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('external_id', sa.Text)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
external_id=None
))
pass
def downgrade():
pass

View File

@ -0,0 +1,34 @@
"""Add cases status
Revision ID: a3eb60654ec4
Revises: 3204e9116233
Create Date: 2022-11-10 07:52:22.502834
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import Integer
from sqlalchemy import text
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'a3eb60654ec4'
down_revision = '3204e9116233'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('cases', 'status_id'):
op.add_column('cases',
sa.Column('status_id', Integer, server_default=text("0"),
nullable=False)
)
pass
def downgrade():
pass

View File

@ -0,0 +1,42 @@
"""Add activity no display field
Revision ID: a929ef458490
Revises: 1df4adfa3160
Create Date: 2022-11-21 15:26:49.088050
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = 'a929ef458490'
down_revision = '1df4adfa3160'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('user_activity', 'display_in_ui'):
op.add_column('user_activity',
sa.Column('display_in_ui', sa.Boolean, default=True)
)
t_ua = sa.Table(
'user_activity',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('display_in_ui', sa.Boolean)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
display_in_ui=True
))
pass
def downgrade():
pass

View File

@ -0,0 +1,96 @@
"""Add IocType validation
Revision ID: ad4e0cd17597
Revises: cd519d2d24df
Create Date: 2022-08-04 15:37:44.484997
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'ad4e0cd17597'
down_revision = '875edc4adb40'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('ioc_type', 'type_validation_regex'):
op.add_column('ioc_type',
sa.Column('type_validation_regex', sa.Text)
)
if not _table_has_column('ioc_type', 'type_validation_expect'):
op.add_column('ioc_type',
sa.Column('type_validation_expect', sa.Text)
)
# Migrate known existing rows if any
migration_map = {
"authentihash": "[a-f0-9]{64}",
"filename|authentihash": ".+\|[a-f0-9]{64}",
"filename|imphash": ".+\|[a-f0-9]{32}",
"filename|md5": ".+\|[a-f0-9]{32}",
"filename|pehash": ".+\|[a-f0-9]{40}",
"filename|sha1": ".+\|[a-f0-9]{40}",
"filename|sha224": ".+\|[a-f0-9]{56}",
"filename|sha256": ".+\|[a-f0-9]{64}",
"filename|sha3-224": ".+\|[a-f0-9]{56}",
"filename|sha3-256": ".+\|[a-f0-9]{64}",
"filename|sha3-384": ".+\|[a-f0-9]{96}",
"filename|sha3-512": ".+\|[a-f0-9]{128}",
"filename|sha384": ".+\|[a-f0-9]{96}",
"filename|sha512": ".+\|[a-f0-9]{128}",
"filename|sha512/224": ".+\|[a-f0-9]{56}",
"filename|sha512/256": ".+\|[a-f0-9]{64}",
"filename|tlsh": ".+\|t?[a-f0-9]{35,}",
"git-commit-id": "[a-f0-9]{40}",
"hassh-md5": "[a-f0-9]{32}",
"hasshserver-md5": "[a-f0-9]{32}",
"imphash": "[a-f0-9]{32}",
"ja3-fingerprint-md5": "[a-f0-9]{32}",
"jarm-fingerprint": "[a-f0-9]{62}",
"md5": "[a-f0-9]{32}",
"pehash": "[a-f0-9]{40}",
"sha1": "[a-f0-9]{40}",
"sha224": "[a-f0-9]{56}",
"sha256": "[a-f0-9]{64}",
"sha3-224": "[a-f0-9]{56}",
"sha3-256": "[a-f0-9]{64}",
"sha3-384": "[a-f0-9]{96}",
"sha3-512": "[a-f0-9]{128}",
"sha384": "[a-f0-9]{96}",
"sha512": "[a-f0-9]{128}",
"sha512/224": "[a-f0-9]{56}",
"sha512/256": "[a-f0-9]{64}",
"telfhash": "[a-f0-9]{70}",
"tlsh": "^t?[a-f0-9]{35,}",
"x509-fingerprint-md5": "[a-f0-9]{32}",
"x509-fingerprint-sha1": "[a-f0-9]{40}",
"x509-fingerprint-sha256": "[a-f0-9]{64}"
}
t_tasks = sa.Table(
'ioc_type',
sa.MetaData(),
sa.Column('type_id', sa.Integer, primary_key=True),
sa.Column('type_name', sa.Text),
sa.Column('type_validation_regex', sa.Text),
sa.Column('type_validation_expect', sa.Text),
)
conn = op.get_bind()
for type_name in migration_map:
conn.execute(t_tasks.update().where(t_tasks.c.type_name == type_name).values(
type_validation_regex=migration_map[type_name]
))
def downgrade():
op.drop_column('ioc_type', 'type_validation_regex')
op.drop_column('ioc_type', 'type_validation_expect')

View File

@ -0,0 +1,42 @@
"""Add dark mode
Revision ID: b664ca1203a4
Revises: 2df770a4989c
Create Date: 2022-03-06 18:00:46.251407
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'b664ca1203a4'
down_revision = '2df770a4989c'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('user', 'in_dark_mode'):
op.add_column('user',
sa.Column('in_dark_mode', sa.Boolean)
)
t_ua = sa.Table(
'user',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('in_dark_mode', sa.Boolean)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
in_dark_mode=False
))
pass
def downgrade():
pass

View File

@ -0,0 +1,118 @@
"""Update tasks status
Revision ID: c773a35c280f
Revises: 0db700644a4f
Create Date: 2022-01-18 07:51:43.714021
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'c773a35c280f'
down_revision = '0db700644a4f'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('case_tasks', 'task_status_id'):
op.add_column('case_tasks',
sa.Column('task_status_id', sa.Integer, sa.ForeignKey('task_status.id'))
)
# Add the foreign key of ioc_type to ioc
op.create_foreign_key(
constraint_name='task_task_status_id',
source_table="case_tasks",
referent_table="task_status",
local_cols=["task_status_id"],
remote_cols=["id"])
if _table_has_column('case_tasks', 'task_status'):
# Set schema and make migration of data
t_tasks = sa.Table(
'case_tasks',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('task_title', sa.Text),
sa.Column('task_status', sa.Text),
sa.Column('task_status_id', sa.ForeignKey('task_status.id')),
)
to_update = ['To do', 'In progress', 'On hold', 'Done', 'Canceled']
# Migrate existing IOCs
for update in to_update:
conn = op.get_bind()
res = conn.execute(f"select id from case_tasks where task_status = '{update}';")
results = res.fetchall()
res = conn.execute(f"select id from task_status where status_name = '{update}';")
e_info = res.fetchall()
if e_info:
status_id = e_info[0][0]
for res in results:
conn.execute(t_tasks.update().where(t_tasks.c.id == res[0]).values(
task_status_id=status_id
))
op.drop_column(
table_name='case_tasks',
column_name='task_status'
)
if not _table_has_column('global_tasks', 'task_status_id'):
op.add_column('global_tasks',
sa.Column('task_status_id', sa.Integer, sa.ForeignKey('task_status.id'))
)
# Add the foreign key of ioc_type to ioc
op.create_foreign_key(
constraint_name='global_task_status_id',
source_table="global_tasks",
referent_table="task_status",
local_cols=["task_status_id"],
remote_cols=["id"])
if _table_has_column('global_tasks', 'task_status'):
# Set schema and make migration of data
tg_tasks = sa.Table(
'global_tasks',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('task_title', sa.Text),
sa.Column('task_status', sa.Text),
sa.Column('task_status_id', sa.ForeignKey('task_status.id')),
)
to_update = ['To do', 'In progress', 'On hold', 'Done', 'Canceled']
# Migrate existing IOCs
for update in to_update:
conn = op.get_bind()
res = conn.execute(f"select id from global_tasks where task_status = '{update}';")
results = res.fetchall()
res = conn.execute(f"select id from task_status where status_name = '{update}';")
e_info = res.fetchall()
if e_info:
status_id = e_info[0][0]
for res in results:
conn.execute(tg_tasks.update().where(tg_tasks.c.id == res[0]).values(
task_status_id=status_id
))
op.drop_column(
table_name='global_tasks',
column_name='task_status'
)
pass
def downgrade():
pass

View File

@ -0,0 +1,27 @@
"""Evidence file_size int to bigint
Revision ID: c832bd69f827
Revises: b664ca1203a4
Create Date: 2022-04-11 21:49:30.739817
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'c832bd69f827'
down_revision = 'b664ca1203a4'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('case_received_file', 'file_size',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
pass
def downgrade():
pass

View File

@ -0,0 +1,127 @@
"""Add classification, history, closing note and initial date
Revision ID: c959c298ca00
Revises: 4ecdfcb34f7c
Create Date: 2023-03-03 23:49:16.360494
"""
import logging
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
from app.alembic.alembic_utils import _table_has_column, _has_table
# revision identifiers, used by Alembic.
revision = 'c959c298ca00'
down_revision = '4ecdfcb34f7c'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
cases_table = sa.Table(
'cases',
sa.MetaData(),
sa.Column('case_id', sa.Integer, primary_key=True),
sa.Column('open_date', sa.DateTime, nullable=False),
sa.Column('initial_date', sa.DateTime, nullable=False),
sa.Column('user_id', sa.Integer, sa.ForeignKey('user.id'), nullable=False),
sa.Column('owner_id', sa.Integer, sa.ForeignKey('user.id'), nullable=False),
sa.Column('classification_id', sa.Integer, sa.ForeignKey('case_classification.id'), nullable=False)
)
res = conn.execute(f"select case_id, open_date, user_id from \"cases\";")
results = res.fetchall()
ras = conn.execute(f"select id from \"user\" ORDER BY id ASC LIMIT 1;")
user = ras.fetchone()
if not _table_has_column('cases', 'modification_history'):
op.add_column('cases',
sa.Column('modification_history', sa.JSON)
)
if not _table_has_column('cases', 'initial_date'):
op.add_column('cases',
sa.Column('initial_date', sa.DateTime, nullable=False, server_default=sa.text("now()"))
)
for case in results:
conn.execute(cases_table.update().where(cases_table.c.case_id == case[0]).values(
initial_date=case[1]
))
if not _table_has_column('cases', 'closing_note'):
op.add_column('cases',
sa.Column('closing_note', sa.Text)
)
if not _has_table('case_classification'):
op.create_table('case_classification',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text),
sa.Column('name_expanded', sa.Text),
sa.Column('creation_date', sa.DateTime, server_default=text("now()"), nullable=True),
sa.Column('created_by', sa.ForeignKey('user.id'), nullable=True)
)
op.create_foreign_key('fk_case_classification_user_id', 'case_classification', 'user', ['created_by'], ['id'])
if not _table_has_column('cases', 'classification_id'):
classification_table = sa.Table(
'case_classification',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text),
sa.Column('name_expanded', sa.Text),
sa.Column('creation_date', sa.DateTime, server_default=text("now()"), nullable=True),
sa.Column('created_by', sa.ForeignKey('user.id'), nullable=True),
keep_existing=True
)
other_classification = sa.select([classification_table.c.id]).where(
classification_table.c.name == 'other:other')
if conn.execute(other_classification).fetchone() is None:
# Create other:other for migration - the rest of the data will be handled by post init
op.execute(f"insert into case_classification (name, name_expanded, description, created_by_id) "
f"values ('other:other', 'Other: Other', 'All incidents that do not fit in one of the given "
f"categories should be put into this class. If the number of incidents in this category "
f"increases, it is an indicator that the classification scheme must be revised.', {user[0]});")
other_classification = sa.select([classification_table.c.id]).where(
classification_table.c.name == 'other:other')
other_classification_id = conn.execute(other_classification).fetchone()[0]
else:
other_classification_id = conn.execute(other_classification).fetchone()[0]
op.add_column('cases',
sa.Column('classification_id', sa.Integer, sa.ForeignKey('case_classification.id'),
server_default=text(str(other_classification_id))),
)
cid_list = [c[0] for c in results]
op.execute(cases_table.update().where(cases_table.c.case_id.in_(cid_list)).values(
classification_id=other_classification_id
))
if not _table_has_column('cases', 'owner_id'):
op.add_column('cases',
sa.Column('owner_id', sa.Integer, sa.ForeignKey('user.id'),
server_default=text("1")),
)
for case in results:
conn.execute(cases_table.update().where(cases_table.c.case_id == case[0]).values(
owner_id=case[2]
))
def downgrade():
pass

View File

@ -0,0 +1,30 @@
"""Add created by in events
Revision ID: ca93d4b54571
Revises: 79a9a54e8f9d
Create Date: 2022-05-08 14:58:38.839651
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'ca93d4b54571'
down_revision = '79a9a54e8f9d'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('cases_events', 'modification_history'):
op.add_column('cases_events',
sa.Column('modification_history', sa.JSON)
)
pass
def downgrade():
pass

View File

@ -0,0 +1,60 @@
"""Password policy edition
Revision ID: cd519d2d24df
Revises: ca93d4b54571
Create Date: 2022-05-25 18:09:08.741619
"""
import sqlalchemy as sa
from alembic import op
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'cd519d2d24df'
down_revision = 'ca93d4b54571'
branch_labels = None
depends_on = None
def upgrade():
columns = {
"password_policy_min_length": sa.Integer,
"password_policy_upper_case": sa.Boolean,
"password_policy_lower_case": sa.Boolean,
"password_policy_digit": sa.Boolean,
"password_policy_special_chars": sa.Text,
}
for col in columns:
if not _table_has_column('server_settings', col):
op.add_column('server_settings',
sa.Column(col, columns[col])
)
t_ua = sa.Table(
'server_settings',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('password_policy_min_length', sa.Integer),
sa.Column('password_policy_upper_case', sa.Boolean),
sa.Column('password_policy_lower_case', sa.Boolean),
sa.Column('password_policy_digit', sa.Boolean),
sa.Column('password_policy_special_chars', sa.Text)
)
conn = op.get_bind()
conn.execute(t_ua.update().values(
password_policy_min_length=12,
password_policy_upper_case=True,
password_policy_lower_case=True,
password_policy_digit=True,
password_policy_special_chars=''
))
pass
def downgrade():
pass

View File

@ -0,0 +1,39 @@
"""Add service account and minibar in users
Revision ID: db93d5c4c0aa
Revises: 2a4a8330b908
Create Date: 2023-04-26 14:14:47.990230
"""
from alembic import op
import sqlalchemy as sa
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'db93d5c4c0aa'
down_revision = '2a4a8330b908'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('user', 'is_service_account'):
op.add_column('user',
sa.Column('is_service_account', sa.Boolean, default=False))
if not _table_has_column('user', 'has_mini_sidebar'):
op.add_column('user',
sa.Column('has_mini_sidebar', sa.Boolean, default=False))
pass
def downgrade():
if _table_has_column('user', 'is_service_account'):
op.drop_column('user', 'is_service_account')
if _table_has_column('user', 'has_mini_sidebar'):
op.drop_column('user', 'has_mini_sidebar')
pass

View File

@ -0,0 +1,30 @@
"""resolution status in alerts
Revision ID: e33dd011fb87
Revises: 00b43bc4e8ac
Create Date: 2023-07-03 13:28:08.882759
"""
from alembic import op
import sqlalchemy as sa
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'e33dd011fb87'
down_revision = '00b43bc4e8ac'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('alerts', 'alert_resolution_status_id'):
op.add_column('alerts', sa.Column('alert_resolution_status_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'alerts', 'alert_resolution_status',
['alert_resolution_status_id'], ['resolution_status_id'])
pass
def downgrade():
pass

View File

@ -0,0 +1,35 @@
"""Add alert in comments
Revision ID: f727badcc4e1
Revises: 50f28953a485
Create Date: 2023-04-12 09:28:58.993723
"""
from alembic import op
import sqlalchemy as sa
from app.alembic.alembic_utils import _table_has_column
# revision identifiers, used by Alembic.
revision = 'f727badcc4e1'
down_revision = '50f28953a485'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column("comments", "comment_alert_id"):
op.add_column('comments',
sa.Column('comment_alert_id',
sa.BigInteger(), nullable=True)
)
op.create_foreign_key(None,
'comments', 'alerts',
['comment_alert_id'], ['alert_id'])
def downgrade():
op.drop_constraint(None, 'comments', type_='foreignkey')
op.drop_column('comments', 'comment_alert_id')
pass

View File

@ -0,0 +1,232 @@
"""Access control migration
Revision ID: fcc375ed37d1
Revises: 7cc588444b79
Create Date: 2022-06-14 17:01:29.205520
"""
import uuid
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects.postgresql import UUID
from app.alembic.alembic_utils import _has_table
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
from app.iris_engine.access_control.utils import ac_get_mask_analyst
from app.iris_engine.access_control.utils import ac_get_mask_case_access_level_full
from app.iris_engine.access_control.utils import ac_get_mask_full_permissions
revision = 'fcc375ed37d1'
down_revision = '7cc588444b79'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
# Add UUID to users
if not _table_has_column('user', 'uuid'):
op.add_column('user',
sa.Column('uuid', UUID(as_uuid=True), default=uuid.uuid4, nullable=False,
server_default=sa.text('gen_random_uuid()'))
)
# Add UUID to existing users
t_users = sa.Table(
'user',
sa.MetaData(),
sa.Column('id', sa.BigInteger(), primary_key=True),
sa.Column('uuid', UUID(as_uuid=True), default=uuid.uuid4, nullable=False)
)
res = conn.execute(f"select id from \"user\";")
results = res.fetchall()
for user in results:
conn.execute(t_users.update().where(t_users.c.id == user[0]).values(
uuid=uuid.uuid4()
))
# Add all the new access control tables if they don't exist
if not _has_table('user_case_access'):
op.create_table('user_case_access',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('user_id', sa.BigInteger(), sa.ForeignKey('user.id'), nullable=False),
sa.Column('case_id', sa.BigInteger(), sa.ForeignKey('cases.case_id'), nullable=False),
sa.Column('access_level', sa.BigInteger()),
keep_existing=True
)
op.create_foreign_key('fk_user_case_access_user_id', 'user_case_access', 'user', ['user_id'], ['id'])
op.create_foreign_key('fk_user_case_access_case_id', 'user_case_access', 'cases', ['case_id'], ['case_id'])
op.create_unique_constraint('uq_user_case_access_user_id_case_id', 'user_case_access', ['user_id', 'case_id'])
if not _has_table('user_case_effective_access'):
op.create_table('user_case_effective_access',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('user_id', sa.BigInteger(), sa.ForeignKey('user.id'), nullable=False),
sa.Column('case_id', sa.BigInteger(), sa.ForeignKey('cases.case_id'), nullable=False),
sa.Column('access_level', sa.BigInteger()),
keep_existing=True
)
op.create_foreign_key('fk_user_case_effective_access_user_id', 'user_case_effective_access',
'user', ['user_id'], ['id'])
op.create_foreign_key('fk_user_case_effective_access_case_id', 'user_case_effective_access',
'cases', ['case_id'], ['case_id'])
op.create_unique_constraint('uq_user_case_effective_access_user_id_case_id',
'user_case_access', ['user_id', 'case_id'])
if not _has_table('group_case_access'):
op.create_table('group_case_access',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('group_id', sa.BigInteger(), sa.ForeignKey('groups.group_id'), nullable=False),
sa.Column('case_id', sa.BigInteger(), sa.ForeignKey('cases.case_id'), nullable=False),
sa.Column('access_level', sa.BigInteger(), nullable=False),
keep_existing=True
)
op.create_foreign_key('group_case_access_group_id_fkey', 'group_case_access', 'groups',
['group_id'], ['group_id'])
op.create_foreign_key('group_case_access_case_id_fkey', 'group_case_access', 'cases',
['case_id'], ['case_id'])
op.create_unique_constraint('group_case_access_unique', 'group_case_access', ['group_id', 'case_id'])
if not _has_table('groups'):
op.create_table('groups',
sa.Column('group_id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('group_uuid', UUID(as_uuid=True), default=uuid.uuid4, nullable=False,
server_default=sa.text('gen_random_uuid()'), unique=True),
sa.Column('group_name', sa.Text(), nullable=False),
sa.Column('group_description', sa.Text(), nullable=False),
sa.Column('group_permissions', sa.BigInteger(), nullable=False),
sa.Column('group_auto_follow', sa.Boolean(), nullable=False, default=False),
sa.Column('group_auto_follow_access_level', sa.BigInteger(), nullable=True),
keep_existing=True
)
op.create_unique_constraint('groups_group_name_unique', 'groups', ['group_name'])
if not _has_table('organisations'):
op.create_table('organisations',
sa.Column('org_id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('org_uuid', UUID(as_uuid=True), default=uuid.uuid4(), nullable=False,
server_default=sa.text('gen_random_uuid()'), unique=True),
sa.Column('org_name', sa.Text(), nullable=False),
sa.Column('org_description', sa.Text(), nullable=False),
sa.Column('org_url', sa.Text(), nullable=False),
sa.Column('org_email', sa.Text(), nullable=False),
sa.Column('org_logo', sa.Text(), nullable=False),
sa.Column('org_type', sa.Text(), nullable=False),
sa.Column('org_sector', sa.Text(), nullable=False),
sa.Column('org_nationality', sa.Text(), nullable=False),
keep_existing=True
)
op.create_unique_constraint('organisation_name_unique', 'organisations', ['org_name'])
if not _has_table('organisation_case_access'):
op.create_table('organisation_case_access',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('org_id', sa.BigInteger(), sa.ForeignKey('organisations.org_id'), nullable=False),
sa.Column('case_id', sa.BigInteger(), sa.ForeignKey('cases.case_id'), nullable=False),
sa.Column('access_level', sa.BigInteger(), nullable=False),
keep_existing=True
)
op.create_foreign_key('organisation_case_access_org_id_fkey', 'organisation_case_access',
'organisations', ['org_id'], ['org_id'])
op.create_foreign_key('organisation_case_access_case_id_fkey', 'organisation_case_access', 'cases',
['case_id'], ['case_id'])
op.create_unique_constraint('organisation_case_access_unique', 'organisation_case_access',
['org_id', 'case_id'])
if not _has_table('user_organisation'):
op.create_table('user_organisation',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('user_id', sa.BigInteger(), sa.ForeignKey('user.id'), nullable=False),
sa.Column('org_id', sa.BigInteger(), sa.ForeignKey('organisations.org_id'), nullable=False),
sa.Column('is_primary_org', sa.Boolean(), nullable=False),
keep_existing=True
)
op.create_foreign_key('user_organisation_user_id_fkey', 'user_organisation', 'user', ['user_id'], ['id'])
op.create_foreign_key('user_organisation_org_id_fkey', 'user_organisation', 'organisations',
['org_id'], ['org_id'])
op.create_unique_constraint('user_organisation_unique', 'user_organisation', ['user_id', 'org_id'])
if not _has_table('user_group'):
op.create_table('user_group',
sa.Column('id', sa.BigInteger(), primary_key=True, nullable=False),
sa.Column('user_id', sa.BigInteger(), sa.ForeignKey('user.id'), nullable=False),
sa.Column('group_id', sa.BigInteger(), sa.ForeignKey('groups.group_id'), nullable=False),
keep_existing=True
)
op.create_foreign_key('user_group_user_id_fkey', 'user_group', 'user', ['user_id'], ['id'])
op.create_foreign_key('user_group_group_id_fkey', 'user_group', 'groups', ['group_id'], ['group_id'])
op.create_unique_constraint('user_group_unique', 'user_group', ['user_id', 'group_id'])
# Create the groups if they don't exist
res = conn.execute(f"select group_id from groups where group_name = 'Administrators';")
if res.rowcount == 0:
conn.execute(f"insert into groups (group_name, group_description, group_permissions, group_uuid, "
f"group_auto_follow, group_auto_follow_access_level) "
f"values ('Administrators', 'Administrators', '{ac_get_mask_full_permissions()}', '{uuid.uuid4()}',"
f" true, 4);")
res = conn.execute(f"select group_id from groups where group_name = 'Administrators';")
admin_group_id = res.fetchone()[0]
res = conn.execute(f"select group_id from groups where group_name = 'Analysts';")
if res.rowcount == 0:
conn.execute(f"insert into groups (group_name, group_description, group_permissions, group_uuid, "
f"group_auto_follow, group_auto_follow_access_level) "
f"values ('Analysts', 'Standard Analysts', '{ac_get_mask_analyst()}', '{uuid.uuid4()}', true, 4);")
res = conn.execute(f"select group_id from groups where group_name = 'Analysts';")
analyst_group_id = res.fetchone()[0]
# Create the organisations if they don't exist
res = conn.execute(f"select org_id from organisations where org_name = 'Default Org';")
if res.rowcount == 0:
conn.execute(f"insert into organisations (org_name, org_description, org_url, org_email, org_logo, "
f"org_type, org_sector, org_nationality, org_uuid) values ('Default Org', 'Default Organisation', "
f"'', '', "
f"'','', '', '', '{uuid.uuid4()}');")
res = conn.execute(f"select org_id from organisations where org_name = 'Default Org';")
default_org_id = res.fetchone()[0]
# Give the organisation access to all the cases
res = conn.execute(f"select case_id from cases;")
result_cases = [case[0] for case in res.fetchall()]
access_level = ac_get_mask_case_access_level_full()
# Migrate the users to the new access control system
conn = op.get_bind()
# Get all users with their roles
if _has_table("user_roles"):
res = conn.execute(f"select distinct roles.name, \"user\".id from user_roles INNER JOIN \"roles\" ON "
f"\"roles\".id = user_roles.role_id INNER JOIN \"user\" ON \"user\".id = user_roles.user_id;")
results_users = res.fetchall()
for user_id in results_users:
role_name = user_id[0]
user_id = user_id[1]
# Migrate user to groups
if role_name == 'administrator':
conn.execute(f"insert into user_group (user_id, group_id) values ({user_id}, {admin_group_id}) "
f"on conflict do nothing;")
elif role_name == 'investigator':
conn.execute(f"insert into user_group (user_id, group_id) values ({user_id}, {analyst_group_id}) "
f"on conflict do nothing;")
# Add user to default organisation
conn.execute(f"insert into user_organisation (user_id, org_id, is_primary_org) values ({user_id}, "
f"{default_org_id}, true) on conflict do nothing;")
# Add default cases effective permissions
for case_id in result_cases:
conn.execute(f"insert into user_case_effective_access (case_id, user_id, access_level) values "
f"({case_id}, {user_id}, {access_level}) on conflict do nothing;")
op.drop_table('user_roles')
pass
def downgrade():
pass

View File

@ -0,0 +1,85 @@
"""changed the assets_type table for custom icons
Revision ID: ff917e2ab02e
Revises: c832bd69f827
Create Date: 2022-04-21 22:14:55.815983
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
from app.alembic.alembic_utils import _table_has_column
revision = 'ff917e2ab02e'
down_revision = 'c832bd69f827'
branch_labels = None
depends_on = None
def upgrade():
if not _table_has_column('assets_type', 'asset_icon_not_compromised'):
op.add_column('assets_type',
sa.Column('asset_icon_not_compromised', sa.String(255))
)
if not _table_has_column('assets_type', 'asset_icon_compromised'):
op.add_column('assets_type',
sa.Column('asset_icon_compromised', sa.String(255))
)
t_assets_type = sa.Table(
'assets_type',
sa.MetaData(),
sa.Column('asset_id', sa.Integer, primary_key=True),
sa.Column('asset_name', sa.String(155)),
sa.Column('asset_icon_not_compromised', sa.String(255)),
sa.Column('asset_icon_compromised', sa.String(255))
)
# Migrate existing Asset_types
conn = op.get_bind()
res = conn.execute("SELECT asset_id, asset_name FROM public.assets_type;")
results = res.fetchall()
if results:
for res in results:
icon_not_compromised, icon_compromised = _get_icons(res[1])
conn.execute(t_assets_type.update().where(t_assets_type.c.asset_id == res[0]).values(
asset_icon_not_compromised=icon_not_compromised,
asset_icon_compromised=icon_compromised
))
def downgrade():
pass
def _get_icons(asset_name):
assets = {
"Account": ("Generic Account", "user.png", "ioc_user.png"),
"Firewall": ("Firewall", "firewall.png", "ioc_firewall.png"),
"Linux - Server": ("Linux server", "server.png", "ioc_server.png"),
"Linux - Computer": ("Linux computer", "desktop.png", "ioc_desktop.png"),
"Linux Account": ("Linux Account", "user.png", "ioc_user.png"),
"Mac - Computer": ("Mac computer", "desktop.png", "ioc_desktop.png"),
"Phone - Android": ("Android Phone", "phone.png", "ioc_phone.png"),
"Phone - IOS": ("Apple Phone", "phone.png", "ioc_phone.png"),
"Windows - Computer": ("Standard Windows Computer", "windows_desktop.png", "ioc_windows_desktop.png"),
"Windows - Server": ("Standard Windows Server", "windows_server.png", "ioc_windows_server.png"),
"Windows - DC": ("Domain Controller", "windows_server.png", "ioc_windows_server.png"),
"Router": ("Router", "router.png", "ioc_router.png"),
"Switch": ("Switch", "switch.png", "ioc_switch.png"),
"VPN": ("VPN", "vpn.png", "ioc_vpn.png"),
"WAF": ("WAF", "firewall.png", "ioc_firewall.png"),
"Windows Account - Local": ("Windows Account - Local", "user.png", "ioc_user.png"),
"Windows Account - Local - Admin": ("Windows Account - Local - Admin", "user.png", "ioc_user.png"),
"Windows Account - AD": ("Windows Account - AD", "user.png", "ioc_user.png"),
"Windows Account - AD - Admin": ("Windows Account - AD - Admin", "user.png", "ioc_user.png"),
"Windows Account - AD - krbtgt": ("Windows Account - AD - krbtgt", "user.png", "ioc_user.png"),
"Windows Account - AD - Service": ("Windows Account - AD - krbtgt", "user.png", "ioc_user.png")
}
if assets.get(asset_name):
return assets.get(asset_name)[1], assets.get(asset_name)[2]
else:
return "question-mark.png","ioc_question-mark.png"