first sync
Some checks failed
Deployment Verification / deploy-and-test (push) Failing after 29s

This commit is contained in:
2025-03-04 07:59:21 +01:00
parent 9cdcf486b6
commit 506716e703
1450 changed files with 577316 additions and 62 deletions

View File

@ -0,0 +1,133 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# contact@dfir-iris.org
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import random
import string
import ldap3.core.exceptions
import ssl
from ldap3 import Connection
from ldap3 import Server
from ldap3 import Tls
from ldap3.utils import conv
from app import app
from app.datamgmt.manage.manage_users_db import get_active_user_by_login
from app.datamgmt.manage.manage_users_db import create_user
from app.datamgmt.manage.manage_users_db import add_user_to_group
from app.datamgmt.manage.manage_groups_db import get_group_by_name
log = app.logger
def _get_unique_identifier(user_login):
if app.config.get('LDAP_AUTHENTICATION_TYPE').lower() == 'ntlm':
return user_login[user_login.find('\\')+1:]
return user_login
def _provision_user(connection, user_login):
if get_active_user_by_login(user_login):
return
search_base = app.config.get('LDAP_SEARCH_DN')
attribute_unique_identifier = app.config.get('LDAP_ATTRIBUTE_IDENTIFIER')
unique_identifier = conv.escape_filter_chars(_get_unique_identifier(user_login))
attribute_display_name = app.config.get('LDAP_ATTRIBUTE_DISPLAY_NAME')
attribute_mail = app.config.get('LDAP_ATTRIBUTE_MAIL')
attributes = []
if attribute_display_name:
attributes.append(attribute_display_name)
if attribute_mail:
attributes.append(attribute_mail)
connection.search(search_base, f'({attribute_unique_identifier}={unique_identifier})', attributes=attributes)
entry = connection.entries[0]
if attribute_display_name:
user_name = entry[attribute_display_name].value
else:
user_name = user_login
if attribute_mail:
user_email = entry[attribute_mail].value
else:
user_email = f'{user_login}@ldap'
log.info(f'Provisioning user "{user_login}" which is present in LDAP but not yet in database.')
# TODO the user password is chosen randomly
# ideally it should be possible to create a user without providing any password
# TODO to create the user password, we use the same code as the one to generate the administrator password in post_init.py
# => should factor and reuse this code bit as a function
# => also, it should probably be more secure to use the secrets module (instead of random)
password = ''.join(random.choices(string.printable[:-6], k=16))
# TODO It seems email unicity is required (a fixed email causes a problem at the second account creation)
# The email either comes from the ldap or is forged from the login to ensure unicity
user = create_user(user_name, user_login, password, user_email, True)
initial_group = get_group_by_name(app.config.get('IRIS_NEW_USERS_DEFAULT_GROUP'))
add_user_to_group(user.id, initial_group.group_id)
def ldap_authenticate(ldap_user_name, ldap_user_pwd):
"""
Authenticate to the LDAP server
"""
if app.config.get('LDAP_AUTHENTICATION_TYPE').lower() != 'ntlm':
ldap_user_name = conv.escape_filter_chars(ldap_user_name)
ldap_user = f"{app.config.get('LDAP_USER_PREFIX')}{ldap_user_name.strip()}{ ','+app.config.get('LDAP_USER_SUFFIX') if app.config.get('LDAP_USER_SUFFIX') else ''}"
else:
ldap_user = f"{ldap_user_name.strip()}"
if app.config.get('LDAP_CUSTOM_TLS_CONFIG') is True:
tls_configuration = Tls(validate=ssl.CERT_REQUIRED,
version=app.config.get('LDAP_TLS_VERSION'),
local_certificate_file=app.config.get('LDAP_SERVER_CERTIFICATE'),
local_private_key_file=app.config.get('LDAP_PRIVATE_KEY'),
local_private_key_password=app.config.get('LDAP_PRIVATE_KEY_PASSWORD'),
ca_certs_file=app.config.get('LDAP_CA_CERTIFICATE')
)
server = Server(f'{app.config.get("LDAP_CONNECT_STRING")}',
use_ssl=app.config.get('LDAP_USE_SSL'),
tls=tls_configuration)
else:
server = Server(f'{app.config.get("LDAP_CONNECT_STRING")}',
use_ssl=app.config.get('LDAP_USE_SSL'))
conn = Connection(server,
user=ldap_user,
password=ldap_user_pwd,
auto_referrals=False,
authentication=app.config.get('LDAP_AUTHENTICATION_TYPE'))
try:
if not conn.bind():
log.error(f"Cannot bind to ldap server: {conn.last_error} ")
return False
if app.config.get('AUTHENTICATION_CREATE_USER_IF_NOT_EXIST'):
_provision_user(conn, ldap_user_name)
except ldap3.core.exceptions.LDAPInvalidCredentialsResult as e:
log.error(f'Wrong credentials. Error : {e.__str__()}')
return False
except Exception as e:
raise Exception(e.__str__())
log.info(f"Successful authenticated user")
return True

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,74 @@
#!/usr/bin/env python3
#
# IRIS Core Code
# contact@dfir-iris.org
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import subprocess
from datetime import datetime
from pathlib import Path
from app import app
log = app.logger
def backup_iris_db():
logs = []
backup_dir = Path(app.config.get('BACKUP_PATH')) / "database"
try:
if not backup_dir.is_dir():
backup_dir.mkdir()
except Exception as e:
logs.append('Unable to create backup directory')
logs.append(str(e))
return True, logs
backup_file = Path(app.config.get('BACKUP_PATH')) / "database" / "backup-{}.sql.gz".format(
datetime.now().strftime("%Y-%m-%d_%H%M%S"))
try:
logs.append(f'Saving database')
with open(backup_file, 'w') as backup:
completed_process = subprocess.run(
[f'{app.config.get("PG_CLIENT_PATH")}/pg_dump', '-h',
app.config.get('PG_SERVER'), '-p',
app.config.get('PG_PORT'),
'-U', app.config.get('PGA_ACCOUNT'),
'--compress=9', '-c', '-O', '--if-exists', app.config.get('PG_DB')],
stdout=backup,
env={'PGPASSWORD': app.config.get('PGA_PASSWD')},
check=True
)
except Exception as e:
logs.append('Something went wrong backing up DB')
logs.append(str(e))
return True, logs
try:
completed_process.check_returncode()
except subprocess.CalledProcessError as e:
logs.append('Something went wrong backing up DB')
logs.append(str(e))
return True, logs
if backup_file.is_file() and backup_file.stat().st_size != 0:
logs.append(f'Backup completed in {backup_file}')
return False, logs

View File

@ -0,0 +1,232 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# DFIR IRIS
# contact@dfir-iris.org
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import random
import string
from flask_login import current_user
from app import app
from app import bc
from app import db
from app.datamgmt.manage.manage_groups_db import add_case_access_to_group
from app.datamgmt.manage.manage_users_db import add_user_to_group
from app.datamgmt.manage.manage_users_db import add_user_to_organisation
from app.datamgmt.manage.manage_users_db import user_exists
from app.iris_engine.access_control.utils import ac_add_users_multi_effective_access
from app.models import Cases
from app.models import Client
from app.models import get_or_create
from app.models.authorization import CaseAccessLevel
from app.models.authorization import User
log = app.logger
def protect_demo_mode_user(user):
if app.config.get('DEMO_MODE_ENABLED') != 'True':
return False
users_p = [f'user_std_{i}' for i in range(1, int(app.config.get('DEMO_USERS_COUNT', 10)))]
users_p += [f'adm_{i}' for i in range(1, int(app.config.get('DEMO_ADM_COUNT', 4)))]
if current_user.id != 1 and user.id == 1:
return True
if user.user in users_p:
return True
return False
def protect_demo_mode_group(group):
if app.config.get('DEMO_MODE_ENABLED') != 'True':
return False
if current_user.id != 1 and group.group_id in [1, 2]:
return True
return False
def gen_demo_admins(count, seed_adm):
random.seed(seed_adm, version=2)
for i in range(1, count):
yield f'Adm {i}',\
f'adm_{i}', \
''.join(random.choices(string.printable[:-6], k=16)), \
''.join(random.choices(string.ascii_letters, k=64))
def gen_demo_users(count, seed_user):
random.seed(seed_user, version=2)
for i in range(1, count):
yield f'User Std {i}',\
f'user_std_{i}', \
''.join(random.choices(string.printable[:-6], k=16)), \
''.join(random.choices(string.ascii_letters, k=64))
def create_demo_users(def_org, gadm, ganalystes, users_count, seed_user, adm_count, seed_adm):
users = {
'admins': [],
'users': [],
'gadm': gadm,
'ganalystes': ganalystes
}
for name, username, pwd, api_key in gen_demo_users(users_count, seed_user):
# Create default users
user = user_exists(username, f'{username}@iris.local')
if not user:
password = bc.generate_password_hash(pwd.encode('utf-8')).decode('utf-8')
user = User(
user=username,
password=password,
email=f'{username}@iris.local',
name=name,
active=True)
user.api_key = api_key
db.session.add(user)
db.session.commit()
add_user_to_group(user_id=user.id, group_id=ganalystes.group_id)
add_user_to_organisation(user_id=user.id, org_id=def_org.org_id)
db.session.commit()
log.info(f'Created demo user: {user.user} - {pwd}')
users['users'].append(user)
for name, username, pwd, api_key in gen_demo_admins(adm_count, seed_adm):
user = user_exists(username, f'{username}@iris.local')
if not user:
password = bc.generate_password_hash(pwd.encode('utf-8')).decode('utf-8')
user = User(
user=username,
password=password,
email=f'{username}@iris.local',
name=name,
active=True)
user.api_key = api_key
db.session.add(user)
db.session.commit()
add_user_to_group(user_id=user.id, group_id=gadm.group_id)
add_user_to_organisation(user_id=user.id, org_id=def_org.org_id)
db.session.commit()
log.info(f'Created demo admin: {user.user} - {pwd}')
users['admins'].append(user)
return users
def create_demo_cases(users_data: dict = None, cases_count: int = 0, clients_count: int = 0):
clients = []
for client_index in range(0, clients_count):
client = get_or_create(db.session,
Client,
name=f'Client {client_index}',
description=f'Description for client {client_index}')
clients.append(client.client_id)
cases_list = []
for case_index in range(0, cases_count):
if demo_case_exists(f"Unrestricted Case {case_index}", f"SOC-{case_index}") is not None:
log.info(f'Restricted case {case_index} already exists')
continue
case = Cases(
name=f"Unrestricted Case {case_index}",
description="This is a demonstration of an unrestricted case",
soc_id=f"SOC-{case_index}",
gen_report=False,
user=random.choice(users_data['users']),
client_id=random.choice(clients)
)
case.validate_on_build()
case.save()
db.session.commit()
cases_list.append(case.case_id)
log.info('Added unrestricted case {}'.format(case.name))
log.info('Setting permissions for unrestricted cases')
add_case_access_to_group(group=users_data['ganalystes'],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
add_case_access_to_group(group=users_data['gadm'],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
ac_add_users_multi_effective_access(users_list=[u.id for u in users_data['users']],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
ac_add_users_multi_effective_access(users_list=[u.id for u in users_data['admins']],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
cases_list = []
for case_index in range(0, int(cases_count/2)):
if demo_case_exists(f"Restricted Case {case_index}", f"SOC-RSTRCT-{case_index}") is not None:
log.info(f'Restricted case {case_index} already exists')
continue
case = Cases(
name=f"Restricted Case {case_index}",
description="This is a demonstration of a restricted case that shouldn't be visible to analyst",
soc_id=f"SOC-RSTRCT-{case_index}",
user=random.choice(users_data['admins']),
client_id=random.choice(clients)
)
case.validate_on_build()
case.save()
db.session.commit()
cases_list.append(case.case_id)
log.info('Added restricted case {}'.format(case.name))
add_case_access_to_group(group=users_data['ganalystes'],
cases_list=cases_list,
access_level=CaseAccessLevel.deny_all.value)
ac_add_users_multi_effective_access(users_list=[u.id for u in users_data['users']],
cases_list=cases_list,
access_level=CaseAccessLevel.deny_all.value)
add_case_access_to_group(group=users_data['gadm'],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
ac_add_users_multi_effective_access(users_list=[u.id for u in users_data['admins']],
cases_list=cases_list,
access_level=CaseAccessLevel.full_access.value)
log.info('Demo data created successfully')
def demo_case_exists(name, soc_id):
return db.session.query(Cases).filter(Cases.name.like(f'%{name}'),
Cases.soc_id == soc_id).first()

View File

@ -0,0 +1,618 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2022 - DFIR IRIS Team
# contact@dfir-iris.org
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import traceback
import base64
import importlib
from flask_login import current_user
from packaging import version
from pickle import dumps
from pickle import loads
from sqlalchemy import and_
from app import app
from app import celery
from app import db
from app.datamgmt.iris_engine.modules_db import get_module_config_from_hname
from app.datamgmt.iris_engine.modules_db import iris_module_add
from app.datamgmt.iris_engine.modules_db import iris_module_exists
from app.datamgmt.iris_engine.modules_db import modules_list_pipelines
from app.models import IrisHook
from app.models import IrisModule
from app.models import IrisModuleHook
from app.util import hmac_sign
from app.util import hmac_verify
from iris_interface import IrisInterfaceStatus as IStatus
log = app.logger
def check_module_compatibility(module_version):
return True
def check_pipeline_args(pipelines_args):
"""
Verify that the pipeline arguments are correct and can be used later on
:param pipelines_args: JSON pipelines
:return: Status
"""
logs = []
has_error = False
if type(pipelines_args) != dict:
return True, ["Error - Pipeline args are not json"]
if not pipelines_args.get("pipeline_internal_name"):
has_error = True
logs.append("Error - pipeline_internal_name missing from pipeline config")
if not pipelines_args.get("pipeline_human_name"):
has_error = True
logs.append("Error - pipeline_human_name missing from pipeline config")
if not pipelines_args.get("pipeline_args"):
has_error = True
logs.append("Error - pipeline_args missing from pipeline config")
if not pipelines_args.get("pipeline_update_support"):
has_error = True
logs.append("Error - pipeline_update_support missing from pipeline config")
if not pipelines_args.get("pipeline_import_support"):
has_error = True
logs.append("Error - pipeline_import_support missing from pipeline config")
return has_error, logs
def check_module_health(module_instance):
"""
Returns a status on the health of the module.
A non healthy module will not be imported
:param module_instance: Instance of the module to check
:return: Status
"""
logs = []
def dup_logs(message):
logs.append(message)
log.info(message)
if not module_instance:
return False, ['Error - cannot instantiate the module. Check server logs']
try:
dup_logs("Testing module")
dup_logs("Module name : {}".format(module_instance.get_module_name()))
if type(module_instance.get_interface_version()) != str:
mod_interface_version = str(module_instance.get_interface_version())
else:
mod_interface_version = module_instance.get_interface_version()
if not (version.parse(app.config.get('MODULES_INTERFACE_MIN_VERSION'))
<= version.parse(mod_interface_version)
<= version.parse(app.config.get('MODULES_INTERFACE_MAX_VERSION'))):
log.critical("Module interface no compatible with server. Expected "
f"{app.config.get('MODULES_INTERFACE_MIN_VERSION')} <= module "
f"<= {app.config.get('MODULES_INTERFACE_MAX_VERSION')}")
logs.append("Module interface no compatible with server. Expected "
f"{app.config.get('MODULES_INTERFACE_MIN_VERSION')} <= module "
f"<= {app.config.get('MODULES_INTERFACE_MAX_VERSION')}")
return False, logs
dup_logs("Module interface version : {}".format(module_instance.get_interface_version()))
module_type = module_instance.get_module_type()
if module_type not in ["module_pipeline", "module_processor"]:
log.critical(f"Unrecognised module type. Expected module_pipeline or module_processor, got {module_type}")
logs.append(f"Unrecognised module type. Expected module_pipeline or module_processor, got {module_type}")
return False, logs
dup_logs("Module type : {}".format(module_instance.get_module_type()))
if not module_instance.is_providing_pipeline() and module_type == 'pipeline':
log.critical("Module of type pipeline has no pipelines")
logs.append("Error - Module of type pipeline has not pipelines")
return False, logs
if module_instance.is_providing_pipeline():
dup_logs("Module has pipeline : {}".format(module_instance.is_providing_pipeline()))
# Check the pipelines config health
has_error, llogs = check_pipeline_args(module_instance.pipeline_get_info())
logs.extend(llogs)
if has_error:
return False, logs
dup_logs("Module health validated")
return module_instance.is_ready(), logs
except Exception as e:
log.exception("Error while checking module health")
log.error(e.__str__())
logs.append(e.__str__())
return False, logs
def instantiate_module_from_name(module_name):
"""
Instantiate a module from a name. The method is not Exception protected.
Caller need to take care of it failing.
:param module_name: Name of the module to register
:return: Class instance or None
"""
try:
mod_root_interface = importlib.import_module(module_name)
if not mod_root_interface:
return None
except Exception as e:
msg = f"Could not import root module {module_name}: {e}"
log.error(msg)
return None, msg
# The whole concept is based on the fact that the root module provides an __iris_module_interface
# variable pointing to the interface class with which Iris can talk to
try:
mod_interface = importlib.import_module("{}.{}".format(module_name,
mod_root_interface.__iris_module_interface))
except Exception as e:
msg = f"Could not import module {module_name}: {e}"
log.error(msg)
return None, msg
if not mod_interface:
return None
# Now get a handle on the interface class
try:
cl_interface = getattr(mod_interface, mod_root_interface.__iris_module_interface)
except Exception as e:
msg = f"Could not get handle on the interface class of module {module_name}: {e}"
log.error(msg)
return None, msg
if not cl_interface:
return None, ''
# Try to instantiate the class
try:
mod_inst = cl_interface()
except Exception as e:
msg = f"Could not instantiate the class for module {module_name}: {e}"
log.error(msg)
return None, msg
return mod_inst, 'Success'
def configure_module_on_init(module_instance):
"""
Configure a module after instantiation, with the current configuration
:param module_instance: Instance of the module
:return: IrisInterfaceStatus
"""
if not module_instance:
return IStatus.I2InterfaceNotImplemented('Module not found')
return IStatus.I2ConfigureSuccess
def preset_init_mod_config(mod_config):
"""
Prefill the configuration with default one
:param mod_config: Configuration
:return: Tuple
"""
index = 0
for config in mod_config:
if config.get('default') is not None:
mod_config[index]["value"] = config.get('default')
index += 1
return mod_config
def get_mod_config_by_name(module_name):
"""
Returns a module configurationn based on its name
:param: module_name: Name of the module
:return: IrisInterfaceStatus
"""
data = get_module_config_from_hname(module_name)
if not data:
return IStatus.I2InterfaceNotReady(message="Module not registered")
return IStatus.I2Success(data=data)
def register_module(module_name):
"""
Register a module into IRIS
:param module_name: Name of the module to register
"""
if not module_name:
log.error("Provided module has no names")
return None, "Module has no names"
try:
mod_inst, _ = instantiate_module_from_name(module_name=module_name)
if not mod_inst:
log.error("Module could not be instantiated")
return None, "Module could not be instantiated"
if iris_module_exists(module_name=module_name):
log.warning("Module already exists in Iris")
return None, "Module already exists in Iris"
# Auto parse the configuration and fill with default
log.info('Parsing configuration')
mod_config = preset_init_mod_config(mod_inst.get_init_configuration())
log.info('Adding module')
module = iris_module_add(module_name=module_name,
module_human_name=mod_inst.get_module_name(),
module_description=mod_inst.get_module_description(),
module_config=mod_config,
module_version=mod_inst.get_module_version(),
interface_version=mod_inst.get_interface_version(),
has_pipeline=mod_inst.is_providing_pipeline(),
pipeline_args=mod_inst.pipeline_get_info(),
module_type=mod_inst.get_module_type()
)
if module is None:
return None, "Unable to register module"
if mod_inst.get_module_type() == 'module_processor':
mod_inst.register_hooks(module_id=module.id)
except Exception as e:
return None, "Fatal - {}".format(e.__str__())
return module, "Module registered"
def iris_update_hooks(module_name, module_id):
"""
Update hooks upon settings update
:param module_name: Name of the module to update
:param module_id: ID of the module to update
"""
if not module_name:
log.error("Provided module has no names")
return False, ["Module has no names"]
try:
mod_inst,_ = instantiate_module_from_name(module_name=module_name)
if not mod_inst:
log.error("Module could not be instantiated")
return False, ["Module could not be instantiated"]
if mod_inst.get_module_type() == 'module_processor':
mod_inst.register_hooks(module_id=module_id)
except Exception as e:
return False, ["Fatal - {}".format(e.__str__())]
return True, ["Module updated"]
def register_hook(module_id: int, iris_hook_name: str, manual_hook_name: str = None,
run_asynchronously: bool = True):
"""
Register a new hook into IRIS. The hook_name should be a well-known hook to IRIS. iris_hooks table can be
queried, or by default they are declared in iris source code > source > app > post_init.
If is_manual_hook is set, the hook is triggered by user action and not automatically. If set, the iris_hook_name
should be a manual hook (aka begin with on_manual_trigger_) otherwise an error is raised.
If run_asynchronously is set (default), the action will be sent to RabbitMQ and processed asynchronously.
If set to false, the action is immediately done, which means it needs to be quick otherwise the request will be
pending and user experience degraded.
:param module_id: Module ID to register
:param iris_hook_name: Well-known hook name to register to
:param manual_hook_name: The name of the hook displayed in the UI, if is_manual_hook is set
:param run_asynchronously: Set to true to queue the module action in rabbitmq
:return: Tuple
"""
module = IrisModule.query.filter(IrisModule.id == module_id).first()
if not module:
return False, [f'Module ID {module_id} not found']
is_manual_hook = False
if "on_manual_trigger_" in iris_hook_name:
is_manual_hook = True
if not manual_hook_name:
# Set default hook name
manual_hook_name = f"{module.module_name}::{iris_hook_name}"
hook = IrisHook.query.filter(IrisHook.hook_name == iris_hook_name).first()
if not hook:
return False, [f"Hook {iris_hook_name} is unknown"]
if not isinstance(is_manual_hook, bool):
return False, [f"Expected bool for is_manual_hook but got {type(is_manual_hook)}"]
if not isinstance(run_asynchronously, bool):
return False, [f"Expected bool for run_asynchronously but got {type(run_asynchronously)}"]
mod = IrisModuleHook.query.filter(
IrisModuleHook.hook_id == hook.id,
IrisModuleHook.module_id == module_id,
IrisModuleHook.manual_hook_ui_name == manual_hook_name
).first()
if not mod:
imh = IrisModuleHook()
imh.is_manual_hook = is_manual_hook
imh.wait_till_return = False
imh.run_asynchronously = run_asynchronously
imh.max_retry = 0
imh.manual_hook_ui_name = manual_hook_name
imh.hook_id = hook.id
imh.module_id = module_id
try:
db.session.add(imh)
db.session.commit()
except Exception as e:
return False, [str(e)]
return True, [f"Hook {iris_hook_name} registered"]
else:
return True, [f"Hook {iris_hook_name} already registered"]
def deregister_from_hook(module_id: int, iris_hook_name: str):
"""
Deregister from an existing hook. The hook_name should be a well-known hook to IRIS. No error are thrown if the
hook wasn't register in the first place
:param module_id: Module ID to deregister
:param iris_hook_name: hook_name to deregister from
:return: IrisInterfaceStatus object
"""
log.info(f'Deregistering module #{module_id} from {iris_hook_name}')
hooks = IrisModuleHook.query.filter(
IrisModuleHook.module_id == module_id,
IrisHook.hook_name == iris_hook_name,
IrisModuleHook.hook_id == IrisHook.id
).all()
if hooks:
for hook in hooks:
log.info(f'Deregistered module #{module_id} from {iris_hook_name}')
db.session.delete(hook)
return True, ['Hook deregistered']
@celery.task(bind=True)
def task_hook_wrapper(self, module_name, hook_name, hook_ui_name, data, init_user, caseid):
"""
Wrap a hook call into a Celery task to run asynchronously
:param self: Task instance
:param module_name: Module name to instanciate and call
:param hook_name: Name of the hook which was triggered
:param hook_ui_name: Name of the UI hook so module knows which hook was called
:param data: Data associated to the hook to process
:param init_user: User initiating the task
:param caseid: Case associated
:return: A task status JSON task_success or task_failure
"""
try:
# Data is serialized, so deserialized
signature, pdata = data.encode("utf-8").split(b" ")
is_verified = hmac_verify(signature, pdata)
if is_verified is False:
log.warning("data argument has not been correctly serialised")
raise Exception('Unable to instantiate target module. Data has not been correctly serialised')
deser_data = loads(base64.b64decode(pdata))
except Exception as e:
log.exception(e)
raise Exception(e)
try:
_obj = None
# The received object will most likely be cleared when handled by the task,
# so we need to attach it to the session in the task
_obj = []
if isinstance(deser_data, list):
_obj = []
for dse_data in deser_data:
obj = db.session.merge(dse_data)
db.session.commit()
_obj.append(obj)
elif isinstance(deser_data, str) or isinstance(deser_data, int):
_obj = [deser_data]
elif isinstance(deser_data, dict):
_obj = [deser_data]
else:
_obj_a = db.session.merge(deser_data)
db.session.commit()
_obj.append(_obj_a)
except Exception as e:
log.exception(e)
raise Exception(e)
log.info(f'Calling module {module_name} for hook {hook_name}')
try:
mod_inst, _ = instantiate_module_from_name(module_name=module_name)
if mod_inst:
task_status = mod_inst.hooks_handler(hook_name, hook_ui_name, data=_obj)
# Recommit the changes made by the module
db.session.commit()
else:
raise Exception('Unable to instantiate target module')
except Exception as e:
msg = f"Failed to run hook {hook_name} with module {module_name}. Error {str(e)}"
log.critical(msg)
log.exception(e)
task_status = IStatus.I2Error(message=msg, logs=[traceback.format_exc()], user=init_user, caseid=caseid)
return task_status
def call_modules_hook(hook_name: str, data: any, caseid: int, hook_ui_name: str = None, module_name: str = None) -> any:
"""
Calls modules which have registered the specified hook
:raises: Exception if hook name doesn't exist. This shouldn't happen
:param hook_name: Name of the hook to call
:param hook_ui_name: UI name of the hook
:param data: Data associated with the hook
:param module_name: Name of the module to call. If None, all modules matching the hook will be called
:param caseid: Case ID
:return: Any
"""
hook = IrisHook.query.filter(IrisHook.hook_name == hook_name).first()
if not hook:
log.critical(f'Hook name {hook_name} not found')
raise Exception(f'Hook name {hook_name} not found')
if hook_ui_name:
condition = and_(
IrisModule.is_active == True,
IrisModuleHook.hook_id == hook.id,
IrisModuleHook.manual_hook_ui_name == hook_ui_name
)
else:
condition = and_(
IrisModule.is_active == True,
IrisModuleHook.hook_id == hook.id
)
if module_name:
condition = and_(
condition,
IrisModule.module_name == module_name
)
modules = IrisModuleHook.query.with_entities(
IrisModuleHook.run_asynchronously,
IrisModule.module_name,
IrisModuleHook.manual_hook_ui_name
).filter(condition).join(
IrisModuleHook.module,
IrisModuleHook.hook
).all()
for module in modules:
if module.run_asynchronously and "on_preload_" not in hook_name:
log.info(f'Calling module {module.module_name} asynchronously for hook {hook_name} :: {hook_ui_name}')
# We cannot directly pass the sqlalchemy in data, as it needs to be serializable
# So pass a dumped instance and then rebuild on the task side
ser_data = base64.b64encode(dumps(data))
ser_data_auth = hmac_sign(ser_data) + b" " + ser_data
task_hook_wrapper.delay(module_name=module.module_name, hook_name=hook_name,
hook_ui_name=module.manual_hook_ui_name, data=ser_data_auth.decode("utf8"),
init_user=current_user.name, caseid=caseid)
else:
# Direct call. Should be fast
log.info(f'Calling module {module.module_name} for hook {hook_name}')
try:
was_list = True
# The data passed on to the module hook is expected to be a list
# So we make sure it's the case or adapt otherwise
if not isinstance(data, list):
data_list = [data]
was_list = False
else:
data_list = data
mod_inst, _ = instantiate_module_from_name(module_name=module.module_name)
status = mod_inst.hooks_handler(hook_name, module.manual_hook_ui_name, data=data_list)
except Exception as e:
log.critical(f"Failed to run hook {hook_name} with module {module.module_name}. Error {str(e)}")
continue
if status.is_success():
data_result = status.get_data()
if not was_list:
if not isinstance(data_result, list):
log.critical(f"Error getting data result from hook {hook_name}: "
f"A list is expected, instead got a {type(data_result)}")
continue
else:
# We fetch the first elt here because we want to get back to the old type
data = data_result[0]
else:
data = data_result
return data
def list_available_pipelines():
"""
Return a list of available pipelines by requesting the DB
"""
data = modules_list_pipelines()
return data
@celery.task(bind=True)
def pipeline_dispatcher(self, module_name, hook_name, pipeline_type, pipeline_data, init_user, caseid):
"""
Dispatch the pipelines according to their types
:param pipeline_type: Type of pipeline
:return: IrisInterfaceStatus
"""
# Retrieve the handler
mod, _ = instantiate_module_from_name(module_name=module_name)
if mod:
status = configure_module_on_init(mod)
if status.is_failure():
return status
# This will run the task in the Celery context
return mod.pipeline_handler(pipeline_type=pipeline_type,
pipeline_data=pipeline_data)
return IStatus.I2InterfaceNotImplemented("Couldn't instantiate module {}".format(module_name))

View File

@ -0,0 +1,64 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# contact@dfir-iris.org
# Created by Lukas Zurschmiede @LukyLuke
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
import os
import shutil
import uuid
import re
from pathlib import Path
from docxtpl import DocxTemplate
from docx_generator.globals.picture_globals import PictureGlobals
from app.datamgmt.datastore.datastore_db import datastore_get_local_file_path
class ImageHandler(PictureGlobals):
def __init__(self, template: DocxTemplate, base_path: str):
self._logger = logging.getLogger(__name__)
PictureGlobals.__init__(self, template, base_path)
def _process_remote(self, image_path: str) -> str:
"""
Checks if the given Link is a datastore-link and if so, save the image locally for further processing.
:
A Datastore Links looks like this: https://localhost:4433/datastore/file/view/2?cid=1
"""
res = re.search(r'datastore\/file\/view\/(\d+)\?cid=(\d+)', image_path)
if not res:
return super()._process_remote(image_path)
if image_path[:4] == 'http' and len(res.groups()) == 2:
file_id = res.groups(0)[0]
case_id = res.groups(0)[1]
has_error, dsf = datastore_get_local_file_path(file_id, case_id)
if has_error:
raise RenderingError(self._logger, f'File-ID {file_id} does not exist in Case {case_id}')
if not Path(dsf.file_local_name).is_file():
raise RenderingError(self._logger, f'File {dsf.file_local_name} does not exists on the server. Update or delete virtual entry')
file_ext = os.path.splitext(dsf.file_original_name)[1]
file_name = os.path.join(self._output_path, str(uuid.uuid4())) + file_ext
return_value = shutil.copy(dsf.file_local_name, file_name)
return return_value
return super()._process_remote(image_path)

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# IMPORTS ------------------------------------------------
# VARS ---------------------------------------------------
# CONTENT ------------------------------------------------

View File

@ -0,0 +1,610 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2022 - DFIR IRIS Team
# contact@dfir-iris.org
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# IMPORTS ------------------------------------------------
# VARS ---------------------------------------------------
# CONTENT ------------------------------------------------
import logging as log
import os
from datetime import datetime
import jinja2
from app.datamgmt.reporter.report_db import export_case_json_for_report
from docx_generator.docx_generator import DocxGenerator
from docx_generator.exceptions import rendering_error
from flask_login import current_user
from sqlalchemy import desc
from app import app
from app.datamgmt.activities.activities_db import get_auto_activities
from app.datamgmt.activities.activities_db import get_manual_activities
from app.datamgmt.case.case_db import case_get_desc_crc
from app.datamgmt.reporter.report_db import export_case_json
from app.models import AssetsType
from app.models import CaseAssets
from app.models import CaseEventsAssets
from app.models import CaseReceivedFile
from app.models import CaseTemplateReport
from app.models import CasesEvent
from app.models import Ioc
from app.models import IocAssetLink
from app.models import IocLink
from app.iris_engine.reporter.ImageHandler import ImageHandler
LOG_FORMAT = '%(asctime)s :: %(levelname)s :: %(module)s :: %(funcName)s :: %(message)s'
log.basicConfig(level=log.INFO, format=LOG_FORMAT)
class IrisReportMaker(object):
"""
IRIS generical report maker
"""
def __init__(self, tmp_dir, report_id, caseid, safe_mode=False):
self._tmp = tmp_dir
self._report_id = report_id
self._case_info = {}
self._caseid = caseid
self.safe_mode = safe_mode
def get_case_info(self, doc_type):
"""Returns case information
Args:
doc_type (_type_): Investigation or Activities report
Returns:
_type_: case info
"""
if doc_type == 'Investigation':
case_info = self._get_case_info()
elif doc_type == 'Activities':
case_info = self._get_activity_info()
else:
log.error("Unknown report type")
return None
return case_info
def _get_activity_info(self):
auto_activities = get_auto_activities(self._caseid)
manual_activities = get_manual_activities(self._caseid)
case_info_in = self._get_case_info()
# Format information and generate the activity report #
doc_id = "{}".format(datetime.utcnow().strftime("%y%m%d_%H%M"))
case_info = {
'auto_activities': auto_activities,
'manual_activities': manual_activities,
'date': datetime.utcnow(),
'gen_user': current_user.name,
'case': {'name': case_info_in['case'].get('name'),
'open_date': case_info_in['case'].get('open_date'),
'for_customer': case_info_in['case'].get('for_customer')
},
'doc_id': doc_id
}
return case_info
def _get_case_info(self):
"""
Retrieve information of the case
:return:
"""
case_info = export_case_json(self._caseid)
# Get customer, user and case title
case_info['doc_id'] = IrisReportMaker.get_docid()
case_info['user'] = current_user.name
# Set date
case_info['date'] = datetime.utcnow().strftime("%Y-%m-%d")
return case_info
@staticmethod
def get_case_summary(caseid):
"""
Retrieve the case summary from thehive
:return:
"""
_crc32, descr = case_get_desc_crc(caseid)
# return IrisMakeDocReport.markdown_to_text(descr)
return descr
@staticmethod
def get_case_files(caseid):
"""
Retrieve the list of files with their hashes
:return:
"""
files = CaseReceivedFile.query.filter(
CaseReceivedFile.case_id == caseid
).with_entities(
CaseReceivedFile.filename,
CaseReceivedFile.date_added,
CaseReceivedFile.file_hash,
CaseReceivedFile.custom_attributes
).order_by(
CaseReceivedFile.date_added
).all()
if files:
return [row._asdict() for row in files]
else:
return []
@staticmethod
def get_case_timeline(caseid):
"""
Retrieve the case timeline
:return:
"""
timeline = CasesEvent.query.filter(
CasesEvent.case_id == caseid
).order_by(
CasesEvent.event_date
).all()
cache_id = {}
ras = {}
tim = []
for row in timeline:
ras = row
setattr(ras, 'asset', None)
as_list = CaseEventsAssets.query.with_entities(
CaseAssets.asset_id,
CaseAssets.asset_name,
AssetsType.asset_name.label('type')
).filter(
CaseEventsAssets.event_id == row.event_id
).join(CaseEventsAssets.asset, CaseAssets.asset_type).all()
alki = []
for asset in as_list:
alki.append("{} ({})".format(asset.asset_name, asset.type))
setattr(ras, 'asset', "\r\n".join(alki))
tim.append(ras)
return tim
@staticmethod
def get_case_ioc(caseid):
"""
Retrieve the list of IOC linked to the case
:return:
"""
res = IocLink.query.distinct().with_entities(
Ioc.ioc_value,
Ioc.ioc_type,
Ioc.ioc_description,
Ioc.ioc_tags,
Ioc.custom_attributes
).filter(
IocLink.case_id == caseid
).join(IocLink.ioc).order_by(Ioc.ioc_type).all()
if res:
return [row._asdict() for row in res]
else:
return []
@staticmethod
def get_case_assets(caseid):
"""
Retrieve the assets linked ot the case
:return:
"""
ret = []
res = CaseAssets.query.distinct().with_entities(
CaseAssets.asset_id,
CaseAssets.asset_name,
CaseAssets.asset_description,
CaseAssets.asset_compromised.label('compromised'),
AssetsType.asset_name.label("type"),
CaseAssets.custom_attributes,
CaseAssets.asset_tags
).filter(
CaseAssets.case_id == caseid
).join(
CaseAssets.asset_type
).order_by(desc(CaseAssets.asset_compromised)).all()
for row in res:
row = row._asdict()
row['light_asset_description'] = row['asset_description']
ial = IocAssetLink.query.with_entities(
Ioc.ioc_value,
Ioc.ioc_type,
Ioc.ioc_description
).filter(
IocAssetLink.asset_id == row['asset_id']
).join(
IocAssetLink.ioc
).all()
if ial:
row['asset_ioc'] = [row._asdict() for row in ial]
else:
row['asset_ioc'] = []
ret.append(row)
return ret
@staticmethod
def get_docid():
return "{}".format(
datetime.utcnow().strftime("%y%m%d_%H%M"))
@staticmethod
def markdown_to_text(markdown_string):
"""
Converts a markdown string to plaintext
"""
return markdown_string.replace('\n', '</w:t></w:r><w:r/></w:p><w:p><w:r><w:t xml:space="preserve">').replace(
'#', '')
class IrisMakeDocReport(IrisReportMaker):
"""
Generates a DOCX report for the case
"""
def __init__(self, tmp_dir, report_id, caseid, safe_mode=False):
self._tmp = tmp_dir
self._report_id = report_id
self._case_info = {}
self._caseid = caseid
self._safe_mode = safe_mode
def generate_doc_report(self, doc_type):
"""
Actually generates the report
:return:
"""
if doc_type == 'Investigation':
case_info = self._get_case_info()
elif doc_type == 'Activities':
case_info = self._get_activity_info()
else:
log.error("Unknown report type")
return None
report = CaseTemplateReport.query.filter(CaseTemplateReport.id == self._report_id).first()
name = "{}".format("{}.docx".format(report.naming_format))
name = name.replace("%code_name%", case_info['doc_id'])
name = name.replace('%customer%', case_info['case'].get('for_customer'))
name = name.replace('%case_name%', case_info['case'].get('name'))
name = name.replace('%date%', datetime.utcnow().strftime("%Y-%m-%d"))
output_file_path = os.path.join(self._tmp, name)
try:
if not self._safe_mode:
image_handler = ImageHandler(template=None, base_path='/')
else:
image_handler = None
generator = DocxGenerator(image_handler=image_handler)
generator.generate_docx("/",
os.path.join(app.config['TEMPLATES_PATH'], report.internal_reference),
case_info,
output_file_path
)
return output_file_path, ""
except rendering_error.RenderingError as e:
return None, e.__str__()
def _get_activity_info(self):
auto_activities = get_auto_activities(self._caseid)
manual_activities = get_manual_activities(self._caseid)
case_info_in = self._get_case_info()
# Format information and generate the activity report #
doc_id = "{}".format(datetime.utcnow().strftime("%y%m%d_%H%M"))
case_info = {
'auto_activities': auto_activities,
'manual_activities': manual_activities,
'date': datetime.utcnow(),
'gen_user': current_user.name,
'case': {'name': case_info_in['case'].get('name'),
'open_date': case_info_in['case'].get('open_date'),
'for_customer': case_info_in['case'].get('for_customer')
},
'doc_id': doc_id
}
return case_info
def _get_case_info(self):
"""
Retrieve information of the case
:return:
"""
case_info = export_case_json_for_report(self._caseid)
# Get customer, user and case title
case_info['doc_id'] = IrisMakeDocReport.get_docid()
case_info['user'] = current_user.name
# Set date
case_info['date'] = datetime.utcnow().strftime("%Y-%m-%d")
return case_info
@staticmethod
def get_case_summary(caseid):
"""
Retrieve the case summary from thehive
:return:
"""
_crc32, descr = case_get_desc_crc(caseid)
# return IrisMakeDocReport.markdown_to_text(descr)
return descr
@staticmethod
def get_case_files(caseid):
"""
Retrieve the list of files with their hashes
:return:
"""
files = CaseReceivedFile.query.filter(
CaseReceivedFile.case_id == caseid
).with_entities(
CaseReceivedFile.filename,
CaseReceivedFile.date_added,
CaseReceivedFile.file_hash,
CaseReceivedFile.custom_attributes
).order_by(
CaseReceivedFile.date_added
).all()
if files:
return [row._asdict() for row in files]
else:
return []
@staticmethod
def get_case_timeline(caseid):
"""
Retrieve the case timeline
:return:
"""
timeline = CasesEvent.query.filter(
CasesEvent.case_id == caseid
).order_by(
CasesEvent.event_date
).all()
cache_id = {}
ras = {}
tim = []
for row in timeline:
ras = row
setattr(ras, 'asset', None)
as_list = CaseEventsAssets.query.with_entities(
CaseAssets.asset_id,
CaseAssets.asset_name,
AssetsType.asset_name.label('type')
).filter(
CaseEventsAssets.event_id == row.event_id
).join(CaseEventsAssets.asset, CaseAssets.asset_type).all()
alki = []
for asset in as_list:
alki.append("{} ({})".format(asset.asset_name, asset.type))
setattr(ras, 'asset', "\r\n".join(alki))
tim.append(ras)
return tim
@staticmethod
def get_case_ioc(caseid):
"""
Retrieve the list of IOC linked to the case
:return:
"""
res = IocLink.query.distinct().with_entities(
Ioc.ioc_value,
Ioc.ioc_type,
Ioc.ioc_description,
Ioc.ioc_tags,
Ioc.custom_attributes
).filter(
IocLink.case_id == caseid
).join(IocLink.ioc).order_by(Ioc.ioc_type).all()
if res:
return [row._asdict() for row in res]
else:
return []
@staticmethod
def get_case_assets(caseid):
"""
Retrieve the assets linked ot the case
:return:
"""
ret = []
res = CaseAssets.query.distinct().with_entities(
CaseAssets.asset_id,
CaseAssets.asset_name,
CaseAssets.asset_description,
CaseAssets.asset_compromise_status_id.label('compromise_status'),
AssetsType.asset_name.label("type"),
CaseAssets.custom_attributes,
CaseAssets.asset_tags
).filter(
CaseAssets.case_id == caseid
).join(
CaseAssets.asset_type
).order_by(desc(CaseAssets.asset_compromise_status_id)).all()
for row in res:
row = row._asdict()
row['light_asset_description'] = row['asset_description']
ial = IocAssetLink.query.with_entities(
Ioc.ioc_value,
Ioc.ioc_type,
Ioc.ioc_description
).filter(
IocAssetLink.asset_id == row['asset_id']
).join(
IocAssetLink.ioc
).all()
if ial:
row['asset_ioc'] = [row._asdict() for row in ial]
else:
row['asset_ioc'] = []
ret.append(row)
return ret
@staticmethod
def get_docid():
return "{}".format(
datetime.utcnow().strftime("%y%m%d_%H%M"))
@staticmethod
def markdown_to_text(markdown_string):
"""
Converts a markdown string to plaintext
"""
return markdown_string.replace('\n', '</w:t></w:r><w:r/></w:p><w:p><w:r><w:t xml:space="preserve">').replace(
'#', '')
class IrisMakeMdReport(IrisReportMaker):
"""
Generates a MD report for the case
"""
def __init__(self, tmp_dir, report_id, caseid, safe_mode=False):
self._tmp = tmp_dir
self._report_id = report_id
self._case_info = {}
self._caseid = caseid
self.safe_mode = safe_mode
def generate_md_report(self, doc_type):
"""
Generate report file
"""
case_info = self.get_case_info(doc_type)
if case_info is None:
return None
# Get file extension
report = CaseTemplateReport.query.filter(
CaseTemplateReport.id == self._report_id).first()
_, report_format = os.path.splitext(report.internal_reference)
# Prepare report name
name = "{}".format(("{}" + str(report_format)).format(report.naming_format))
name = name.replace("%code_name%", case_info['doc_id'])
name = name.replace(
'%customer%', case_info['case'].get('for_customer'))
name = name.replace('%case_name%', case_info['case'].get('name'))
name = name.replace('%date%', datetime.utcnow().strftime("%Y-%m-%d"))
# Build output file
output_file_path = os.path.join(self._tmp, name)
try:
# Load the template
template_loader = jinja2.FileSystemLoader(searchpath="/")
template_env = jinja2.Environment(loader=template_loader, autoescape=True)
template_env.filters = app.jinja_env.filters
template = template_env.get_template(os.path.join(
app.config['TEMPLATES_PATH'], report.internal_reference))
# Render with a mapping between JSON (from db) and template tags
output_text = template.render(case_info)
# Write the result in the output file
with open(output_file_path, 'w', encoding="utf-8") as html_file:
html_file.write(output_text)
except Exception as e:
log.exception("Error while generating report: {}".format(e))
return None, e.__str__()
return output_file_path, 'Report generated'
class QueuingHandler(log.Handler):
"""A thread safe logging.Handler that writes messages into a queue object.
Designed to work with LoggingWidget so log messages from multiple
threads can be shown together in a single ttk.Frame.
The standard logging.QueueHandler/logging.QueueListener can not be used
for this because the QueueListener runs in a private thread, not the
main thread.
Warning: If multiple threads are writing into this Handler, all threads
must be joined before calling logging.shutdown() or any other log
destinations will be corrupted.
"""
def __init__(self, *args, task_self, message_queue, **kwargs):
"""Initialize by copying the queue and sending everything else to superclass."""
log.Handler.__init__(self, *args, **kwargs)
self.message_queue = message_queue
self.task_self = task_self
def emit(self, record):
"""Add the formatted log message (sans newlines) to the queue."""
self.message_queue.append(self.format(record).rstrip('\n'))
self.task_self.update_state(state='PROGRESS',
meta=list(self.message_queue))

View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from celery import Celery
def make_celery(app):
celery = Celery(
app.import_name,
config_source=app.config.get('CELERY')
)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery

View File

@ -0,0 +1,102 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# IMPORTS ------------------------------------------------
import os
import urllib.parse
from celery.signals import task_prerun
from flask_login import current_user
from app import app
from app import db
from app.datamgmt.case.case_db import get_case
from app.iris_engine.module_handler.module_handler import pipeline_dispatcher
from app.iris_engine.utils.common import build_upload_path
from app.iris_engine.utils.tracker import track_activity
from iris_interface import IrisInterfaceStatus as IStatus
from iris_interface.IrisModuleInterface import IrisPipelineTypes
app.config['timezone'] = 'Europe/Paris'
# CONTENT ------------------------------------------------
@task_prerun.connect
def on_task_init(*args, **kwargs):
db.engine.dispose()
def task_case_update(module, pipeline, pipeline_args, caseid):
"""
Update the current case of the current user with fresh data.
The files should have already been uploaded.
:return: Tuple (success, errors)
"""
errors = []
case = get_case(caseid=caseid)
if case:
# We have a case so we can update the current case
# Build the upload path where the files should be
fpath = build_upload_path(case_customer=case.client.name,
case_name=urllib.parse.unquote(case.name),
module=module)
# Check the path is valid and exists
if fpath:
if os.path.isdir(fpath):
# Build task args
task_args = {
"pipeline_args": pipeline_args,
"db_name": '',
"user": current_user.name,
"user_id": current_user.id,
"case_name": case.name,
"case_id": case.case_id,
"path": fpath,
"is_update": True
}
track_activity("started a new analysis import with pipeline {}".format(pipeline))
pipeline_dispatcher.delay(module_name=module,
hook_name=IrisPipelineTypes.pipeline_type_update,
pipeline_type=IrisPipelineTypes.pipeline_type_update,
pipeline_data=task_args,
init_user=current_user.name,
caseid=caseid)
return IStatus.I2Success('Pipeline task queued')
return IStatus.I2FileNotFound("Built path was not found ")
return IStatus.I2UnexpectedResult("Unable to build path")
else:
# The user do not have any context so we cannot update
# Return an error
errors.append('Current user does not have a valid case in context')
return IStatus.I2UnexpectedResult("Invalid context")
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]

View File

@ -0,0 +1,2 @@

View File

@ -0,0 +1,617 @@
#!/usr/bin/env python3
#
# IRIS Core Code
# contact@dfir-iris.org
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import gnupg
import hashlib
import json
import os
import requests
import shutil
import subprocess
import tempfile
import time
from celery.schedules import crontab
from datetime import datetime
from flask_login import current_user
from flask_socketio import emit
from flask_socketio import join_room
from packaging import version
from pathlib import Path
from app import app
from app import cache
from app import celery
from app import db
from app import socket_io
from app.datamgmt.manage.manage_srv_settings_db import get_server_settings_as_dict
from app.iris_engine.backup.backup import backup_iris_db
from app.models import ServerSettings
from iris_interface import IrisInterfaceStatus as IStatus
log = app.logger
def update_log_to_socket(status, is_error=False):
log.info(status)
data = {
"message": status,
"is_error": is_error
}
socket_io.emit('update_status', data, to='iris_update_status', namespace='/server-updates')
def notify_server_off():
socket_io.emit('server_has_turned_off', {}, to='iris_update_status', namespace='/server-updates')
def notify_update_failed():
socket_io.emit('update_has_fail', {}, to='iris_update_status', namespace='/server-updates')
def update_log(status):
update_log_to_socket(status)
def update_log_error(status):
update_log_to_socket(status, is_error=True)
@socket_io.on('join-update', namespace='/server-updates')
def get_message(data):
room = data['channel']
join_room(room=room)
emit('join', {'message': f"{current_user.user} just joined", 'is_error': False}, room=room,
namespace='/server-updates')
@socket_io.on('update_ping', namespace='/server-updates')
def socket_on_update_ping(msg):
emit('update_ping', {'message': f"Server connected", 'is_error': False},
namespace='/server-updates')
@socket_io.on('update_get_current_version', namespace='/server-updates')
def socket_on_update_do_reboot(msg):
socket_io.emit('update_current_version', {"version": app.config.get('IRIS_VERSION')}, to='iris_update_status',
namespace='/server-updates')
def notify_server_ready_to_reboot():
socket_io.emit('server_ready_to_reboot', {}, to='iris_update_status', namespace='/server-updates')
def notify_server_has_updated():
socket_io.emit('server_has_updated', {}, to='iris_update_status', namespace='/server-updates')
def inner_init_server_update():
has_updates, updates_content, release_config = is_updates_available()
init_server_update(release_config)
def get_external_url(url):
server_settings = get_server_settings_as_dict()
proxies = server_settings.get('proxies')
try:
request = requests.get(url, proxies=proxies)
except Exception as e:
app.logger.error(e)
return None
return request
def get_latest_release():
try:
releases = get_external_url(app.config.get('RELEASE_URL'))
except Exception as e:
app.logger.error(e)
return True, {'message': f"Unexpected error. {str(e)}"}
if not releases:
return True, {'message': "Unexpected error"}
if releases.status_code == 200:
releases_j = releases.json()
return False, releases_j[0]
if releases.status_code == 403:
return True, releases.json()
return True, {'msg': "Unexpected error"}
def get_release_assets(assets_url):
try:
release_assets = get_external_url(assets_url)
except Exception as e:
app.logger.error(e)
return None
if release_assets:
return release_assets.json()
return None
def is_updates_available():
has_error, release = get_latest_release()
current_version = app.config.get('IRIS_VERSION')
if has_error:
return False, release.get('message'), None
release_version = release.get('name')
cache.delete('iris_has_updates')
srv_settings = ServerSettings.query.first()
if not srv_settings:
raise Exception('Unable to fetch server settings. Please reach out for help')
if version.parse(current_version) < version.parse(release_version):
srv_settings.has_updates_available = True
db.session.commit()
return True, f'# New version {release_version} available\n\n{release.get("body")}', release
else:
srv_settings.has_updates_available = False
db.session.commit()
return False, f'**Current server is up-to-date with {release_version}**', None
def init_server_update(release_config):
if not release_config:
update_log_error('Release config is empty. Please contact IRIS team')
notify_update_failed()
return False
update_log('Fetching release assets info')
has_error, temp_dir = download_release_assets(release_config.get('assets'))
if has_error:
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
has_error = verify_assets_signatures(temp_dir, release_config.get('assets'))
if has_error:
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
updates_config = verify_compatibility(temp_dir, release_config.get('assets'))
if updates_config is None:
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
update_archive = Path(temp_dir) / updates_config.get('app_archive')
has_error = verify_archive_fingerprint(update_archive, updates_config.get('app_fingerprint'))
if not has_error:
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
update_log('Backing up current version')
has_error = update_backup_current_version()
if has_error:
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
update_log('Backing up database')
has_error, logs = backup_iris_db()
if has_error:
for log_entry in logs:
update_log_error(log_entry)
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
if 'worker' in updates_config.get('scope'):
update_log('Worker needs to be updated. Scheduling updates task')
async_update = task_update_worker.delay(update_archive.as_posix(), updates_config)
update_log('Scheduled. Waiting for worker to finish updating')
result_output = async_update.get(interval=1)
if result_output.is_failure():
update_log_error('Worker failed to updates')
update_log_error(result_output.logs)
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
time.sleep(5)
async_update_version = task_update_get_version.delay()
result_output = async_update_version.get(interval=1)
if result_output.is_failure():
update_log_error('Worker failed to updates')
update_log_error(result_output.data)
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
worker_update_version = result_output.get_data()
if worker_update_version != updates_config.get('target_version'):
update_log_error('Worker failed to updates')
update_log_error(f'Expected version {updates_config.get("target_version")} but worker '
f'is in {worker_update_version}')
update_log_error('Aborting upgrades - see previous errors')
notify_update_failed()
shutil.rmtree(temp_dir)
return False
update_log(f'Worker updated to {updates_config.get("target_version")}')
if updates_config.get('need_app_reboot') is True:
update_log('Closing all database connections. Unsaved work will be lost.')
from sqlalchemy.orm import close_all_sessions
close_all_sessions()
update_log('All checks passed. IRIS will turn off shortly and updates')
update_log('Please don\'t leave the page - logging will resume here')
update_log('Handing off to updater')
notify_server_off()
time.sleep(0.5)
if 'iriswebapp' in updates_config.get('scope'):
if call_ext_updater(update_archive=update_archive, scope="iriswebapp",
need_reboot=updates_config.get('need_app_reboot')):
socket_io.stop()
return True
def verify_archive_fingerprint(update_archive, archive_sha256):
update_log('Verifying updates archive')
if update_archive.is_file():
sha256_hash = hashlib.sha256()
with open(update_archive, "rb") as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
current_sha256 = sha256_hash.hexdigest().upper()
if current_sha256 == archive_sha256:
return True
update_log_error(f'Fingerprint mismatch. Expected {archive_sha256} but got {current_sha256}')
else:
update_log_error(f'Archive {update_archive} not found')
return False
def call_ext_updater(update_archive, scope, need_reboot):
if not isinstance(update_archive, Path):
update_archive = Path(update_archive)
archive_name = Path(update_archive).stem
if os.getenv("DOCKERIZED"):
source_dir = Path.cwd() / 'scripts'
target_dir = Path.cwd()
docker = 1
else:
source_dir = Path.cwd().absolute() / 'scripts'
if app.config["DEVELOPMENT"]:
target_dir = Path('../../update_server/test_update')
else:
target_dir = Path.cwd()
docker = 0
try:
subprocess.Popen(["/bin/bash", f"{source_dir}/iris_updater.sh",
update_archive.as_posix(), # Update archive to unpack
target_dir.as_posix(), # Target directory of update
archive_name, # Root directory of the archive
scope[0], # Scope of the update
'1' if docker else '0', # Are we in docker ?
'1' if need_reboot else '0', # Do we need to restart the app
'&'])
except Exception as e :
log.error(str(e))
return False
return True
def update_backup_current_version():
date_time = datetime.now()
root_backup = Path(app.config.get("BACKUP_PATH"))
root_backup.mkdir(exist_ok=True)
backup_dir = root_backup / f"server_backup_{date_time.timestamp()}"
backup_dir.mkdir(exist_ok=True)
if not backup_dir.is_dir():
update_log_error(f"Unable to create directory {backup_dir} for backup. Aborting")
return True
if os.getenv("DOCKERIZED"):
source_dir = Path.cwd()
else:
source_dir = Path.cwd().parent.absolute()
try:
update_log(f'Copying {source_dir} to {backup_dir}')
shutil.copytree(source_dir, backup_dir, dirs_exist_ok=True)
except Exception as e:
update_log_error('Unable to backup current version')
update_log_error(str(e))
return False, ['Unable to backup current version', str(e)]
update_log('Current version backed up')
has_error = generate_backup_config_file(backup_dir)
if has_error:
return True
return False
def generate_backup_config_file(backup_dir):
backup_config = {
"backup_date": datetime.now().timestamp(),
"backup_version": app.config.get('IRIS_VERSION')
}
hashes_map = {}
for entry in backup_dir.rglob('*'):
if entry.is_file():
sha256_hash = hashlib.sha256()
with open(entry, "rb") as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
hashes_map[entry.as_posix()] = sha256_hash.hexdigest()
backup_config["hashes_map"] = hashes_map
try:
with open(backup_dir / "backup_config.json", 'w') as fconfig:
json.dump(backup_config, fconfig, indent=4)
except Exception as e:
update_log_error('Unable to save configuration file')
update_log_error(str(e))
return True
update_log('Backup configuration file generated')
return False
def verify_compatibility(target_directory, release_assets_info):
release_updates = None
update_log('Verifying updates compatibilities')
for release_asset in release_assets_info:
asset_name = release_asset.get('name')
if asset_name != 'release_updates.json':
continue
if (Path(target_directory) / asset_name).is_file():
release_updates = Path(target_directory) / asset_name
break
if not release_updates:
update_log_error('Unable to find release updates configuration file')
return None
try:
with open(file=release_updates) as fin:
updates_info = json.load(fin)
except Exception as e:
update_log_error('Unable to read release updates configuration file')
update_log_error(str(e))
update_log_error('Please contact DFIR-IRIS team')
return None
can_update = False
accepted_versions = updates_info.get('accepted_versions')
for av in accepted_versions:
if version.parse(app.config.get('IRIS_VERSION')) == version.parse(av):
can_update = True
break
if not can_update:
update_log_error(f'Current version {app.config.get("IRIS_VERSION")} cannot '
f'be updated to {updates_info.get("target_version")} automatically')
update_log_error(f'Supported versions are {updates_info.get("accepted_versions")}')
return None
if not updates_info.get('support_auto'):
update_log_error(f'This updates does not support automatic handling. Please read the upgrades instructions.')
return None
if 'worker' not in updates_info.get('scope') and 'iriswebapp' not in updates_info.get('scope'):
update_log_error(f'Something is wrong, updates configuration does not have any valid scope')
update_log_error('Please contact DFIR-IRIS team')
return None
update_log('Compatibly checks done. Good to go')
return updates_info
def verify_assets_signatures(target_directory, release_assets_info):
# Expects a signature for every assets
has_error = False
assets_check = {}
for release_asset in release_assets_info:
asset_name = release_asset.get('name')
if not asset_name.endswith('.sig'):
if (Path(target_directory) / asset_name).is_file():
if (Path(target_directory) / f"{asset_name}.sig").is_file():
assets_check[Path(target_directory) / asset_name] = Path(target_directory) / f"{asset_name}.sig"
else:
update_log_error(f"{asset_name} does not have a signature file")
has_error = True
else:
update_log_error(f"Could not find {Path(target_directory) / asset_name}")
has_error = True
if has_error:
return has_error
update_log("Importing DFIR-IRIS GPG key")
gpg = gnupg.GPG()
with open(app.config.get("RELEASE_SIGNATURE_KEY"), 'rb') as pkey:
import_result = gpg.import_keys(pkey.read())
if import_result.count < 1:
update_log_error(f'Unable to fetch {app.config.get("RELEASE_SIGNATURE_KEY")}')
has_error = True
for asset in assets_check:
with open(assets_check[asset], 'rb') as fin:
verified = gpg.verify_file(fin, data_filename=asset)
if not verified.valid:
update_log_error(f'{asset.name} does not have a valid signature (checked '
f'against {assets_check[asset].name}). '
f'Contact DFIR-IRIS team')
update_log_error(f"Signature status : {verified.status}")
has_error = True
continue
update_log(f"{asset.name} : signature validated")
return has_error
def download_release_assets(release_assets_info):
has_error = False
if not Path(app.config.get("UPDATES_PATH")).is_dir():
Path(app.config.get("UPDATES_PATH")).mkdir(exist_ok=True)
temp_dir = tempfile.mkdtemp(dir=app.config.get("UPDATES_PATH"))
for release_asset in release_assets_info:
asset_name = release_asset.get('name')
asset_url = release_asset.get('browser_download_url')
# TODO: Check for available FS free space before downloading
update_log(f'Downloading from {asset_url} to {temp_dir}')
if not download_from_url(asset_url, Path(temp_dir) / asset_name):
update_log_error('ERROR - Unable to save asset file to FS')
has_error = True
if has_error:
update_log_error('Aborting upgrades - see previous errors')
return has_error, temp_dir
def download_from_url(asset_url, target_file):
with open(target_file, "wb") as file:
response = get_external_url(asset_url)
file.write(response.content)
return Path(target_file).is_file()
@celery.task(bind=True)
def task_update_worker(self, update_archive, updates_config):
if not call_ext_updater(update_archive=update_archive, scope="worker",
need_reboot=updates_config.get('need_worker_reboot')):
return IStatus.I2Success(message="Unable to spawn updater")
return IStatus.I2Success(message="Worker updater called")
@celery.task(bind=True)
def task_update_get_version(self):
return IStatus.I2Success(data=app.config.get('IRIS_VERSION'))
@celery.on_after_finalize.connect
def setup_periodic_update_checks(self, **kwargs):
self.add_periodic_task(
crontab(hour=0, minute=0),
task_check_available_updates.s(),
name='iris_auto_check_updates'
)
def remove_periodic_update_checks():
if 'iris_auto_check_updates' in celery.conf['beat_schedule']:
del celery.conf['beat_schedule']['iris_auto_check_updates']
@celery.task
def task_check_available_updates():
log.info('Cron - Checking if updates are available')
has_updates, _, _ = is_updates_available()
srv_settings = ServerSettings.query.first()
if not srv_settings:
return IStatus.I2Error('Unable to fetch server settings. Please reach out for help')
srv_settings.has_updates_available = has_updates
db.session.commit()
if srv_settings.has_updates_available:
log.info('Updates are available for this server')
return IStatus.I2Success(f'Successfully checked updates. Available : {srv_settings.has_updates_available}')

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# IMPORTS ------------------------------------------------
# VARS ---------------------------------------------------
# CONTENT ------------------------------------------------

View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os
from datetime import datetime
from app import app
def build_upload_path(case_customer, case_name, module, create=False):
"""
Create a path for the upload of the files, according to the specifications of the case
:param case_customer: Customer name linked to the case
:param case_name: Name of the case
:param module: Name of the module which will handle the data
:param create: True if the path needs to be created, else false
:return: The built full path, None if errors
"""
try:
if case_name and case_customer and module:
path = "{customer}/{case}/{module}/".format(
customer=case_customer.strip().replace('.', '').replace(' ', '').replace('/', ''),
case=case_name.strip().replace('.', '').replace(' ', '_').replace('/', '').lower(),
module=module.replace('.', '').replace(' ', '_').replace('/', '')
)
fpath = os.path.join(app.config['UPLOADED_PATH'], path)
if create:
os.makedirs(os.path.join(app.config['UPLOADED_PATH'], path), exist_ok=True)
return fpath
return None
except Exception as e:
print(e)
return None
def parse_bf_date_format(input_str):
date_value = input_str.strip()
if len(date_value) == 10 and '-' not in date_value and '.' not in date_value and '/' not in date_value:
# Assume linux timestamp, from 1966 to 2286
date = datetime.fromtimestamp(int(date_value))
return date
elif len(date_value) == 13 and '-' not in date_value and '.' not in date_value and '/' not in date_value:
# Assume microsecond timestamp
date = datetime.fromtimestamp(int(date_value) / 1000)
return date
else:
# brute force formats
for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M', '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f',
'%Y-%m-%d %H:%M%z', '%Y-%m-%d %H:%M:%S%z', '%Y-%m-%d %H:%M:%S.%f%z',
'%Y-%m-%d %H:%M %Z', '%Y-%m-%d %H:%M:%S %Z', '%Y-%m-%d %H:%M:%S.%f %Z',
'%Y-%m-%d - %H:%M:%S.%f%z',
'%b %d %H:%M:%S', '%Y %b %d %H:%M:%S', '%b %d %H:%M:%S %Y', '%b %d %Y %H:%M:%S',
'%y %b %d %H:%M:%S', '%b %d %H:%M:%S %y', '%b %d %y %H:%M:%S',
'%Y-%m-%d', '%Y-%m-%dT%H:%M', '%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f',
'%Y-%m-%dT%H:%M%z', '%Y-%m-%dT%H:%M:%S%z', '%Y-%m-%dT%H:%M:%S.%f%z',
'%Y-%m-%dT%H:%M %Z', '%Y-%m-%dT%H:%M:%S %Z', '%Y-%m-%dT%H:%M:%S.%f %Z',
'%Y-%d-%m', '%Y-%d-%m %H:%M', '%Y-%d-%m %H:%M:%S', '%Y-%d-%m %H:%M:%S.%f',
'%Y-%d-%m %H:%M%z', '%Y-%d-%m %H:%M:%S%z', '%Y-%d-%m %H:%M:%S.%f%z',
'%Y-%d-%m %H:%M %Z', '%Y-%d-%m %H:%M:%S %Z', '%Y-%d-%m %H:%M:%S.%f %Z',
'%d/%m/%Y %H:%M', '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M:%S.%f',
'%d.%m.%Y %H:%M', '%d.%m.%Y %H:%M:%S', '%d.%m.%Y %H:%M:%S.%f',
'%d-%m-%Y %H:%M', '%d-%m-%Y %H:%M:%S', '%d-%m-%Y %H:%M:%S.%f',
'%b %d %Y %H:%M', '%b %d %Y %H:%M:%S', '%b %d %Y %H:%M:%S',
'%a, %d %b %Y %H:%M:%S', '%a, %d %b %Y %H:%M:%S %Z', '%a, %d %b %Y %H:%M:%S.%f',
'%a, %d %b %y %H:%M:%S', '%a, %d %b %y %H:%M:%S %Z', '%a, %d %b %y %H:%M:%S.%f',
'%d %b %Y %H:%M', '%d %b %Y %H:%M:%S', '%d %b %Y %H:%M:%S.%f',
'%d %b %y %H:%M', '%d %b %y %H:%M:%S', '%d %b %y %H:%M:%S.%f',
'%Y-%m-%d', '%d.%m.%Y', '%d/%m/%Y', "%A, %B %d, %Y", "%A %B %d, %Y", "%A %B %d %Y",
'%d %B %Y'):
try:
date = datetime.strptime(date_value, fmt)
return date
except ValueError:
pass
return None

View File

@ -0,0 +1,70 @@
#!/usr/bin/env python3
#
# IRIS Source Code
# Copyright (C) 2021 - Airbus CyberSecurity (SAS)
# ir@cyberactionlab.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# IMPORTS ------------------------------------------------
from datetime import datetime
from flask import request
from flask_login import current_user
import app
from app import db
from app.models import UserActivity
log = app.app.logger
# CONTENT ------------------------------------------------
def track_activity(message, caseid=None, ctx_less=False, user_input=False, display_in_ui=True):
"""
Register a user activity in DB.
:param message: Message to save as activity
:return: Nothing
"""
ua = UserActivity()
try:
ua.user_id = current_user.id
except:
pass
try:
ua.case_id = caseid if ctx_less is False else None
except Exception:
pass
ua.activity_date = datetime.utcnow()
ua.activity_desc = message.capitalize()
if current_user.is_authenticated:
log.info(f"{current_user.user} [#{current_user.id}] :: Case {caseid} :: {ua.activity_desc}")
else:
log.info(f"Anonymous :: Case {caseid} :: {ua.activity_desc}")
ua.user_input = user_input
ua.display_in_ui = display_in_ui
ua.is_from_api = (request.cookies.get('session') is None if request else False)
db.session.add(ua)
db.session.commit()
return ua