# By: Riasat Ullah

# This file contains all functions that are relevant for
# instances of a task.

from dbqueries import db_policies
from objects.alert_log import AlertLog
from objects.events import Event
from objects.instance_state import InstanceState
from objects.task import Task
from psycopg2 import errorcodes
from utils import constants, errors, helpers, key_manager, times, var_names
from validations import string_validator
import configuration as configs
import datetime
import json
import psycopg2
import uuid


def create_task_instances(conn, timestamp, tasks, policies=None):
    '''
    Creates instances of task(s). This function should be primarily used by the task monitor
    to create instances of tasks. The check that ensured that the same manual task is not triggered
    more than once in a day should be handled when the tasks are retrieved.
    :param conn: db connection
    :param timestamp: timestamp when the instance should be created for
    :param tasks: (list) of Task objects
    :param policies: (dict) of Policy objects
    :return: (int) number of instances that were created
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(tasks, list)

    if policies is None:
        policy_ids = []
        for task in tasks:
            policy_ids += task.get_assignee_policy_ids()
        if len(policy_ids) > 0:
            policies = db_policies.get_policies(conn, timestamp, with_policyid=policy_ids)

    inst_query_params = []
    for inst_task in tasks:
        # We need to create the instance assignments here because otherwise we will not be able to figure out
        # afterwards who the instance was assigned to if responses are required from all assignees. When a
        # response is required from all assignees, a separate instance is created for each one of them.
        all_assignees = inst_task.get_level_assignees(check_level=1)
        avl_level = 1
        all_structs = []
        for assignee in all_assignees:
            assignee_policy = policies[assignee.for_policy_id]
            avl_level, avl_assignments = helpers.get_new_instance_assignments_struct(timestamp, assignee_policy)
            all_structs += avl_assignments

        # The structure was made to be like this due to an old feature of "requiring all responses".
        # That feature is now deprecated, but the db functions have not been updated to avoid complications.
        all_structs = [all_structs]

        assignee_policies = [policies[id_] for id_ in inst_task.get_assignee_policy_ids()]
        next_alert_timestamp = InstanceState.calculate_next_alert_timestamp(1, assignee_policies, timestamp)

        for assignment in all_structs:
            inst_query_params.append((inst_task.details[var_names.organization_id], inst_task.taskid, timestamp.date(),
                                      timestamp, avl_level, constants.internal, next_alert_timestamp,
                                      constants.end_timestamp, constants.open_state, json.dumps(assignment),))

    instance_query = " select create_new_instance(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, null); "
    try:
        conn.execute_batch(instance_query, inst_query_params)
        return len(inst_query_params)
    except psycopg2.DatabaseError:
        raise


def list_instances(conn, timestamp, org_id, user_id=None, open_instances=None, assignee_type=None, instance_ids=None,
                   row_limit=None, row_offset=None, service_ref_id=None, min_timestamp=None, max_timestamp=None,
                   urgency=None, org_instance_ids=None, service_name=None, team_ref_id=None, search_words=None,
                   instance_status=None, check_adv_perm=False):
    '''
    Get the basic details of task instances.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made on
    :param org_id: organization ID of the user
    :param user_id: user_id of the user making the request
    :param open_instances: (optional) if True gets only open; if False gets only non-open; default gets all
    :param assignee_type: (optional) get instances assigned to the 'USER' only, or its 'GROUP'(s) or its ORGANIZATION
    :param instance_ids: (optional) list of instance ids to filter by
    :param row_limit: (optional) number of instances to fetch
    :param row_offset: (optional) number of instances to skip ahead
    :param service_ref_id: reference ID of the service to filter by
    :param min_timestamp: only get instances that have occurred after this time
    :param max_timestamp: only get instances that have occurred until this time
    :param urgency: (int) urgency of the incident
    :param org_instance_ids: (int or list) organization instance ID(s)
    :param service_name: (str) name of the service
    :param team_ref_id: reference ID of the team to filter by
    :param search_words: (str) keywords to search with
    :param instance_status: (str) status of the instance (OPEN, ACKNOWLEDGED, RESOLVED)
    :param check_adv_perm: (boolean) True if advanced team permissions should be checked
    :return: (list of dict) of basic info
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(org_id, int)

    query_params = {'timestamp': timestamp, 'org_id': org_id,
                    'pol_comp_type_id': configs.policy_component_type_id,
                    'serv_comp_type_id': configs.service_component_type_id}

    cond = []
    time_cond = ''
    if user_id is not None:
        assert isinstance(user_id, int)
        query_params['usr_id'] = user_id

    if open_instances is not None:
        assert isinstance(open_instances, bool)
        if open_instances:
            time_cond = " and ia.start_timestamp <= %(timestamp)s and ia.end_timestamp > %(timestamp)s "
            cond.append(" inst.is_open = true ")
            cond.append(" tasks.start_timestamp <= %(timestamp)s and tasks.end_timestamp > %(timestamp)s ")
        else:
            cond.append(" inst.is_open = false ")

    if instance_status is not None:
        assert instance_status in [constants.open_state, constants.acknowledged_state, constants.resolved_state]
        cond.append(" inst.status = %(inst_sts)s ")
        query_params['inst_sts'] = instance_status

    if assignee_type is None:
        if check_adv_perm:
            cond.append('''
                (
                    users.user_id = %(usr_id)s
                    or
                    ia.for_policyid not in (
                        select component_id from components_user_cannot_view(
                            %(timestamp)s, %(org_id)s, %(usr_id)s, %(pol_comp_type_id)s::smallint
                        )
                    )
                )
                and (
                    tasks.serviceid is null
                    or (
                        tasks.serviceid is not null
                        and tasks.serviceid not in (
                            select component_id from components_user_cannot_view(
                                %(timestamp)s, %(org_id)s, %(usr_id)s, %(serv_comp_type_id)s::smallint
                            )
                        )
                    )
                )
            ''')
    else:
        assert assignee_type in [constants.user_assignee, constants.team_assignee]
        if assignee_type == constants.user_assignee:
            cond.append(" users.user_id = %(usr_id)s ")

        elif assignee_type == constants.team_assignee:
            cond.append('''
            (
                ia.for_policyid in (
                    select component_id from team_components
                    where start_timestamp <= %(timestamp)s
                        and end_timestamp > %(timestamp)s
                        and component_type_id = %(pol_comp_type_id)s::smallint
                        and team_id in (select team_id from get_user_team_ids(%(timestamp)s, %(usr_id)s))
                )
                or (
                    tasks.serviceid is not null
                    and tasks.serviceid in (
                        select component_id from team_components
                        where start_timestamp <= %(timestamp)s
                            and end_timestamp > %(timestamp)s
                            and component_type_id = %(serv_comp_type_id)s::smallint
                            and team_id in (select team_id from get_user_team_ids(%(timestamp)s, %(usr_id)s))
                    )
                )
            )
            ''')

    if instance_ids is not None:
        cond.append(" inst.instanceid = any(%(inst_id)s) ")
        query_params['inst_id'] = helpers.get_int_list(instance_ids)

    if org_instance_ids is not None:
        cond.append(" inst.organization_instance_id = any(%(org_inst_id)s) ")
        query_params['org_inst_id'] = helpers.get_int_list(org_instance_ids)

    if service_ref_id is not None:
        cond.append('''
            tasks.serviceid in (
                select serviceid from services
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                    and organization_id = %(org_id)s
                    and service_ref_id = %(srv_ref_id)s
            )
        ''')
        query_params['srv_ref_id'] = key_manager.unmask_reference_key(service_ref_id)

    if min_timestamp is not None:
        assert isinstance(min_timestamp, datetime.datetime)
        cond.append(" inst.instance_timestamp >= %(min_time)s ")
        query_params['min_time'] = min_timestamp

    if max_timestamp is not None:
        assert isinstance(max_timestamp, datetime.datetime)
        cond.append(" inst.instance_timestamp <= %(max_time)s ")
        query_params['max_time'] = max_timestamp

    if urgency is not None:
        assert urgency in configs.allowed_urgency_levels
        cond.append(" inst.taskid in (select taskid from tasks where urgency_level = %(urg)s) ")
        query_params['urg'] = urgency

    if service_name is not None:
        cond.append('''
            tasks.serviceid in (
                select serviceid from services
                where organization_id = %(org_id)s
                    and (LOWER(service_name) like '%%' || LOWER(%(serv_name)s) || '%%')
            )
        ''')
        query_params['serv_name'] = service_name

    if team_ref_id is not None:
        cond.append('''
        (
            ia.for_policyid in (
                select component_id from team_components
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                    and component_type_id = %(pol_comp_type_id)s::smallint
                    and team_id in (
                        select team_id from teams
                        where start_timestamp <= %(timestamp)s
                            and end_timestamp > %(timestamp)s
                            and team_ref_id = %(tm_ref_id)s
                    )
            )
            or (
                tasks.serviceid is not null
                and tasks.serviceid in (
                    select component_id from team_components
                    where start_timestamp <= %(timestamp)s
                        and end_timestamp > %(timestamp)s
                        and component_type_id = %(serv_comp_type_id)s::smallint
                        and team_id in (
                            select team_id from teams
                            where start_timestamp <= %(timestamp)s
                                and end_timestamp > %(timestamp)s
                                and team_ref_id = %(tm_ref_id)s
                        )
                )
            )
        )
        ''')
        query_params['tm_ref_id'] = key_manager.unmask_reference_key(team_ref_id)

    if search_words is not None:
        assert isinstance(search_words, str)
        cond.append('''
            (
            inst.taskid in (
                select taskid from tasks
                where
                    LOWER(title) like '%%' || %(search_words)s || '%%'
                    or
                    LOWER(text_msg) like '%%' || %(search_words)s || '%%'
                    or
                    integration_id in (
                        select integration_id from service_integrations
                        where organization_id = %(org_id)s
                            and (
                                LOWER(integration_name) like '%%' || %(search_words)s || '%%'
                                or
                                integration_type_id in (
                                    select integration_type_id
                                    from integration_types
                                    where LOWER(integration_type) like '%%' || %(search_words)s || '%%'
                                )
                            )
                    )
            )
            or
            inst.taskid in (
                select taskid from task_labels where label_type = 'tag' and label like '%%' || %(search_words)s || '%%'
            )
            or
            tasks.serviceid in (
                select serviceid from services
                where organization_id = %(org_id)s
                    and (LOWER(service_name) like '%%' || LOWER(%(search_words)s) || '%%')
            )
        )
        ''')
        query_params['search_words'] = search_words.lower()

    limit_cond = ''
    if row_limit is not None:
        assert isinstance(row_limit, int)
        limit_cond += ' limit {0} '.format(str(row_limit))
    if row_offset is not None:
        assert isinstance(row_offset, int)
        limit_cond += ' offset {0} '.format(str(row_offset))

    query = '''
            with t1 as (
                select inst.instanceid, inst.taskid, inst.organization_instanceid, inst.instance_timestamp,
                        inst.next_alert_timestamp, inst.status, inst.resolved_on, tasks.serviceid,
                        jsonb_agg(distinct jsonb_build_object(
                            'display_name', first_name || ' ' || last_name,
                            'preferred_username', preferred_username
                        )) as assignees
                from task_instances as inst
                join tasks using(taskid)
                left join instance_assignments as ia
                    on ia.instanceid = inst.instanceid
                        {0}
                left join users
                    on users.policyid = ia.user_policyid
                        and users.start_timestamp <= ia.start_timestamp
                        and users.end_timestamp > ia.start_timestamp
                where inst.organization_id = %(org_id)s
                    and inst.instance_timestamp <= %(timestamp)s
                    {1}
                group by inst.instanceid, inst.taskid, inst.organization_instanceid, inst.instance_timestamp,
                    inst.next_alert_timestamp, inst.status, inst.resolved_on, tasks.serviceid
                order by instanceid desc
                {2}
            )
            select t1.instanceid, t1.organization_instanceid, t1.instance_timestamp, t1.next_alert_timestamp, t1.status,
                t1.resolved_on, td.title, td.urgency_level, srv.service_ref_id, srv.service_name, t1.assignees
            from t1
            join tasks as td
                on td.taskid = t1.taskid
                    and td.current_version
            left join services as srv
                on td.serviceid is not null
                    and srv.serviceid = td.serviceid
                    and srv.start_timestamp <= t1.instance_timestamp
                    and srv.end_timestamp > t1.instance_timestamp
            where td.task_status is null
            order by t1.instanceid desc;
            '''.format(time_cond, ' and ' + ' and '.join(cond) if len(cond) > 0 else '', limit_cond)
    try:
        result = conn.fetch(query, query_params)
        data = []
        for inst_id, org_inst_id, inst_timestamp, next_alert, status, \
                resolved_on, title, urgency, serv_ref, serv_name, assignees in result:
            data.append({
                var_names.instance_id: inst_id,
                var_names.organization_instance_id: org_inst_id,
                var_names.instance_timestamp: inst_timestamp,
                var_names.next_alert_timestamp: next_alert,
                var_names.resolved_on: resolved_on,
                var_names.status: status,
                var_names.task_title: title,
                var_names.urgency_level: urgency,
                var_names.services: None if serv_ref is None
                else [serv_name, key_manager.conceal_reference_key(serv_ref)],
                var_names.assignees: [[item[var_names.display_name], item[var_names.preferred_username]]
                                      for item in assignees] if assignees is not None else None
            })
        return data
    except psycopg2.DatabaseError:
        raise


def get_instance_details(conn, current_timestamp, org_id, instance_id=None, org_instance_id=None,
                         user_id=None, check_adv_perm=False):
    '''
    Gets the instances that were assigned to a user.
    :param conn: db connection
    :param current_timestamp: timestamp when this request is being made
    :param org_id: ID of the organization the instance(s) belong to
    :param instance_id: (optional) (int or int list) ID of the instance
    :param org_instance_id: (optional) (int or list of int) instance id respective to the organization
    :param user_id: (optional) ID of the user requesting to see the details
    :param check_adv_perm: (boolean) True if advanced team permissions should be checked
    :return: dict of InstanceState objects
    :errors: AssertionError, DatabaseError, ValueError
    '''
    assert isinstance(current_timestamp, datetime.datetime)
    assert isinstance(org_id, int)
    assert instance_id is not None or org_instance_id is not None
    if check_adv_perm:
        assert user_id is not None and isinstance(user_id, int)

    query, query_params = None, None

    if instance_id is not None:
        query = 'select * from instance_details_from_inst_ids(%s, %s, %s, %s, %s)'
        query_params = (check_adv_perm, current_timestamp, org_id, helpers.get_int_list(instance_id), user_id)

    if org_instance_id is not None:
        query = 'select * from instance_details_from_org_inst_id(%s, %s, %s, %s, %s)'
        query_params = (check_adv_perm, current_timestamp, org_id, org_instance_id, user_id)

    try:
        result = conn.fetch(query, query_params)
        instance_states = dict()

        for inst_id, org_inst_id, inst_time, task_id, level, last_alert_timestamp, next_alert_timestamp, status, \
            res_on, task_ref_id, start_date, title, task_timezone, task_time, repeat, text_msg, urgency_level, \
            rel_task_ref_id, trig_method, snapshots, voice_messages, vendor_url, created_by, serv_ref, serv_name, \
            integ_key, integ_name, task_labels, inst_assignees, inst_events, inst_notes, inst_bus, \
                inst_sub, inst_upd, inst_conf, synced_vendors in result:

            # correct the data format of impacted business services and instance subscribers
            if inst_bus is not None:
                inst_bus = [[item[var_names.service_name],
                             key_manager.conceal_reference_key(uuid.UUID(item[var_names.business_service_ref_id]))]
                            for item in inst_bus]
            if inst_sub is not None:
                inst_sub = [[item[var_names.display_name], item[var_names.preferred_username],
                             item[var_names.profile_picture]] for item in inst_sub]

            # only 1 conference is allowed at a time
            if inst_conf is not None:
                inst_conf = [inst_conf[0][var_names.conference_phone], inst_conf[0][var_names.conference_url]]

            task_ref_id = key_manager.conceal_reference_key(task_ref_id)
            if rel_task_ref_id is not None:
                rel_task_ref_id = key_manager.conceal_reference_key(rel_task_ref_id)

            task_details = {var_names.task_ref_id: task_ref_id,
                            var_names.start_date: start_date,
                            var_names.created_by: created_by,
                            var_names.task_title: title,
                            var_names.task_timezone: task_timezone,
                            var_names.task_time: task_time,
                            var_names.repeat: repeat,
                            var_names.text_msg: text_msg,
                            var_names.urgency_level: urgency_level,
                            var_names.services: None if serv_ref is None
                            else [serv_name, key_manager.conceal_reference_key(serv_ref)],
                            var_names.integration_key: None if integ_key is None
                            else key_manager.conceal_reference_key(integ_key),
                            var_names.integration_name: integ_name,
                            var_names.related_task_id: rel_task_ref_id,
                            var_names.trigger_method: trig_method,
                            var_names.snapshots: snapshots,
                            var_names.voice_messages: voice_messages,
                            var_names.vendor_url: vendor_url}

            labels = Task.standardize_labels(task_labels)
            instance_states[inst_id] = InstanceState(inst_id, org_inst_id, org_id, inst_time, current_timestamp, level,
                                                     last_alert_timestamp, next_alert_timestamp, status,
                                                     [[item[var_names.display_name], item[var_names.preferred_username],
                                                       item[var_names.profile_picture]] for item in inst_assignees]
                                                     if inst_assignees is not None else None,
                                                     Task(task_ref_id, task_details, [], labels),
                                                     Event.create_events(inst_id, inst_events),
                                                     inst_notes, impacted_business_services=inst_bus,
                                                     subscribers=inst_sub, updates=inst_upd,
                                                     conference_bridges=inst_conf, synced_vendors=synced_vendors,
                                                     resolved_on=res_on)

        return instance_states
    except psycopg2.DatabaseError:
        raise


def get_instances(conn, current_timestamp, instance_id=None, org_instance_id=None, user_id=None, org_id=None,
                  active=None, created_by=None, integration_id=None, min_timestamp=None, service_id=None,
                  skip_instance_id=None, inst_task_ids=None):
    '''
    Gets task instances. No displayable information will be provided here.
    :param conn: db connection
    :param current_timestamp: timestamp when this request is being made
    :param instance_id: (optional) (int or list of int) instance id to look for
    :param org_instance_id: (optional) (int or list of int) instance id respective to the organization
    :param user_id: (optional) if user_id is provided the organization id of the user will be used for filtering
    :param org_id: (optional) organization id to filter by
    :param active: (optional) if True only active instances are retrieved; if False then non-active; default gets all
    :param created_by: (optional) user_id of the user who created the instance
    :param integration_id: (optional) integration ID to search with
    :param min_timestamp: (optional) minimum datetime.datetime the instance can have
    :param service_id: (optional) service ID to search with
    :param skip_instance_id: (optional) ID of the instance to avoid;
        this will be needed when getting past instances of a resolved instance
    :param inst_task_ids: (list) of task IDs to filter the instances by
    :return: dict of InstanceState objects
    :errors: AssertionError, DatabaseError, ValueError
    '''
    assert isinstance(current_timestamp, datetime.datetime)
    query_params = {'timestamp': current_timestamp, 'chat_types': configs.supported_chat_integrations}

    conditions = []

    if instance_id is not None:
        conditions.append(' inst.instanceid = any(%(inst)s) ')
        query_params['inst'] = helpers.get_int_list(instance_id)

    if org_instance_id is not None:
        # only allow org instance id if the instance id, organization id or user_id is provided
        if instance_id is None and user_id is None and org_id is None:
            raise SystemError(errors.err_internal_incident_request_incomplete)
        conditions.append(' inst.organization_instanceid = any(%(org_inst)s) ')
        query_params['org_inst'] = helpers.get_int_list(org_instance_id)

    if user_id is not None:
        assert isinstance(user_id, int)
        conditions.append(''' inst.organization_id in (
                                select organization_id from users
                                where user_id = %(user_name)s
                                    and start_timestamp <= %(timestamp)s
                                    and end_timestamp > %(timestamp)s
                          )''')
        query_params['user_name'] = user_id

    if org_id is not None:
        assert isinstance(org_id, int)
        conditions.append(" inst.organization_id = %(org_id)s ")
        query_params['org_id'] = org_id

    if active is not None:
        assert isinstance(active, bool)
        conditions.append(" inst.is_open = true ") if active else conditions.append(" inst.is_open = false ")

    if created_by is not None:
        assert isinstance(created_by, int)
        conditions.append(" td.created_by = %(sender)s ")
        query_params['sender'] = created_by

    if integration_id is not None:
        assert isinstance(integration_id, int)
        conditions.append(" td.integration_id = %(integ_id)s ")
        query_params['integ_id'] = integration_id

    if min_timestamp is not None:
        assert isinstance(min_timestamp, datetime.datetime)
        conditions.append(" inst.instance_timestamp > %(min_time)s ")
        query_params['min_time'] = min_timestamp

    if service_id is not None:
        assert isinstance(service_id, int)
        conditions.append(" td.serviceid = %(serv_id)s ")
        query_params['serv_id'] = service_id

    if skip_instance_id is not None:
        assert isinstance(skip_instance_id, int)
        conditions.append(' inst.instanceid != %(skip_inst)s ')
        query_params['skip_inst'] = skip_instance_id

    if inst_task_ids is not None:
        conditions.append(' inst.taskid = any(%(ins_tsk_ids)s) ')
        query_params['ins_tsk_ids'] = helpers.get_int_list(inst_task_ids)

    query = '''
            with t1 as (
                select inst.instanceid, inst.organization_instanceid, inst.organization_id, inst.instance_timestamp,
                    inst.taskid, inst.last_level, inst.last_alert_timestamp, inst.next_alert_timestamp, inst.status,
                    inst.resolved_on, td.task_start_date, td.title, td.task_timezone, td.task_time, td.repeat,
                    td.text_msg, td.urgency_level, td.alert, td.related_taskid,
                    td.created_by, td.serviceid, srv.service_name, td.integration_id, td.trigger_method,
                    td.trigger_info->'snapshots' as snapshots, td.trigger_info->'voice_messages' as voice_messages,
                    td.trigger_info->>'vendor_url' as vendor_url
                from task_instances as inst
                join tasks as td
                    on td.taskid = inst.taskid
                        and td.current_version
                left join services as srv
                    on srv.serviceid = td.serviceid
                        and srv.start_timestamp <= %(timestamp)s
                        and srv.end_timestamp > %(timestamp)s
                where inst.instance_timestamp <= %(timestamp)s
                {0}
            )
            , t2 as(
                select tal.taskid, json_agg(json_build_object(
                    'field_name', label_type,
                    'field_value', label
                )) as labels
                from task_labels as tal
                join t1
                on tal.taskid = t1.taskid
                    and start_timestamp <= t1.instance_timestamp
                    and end_timestamp > t1.instance_timestamp
                group by tal.taskid
            )
            , t3 as (
                select instanceid, json_agg(json_build_object(
                    'user_policyid', user_policyid,
                    'for_policyid', for_policyid,
                    'assignee_level', level,
                    'policy_name', pol.policy_name,
                    'display_name', users.first_name || ' ' || users.last_name,
                    'acknowledgement_code', ia.acknowledgement_code,
                    'resolution_code', ia.resolution_code,
                    'escalation_code', ia.escalation_code,
                    'last_notification_timestamp', last_notification_timestamp
                )) as inst_assignees
                from instance_assignments as ia
                join policies as pol
                    on pol.policyid = ia.for_policyid
                        and pol.start_timestamp <= ia.start_timestamp
                        and pol.end_timestamp >= ia.end_timestamp
                join users
                    on users.policyid = ia.user_policyid
                        and users.start_timestamp <= ia.start_timestamp
                        and users.end_timestamp >= ia.end_timestamp
                where instanceid in (select instanceid from t1)
                    and ia.start_timestamp <= %(timestamp)s
                    and ia.end_timestamp  >= %(timestamp)s
                group by instanceid
            )
            , t4 as (
                select instanceid, json_agg(json_build_object(
                    'event_timestamp', event_timestamp,
                    'event_type', event_type,
                    'event_method', event_method,
                    'event_by', event_log::json->>'event_by',
                    'event_log', event_log
                ) order by event_timestamp) as inst_events
                from instance_events
                left join users
                    on (event_log::json ->> 'event_by')::int = users.user_id
                        and users.start_timestamp <= event_timestamp
                        and users.end_timestamp > event_timestamp
                where instanceid in (select instanceid from t1)
                group by instanceid
            )
            , t5 as (
                select instanceid, json_agg(json_build_object(
                    'conference_phone', conference_phone,
                    'conference_url', conference_url
                )) as conf_bridges
                from instance_conferences
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                group by instanceid
            )
            , t6 as (
                select instanceid, json_agg(json_build_object(
                    'integration_id', ica.integration_id,
                    'synced_with', intt.integration_type,
                    'vendor_id', ica.vendor_id,
                    'vendor_url', ica.vendor_url
                )) as synced_vendors
                from instance_custom_actions as ica
                join integration_types as intt using(integration_type_id)
                where intt.start_date <= ica.action_timestamp
                    and intt.end_date > ica.action_timestamp
                    and (ica.is_synced or intt.integration_type = any(%(chat_types)s))
                group by instanceid
            )
            select t1.*, t2.labels, t3.inst_assignees, t4.inst_events, t5.conf_bridges, t6.synced_vendors
            from t1
            left join t2 using(taskid)
            left join t3 using(instanceid)
            left join t4 using(instanceid)
            left join t5 using(instanceid)
            left join t6 using(instanceid);
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')

    try:
        result = conn.fetch(query, query_params)

        instance_states = dict()
        for inst_id, org_inst_id, org_id, inst_time, task_id, level, last_alert_timestamp, next_alert_timestamp, \
            status, res_on, start_date, title, task_timezone, task_time, repeat, text_msg, urgency_level, alert, \
            rel_task_id, created_by, service_id, service_name, integ_id, trig_method, snapshots, voice_messages, \
                vendor_url, task_labels, inst_assignees, inst_events, inst_conf, synced_vendors in result:

            task_details = {var_names.task_id: task_id,
                            var_names.start_date: start_date,
                            var_names.created_by: created_by,
                            var_names.task_title: title,
                            var_names.task_timezone: task_timezone,
                            var_names.task_time: task_time,
                            var_names.repeat: repeat,
                            var_names.text_msg: text_msg,
                            var_names.urgency_level: urgency_level,
                            var_names.to_alert: alert,
                            var_names.service_id: service_id,
                            var_names.service_name: service_name,
                            var_names.integration_id: integ_id,
                            var_names.related_task_id: rel_task_id,
                            var_names.trigger_method: trig_method,
                            var_names.snapshots: snapshots,
                            var_names.voice_messages: voice_messages,
                            var_names.vendor_url: vendor_url}

            labels = Task.standardize_labels(task_labels)
            instance_states[inst_id] = InstanceState(inst_id, org_inst_id, org_id, inst_time, current_timestamp, level,
                                                     last_alert_timestamp, next_alert_timestamp, status,
                                                     InstanceState.create_instance_assignees(inst_assignees)
                                                     if inst_assignees is not None else None,
                                                     Task(task_id, task_details, [], labels),
                                                     Event.create_events(inst_id, inst_events),
                                                     conference_bridges=inst_conf,
                                                     synced_vendors=synced_vendors,
                                                     resolved_on=res_on)

        return instance_states
    except psycopg2.DatabaseError:
        raise


def get_active_instances_for_monitoring(conn, timestamp):
    '''
    Gets the task instances that need monitoring. The data will be used for monitoring and handling
    live instances. This function should be used for internal purposes only.
    :param conn: db connection
    :param timestamp: the timestamp at which to check on
    :return: (dict) -> {instance ID: InstanceState object, ...}
    '''
    assert isinstance(timestamp, datetime.datetime)

    query = " select * from instances_to_monitor(%s); "
    query_params = (timestamp,)

    try:
        result = conn.fetch(query, query_params)

        instance_states = dict()
        for inst_id, inst_time, task_id, level, last_alert_timestamp, next_alert_timestamp, status, org_inst_id, \
            org_id, created_by, title, task_timezone, task_time, text_msg, urgency_level, alert, service_id, \
            integration_id, rel_task_id, task_status, snapshots, task_labels, inst_assignees, inst_events, \
                inst_updates, inst_subscribers, inst_conf, synced_vendors in result:

            details = {var_names.task_id: task_id,
                       var_names.created_by: created_by,
                       var_names.task_title: title,
                       var_names.task_timezone: task_timezone,
                       var_names.task_time: task_time,
                       var_names.text_msg: text_msg,
                       var_names.urgency_level: urgency_level,
                       var_names.to_alert: alert,
                       var_names.integration_id: integration_id,
                       var_names.related_task_id: rel_task_id,
                       var_names.status: task_status,
                       var_names.service_id: service_id,
                       var_names.snapshots: snapshots}

            instance_states[inst_id] = InstanceState(inst_id, org_inst_id, org_id, inst_time, timestamp, level,
                                                     last_alert_timestamp, next_alert_timestamp, status,
                                                     InstanceState.create_instance_assignees(inst_assignees),
                                                     Task(task_id, details, [], {var_names.tags: task_labels}),
                                                     Event.create_events(inst_id, inst_events),
                                                     updates=inst_updates, subscribers=inst_subscribers,
                                                     conference_bridges=inst_conf, synced_vendors=synced_vendors)
        return instance_states
    except psycopg2.DatabaseError:
        raise


def get_alert_agnostic_active_instances_for_monitoring(conn, timestamp, inst_ids):
    '''
    Gets the task instances that need monitoring. The data will be used for monitoring and handling
    live instances. This function should be used for internal purposes only.
    :param conn: db connection
    :param timestamp: the timestamp at which to check on
    :param inst_ids: (list) of instance IDs
    :return: (dict) -> {instance ID: InstanceState object, ...}
    '''
    assert isinstance(timestamp, datetime.datetime)

    query = " select * from alert_agnostic_active_instances_for_monitoring(%s, %s); "
    query_params = (timestamp, helpers.get_int_list(inst_ids),)

    try:
        result = conn.fetch(query, query_params)

        instance_states = dict()
        for inst_id, inst_time, task_id, level, last_alert_timestamp, next_alert_timestamp, status, org_inst_id, \
            org_id, created_by, title, task_timezone, task_time, text_msg, urgency_level, alert, service_id, \
            integration_id, rel_task_id, task_status, snapshots, task_labels, inst_assignees, inst_events, \
                inst_updates, inst_subscribers, inst_conf, synced_vendors in result:

            details = {var_names.task_id: task_id,
                       var_names.created_by: created_by,
                       var_names.task_title: title,
                       var_names.task_timezone: task_timezone,
                       var_names.task_time: task_time,
                       var_names.text_msg: text_msg,
                       var_names.urgency_level: urgency_level,
                       var_names.to_alert: alert,
                       var_names.integration_id: integration_id,
                       var_names.related_task_id: rel_task_id,
                       var_names.status: task_status,
                       var_names.service_id: service_id,
                       var_names.snapshots: snapshots}

            instance_states[inst_id] = InstanceState(inst_id, org_inst_id, org_id, inst_time, timestamp, level,
                                                     last_alert_timestamp, next_alert_timestamp, status,
                                                     InstanceState.create_instance_assignees(inst_assignees),
                                                     Task(task_id, details, [], {var_names.tags: task_labels}),
                                                     Event.create_events(inst_id, inst_events),
                                                     updates=inst_updates, subscribers=inst_subscribers,
                                                     conference_bridges=inst_conf, synced_vendors=synced_vendors)
        return instance_states
    except psycopg2.DatabaseError:
        raise


def get_instance_for_workflow(conn, timestamp, instance_id):
    '''
    Gets the full details of an instance to handle workflows. This should only be used for internal purposes.
    :param conn: db connection
    :param timestamp: the timestamp at which to check on
    :param instance_id: ID of the instance to fetch
    :return: InstanceState object; None if the instance is not found
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(instance_id, int)

    query = " select * from instance_for_workflow(%s, %s); "
    query_params = (timestamp, instance_id,)

    try:
        result = conn.fetch(query, query_params)

        for inst_id, inst_time, task_id, level, last_alert_timestamp, next_alert_timestamp, status, org_inst_id, \
            org_id, created_by, title, task_timezone, task_time, text_msg, urgency_level, alert, service_id, \
            service_name, integration_id, rel_task_id, task_status, task_trigger_info, task_labels, inst_assignees, \
                inst_events, inst_updates, inst_subscribers, inst_conf, synced_vendors in result:

            details = {var_names.task_id: task_id,
                       var_names.created_by: created_by,
                       var_names.task_title: title,
                       var_names.task_timezone: task_timezone,
                       var_names.task_time: task_time,
                       var_names.text_msg: text_msg,
                       var_names.urgency_level: urgency_level,
                       var_names.to_alert: alert,
                       var_names.integration_id: integration_id,
                       var_names.related_task_id: rel_task_id,
                       var_names.status: task_status,
                       var_names.service_id: service_id,
                       var_names.service_name: service_name,
                       var_names.source_payload: task_trigger_info}

            return InstanceState(inst_id, org_inst_id, org_id, inst_time, timestamp, level,
                                 last_alert_timestamp, next_alert_timestamp, status,
                                 InstanceState.create_instance_assignees(inst_assignees),
                                 Task(task_id, details, [], {var_names.tags: task_labels}),
                                 Event.create_events(inst_id, inst_events),
                                 updates=inst_updates, subscribers=inst_subscribers,
                                 conference_bridges=inst_conf, synced_vendors=synced_vendors)
        return None
    except psycopg2.DatabaseError:
        raise


def get_org_active_instances_basic_list(conn, timestamp, organization_id):
    '''
    Get the active instances of an organization.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param organization_id: ID of the organization whose instances should be retrieved
    :return: (list of list) -> [ [task title, org inst ID, inst ID], ... ]
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(organization_id, int)

    query = '''
            select title, organization_instanceid, instanceid
            from task_instances as ti
            join tasks using (taskid)
            where ti.organization_id = %(org_id)s
                and ti.instance_timestamp <= %(timestamp)s
                and ti.is_open = true
                and tasks.start_timestamp <= %(timestamp)s
                and tasks.end_timestamp > %(timestamp)s;
            '''
    query_params = {'timestamp': timestamp, 'org_id': organization_id}
    try:
        result = conn.fetch(query, query_params)
        data = []
        for title, org_inst_id, inst_id in result:
            data.append([title, org_inst_id, inst_id])
        return data
    except psycopg2.DatabaseError:
        raise


def get_user_active_action_codes(conn, timestamp, phone_number):
    '''
    Gets the action codes that can be expected from a user through their phone at a point in time.
    :param conn: db connection
    :param timestamp: timestamp when the codes are being requested for
    :param phone_number: (str) of phone number
    :return: (list) of dictionaries containing instance id, user_id and action codes
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert string_validator.is_phone_number(phone_number)

    query = '''
            select ia.instanceid, users.organization_id, orp.permissions as org_perm,
                users.user_id, usp.permissions as user_perm, users.iso_country_code, users.country_code,
                acknowledgement_code, resolution_code, escalation_code
            from instance_assignments as ia
            join users on ia.user_policyid = users.policyid
            join user_permissions as usp on usp.user_id = users.user_id
            join organization_permissions as orp on orp.organization_id = users.organization_id
            where ia.start_timestamp <= %(timestamp)s
                and ia.end_timestamp > %(timestamp)s
                and users.start_timestamp <= %(timestamp)s
                and users.end_timestamp > %(timestamp)s
                and users.phone = %(phone)s
                and usp.start_timestamp <= %(timestamp)s
                and usp.end_timestamp > %(timestamp)s
                and orp.start_timestamp <= %(timestamp)s
                and orp.end_timestamp > %(timestamp)s;
            '''
    query_params = {'timestamp': timestamp, 'phone': phone_number}
    try:
        result = conn.fetch(query, query_params)
        data = []
        for id_, org_id, org_perm, user_id, user_perm, iso_code, phone_code,\
                ack_code, resolve_code, escalate_code in result:
            data.append({var_names.instance_id: id_,
                         var_names.organization_id: org_id,
                         var_names.organization_permissions: org_perm,
                         var_names.user_id: user_id,
                         var_names.user_permissions: user_perm,
                         var_names.iso_country_code: iso_code,
                         var_names.country_code: phone_code,
                         var_names.action_codes: (ack_code, resolve_code, escalate_code)})
        return data
    except psycopg2.DatabaseError:
        raise


def log_alert_events(conn, alert_logs):
    '''
    Log alert events.
    :param conn: db connection
    :param alert_logs: (list) of AlertLog objects
    :errors: DatabaseError
    '''
    logs = []
    for alert in alert_logs:
        assert isinstance(alert, AlertLog)
        logs.append(alert.get_db_values())

    query = '''
            insert into alert_event_logs (
                log_date, log_timestamp, side, event_method,
                event_type, organization_id, user_id, policyid,
                sender_iso_country_code, sender_country_code, sender_phone_number, recipient_iso_country_code,
                recipient_country_code, recipient_phone_number, vendor, vendor_event_id,
                instanceid, attack, attack_type, message,
                processing_succeeded, processing_log
            ) values (
                %s, %s, %s, %s,
                %s, %s, %s, %s,
                %s, %s, %s, %s,
                %s, %s, %s, %s,
                %s, %s, %s, %s,
                %s, %s
            );
            '''
    try:
        if len(alert_logs) > 0:
            conn.execute_batch(query, logs)
    except psycopg2.DatabaseError:
        raise


def get_instance_assignment_history(conn, timestamp, start_date, end_date, with_user_id=None,
                                    with_organization_id=None, with_policy_id=None):
    '''
    Get the history/timeline of all assignments that have been made for an instance.
    :param conn: db connection
    :param timestamp: timestamp when the request was made
    :param start_date: the start date of the period
    :param end_date: the end date of the period
    :param with_user_id: (str) the user_id of the assignee to fetch data for
    :param with_organization_id: (int) the id of the organization to fetch data for
    :param with_policy_id: (list) of policy ids to fetch data for
    :return: (dict of list) -> { inst ID: [ [user pol ID, for pol ID, start time, end time], ... ], ...}
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(start_date, datetime.date)
    assert isinstance(end_date, datetime.date)

    query_params = {'timestamp': timestamp, 'start_date': start_date, 'end_date': end_date}
    conditions = []

    if with_user_id is not None:
        assert isinstance(with_user_id, int)
        conditions.append('''
            pol.policyid in (select policyid from users
                where start_timestamp <= %(timestamp)s and end_timestamp > %(timestamp)s and user_id = %(usr_id)s
        )''')
        query_params['usr_id'] = with_user_id

    if with_organization_id is not None:
        assert isinstance(with_organization_id, int)
        conditions.append(" ti.organization_id = %(org_id)s ")
        query_params['org_id'] = with_organization_id

    if with_policy_id is not None:
        conditions.append(" pol.policyid = any(%(pid)s) ")
        query_params['pid'] = helpers.get_int_list(with_policy_id)

    query = '''
            select ti.instanceid, json_agg(json_build_object(
                'user_policyid', user_policyid,
                'for_policyid', for_policyid,
                'valid_start', start_timestamp,
                'valid_end', end_timestamp
            )) as all_assignees
            from task_instances as ti
            join instance_assignments as ia using(instanceid)
            join policies as pol
            on (ia.user_policyid = pol.policyid or ia.for_policyid = pol.policyid)
                and pol.start_timestamp <= ia.start_timestamp
                and pol.end_timestamp > ia.start_timestamp
            where ia.start_timestamp >= %(start_date)s
                and ia.end_timestamp <= %(end_date)s
                {0}
            group by ti.instanceid;
            '''.format('' if len(conditions) == 0 else ' and ' + ' and '.join(conditions))
    try:
        result = conn.fetch(query, query_params)

        assignees_history = dict()
        for id_, assg_ in result:
            assignees_history[id_] = [[item[var_names.user_policyid], item[var_names.for_policyid],
                                       item[var_names.valid_start], item[var_names.valid_end]]
                                      for item in assg_]
        return assignees_history
    except psycopg2.DatabaseError:
        raise


def user_incident_parent_policy_count(conn, start_date, end_date, user_id):
    '''
    Gets the number of incidents that were assigned to a user from each policy that it is a part of.
    :param conn: db connection
    :param start_date: start date of the period to check for
    :param end_date: end date of the period to check for
    :param user_id: user_id of the user to check for
    :return: (list) of list [[policy id, display name, incident count], ...]
    '''
    assert isinstance(start_date, datetime.date)
    assert isinstance(end_date, datetime.date)
    assert isinstance(user_id, int)

    query = '''
            with t1 as (
                select distinct instanceid, for_policyid
                from instance_assignments
                where start_timestamp >= %(start_date)s
                    and end_timestamp < %(end_date)s
                    and user_policyid in (
                        select policyid from users
                        where user_id = %(user_name)s
                            and start_timestamp <= %(end_date)s
                            and end_timestamp > %(start_date)s
                    )
            )
            , t2 as (
                select for_policyid, count(instaceid) as incident_count
                from t1
                group by for_policyid
            )
            select t2.for_policyid, policy_name, t2.incident_count
            from t2
            join policies on t2.for_policyid = policies.policyid
            '''
    query_params = {'start_date': start_date, 'end_date': end_date, 'user_name': user_id}
    try:
        result = conn.fetch(query, query_params)
        data = []
        for policy_id, display_name, incident_count in result:
            data.append([policy_id, display_name, incident_count])
        return data
    except psycopg2.DatabaseError:
        raise


def get_organization_id_of_instance(conn, instance_id):
    '''
    Get the ID of the organization that a given instance is for.
    :param conn: db connection
    :param instance_id: (int) instance id
    :return: (int) organization id
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(instance_id, int)
    query = 'select organization_id from task_instances where instanceid = %s;'
    query_params = (instance_id,)
    try:
        result = conn.fetch(query, query_params)
        if len(result) == 1:
            return result[0][0]
        elif len(result) == 0:
            raise LookupError(errors.err_unknown_resource)
        else:
            raise SystemError(errors.err_internal_multiple_entries_found + ' Instance ID -' + str(instance_id))
    except psycopg2.DatabaseError:
        raise


def get_organization_instance_id_of_instance(conn, instance_id):
    '''
    Get the organization instance ID of an instance.
    :param conn: db connection
    :param instance_id: (int) instance id
    :return: (int) organization id
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(instance_id, int)
    query = 'select organization_instanceid from task_instances where instanceid = %s;'
    query_params = (instance_id,)
    try:
        result = conn.fetch(query, query_params)
        if len(result) == 1:
            return result[0][0]
        elif len(result) == 0:
            raise LookupError(errors.err_unknown_resource)
        else:
            raise SystemError(errors.err_internal_multiple_entries_found + ' Instance ID -' + str(instance_id))
    except psycopg2.DatabaseError:
        raise


def get_active_instance_task_ids_with_dedup_key(conn, timestamp, organization_id, dedup_key):
    '''
    Gets the task ids of active instances of an organization that have a given dedup_key.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param organization_id: ID of the organization this request is for
    :param dedup_key: the dedup key
    :return: (list) of task ids
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(organization_id, int)
    assert string_validator.is_not_sql_injection(dedup_key)
    query = '''
            with t1 as(
                select taskid from task_instances
                where organization_id = %s
                    and is_open = true
            )
            select taskid from task_labels
            where taskid in (select taskid from t1)
                and start_timestamp <= %s
                and end_timestamp > %s
                and label_type = %s
                and label = %s;
            '''
    query_params = (organization_id, timestamp, timestamp, var_names.dedup_key, dedup_key,)
    try:
        result = conn.fetch(query, query_params)
        ids = []
        for item in result:
            ids.append(item[0])
        return ids
    except psycopg2.DatabaseError:
        raise


def get_related_task_ids(conn, timestamp, routing_id=None, integration_id=None, min_timestamp=None, active=True,
                         organization_id=None):
    '''
    Get the IDs of tasks that are related by routing id or integration id.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param routing_id: (int) routing id
    :param integration_id: (int) integration id to check for
    :param min_timestamp: minimum time by when the instances should have occurred
    :param active: if True only gets tasks that are active at the time of query
    :param organization_id: ID of the organization
    :return: (list) of related task ids
    :errors: DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    query_params = {'timestamp': timestamp}
    conditions = []
    if routing_id is not None:
        assert isinstance(routing_id, int)
        conditions.append(" %(rout_id)s = any(routing_id) ")
        query_params['rout_id'] = routing_id

    if integration_id is not None:
        assert isinstance(integration_id, int)
        conditions.append(" integration_id = %(integ_id)s ")
        query_params['integ_id'] = integration_id

    if min_timestamp is not None:
        assert isinstance(min_timestamp, datetime.datetime)
        conditions.append(" task_start_date + task_time >= %(min_time)s ")
        query_params['min_time'] = min_timestamp

    if organization_id is not None:
        assert isinstance(organization_id, int)
        conditions.append(" organization_id = %(org_id)s ")
        query_params['org_id'] = organization_id

    if active:
        conditions.append(" end_timestamp > %(timestamp)s ")

    query = '''
            select taskid from tasks
            where start_timestamp <= %(timestamp)s
                {0};
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')
    try:
        result = conn.fetch(query, query_params)
        data = []
        for item in result:
            data.append(item[0])
        return data
    except psycopg2.DatabaseError:
        raise


def redact_instance(conn, timestamp, instance_id, org_id, user_id):
    '''
    Redact an incident from the database completely.
    BE CAREFUL with this function.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param instance_id: ID of the instance to redact
    :param org_id: ID of the organization the instance belongs to
    :param user_id: ID of the user redacting the instance
    :errors: AssertionError, DatabaseError, LookupError
    '''
    assert isinstance(instance_id, int)
    query = " select redact_incident(%s, %s, %s, %s); "
    query_params = (instance_id, org_id, user_id, timestamp,)
    try:
        conn.execute(query, query_params)
    except psycopg2.IntegrityError as e:
        if e.pgcode == errorcodes.CHECK_VIOLATION:
            raise LookupError(errors.err_incident_not_found)
        else:
            raise
    except psycopg2.DatabaseError:
        raise


def get_instance_source_details(conn, instance_id, org_id):
    '''
    Get the details of when and who created an instance. This will be needed
    to find the source details before past incidents can be retrieved.
    :param conn: db connection
    :param instance_id: (int) instance ID
    :param org_id: (int) organization ID
    :return: (dict) -> {inst timestamp: ..., created by: ..., integration id: ...}
    '''
    query = '''
            select instance_timestamp, created_by, serviceid, integration_id
            from task_instances as ti
            join tasks as td
                on td.taskid = ti.taskid
                    and td.start_timestamp <= ti.instance_timestamp
                    and td.end_timestamp > ti.instance_timestamp
            where instanceid = %s
                and ti.organization_id = %s;
            '''
    query_params = (instance_id, org_id,)
    try:
        result = conn.fetch(query, query_params)
        if len(result) == 0:
            raise LookupError(errors.err_unknown_resource)
        for time_, sender, serv_id, integ_id in result:
            return {var_names.instance_timestamp: time_,
                    var_names.created_by: sender,
                    var_names.service_id: serv_id,
                    var_names.integration_id: integ_id}
    except psycopg2.DatabaseError:
        raise


def get_instance_payload(conn, organization_id, org_inst_id):
    '''
    Gets the payload of an instance.
    :param conn: db connection
    :param organization_id: organization ID
    :param org_inst_id: org instance ID
    :return: (json) payload
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(organization_id, int)
    assert isinstance(org_inst_id, int)

    query = '''
            select trigger_info
            from task_instances as ti
            join tasks as td
                on td.taskid = ti.taskid
                    and td.start_timestamp <= ti.instance_timestamp
                    and td.end_timestamp > ti.instance_timestamp
            where ti.organization_id = %s
                and ti.organization_instanceid = %s;
            '''
    query_params = (organization_id, org_inst_id,)
    try:
        result = conn.fetch(query, query_params)
        if len(result) == 0:
            raise LookupError(errors.err_unknown_resource)
        else:
            payload = result[0][0]
            if payload is None:
                raise LookupError(errors.err_payload_not_found)
            else:
                return payload
    except psycopg2.DatabaseError:
        raise


def get_instance_subscriber_dispatch_information(conn, timestamp, instance_id):
    '''
    Get the basic contact information (email, phone and push tokens) of a user.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param instance_id: (int) ID of the instance
    :return: (list of dict) -> [ { details }, ...]
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(instance_id, int)

    query = '''
            with t1 as (
                select instanceid, organization_instanceid, instance_timestamp, tasks.title as title
                from task_instances as inst
                join tasks using(taskid)
                where instanceid = %(inst_id)s
                    and is_open = true
            )
            , t2 as (
                select instanceid, users.user_id, email, iso_country_code, phone, user_language
                from instance_subscribers as insu
                join users
                    on insu.subscriber_id = users.user_id
                where insu.instanceid = %(inst_id)s
                    and insu.start_timestamp <= %(timestamp)s
                    and insu.end_timestamp > %(timestamp)s
                    and users.start_timestamp <= %(timestamp)s
                    and users.end_timestamp > %(timestamp)s
            )
            , t3 as (
                select user_id, array_agg(push_token) as push_tokens
                from authentication_tokens
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                    and user_id in (select user_id from t2)
                group by user_id
            )
            , t4 as (
                select instanceid, json_agg(json_build_object(
                    'user_id', user_id,
                    'email', email,
                    'language', user_language,
                    'push_token', push_tokens
                )) as subscribers
                from t2
                left join t3 using(user_id)
                group by instanceid
            )
            select t1.organization_instanceid, instance_timestamp, title, t4.subscribers
            from t1 join t4 using(instanceid);
            '''
    query_params = {'timestamp': timestamp, 'inst_id': instance_id}
    try:
        result = conn.fetch(query, query_params)
        data = dict()
        for org_inst_, inst_time_, title_, subscribers_ in result:
            data = {
                var_names.organization_instance_id: org_inst_,
                var_names.instance_timestamp: inst_time_,
                var_names.task_title: title_,
                var_names.subscribers: subscribers_
            }
        return data
    except psycopg2.DatabaseError:
        raise


def get_instance_assignee_dispatch_info(conn, timestamp, instance_id):
    '''
    Get the basic contact information (email, phone and push tokens) of a user.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param instance_id: (int) ID of the instance
    :return: (list of dict) -> [ { details }, ...]
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(instance_id, int)

    query = '''
            with t1 as (
                select instanceid, organization_instanceid, instance_timestamp, tasks.title as title
                from task_instances as inst
                join tasks using(taskid)
                where instanceid = %(inst_id)s
                    and is_open = true
                    and tasks.start_timestamp <= %(timestamp)s
                    and tasks.end_timestamp > %(timestamp)s
            )
            , t2 as (
                select instanceid, users.user_id, email, iso_country_code, phone, user_language
                from instance_assignments as inas
                join users
                    on inas.user_policyid = users.policyid
                where inas.instanceid = %(inst_id)s
                    and inas.start_timestamp <= %(timestamp)s
                    and inas.end_timestamp > %(timestamp)s
                    and users.start_timestamp <= %(timestamp)s
                    and users.end_timestamp > %(timestamp)s
            )
            , t3 as (
                select user_id, array_agg(push_token) as push_tokens
                from authentication_tokens
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                    and user_id in (select user_id from t2)
                group by user_id
            )
            , t4 as (
                select instanceid, json_agg(json_build_object(
                    'user_id', user_id,
                    'email', email,
                    'language', user_language,
                    'push_token', push_tokens
                )) as assignees
                from t2
                left join t3 using(user_id)
                group by instanceid
            )
            select t1.organization_instanceid, instance_timestamp, title, t4.assignees
            from t1 join t4 using(instanceid);
            '''
    query_params = {'timestamp': timestamp, 'inst_id': instance_id}
    try:
        result = conn.fetch(query, query_params)
        data = dict()
        for org_inst_, inst_time_, title_, assignees_ in result:
            data = {
                var_names.organization_instance_id: org_inst_,
                var_names.instance_timestamp: inst_time_,
                var_names.task_title: title_,
                var_names.assignees: assignees_
            }
        return data
    except psycopg2.DatabaseError:
        raise


def enable_instance_alerting(conn, timestamp, org_id, instance_id, to_enable=True):
    '''
    Enable alerting for an instance.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param org_id: ID f the organization
    :param instance_id: ID of the instance
    :param to_enable: True if instance alerting should be enabled; False otherwise
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(org_id, int)
    assert isinstance(instance_id, int)
    assert isinstance(to_enable, bool)

    query = '''
            select enable_instance_alerting(%s, %s, %s, %s, %s);
            '''
    query_params = (timestamp, constants.end_timestamp, org_id, instance_id, to_enable,)
    try:
        conn.execute(query, query_params)
    except psycopg2.DatabaseError:
        raise


def get_linked_instances(conn, timestamp, organization_id, integration_type, ticket_id):
    '''
    Get the ID of the instances that is associated with a linkable integration.
    :param conn: db connection
    :param timestamp: timestamp when this request is being made
    :param organization_id: (int) organization ID
    :param integration_type: integration the instance is linked with
    :param ticket_id: the of the ticket
    :return: (list of int) instance ID
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(organization_id, int)
    assert integration_type in configs.allowed_linkable_integrations
    assert isinstance(ticket_id, str)

    query = '''
            select instanceid from linked_instances
            join task_instances as ti using(instanceid)
            where link_timestamp <= %(tmsp)s
                and ti.organization_id = %(org_id)s
                and linked_id = %(tkt_id)s
                and integration_type_id in (
                    select integration_type_id from integration_types
                    where start_date <= %(tmsp)s
                        and end_date > %(tmsp)s
                        and integration_type = %(integ_type)s
                );
            '''
    query_params = {'tmsp': timestamp, 'org_id': organization_id, 'tkt_id': ticket_id, 'integ_type': integration_type}
    try:
        result = conn.fetch(query, query_params)
        data = []
        for item in result:
            data.append(item[0])
        return data
    except psycopg2.DatabaseError:
        raise


def get_organization_pending_instance_ids(conn, organization_id, max_timestamp):
    '''
    Get the IDs of incidents that are have been pending for a while. They are identified by when they were created.
    :param conn: db connection
    :param organization_id: ID of the organization
    :param max_timestamp: the maximum time the incident could have been created on
    :return: (list) of instance IDs
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(organization_id, int)
    assert isinstance(max_timestamp, datetime.datetime)

    query = '''
            select instanceid from task_instances
            where organization_id = %s
                and instance_timestamp <= %s
                and is_open;
            '''
    query_params = (organization_id, max_timestamp,)
    try:
        result = conn.fetch(query, query_params)
        data = []
        for item in result:
            data.append(item[0])
        return data
    except psycopg2.DatabaseError:
        raise


def get_instance_status_update_details(conn, start_time, end_time, instance_ids=None):
    '''
    Get the details needed for sending notifications to instance subscribers. This is treated differently from
    business subscribers because notification will be sent to instance subscribers regardless for every status update;
    not just if a business service is impacted.
    :param conn: db connection
    :param start_time: minimum timestamp for data validity
    :param end_time: maximum timestamp for data validity
    :param instance_ids: (list) of instance IDs to check for
    :return: (list) of instance IDs to filter by
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(start_time, datetime.datetime)
    assert isinstance(end_time, datetime.datetime)
    query_params = {'last_time': start_time, 'curr_time': end_time}

    if instance_ids is None:
        condition = '''
            ti.is_open or (
                not ti.is_open
                    and ti.instance_timestamp > %(last_time)s - interval '2 DAYS'
                    and ti.resolved_on > %(last_time)s
                    and ti.resolved_on <= %(curr_time)s
            )
        '''
    else:
        condition = " ti.instanceid in (%(inst_ids)s) "
        query_params['inst_ids'] = helpers.get_int_list(instance_ids)

    query = '''
            with t1 as (
                select ti.instanceid, ti.organization_instanceid, ti.instance_timestamp, taskid, resolved_on,
                    case
                        when resolved_on is null then %(curr_time)s
                        else resolved_on - interval '50 milliseconds'
                    end as check_time
                from task_instances as ti
                where {0}
            )
            , t2 as (
                select t1.instanceid, organization_instanceid, instance_timestamp, resolved_on,
                    tasks.organization_id, tasks.title, array_agg(users.policyid) as inst_sub
                from t1
                join tasks
                    on tasks.taskid = t1.taskid
                        and tasks.start_timestamp <= t1.check_time
                        and tasks.end_timestamp > t1.check_time
                join instance_subscribers as sub
                    on sub.instanceid = t1.instanceid
                        and sub.start_timestamp <= t1.check_time
                        and sub.end_timestamp > t1.check_time
                join users
                    on users.user_id = sub.subscriber_id
                        and users.start_timestamp <= t1.check_time
                        and users.end_timestamp > t1.check_time
                group by t1.instanceid, organization_instanceid, t1.instance_timestamp, resolved_on,
                    tasks.title, tasks.organization_id
            )
            , t3 as (
                select instanceid, json_agg(json_build_object(
                    'timestamp', update_timestamp,
                    'status_update', status_update
                )) as inst_upd
                from instance_updates
                where instanceid in (select instanceid from t2)
                    and update_timestamp > %(last_time)s
                    and update_timestamp <= %(curr_time)s
                group by instanceid
            )
            select t2.instanceid, t2.organization_instanceid, t2.instance_timestamp, t2.resolved_on,
                t2.organization_id, t2.title, t2.inst_sub, t3.inst_upd
            from t2
            left join t3 using(instanceid);
            '''.format(condition)
    try:
        result = conn.fetch(query, query_params)
        data = []
        for inst_id, org_inst_id, inst_time, resolved_on, org_id, tsk_title, inst_subscribers, inst_updates in result:
            if inst_updates is not None:
                for item in inst_updates:
                    item[var_names.timestamp] = times.get_timestamp_from_string(item[var_names.timestamp])

            data.append({
                var_names.instance_id: inst_id,
                var_names.organization_instance_id: org_inst_id,
                var_names.instance_timestamp: inst_time,
                var_names.resolved_on: resolved_on,
                var_names.organization_id: org_id,
                var_names.task_title: tsk_title,
                var_names.status_update: inst_updates,
                var_names.subscribers: inst_subscribers
            })
        return data
    except psycopg2.DatabaseError:
        raise
