# By: Riasat Ullah
# This file contains functions for handling the analytics to be provided for teams.

from analytics import analytics_tools
from analytics.instance_analyzer import InstanceAnalyzer
from dbqueries import db_analytics, db_teams
from utils import errors, var_names


class TeamAnalytics(object):

    def __init__(self, conn, organization_id, start_date, end_date, timezone, urgency_levels=None, team_refs=None,
                 tags=None, user_id=None):
        self.conn = conn
        self.organization_id = organization_id
        self.start_date = start_date
        self.end_date = end_date
        self.timezone = timezone
        self.urgency_levels = urgency_levels
        self.team_refs = team_refs
        self.tags = tags
        self.user_id = user_id
        self.all_team_components = db_teams.get_organization_team_components_for_analytics(
            self.conn, start_date, end_date, organization_id)

        if team_refs is not None and len(self.all_team_components) == 0:
            raise LookupError(errors.err_unknown_resource)

        # we do not filter by the team services and policies in the db query for retrieving the instances because
        # it will not be possible to identify the ownership of the components by time validity
        self.instances = db_analytics.get_instances_for_analysis(
            self.conn, start_date, end_date, organization_id, urgency_levels, only_team_components=True,
            tags=self.tags, user_id=self.user_id
        )
        self.final_metrics = self.get_metrics_data_format()

    @staticmethod
    def get_metrics_data_format():
        '''
        Get the dict structure in which the final metrics will be returned.
        :return: (dict of dict)
        '''
        return {
            var_names.incidents: {var_names.daily: {var_names.labels: [], var_names.data: []},
                                  var_names.aggregate: {var_names.labels: [], var_names.data: []}},
            var_names.response_time: {var_names.daily: {var_names.labels: [], var_names.data: []},
                                      var_names.aggregate: {var_names.labels: [], var_names.data: []}},
            var_names.interruption_hours: {var_names.daily: {var_names.labels: [], var_names.data: []},
                                           var_names.aggregate: {var_names.labels: [], var_names.data: []}},
            var_names.escalation_count: {var_names.daily: {var_names.labels: [], var_names.data: []},
                                         var_names.aggregate: {var_names.labels: [], var_names.data: []}}
        }

    def get_metrics(self):
        '''
        Get all the team metrics, both the daily metrics of the selected teams
        and the aggregate metrics for comparison.
        :return: (dict of dict) of metrics
        '''
        self.prepare_daily_metrics()
        self.prepare_aggregate_metrics()
        return self.final_metrics

    def prepare_daily_metrics(self):
        '''
        Prepare the daily metrics for the selected teams.
        '''
        analyzer = InstanceAnalyzer(self.timezone, self.instances)
        analyzer.create_off_hour_interruption_column()
        analyzer.create_escalation_count_column()

        if self.team_refs is not None:
            analyzer.filter_by_teams(self.all_team_components, self.team_refs)

        # incident counts
        incident_counts = analytics_tools.get_period_labels_and_values(self.start_date, self.end_date,
                                                                       analyzer.get_daily_instance_count())
        self.final_metrics[var_names.incidents][var_names.daily][var_names.labels] = incident_counts[0]
        self.final_metrics[var_names.incidents][var_names.daily][var_names.data] = incident_counts[1]

        # response times
        response_times = analytics_tools.get_period_labels_and_values(self.start_date, self.end_date,
                                                                      analyzer.get_daily_response_effort())
        self.final_metrics[var_names.response_time][var_names.daily][var_names.labels] = response_times[0]
        self.final_metrics[var_names.response_time][var_names.daily][var_names.data] = response_times[1]

        # off hour interruption
        interruption = analytics_tools.get_period_labels_and_values(
            self.start_date, self.end_date, analyzer.get_daily_count_of_instances_in_off_hours())
        self.final_metrics[var_names.interruption_hours][var_names.daily][var_names.labels] = interruption[0]
        self.final_metrics[var_names.interruption_hours][var_names.daily][var_names.data] = interruption[1]

        # escalation count
        esc_counts = analytics_tools.get_period_labels_and_values(self.start_date, self.end_date,
                                                                  analyzer.get_daily_count_of_escalated_instances())
        self.final_metrics[var_names.escalation_count][var_names.daily][var_names.labels] = esc_counts[0]
        self.final_metrics[var_names.escalation_count][var_names.daily][var_names.data] = esc_counts[1]

    def prepare_aggregate_metrics(self):
        '''
        Prepare the aggregate metrics of the selected teams for comparison with each other. When no teams
        are selected, the aggregate metrics of all the teams of the organization are retrieved.
        '''
        incident_counts = dict()
        response_counts = dict()
        interruption_counts = dict()
        escalation_counts = dict()

        for ref in self.all_team_components:
            analyzer = InstanceAnalyzer(self.timezone, self.instances)
            analyzer.filter_by_teams(self.all_team_components, [ref])
            analyzer.create_off_hour_interruption_column()
            analyzer.create_escalation_count_column()

            incident_counts[ref] = analyzer.get_aggregate_instance_count()
            response_counts[ref] = analyzer.get_aggregate_response_effort(in_hours=True)
            interruption_counts[ref] = analyzer.get_aggregate_count_of_instances_in_off_hours()
            escalation_counts[ref] = analyzer.get_aggregate_count_of_escalated_instances()

        refs_to_check = list(self.all_team_components.keys()) if self.team_refs is None else self.team_refs

        self.final_metrics[var_names.incidents][var_names.aggregate] = self.data_category_aggregate_metrics(
            incident_counts, refs_to_check)

        self.final_metrics[var_names.response_time][var_names.aggregate] = self.data_category_aggregate_metrics(
            response_counts, refs_to_check)

        self.final_metrics[var_names.interruption_hours][var_names.aggregate] = self.data_category_aggregate_metrics(
            interruption_counts, refs_to_check)

        self.final_metrics[var_names.escalation_count][var_names.aggregate] = self.data_category_aggregate_metrics(
            escalation_counts, refs_to_check)

    def data_category_aggregate_metrics(self, data_dict, check_refs):
        '''
        This is a helper function that helps create the labels, data values and averages of the aggregate metrics.
        :param data_dict: (dict) of metric values keyed on team ref ID
        :param check_refs: (list) of team ref IDs
        :return: (dict) of all the metrics to be shown or a specific metric type
        '''
        sliced_dict = {k: data_dict[k] for k in data_dict if k in check_refs}
        ord_items = sorted(sliced_dict.items(), key=lambda y: y[1], reverse=True)
        ord_refs = [x[0] for x in ord_items]
        ord_vals = [analytics_tools.convert_nan_to_number(x[1]) for x in ord_items]
        return {
            var_names.labels: [self.all_team_components[ref][var_names.team_name] for ref in ord_refs],
            var_names.data: ord_vals,
            var_names.average: 0 if len(data_dict) == 0 else
            analytics_tools.convert_nan_to_number(sum(list(data_dict.values()))/len(data_dict))
        }
