# By: Riasat Ullah
# BusinessImpactAnalytics contains methods to produce business impact related metrics and analysis.

from analytics import analytics_tools
from analytics.instance_analyzer import InstanceAnalyzer
from dbqueries import db_analytics, db_teams
from utils import helpers, key_manager, var_names
import configuration as configs
import uuid


class BusinessImpactAnalytics(object):

    def __init__(self, conn, timestamp, organization_id, start_date, end_date, timezone, team_refs=None,
                 hourly_business_loss=None, hourly_labor_cost=None, tags=None, user_id=None):
        self.conn = conn
        self.timestamp = timestamp
        self.organization_id = organization_id
        self.start_date = start_date
        self.end_date = end_date
        self.timezone = timezone
        self.hourly_business_loss = hourly_business_loss if hourly_business_loss is not None\
            else configs.standard_hourly_business_loss
        self.hourly_labor_cost = hourly_labor_cost if hourly_labor_cost is not None\
            else configs.standard_hourly_labor_cost
        self.tags = tags
        self.user_id = user_id

        self.team_refs = team_refs
        self.all_team_components = db_teams.get_organization_team_components_for_analytics(
            self.conn, start_date, end_date, organization_id)

        self.last_six_periods = analytics_tools.get_previous_periods(start_date, end_date, 6)

        # we do not filter by the team services and policies in the db query for retrieving the instances because
        # it will not be possible to identify the ownership of the components by time validity
        self.instances = db_analytics.get_instances_for_analysis(self.conn, self.last_six_periods[0][0],
                                                                 self.end_date, organization_id,
                                                                 only_team_components=True, tags=self.tags,
                                                                 user_id=self.user_id)

    def get_metrics(self):
        '''
        Get all the analytical metrics of the business impact the organization (or teams) had.
        :return: (dict) of metrics
        '''
        return {
            **self.create_period_metrics_and_trend(),
            **{var_names.most_impacted_business_services: self.create_most_impacted_business_services_report()},
            **{var_names.loudest_services: self.create_loudest_services_report()},
            **{var_names.costliest_incidents: self.create_costliest_instances_report()}
        }

    def create_period_metrics_and_trend(self):
        '''
        Create the metrics for the period and the trend of these metrics in the last 6 equivalent periods.
        :return: (dict) -> of metrics
        '''
        # get the metrics for the current period first
        analyzer = InstanceAnalyzer(self.timezone, self.instances)
        analyzer.filter_by_period(self.start_date, self.end_date)

        curr_inst_count = analyzer.get_aggregate_instance_count()
        curr_impact_count = analyzer.get_aggregate_business_impacting_instance_count()
        curr_response_effort = analytics_tools.convert_nan_to_number(analyzer.get_aggregate_response_effort())
        curr_business_downtime = analytics_tools.convert_nan_to_number(analyzer.get_aggregate_business_downtime())

        # calculate the monetary costs
        business_loss = round((curr_business_downtime/60) * self.hourly_business_loss)
        labor_cost = round((curr_response_effort/60) * self.hourly_labor_cost)
        total_cost = business_loss + labor_cost

        # gather data for the trend
        trend_inst_count = []
        trend_impact_count = []
        trend_response_effort = []
        trend_business_downtime = []

        for i in range(0, len(self.last_six_periods) - 1):
            start_, end_ = self.last_six_periods[i]
            analyzer = InstanceAnalyzer(self.timezone, self.instances)
            analyzer.filter_by_period(start_, end_)

            trend_inst_count.append(analyzer.get_aggregate_instance_count())
            trend_impact_count.append(analyzer.get_aggregate_business_impacting_instance_count())
            trend_response_effort.append(analytics_tools.convert_nan_to_number(
                analyzer.get_aggregate_response_effort()))
            trend_business_downtime.append(analytics_tools.convert_nan_to_number(
                analyzer.get_aggregate_business_downtime()))

        trend_inst_count.append(curr_inst_count)
        trend_impact_count.append(curr_impact_count)
        trend_response_effort.append(curr_response_effort)
        trend_business_downtime.append(curr_business_downtime)

        return {
            var_names.incidents: {var_names.total: curr_inst_count, var_names.trend: trend_inst_count},
            var_names.downtime: {var_names.total: curr_response_effort, var_names.trend: trend_response_effort},
            var_names.business_impact: {var_names.total: curr_impact_count, var_names.trend: trend_impact_count},
            var_names.business_impact_downtime: {var_names.total: curr_business_downtime,
                                                 var_names.trend: trend_business_downtime},
            var_names.business_cost: business_loss,
            var_names.labor_cost: labor_cost,
            var_names.total_cost: total_cost
        }

    def create_most_impacted_business_services_report(self):
        '''
        Create the report for the business services that were impacted the most. The top 5 are added to the report
        and are ordered by the cost (descending).
        :return: (list of dict) of metrics
        '''
        analyzer = InstanceAnalyzer(self.timezone, self.instances)
        analyzer.filter_by_period(self.start_date, self.end_date)
        impacted_instances = analyzer.get_business_impacting_instances()

        sub_data = dict()
        for item in impacted_instances:
            count_added = False
            for imp_bus in item[var_names.impacted_business_services]:
                bus_id = imp_bus[var_names.business_service_id]

                imp_duration = (imp_bus[var_names.valid_end] - imp_bus[
                    var_names.valid_start]).total_seconds() / 60
                imp_cost = round(
                    (imp_duration / 60) * self.hourly_business_loss) + round(
                    (item[var_names.duration] / 60) *
                    self.hourly_labor_cost)

                if bus_id not in sub_data:
                    sub_data[bus_id] = {
                        var_names.business_service_name: imp_bus[var_names.business_service_name],
                        var_names.business_service_ref_id: imp_bus[var_names.business_service_ref_id],
                        var_names.count: 0,
                        var_names.duration: 0,
                        var_names.total_cost: 0,
                        var_names.services: []
                    }

                if not count_added:
                    sub_data[bus_id][var_names.count] += 1
                    count_added = True

                sub_data[bus_id][var_names.duration] += imp_duration
                sub_data[bus_id][var_names.total_cost] += imp_cost

                # Add service as a dictionary with name and ref_id
                service_entry = {
                    var_names.service_name: item[var_names.service_name],
                    var_names.service_ref_id: item[var_names.service_ref_id]
                }

                # Check if this service is already in the list (by ref_id)
                service_exists = any(
                    s.get(var_names.service_ref_id) == item[var_names.service_ref_id]
                    for s in sub_data[bus_id][var_names.services]
                )

                if not service_exists:
                    sub_data[bus_id][var_names.services].append(service_entry)

        new_data = helpers.sorted_list_of_dict(list(sub_data.values()),
                                               sort_by_key=var_names.total_cost,
                                               descending=True)[:5]

        return new_data

    def create_loudest_services_report(self):
        '''
        Create the report for the loudest services. The top 5 are added to the report and are ordered by
        the number of instances that have occurred on them (descending).
        :return: (list of dict) -> of metrics
        '''
        analyzer = InstanceAnalyzer(self.timezone, self.instances)
        analyzer.filter_by_period(self.start_date, self.end_date)
        service_details = analyzer.get_loudest_services_details(5)

        for item in service_details:
            srv_id = item[var_names.service_id]
            item[var_names.labor_cost] = round(
                (item[var_names.duration] / 60) * self.hourly_labor_cost)
            item[var_names.business_cost] = round(
                (item[var_names.downtime] / 60) * self.hourly_business_loss)

            # Build team list with concealed team ref IDs
            service_teams = []
            for team_comp in list(self.all_team_components.values()):
                if srv_id in [x[0] for x in team_comp[var_names.services]]:

                    service_teams.append({
                        var_names.team_name: team_comp[var_names.team_name],
                        var_names.team_ref_id: team_comp[var_names.team_ref_id]
                    })

            item[var_names.teams] = service_teams
            del item[var_names.service_id]

        return service_details

    def create_costliest_instances_report(self):
        '''
        Create the report for the costliest instances. The top 5 are added to the report
        and are ordered by the cost (descending).
        :return: (list of dict) of details of the costliest incidents
        '''
        analyzer = InstanceAnalyzer(self.timezone, self.instances)
        analyzer.filter_by_period(self.start_date, self.end_date)
        impacted_instances = analyzer.get_business_impacting_instances()

        new_data = []
        for item in impacted_instances:
            bus_services = []
            total_cost = 0

            for imp_bus in item[var_names.impacted_business_services]:

                bus_services.append({
                    var_names.business_service_name: imp_bus[
                        var_names.business_service_name],
                    var_names.business_service_ref_id: imp_bus[var_names.business_service_ref_id]
                })

                imp_duration = (imp_bus[var_names.valid_end] - imp_bus[
                    var_names.valid_start]).total_seconds() / 60
                total_cost += round(
                    (imp_duration / 60) * self.hourly_business_loss) + \
                              round((item[
                                         var_names.duration] / 60) * self.hourly_labor_cost)

            service_ref_id = item[var_names.service_ref_id]

            new_data.append({
                var_names.organization_instance_id: item[
                    var_names.organization_instance_id],
                var_names.task_title: item[var_names.task_title],
                var_names.created_on: item[var_names.regional_timestamp],
                var_names.duration: item[var_names.duration],
                var_names.urgency_level: item[var_names.urgency_level],
                var_names.service_name: item[var_names.service_name],
                var_names.service_ref_id: service_ref_id,
                var_names.impacted_business_services: bus_services,
                var_names.total_cost: total_cost
            })

        new_data = helpers.sorted_list_of_dict(new_data,
                                               sort_by_key=var_names.total_cost,
                                               descending=True)[:5]
        return new_data