#!/usr/bin/env python

import os
from os.path import join
import pprint
import re
import sys

from Debug import _line as line

from ConcatenatedLogStream import ConcatenatedLogStream  

class FailoverMatches(ConcatenatedLogStream):

    def __init__(self,filename_array):
        ConcatenatedLogStream.__init__(self,filename_array)

    def get_matching_records_by_regex_sequence(self,regex_array,file_pointer):
        """
        Given a sequence of regullar expressions (in regex_array), will return a
        list of the first occurances of the records containing these regullar
        expressions.
        """
        matching_recs = []
        pointer_ = file_pointer
        for regex in regex_array:
            match_rec = self.get_one_matching_record(regex,pointer_)
            match_rec = self.get_one_matching_record(regex,file_pointer)
            matching_recs.append(match_rec)
        return(matching_recs)

    def p_print(self,line,matching_recs):
        pp = pprint.PrettyPrinter(indent=4)
        print line+". ",
        pp.pprint (matching_recs)

    def match_generator(self,regex):
        """
        Generate the next line of self.input_file that
        matches regex.
        """
        generator_ = self.line_generator()
        while True:
            self.file_pointer = self.input_file.tell()
            if self.file_pointer != 0:
                self.file_pointer -= 1
            if (self.file_pointer + 2) >= self.last_line_offset:
                break
            line_ = generator_.next()
            print "%.2f%%   \r" % (((self.last_line_offset - self.input_file.tell()) / (self.last_line_offset * 1.0)) * 100.0),
            if not line_:
                break
            else:
                match_ = regex.match(line_)
                groups_ = re.findall(regex,line_)
                if match_:
                    yield line_.strip("\n"), groups_

    def get_matching_records_by_regex_extremes(self,regex_array):
        """
        Function will:
        Find the record matching the first item of regex_array.
        Will save all records until the last item of regex_array.
        Will save the last line.
        Will remember the position of the beginning of the next line in
        self.input_file.
        """
        start_regex = regex_array[0]
        end_regex = regex_array[len(regex_array) - 1]

        all_recs = []
        generator_ = self.match_generator

        try:
            match_start,groups_ = generator_(start_regex).next()
        except StopIteration:
            return(None)

        if match_start != None:
            all_recs.append([match_start,groups_])

            line_ = self.line_generator().next()
            while line_:
                match_ = end_regex.match(line_)
                groups_ = re.findall(end_regex,line_)
                if match_ != None:
                    all_recs.append([line_,groups_])
                    return(all_recs)
                else:
                    all_recs.append([line_,[]])
                    line_ = self.line_generator().next()
        else:
            return(all_recs)
            sys.exit()
   
    def get_dpm_name(self, rec):
        return(rec[1])

    def get_vol_name(self,rec):
        regex = re.compile("[\[](.*)[\]]")
        match_ = regex.search(rec[4])
        return(match_.group(1))

    def get_epoch(self,rec):
        epoch_ = self.time_to_epoch(self.add_year_to_log_time(rec[0]))
        return(epoch_)

    def get_lines_of_event(self,event):
        lines = []
        for rec in event:
            lines.append(rec[0])
        return(lines)

    def make_db(self, matching_recs):
        """
        """
        self.db = []
        rec_num = 0
        print line()+". start get_matching_records_by_regex_extremes"
        for event in matching_recs:
            start_rec = event[0]
            end_rec = event[len(event) - 1]
            db_rec = {}
            db_rec["lines"] = self.get_lines_of_event(event)
            db_rec["DPM"] = self.get_dpm_name(start_rec[1][0])
            db_rec["volume_name"] = self.get_vol_name(start_rec[1][0])
            if self.get_dpm_name(end_rec[1][0]) != db_rec["DPM"]:
                print line()+". ERROR: DPM name on first line (%s) differ from DPM name on last line (%s)." % ( db_rec["DPM"], self.get_dpm_name(end_rec[1][0]) )
            epoch_start = self.get_epoch(start_rec[1][0])
            db_rec["start_time"] = self.epoch_to_time(epoch_start)
            epoch_end = self.get_epoch(end_rec[1][0])
            db_rec["end_time"] = self.epoch_to_time(epoch_end)
            db_rec["event_duration"] = epoch_end - epoch_start
            db_rec["event"] = "Volume failover"
            db_rec["rec_num"] = rec_num
            rec_num += 1

            self.db.append(db_rec)

if __name__ == "__main__":
    # Import Psyco if available
    try:
        import psyco
        psyco.log()
        psyco.profile(0.2)
    except ImportError:
        pass

    print line()+". len(sys.argv):",len(sys.argv)
    try:
        if len(sys.argv) > 2:
            filename_stem = sys.argv[2]
            print line()+". filename_stem:",filename_stem
    except AttributeError:
        sys.exit(1)

    try:
        if len(sys.argv) > 1:
            directory = sys.argv[1]
            print line()+". directory:",directory
        else:
            print "usage: "+sys.argv[0]+" dir [file-name-stem]"
            sys.exit(1)
    except AttributeError:
        sys.exit(1)

    try:
        if not filename_stem:
            filename_stem = "sac.log"
    except NameError:
        filename_stem = "sac.log"
    file_ext_list = [".gz"]
    sac_files = []
    files = [os.path.normcase(f) for f in os.listdir(directory)]
    print line()+". files:",files
    print ("%s. creating sac_files" % (line()))
    for f in files:
        if os.path.splitext(f)[0].find(filename_stem) != -1:
            if os.path.splitext(f)[1] in file_ext_list:
                f = join(directory,f)
                sac_files.append(f)
    sac_files.sort(reverse=True)            

    print line()+". sac_files:",sac_files
    if sac_files:
        sac_files.append(sac_files[0])
        del sac_files[0]
    else:
        print ("%s. Error: no matching %s* files found in %s" % (line(), filename_stem,directory))
        sys.exit(2)


    concatenated_log_stream = FailoverMatches(sac_files)    

    sac_files = concatenated_log_stream.make_files_list(directory,filename_stem)
    print line()+". sac_files:",sac_files

    log_stream = concatenated_log_stream
    log_stream.input_file.seek(0)
    offset, last_line_content = log_stream.last_line_loc_and_contents()
    print line()+". Last line: [%r] ; offset %i" % (last_line_content, offset)

    matching_recs = []
    print line()+". searching for end_timestamp"
    end_timestamp = log_stream.get_end_timestamp()

    log_stream.input_file.seek(0)

    def make_sac_log_line_regex(variable_text):
        text_ = "(\s*[a-zA-Z]{3}\s\d+\s\S+)\s+(.+?)\s+(.+?):(.+?):\s*\[%s\s*](.+)$" % (variable_text)
        regex = re.compile(text_)
        return(regex)

    print line()+". creating regular expressions"
    regex_array = []
    regex_array.append(make_sac_log_line_regex("daemon_send_failover_req"))
    regex_array.append(make_sac_log_line_regex("svm_send_volume_failover"))
    regex_array.append(make_sac_log_line_regex("vt_send_failover_to_vdev"))

    print line()+". finding matching records"
    while log_stream.input_file.tell() < log_stream.last_line_offset:
        recs = log_stream.get_matching_records_by_regex_extremes(regex_array)
        if recs == None:
            break
        matching_recs.append(recs)

    print
    log_stream.make_db(matching_recs)
    log_stream.p_print(line(),log_stream.db)
