import logging
import urllib2
from ftplib import FTP
import base64
import re
import os
import glob
import numpy as np
import datetime
from dateutil import rrule
from dateutil.parser import parse
import tempfile
import types

import matplotlib.pyplot as plt
import numpy as np
from scipy.io.netcdf import netcdf_file

import gdal, osr

from nansat import Nansat, Domain, Figure, Mosaic
from nansat.tools import add_logger

# load database manager
from . import L2Catalog

# load configuration
from .config import l3 as l3config

class L3PROC():
    '''Common methods for L3 processing of satellite data:
        * Set up of visualiztion schemas
        * Reading of configuration
        * Spatial/temporal averaging
        * Visualisation
    '''
    # default binning period
    binPeriod = 'daily'

    # schemas of visualisation:
    # how many images to make and with which presets
    fSchemas = {
        # default simplest schema, only one file with simplest preset
        '0': [{'preset': 0, 'width': None, 'extension': '.png'}],
        # three files (preview, full res jpg with legend, full res png w.o. legend)
        'hab': [{'preset': 0, 'width': 140,  'extension': 's.jpg'},
                {'preset': 1, 'width': None, 'extension': '.jpg'},
                {'preset': 0, 'width': None, 'extension': '.png'},
                ],
        # three files (preview, full res jpg with legend, full res png w.o. legend)
        'hab2': [{'preset': 0, 'width': 140,  'extension': 's.jpg'},
                {'preset': 2, 'width': None, 'extension': '.jpg'},
                {'preset': 0, 'width': None, 'extension': '.png'},
                ],
        # two files (small preview and full size image) RS2ICE L3
        'rs2ice': [{'preset': 0, 'width': 140,  'extension': 's.png'},
                   {'preset': 3, 'width': None, 'extension': '.png'},
                  ],
        }

    # presets for visualisation: all parameters for Figure()
    # which colormap, if logarithm, where logo, etc..
    fPresets = {
        # default simplest preset
        0: {
            'legend': False,
            },
        # + legend with title (HAB)
        1: {
            'legend': True,
            'titleString': 'Algae Monitoring',
            'fontSize': 24,
            'numOfTicks' : 8,
            'logoFileName': '/Home/antonk/sadcat/process/hab/NERSC_LOGO_SMALL.PNG',
            'logoLocation': [-1, -5],
            'logoSize': [200, 100],
            'CBAR_LOCATION_X': 0.05,
            'CBAR_WIDTH': 0.7,
            'TEXT_LOCATION_X': 0.05,
            'NAME_LOCATION_X': 0.05,
            },
        # + legend with title smaller font
        2: {
            'legend': True,
            'titleString': 'Algae Monitoring',
            'fontSize': 10,
            'numOfTicks' : 5,
            'logoFileName': 'NERSC_LOGO_SMALL.PNG',
            'logoLocation': [-1, -5],
            'logoSize': [200, 100],
            'CBAR_LOCATION_X': 0.05,
            'CBAR_WIDTH': 0.7,
            'TEXT_LOCATION_X': 0.05,
            'NAME_LOCATION_X': 0.05,
            },
        # + legend (RS2ICE L3)
        3: {
            'legend': True,
            'fontSize': 18,
            'addDate' : True,
            'CBAR_LOCATION_X': 0.2,
            'CBAR_LOCATION_Y': 0.3,
            'CBAR_WIDTH': 0.7,
            'CBAR_HEIGHT' : 0.3,
            'NAME_LOCATION_X': 0.2,
            'NAME_LOCATION_Y': 0,
            'logoFileName': 'NERSC_LOGO_SMALL.PNG',
            'logoLocation': [0, -1],
            'logoSize': [150, 75],
            },
        }

    # parameters of temporal binning
    tempBinningParams = {
        'hourly': {
                    'dateSuffix':   '_1h',
                    'duration': 1/24.,
                    'step':         rrule.HOURLY,
                    'interval': 1,
                    },
        'daily': {
                    'dateSuffix':   '_1d',
                    'duration': 1,
                    'step':         rrule.DAILY,
                    'interval': 1,
                    },
        'three_day': {
                    'dateSuffix':   '_3d',
                    'duration': 3,
                    'step':         rrule.DAILY,
                    'interval': 3,
                    },
        'weekly': {
                    'dateSuffix': '_7d',
                    'duration': 7,
                    'step':         rrule.DAILY,
                    'interval': 1,
                    },
        'monthly': {
                    'dateSuffix': '_1m',
                    'duration': 30,
                    'step':         rrule.MONTHLY,
                    'interval': 1,
                    }
        }

    def __init__(self, zoneName, l2CatalogName, l2Class=Nansat, logLevel=30):
        '''Initialize L3 processor:
            Make logger, get configuration, create Domain
        '''
        self.logger = add_logger('L3PROC', logLevel)
        # read configuration
        self.zone = l3config[zoneName]

        # create destination domain
        self.dstDomain = Domain(**self.zone['domain'])

        # create destination watermask (array)
        self.wm = Nansat(domain=self.dstDomain).watermask()[1]

        # catalog with L2-data
        self.dbName = l2CatalogName

        # L2-data opener
        self.l2Class = l2Class

        self.logger.debug(str(self.dstDomain))

    def l3(self, binPeriod='daily'):
        '''L3 processing:

        Get list of available files from database
        Select not processed ones
        Get list of dates from not processed files
        For dates:
            select files for this date
            run averaging
            run visualization
        '''
        # set parameters of temporal binning
        self.binPeriod = binPeriod
        self.tempBP = self.tempBinningParams[self.binPeriod]


        # get list of dates of images ready for processing
        iDateTimes, dbRows, dateField, nameField = self.get_image_datetimes(binPeriod)
        if len(iDateTimes) == 0:
            return

        # Get list of start dates for averaging
        sDateTimes = self.get_start_datetimes(iDateTimes, self.tempBP)

        # do processing for each starting date
        for sDateTime in sDateTimes:
            self.one_date_processing(sDateTime, dbRows, dateField, nameField)

    def get_image_datetimes(self, binPeriod='daily'):
        ''' Get list of images ready for processing

        Get list of available files from database
        Select not processed ones
        Get list of dates from not processed files
        '''
        # set bin period from the input param
        self.binPeriod = binPeriod

        # get lits of files in the database
        db = L2Catalog(self.dbName)
        dbRows, dbFields = db.get_sensor_list(db.sensor)
        db = None
        self.logger.debug('dbFields: %s' %  str(dbFields))
        binField = dbFields.index(self.binPeriod)
        dateField = dbFields.index('sensstart')
        nameField = dbFields.index('name')

        self.logger.debug('binField: %d' %  binField)
        #  collect list of dates from non processed files
        iDateTimes = []
        for dbRow in dbRows:
            if dbRow[binField] == 0 or dbRow[binField] is None:
                self.logger.debug('dbRow %s' % str(dbRow[nameField]).strip())
                iDateTimes.append(dbRow[dateField])

        self.logger.info('All image dates: %s' % iDateTimes)

        return iDateTimes, dbRows, dateField, nameField

    def get_start_datetimes(self, iDates, tempBP):
        '''Get list of start dates for averaging'''

        # get first/last years
        iDates.sort()
        yearf = iDates[0].year
        yearl = iDates[-1].year
        datef = datetime.datetime(yearf, 1, 1, 0)
        datel = datetime.datetime(yearl, 12, 31, 23)

        # make list of all starting dates for all years using rrule
        startDates  = list(rrule.rrule(tempBP['step'],
                           dtstart=datef,
                           until=datel,
                           interval=tempBP['interval']))

        iDates = self._datetimes_to_cal1900(iDates)
        startDates = self._datetimes_to_cal1900(startDates)
        # among all starting dates find those containing image dates
        oDates = []
        for sdate in startDates:
            for idate in iDates:
                if idate >= sdate and idate < sdate + tempBP['duration'] * 86400:
                    oDates.append(sdate)

        # get list of unique starting datetimes
        oDates = list(set(oDates))
        oDates.sort()
        oDates = self._cal1900_to_datetimes(oDates)
        self.logger.info('Starting dates: %s' % oDates)

        return oDates

    def _datetimes_to_cal1900(self, idatetimes):
        ''' Convert datetime to seconds since 00:00 1 January 1900'''
        total_seconds = lambda(dd): dd.days * 24 * 3600 + dd.seconds

        d1900 = datetime.datetime(1900,1,1)
        return [total_seconds(x - d1900) for x in idatetimes]

    def _cal1900_to_datetimes(self, idatestimes):
        ''' convert seconds since 00:00 1 January 1900 to datetime '''
        d1900 = datetime.datetime(1900,1,1)
        return [datetime.timedelta(x / 86400.) + d1900 for x in idatestimes]

    def one_date_processing(self, uDateTime, dbRows, dateField, nameField):
        '''Perform all processing for a given date'''

        # last date of the period
        if self.binPeriod == 'hourly':
            uDateTime2 = uDateTime + datetime.timedelta(seconds=((self.tempBP['duration'])*3600-1))
        else:
            uDateTime2 = uDateTime + datetime.timedelta(days=(self.tempBP['duration']), seconds=-1)
        print 'processing of period: %s --> %s' % (uDateTime.isoformat(),
                                                   uDateTime2.isoformat())

        # create list of files for averaging
        files = []
        for dbRow in dbRows:
            dbRowDateTime = datetime.datetime(dbRow[dateField].year,
                                              dbRow[dateField].month,
                                              dbRow[dateField].day,
                                              dbRow[dateField].hour)
            if (dbRowDateTime >= uDateTime and dbRowDateTime <= uDateTime2):
                iFileName = os.path.join(
                        self.zone['iDir'],
                        self.zone['nameConvert'] % dbRow[nameField].strip())
                files.append(str(iFileName))
                self.logger.debug('Append %s' % str(iFileName))

        # if no files found - return
        if len(files) == 0:
            return

        files.sort()
        self.logger.debug('Files for L3A processing: %s' % files)
        self.logger.info('Products: %s' % self.zone['products'])

        # get parameters for mosaicing
        maskName = self.zone.get('maskName', 'mask')
        doReproject = self.zone.get('doReproject', True)
        threads = self.zone.get('threads', 1)
        eResampleAlg = self.zone.get('eResampleAlg', 0)

        # AVERAGE files for this date
        l3aData = Mosaic(domain=self.dstDomain, logLevel=self.logger.level)
        l3aData.average(files=files,
                        bands=self.zone['products'],
                        maskName=maskName,
                        doReproject=doReproject,
                        nClass=self.l2Class,
                        eResampleAlg=eResampleAlg,
                        threads=threads)

        # add time to Mosaic
        l3aData.vrt._set_time(uDateTime)
        l3aData.set_metadata('start_date', uDateTime.isoformat())
        l3aData.set_metadata('stop_date', uDateTime2.isoformat())

        backends = self.zone['backends']
        backendNames = backends.keys()
        status = 0
        for backendName in backendNames:
            print 'beFunc:', 'self.' + backendName
            try:
                beFunc = eval('self.' + backendName)
            except:
                print 'WARNING: Unknwon backend %s!' % backendName
            else:
                print backends[backendName]
                status += beFunc(l3aData, **backends[backendName])

        # MARK processed files in the database
        if status == 0:
            db = L2Catalog(self.dbName)
            for filePath in files:
                fileName = os.path.split(filePath)[1]
                fileName = fileName.replace(self.zone['nameConvert'].replace('%s',''), '')
                self.logger.info('Mark as processed: %s %s' % (self.binPeriod, fileName))
                db.set_field_value("'%s'" % fileName, self.binPeriod, 1)

    def export(self, data, thrNameMask='./%s/%s%s.nc',
                           products=None, metadata=None):
        '''Export averaged data to thredds netcdf'''
        # output file path
        thrName = self.make_thredds_path(data, thrNameMask)

        status = data.export2thredds(thrName, products, metadata)
        return status

    def make_thredds_path(self, data, thrNameMask):
        '''Make path for outputing thredds file'''
        # create name for THR-file
        period = {'start_date': data.get_metadata('start_date'),
                  'stop_date': data.get_metadata('stop_date')
                 }

        for iKey, iDate in period.items():
            if self.binPeriod == 'hourly':
                dt = datetime.datetime.strptime(iDate,
                                                '%d-%b-%Y %H:%M:%S.%f')
                data.set_metadata(iKey,
                                  str('%4d-%02d-%02d %02d:%02d:%02d UTC'
                                      %(dt.year, dt.month, dt.day,
                                        dt.hour, dt.minute, dt.second)))
                period[iKey] = str('%4d%02d%02dT%02d%02d%02d'
                                   %(dt.year, dt.month, dt.day,
                                     dt.hour, dt.minute, dt.second))
            else:
                try :
                    dt = datetime.datetime.strptime(iDate,
                                                '%Y-%m-%dT%H:%M:%S')
                except:
                    period[iKey] = iDate.replace('-', '')
                else:
                    period[iKey] = str('%4d%02d%02d'
                                       %(dt.year, dt.month, dt.day))

        thrName = thrNameMask % (self.binPeriod,
                                 period['start_date'], period['stop_date'])

        return thrName
