#!/usr/bin/env python3
#-----------------------------------------------------------------------------#
#           Group on Data Assimilation Development - GDAD/CPTEC/INPE          #
#-----------------------------------------------------------------------------#
#BOP
#
# !SCRIPT:
#
# !DESCRIPTION:
#
# !CALLING SEQUENCE:
#
# !REVISION HISTORY:
# 09 out 2017 - J. G. de Mattos - Initial Version
# 12 abr 2018 - C. W. Eichholz  - Correção de Bug no método ptmap
# 13 abr 2018 - C. W. Eichholz  - Inclusão do método time_series
# 20 set 2018 - E. P. Vendrasco - Correção de bug na escolha dos níveis na função 
#                                 pgeomap, inclusão e correção de comentários sobre
#                                 o uso das funções, inclusão da função time_series_omfoma,
#                                 inclusão da escolha entre SMG e SMR e escolha de qual
#                                 outer loop está sendo utilizado.
# 21 set 2018 - C. W. Eichholz  - Correcao de bugs nos metodos time_series, ptmap. E Adição de
#                                 intrução para ignorar warnings deprecated
# 11 dec 2018 - C. W. Eichholz  - Correcao de bugs no metodo time_series
#
#
# !REMARKS:
#
#EOP
#-----------------------------------------------------------------------------#
#BOC

from ReadDiag.diag2python import diag2python as d2p
from mpl_toolkits.basemap import Basemap
from datetime import datetime, timedelta
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import gdad.dataSources as Ds
import os
import warnings
warnings.simplefilter('ignore')

class setcolor:
    HEADER    = '\033[95m'
    OKBLUE    = '\033[94m'
    OKGREEN   = '\033[92m'
    WARNING   = '\033[93m'
    FAIL      = '\033[91m'
    ENDC      = '\033[0m'
    BOLD      = '\033[1m'
    UNDERLINE = '\033[4m'

lista=[]
lista_1=[]

class open(object):
    FileName = None  # File name
    FNumber  = None  # File unit number to be opened
    nVars    = None  # Total of variables
    VarNames = None  # Name of variables
    ObsInfo  = None  #
    nObs     = None  # Number of observations for vName
    used     = False # Variable used (or not) in GSI

    def __init__(self,FileName):
        self.FileName   = FileName
        self.FNumber    = d2p.open(self.FileName)

        if (self.FNumber == -1):
            self.FNumber = None
            return

        self.__used = True

        #
        # Get extra informations
        #

        self.nVars      = d2p.getnvars(self.FNumber)
        vnames          = d2p.getvarnames(self.FNumber,self.nVars)
        self.VarNames   = []
        self.ObsInfo    = {}
        for i, name in enumerate(vnames):
            self.VarNames.append(name.tostring().decode('UTF-8').strip())
            ttt = d2p.getvarinfo(self.FNumber, self.VarNames[i])
            self.ObsInfo[self.VarNames[i]] = d2p.array2d.astype(int).copy()
            d2p.array2d = None

    def close(self):
        iret = d2p.close(self.FNumber)
        self.FileName = None # File name
        self.FNumber  = None # File unit number to be closed
        self.nVars    = None # Total of variables
        self.VarNames = None # Name of variables
        self.ObsInfo  = None
        self.nObs     = None # Number of observations for vName


    def GTable(self, VarName, kx, ObsLevel=None):
        self.nObs     = d2p.getobs(self.FNumber, VarName, kx)

        if (self.nObs > 0):
            ObsTable      = d2p.array2d.copy()
            d2p.array2d   = None
        else:
            print('No observation found for ObsName and ObsType')
            ObsTable = None

        return ObsTable


    def GDict(self, VarName, kx, ObsLevel=None):

        """ Retorna um dicionário com informações separadas por classe.

            Atributos:
                lat:   Todas as latitudes das fontes utilizadas
                lon:   Todas as longitudes das fontes utilizadas
                prs:   Nível de pressão da observação.
                lev:   Níveis de pressão da observação.
                time:  Tempo de observação (minutos relativo ao tempo de análise)
                pbqc:  PreBuffer de entrada qc ou event mark
                iuse:  Flag utilizado na análise (1=use; -1=monitoring)
                iusev: Flag utilizado na análise (valor)
                robs:  Observação
                diff:  obs-ges utilizado na análise
                rmod:  
        """
        self.nObs = d2p.getobs(self.FNumber, VarName, kx)
        __tmp = self.GTable(VarName, kx, ObsLevel)

        ObsDict = {}
        if self.nObs > 0:
            if not ObsLevel == None:
                Glat, Glon, Gprs, Glev, Gtime, Gpbqc, Giuse, Giusev, Grobs, Gdiff, Grmod = [], [], [], [], [], [], [], [], [], [], []
                for idx in range(0, len(__tmp)):
                    if int(__tmp[idx, 3]) == ObsLevel:
                        Glat.append(__tmp[idx, 0])
                        Glon.append(__tmp[idx, 1])
                        Gprs.append(__tmp[idx, 2])
                        Glev.append(__tmp[idx, 3])
                        Gtime.append(__tmp[idx, 4])
                        Gpbqc.append(__tmp[idx, 5])
                        Giuse.append(__tmp[idx, 6])
                        Giusev.append(__tmp[idx, 7])
                        Grobs.append(__tmp[idx, 8])
                        Gdiff.append(__tmp[idx, 9])
                        Grmod.append(__tmp[idx, 10])
            else:
                Glat   = __tmp[:, 0]
                Glon   = __tmp[:, 1]
                Gprs   = __tmp[:, 2]
                Glev   = __tmp[:, 3]
                Gtime  = __tmp[:, 4]
                Gpbqc  = __tmp[:, 5]
                Giuse  = __tmp[:, 6]
                Giusev = __tmp[:, 7]
                Grobs  = __tmp[:, 8]
                Gdiff  = __tmp[:, 9]
                Grmod  = __tmp[:, 10]

            ObsDict.update({'lat':   Glat})
            ObsDict.update({'lon':   Glon})
            ObsDict.update({'prs':   Gprs})
            ObsDict.update({'lev':   Glev})
            ObsDict.update({'time':  Gtime})
            ObsDict.update({'pbqc':  Gpbqc})
            ObsDict.update({'iuse':  Giuse})
            ObsDict.update({'iusev': Giusev})
            ObsDict.update({'robs':  Grobs})
            ObsDict.update({'diff':  Gdiff})
            ObsDict.update({'rmod':  Grmod})

            d2p.array2d     = None
        else:
            print('No observation found for ObsName', VarName, 'and ObsType', kx)
            ObsTable = None

        return ObsDict


    def overview(self):

        variablesList = {}
        for var in self.VarNames:
            variablesTypes = []
            for kx in self.ObsInfo[var][:,0]:
                variablesTypes.append(kx)
            variablesList.update({var:variablesTypes})
        return variablesList

    def pfileinfo(self):

        for name in self.VarNames:
            print('Variavel Name :',name)
            print('              └── kx => ', end='', flush=True)
            for kx in self.ObsInfo[name][:,0]:
               print(kx,' ', end='', flush=True)
            print()


    def pcount(self,VarName):

        kx       = self.ObsInfo[VarName][:,0]
        y_pos    = np.arange(len(kx))
        contagem = self.ObsInfo[VarName][:,1]

        plt.bar(y_pos, contagem, align='center', alpha=0.5)
        plt.xticks(y_pos, kx)
        plt.ylabel('Quantidade de Observação')
        plt.xlabel('KX')
        plt.title('Variável : '+VarName)
        plt.xticks(rotation = 45)

        #plt.show()

    def pgeomap(self, VarName, kx, WhatPlot, Data, Levinf, Levsup, sysname, OuterL=None, WhatLabel=None):

        '''
        A função pgeomap faz plotagem da variável selecionada para cada tipo de fonte escolhida para uma determinada data e camada.

        Exemplo:
        a.pgeomap('ps', 187, diff, 2013010100, 1100, 100, 'SMR', '01')
        
        No exemplo acima, será feito o plot referente à variável pressão em superfície (ps) para a fonte 187 (ADPSFC) para a data
        2013010100, selecionando a camada entre 1000 e 100 hPa. Está sendo definido o sistema de assimilação SMR e outer loop 01.
        A Opção diff define a informação a ser mostrada, sendo diff a diferença entre observação e background se outer loop igual
        a 01 ou observação menos análise se outer loop diferente de 01. Outras opções são:

        lat:   Todas as latitudes das fontes utilizadas
        lon:   Todas as longitudes das fontes utilizadas
        prs:   Nível de pressão da observação.
        lev:   Níveis de pressão da observação.
        time:  Tempo de observação (minutos relativo ao tempo de análise)
        pbqc:  PreBuffer de entrada qc ou event mark
        iuse:  Flag utilizado na análise (1=use; -1=monitoring)
        iusev: Flag utilizado na análise (valor)
        robs:  Observação
        rmod:  

        '''
        
        ObsName=VarName
        __tmp = self.GTable(VarName, kx)
        
        wp = None
        if WhatPlot is not None:
            wp = WhatPlot.lower()

        if OuterL == None:
            print("ERROR - Please, provide the OuterLoop number, ex: 03")
            exit()
        else:
            if OuterL == "01":
                omflag = "OmF"
            else:    
                omflag = "OmA"

        if sysname == 'SMG':
            loni=-180
            lonf=180
            lati=-90
            latf=90
        elif sysname == 'SMR':
            loni=-90           
            lonf=-25            
            lati=-60           
            latf=20            

        # Dict of variables and labels
        VarDic    = {'pbqc':5, 'isue':6, 'use':7, 'robs':8, 'diff':9, 'rmod':10}
        legendDic = {'pbqc':'pbqc flag', 'isue':'iuse flag', 'use':'use flag', 'robs':VarName.upper() + ' (observation)', 'diff':VarName.upper() + ' ('+omflag+')', 'rmod':VarName.upper() + ' (model)'}
 
        # Define the variable
        setVar = VarDic[wp] if (wp != None) else VarDic['diff']
        legend = legendDic[wp] if (wp != None) else legendDic['diff']

        if not __tmp is None:

            # Chosing plot size according to sysname
            if sysname == 'SMG':
                fig, ax = plt.subplots(1,figsize=(8,4))
            elif sysname == 'SMR':
                fig, ax = plt.subplots(1,figsize=(5,6))
                
            # Define map as cylindical equidistant
            m = Basemap(projection = 'cyl', # Projection map
                        llcrnrlat  =  lati, #Lower Left  CoRNeR Latitude
                        urcrnrlat  =  latf, #Upper Right CoRNeR Latitude
                        llcrnrlon  =  loni, #Lower Left  CoRNeR Longitude
                        urcrnrlon  =  lonf, #Upper Right CoRNeR Longitude
                        resolution = 'c')

            # draw coastlines, country boundaries
            m.drawcoastlines(linewidth=0.15, color = 'grey')
            m.drawcountries(linewidth=0.15, color = 'grey')

            # Fill continents with color
            #m.fillcontinents(color='coral',lake_color='aqua')

            # draw the edge of the map projection region (the projection limb)
            #m.drawmapboundary(fill_color='aqua')

            # draw parallels and meridians according to sysname
            if sysname == 'SMG':
                m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0], linewidth=0, fontsize=9)
                m.drawmeridians(np.arange(-180., 180., 60.), labels=[0, 0, 0, 1], linewidth=0, fontsize=9)
            elif sysname == 'SMR': 
                m.drawparallels(np.arange(lati-10, latf+10., 10.), labels=[1, 0, 0, 0], linewidth=0, fontsize=8)
                m.drawmeridians(np.arange(loni-10, lonf+10., 10.), labels=[0, 0, 0, 1], linewidth=0, fontsize=8)

            # Get lat/lon positions for all
            lon = __tmp[:, 1]
            lon1 = lon.copy()
            for n, l in enumerate(lon1):
               if l >= 180:
                  lon1[n]=lon1[n]-360.
            lon = lon1
            lat = __tmp[:, 0]
 
            lonstmp, latstmp = m(lon,lat)
 
            vals = __tmp[:, int(setVar)][(__tmp[:, 2].astype(int) >= int(Levsup)) & (__tmp[:, 2].astype(int) <= int(Levinf))]
            lats = latstmp[(__tmp[:, 2].astype(int) >= int(Levsup)) & (__tmp[:, 2].astype(int) <= int(Levinf))]
            lons = lonstmp[(__tmp[:, 2].astype(int) >= int(Levsup)) & (__tmp[:, 2].astype(int) <= int(Levinf))]

            print()
            print('Data: '+str(Data)+'; Level Inferior:'+str(Levinf)+'; Tipo: '+str(kx)+'; VarName: '+VarName+'; Total de Dados: '+str(len(vals)))
            print()
                      
            # Adjusting the plot position according to sysname
            if sysname == 'SMG':
                plt.subplots_adjust(left=0.10, bottom=0.0, right=0.999, top=0.999)
            elif sysname == 'SMR':
                plt.subplots_adjust(left=0.08, bottom=0.01, right=0.92, top=0.97)
                
            # Save the nice dark grey color as a variable
            almost_black = '#262626'
            cm = plt.cm.get_cmap('jet') # 'RdYlBu'
            geomap = plt.scatter(lons, lats, c=vals, cmap=cm, s=15, edgecolor=almost_black, linewidth=0.15)

            # Mappable 'maps' the values of s to an array of RGB colors defined by a color palette
            cbar = plt.colorbar(mappable = geomap, ax = ax, shrink=0.65)  
            cbar.set_label(legend)
            plt.title(VarName+'  '+str(kx)+' Data: '+str(Data)+' |'+' Convencionais | Filtro:'+WhatPlot, fontsize='9')
            
            # Save figure 
            plt.savefig('pgeomap_'+WhatPlot+'_'+VarName+'_'+str(kx)+'_'+str(Data)+'_ol'+OuterL+'.png')

            # Cleaning up
            plt.cla()
            plt.clf()
            plt.close('all')
            
            #plt.show()
            
            return

    def ptmap(self, VarName, kx, Data, sysname):
        
        '''
        A função ptmap faz plotagem da variável selecionada para cada tipo de fonte escolhida para uma determinada data.

        Exemplo:
        a.ptmap('uv', [290, 224, 223], 2013010100)
        
        No exemplo acima, será feito o plot do vento (uv) para as fontes 290 (ASCATW), 224 (VADWND) e 223 (PROFLR) para a data 2013010100

        '''
        # Chosing plot size according to sysname
        if sysname == 'SMG':
            fig = plt.figure(figsize=(8, 4), edgecolor='w')
            loni=-180
            lonf=180
            lati=-90
            latf=90
        elif sysname == 'SMR': 
            fig = plt.figure(figsize=(5, 6), edgecolor='w')
            loni=-90           
            lonf=-25            
            lati=-60           
            latf=20
        
        # Map style        
        map_land = 'white'
        map_land_lines = 'darkgray'
        map_oceans = 'white'
        m = Basemap(llcrnrlon=loni, urcrnrlon=lonf, llcrnrlat=lati, urcrnrlat=latf, projection='mill', resolution='c')
        m.drawcoastlines(linewidth=0.5, color=map_land_lines)
        m.drawmapboundary(fill_color=map_oceans)
        m.drawcountries(linewidth=0.15, color = 'grey')
        m.fillcontinents(color=map_land, lake_color=map_oceans)
        
        # draw parallels and meridians according to sysname
        if sysname == 'SMG':
            m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0], linewidth=0, fontsize=9)
            m.drawmeridians(np.arange(-180., 180., 60.), labels=[0, 0, 0, 1], linewidth=0, fontsize=9)
        elif sysname == 'SMR': 
            m.drawparallels(np.arange(lati-10, latf+10., 10.), labels=[1, 0, 0, 0], linewidth=0, fontsize=8)
            m.drawmeridians(np.arange(loni-10, lonf+10., 10.), labels=[0, 0, 0, 1], linewidth=0, fontsize=8)

        legend_labels = []
        
        data_available = 0
        for ivar in kx:
            try:
                __tmp = self.GTable(VarName, ivar)
                if not __tmp is None:
                    plon = [(x - 360) if (x > 180) else x for x in __tmp[:, 1][:]]
                    plat = __tmp[:, 0]
                    xpt, ypt = m(plon, plat)
                    
                    m.plot(xpt, ypt, str(Ds.getVarInfo(int(ivar), VarName, 'symbol')), alpha=0.5, markersize=3, linewidth=1, color=Ds.getVarInfo(int(ivar), VarName, 'color'))
                    legend_labels.append(mpatches.Patch(color=Ds.getVarInfo(int(ivar), VarName, 'color'), label=VarName + '-' + str(ivar) + ' | ' + Ds.getVarInfo(int(ivar), VarName, 'instrument')))
                    data_available = 1

            except:
                print("ERROR in ptmap function.")
  
        if data_available != 0:
            # Adjusting the plot position according to sysname
            if sysname == 'SMG':
                plt.subplots_adjust(left=0.05, bottom=0.05, right=0.70, top=0.90, wspace=0, hspace=0)
                # Plot legend SMG
                plt.legend(handles=legend_labels, loc='upper right', bbox_to_anchor=(1.5, 1.0), fancybox=False, shadow=False, frameon=False, numpoints=1, prop={"size": 9})

            elif sysname == 'SMR': 
                plt.subplots_adjust(left=0.08, bottom=0.15, right=0.92, top=0.93)
                # Plot legend SMR
                plt.legend(handles=legend_labels, bbox_to_anchor=(0.5, -0.17), loc='lower center', fancybox=False, shadow=False, frameon=False, numpoints=1, prop={"size": 9})

            # Title
            plt.title('Distribuição Espacial dos Dados na Assimilação - '+str(Data), fontsize='10')
            
            # Sabe figure
            plt.savefig('ptmap'+'_'+VarName+'_'+str(Data)+'.png')
            
            # plt.show()
            
            # Cleaning up
            plt.cla()
            plt.clf()
            plt.close('all')

        return
        
        
    def pvmap(self, VarName, Data, sysname, use=None):

        '''
        A função pvmap faz plotagem das variáveis selecionadas sem identificar o tipo de fonte para uma determinada data. E
        separa em assimilado e monitorado.

        Exemplo:
        a.pvmap(['uv','ps','t','q'], 2013010100, 'SMR', use=[1, -1])
        
        No exemplo acima, será feito o plot do vento (uv), da pressão em superfície (ps), da temperatura (t) e da umidade (q)
        dos dados assimilados (use=1) e monitorados (use=-1) para a data 2013010100.

        '''
        
        # Chosing plot size according to sysname
        if sysname == 'SMG':
            fig = plt.figure(figsize=(9.8, 4), edgecolor='w')
            loni=-180
            lonf=180
            lati=-90
            latf=90

            if len(use) == 1:
                fig = plt.figure(figsize=(6, 4), edgecolor='w')
            else:
                fig = plt.figure(figsize=(5, 6.5), edgecolor='w')

        elif sysname == 'SMR': 
            fig = plt.figure(figsize=(5, 6), edgecolor='w')
            loni=-90           
            lonf=-25            
            lati=-60           
            latf=20

            if len(use) == 1:
                fig = plt.figure(figsize=(6, 4), edgecolor='w')
            else:
                fig = plt.figure(figsize=(5, 8), edgecolor='w')

        # Map style   
        map_land = 'white'
        map_land_lines = 'darkgray'
        map_oceans = 'white'

        for n, idxuse in enumerate(use):

            if idxuse == 1:  data_status = 'Utilizadas'
            if idxuse == -1: data_status = 'Monitoradas'

            plt.subplot(len(use), 1, (n+1))
            plt.title('Variáveis '+ data_status +' na Assimilação - '+str(Data), fontsize='10')
            m = Basemap(llcrnrlon=loni, urcrnrlon=lonf, llcrnrlat=lati, urcrnrlat=latf, projection='mill', resolution='c')
            m.drawcoastlines(linewidth=0.5, color=map_land_lines)
            m.drawmapboundary(fill_color=map_oceans)
            m.fillcontinents(color=map_land, lake_color=map_oceans)
            
            # draw parallels and meridians according to sysname
            if sysname == 'SMG':
                m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0], linewidth=0, fontsize=9)
                m.drawmeridians(np.arange(-180., 180., 60.), labels=[0, 0, 0, 1], linewidth=0, fontsize=9)
            elif sysname == 'SMR': 
                m.drawparallels(np.arange(lati-10, latf+10., 10.), labels=[1, 0, 0, 0], linewidth=0, fontsize=8)
                m.drawmeridians(np.arange(loni-10, lonf+10., 10.), labels=[0, 0, 0, 1], linewidth=0, fontsize=8)

            __tmp_obsinfo = self.ObsInfo

            colors_palette = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd', '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22']
            setColor = 0
            legend_labels = []
            for xvar in VarName:
                for xtype in __tmp_obsinfo[xvar][:, 0]:
                    __tmp = self.GTable(xvar, xtype)
                    if not __tmp is None:
                        plon_1 = __tmp[:, 1][__tmp[:, 6].astype(int) == int(idxuse)]
                        plon   = [(x - 360) if (x > 180) else x for x in plon_1]
                        plat   = __tmp[:, 0][__tmp[:, 6].astype(int) == int(idxuse)]
                        del plon_1

                        xpt, ypt = m(plon, plat)
                        m.plot(xpt, ypt, 's', alpha=1.0, markersize=1, markerfacecolor="None", color=colors_palette[setColor])

                legend_labels.append(mpatches.Patch(color=colors_palette[setColor], label=xvar))
                plt.legend(handles=legend_labels, numpoints=1, loc='best', bbox_to_anchor=(1.1, 0.6), fancybox=False, shadow=False, frameon=False, ncol=1, prop={"size": 10})
                setColor += 1

        plt.tight_layout()

        if len(use) == 1:
            plt.subplots_adjust(left=0.07, bottom=0.05, right=0.87, top=0.94, wspace=None, hspace=None)
        else:
            plt.subplots_adjust(left=0.0, bottom=0.04, right=0.95, top=0.94, wspace=None, hspace=0.22)

        # Save Figure
        plt.savefig('pvmap_'+'_'.join(sorted(VarName))+'_'+str(Data)+'.png')
        
        #plt.show()
        
        # Cleaning up
        plt.cla()
        plt.clf()
        plt.close('all')

        return


class gsiFunctions(object):

    def time_series(self, varName=None, varType=None, dateIni=None, dateFin=None,  path=None, nHour="06", vminOMA=None, vmaxOMA=None, vminSTD=0.0, vmaxSTD=14.0, Level=None, OuterL="01"):
        
        '''
        A função time_series realiza a plotagem da série temporal de diferentes variáveis, considerando diferentes intervalos de tempo. É possivel escolher entre fazer a série
        para um único nível ou para diversos níveis.

        Exemplo:

        dini = 2013010100 # Data inicial
        dfin = 2013010900 # Data final
        nHour = "06"      # Intervalo de tempo
        vName = 'uv'      # Variável
        vType = 224       # Fonte da variável
        vminOMA = -4.0    # Valor mínimo da escala Y para OmF ou OmA
        vmaxOMA = 4.0     # Valor máximo da escala Y para OmF ou OmA
        vminSTD = 0.0     # Valor mínimo da escala Y para o desvio-padrão
        vmaxSTD = 14.0    # Valor máximo da escala Y para o desvio-padrão
        Level = 1000      # Valor do nível a ser feita a série temporal, se não informar, será¡ feita para todos os níveis padrões
        OuterL = 01       # Número do outer loop
        
        gsiTools = gs.gsiFunctions()
        gsiTools.time_series(varName=vName, varType=vType, dateIni=dini, dateFin=dfin, vminOMA=None, vmaxOMA=None, vminSTD=None, vmaxSTD=None, Level=None)

        '''
        
        delta = nHour
        omflag = "OmF" if OuterL == "01" else "OmA"

        dataSource = '/scratchin/grupos/das/projetos/gdad/public/eval/stat/testcase/SMG/gsi/dataout/'
        data_path  = dataSource if path == None else path

        separator = " ====================================================================================================="

        print()
        print(separator)
        print(" Reading dataset in " + data_path)
        print(" Analyzing data of variable: " + varName + "  ||  type: " + str(varType) + "  ||  " + Ds.getVarInfo(varType, varName, 'instrument') + "  ||  check: " + omflag)
        print(separator)
        print()
        
        datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
        datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
        date  = datei

        levs_tmp, DayHour_tmp = [], []
        info_check = {}
        while (date <= datef):
            datefmt = date.strftime("%Y%m%d%H")
            DayHour_tmp.append(date.strftime("%d%H"))

            a = open(data_path + str(datefmt) + '/diag/pe%e.conv_'+OuterL)
            dataDict = a.GDict(varName, varType)
            info_check.update({date.strftime("%d%H"):True})

            if 'lev' in dataDict and Level == None:
                levs_tmp.extend(list(set(map(int,dataDict['lev']))))
                info_check.update({date.strftime("%d%H"):True})
                print(date.strftime(' Preparing data for: ' + "%Y-%m-%d:%H"), ' - Levels: ', sorted(list(set(map(int,dataDict['lev'])))), end='\n')
            else:
                if Level != None and info_check[date.strftime("%d%H")] == True:
                    levs_tmp.extend([Level])
                    print(date.strftime(' Preparing data for: ' + "%Y-%m-%d:%H"), ' - Level: ', Level , end='\n')
                else:
                    info_check.update({date.strftime("%d%H"):False})
                    print(date.strftime(setcolor.WARNING + ' Preparing data for: ' + "%Y-%m-%d:%H"), ' - No information on this date ' + setcolor.ENDC, end='\n')

            a.close()
            del(a)
            del(dataDict)

            date = date + timedelta(hours=int(delta))
            
        DayHour = [hr if (ix % int(len(DayHour_tmp) / 4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
        
        print()
        print(separator)
        print()
        
        list_meanByLevs, list_stdByLevs, list_countByLevs = [], [], []
        date = datei
        levs = sorted(list(set(levs_tmp)))
        levs_tmp.clear()
        del(levs_tmp[:])

        while (date <= datef):

            print(date.strftime(' Calculating for ' + "%Y-%m-%d:%H"))
            datefmt = date.strftime("%Y%m%d%H")

            try: 
                if info_check[date.strftime("%d%H")] == True:
                    a = open(data_path + str(datefmt) + '/diag/pe%e.conv_'+OuterL)
                    dataTable = a.GTable(varName, varType)
                    dataDict  = a.GDict(varName, varType)
                    dataByLevs, mean_dataByLevs, std_dataByLevs, count_dataByLevs = {}, {}, {}, {}
                    [dataByLevs.update({int(lvl): []}) for lvl in levs]
                    if Level != None:
                        [dataByLevs[int(v[3])].append(v[9]) for v in dataTable[:,:] if int(v[3]) == Level]
                    else:
                        [dataByLevs[int(v[3])].append(v[9]) for v in dataTable[:,:] ]
                
                for lv in levs:
                    if len(dataByLevs[lv]) != 0 and info_check[date.strftime("%d%H")] == True:
                       mean_dataByLevs.update({int(lv): np.mean(np.array(dataByLevs[lv]))})
                       std_dataByLevs.update({int(lv): np.std(np.array(dataByLevs[lv]))})
                       count_dataByLevs.update({int(lv): len(np.array(dataByLevs[lv]))})
                    else:
                       mean_dataByLevs.update({int(lv): -99})
                       std_dataByLevs.update({int(lv): -99})
                       count_dataByLevs.update({int(lv): -99})
            
            except:
                if info_check[date.strftime("%d%H")] == True:
                    print("ERROR in time_series function.")
                else:
                    print(setcolor.WARNING + "    >>> No information on this date (" + str(date.strftime("%Y-%m-%d:%H")) +") <<< " + setcolor.ENDC)

                for lv in levs:
                    mean_dataByLevs.update({int(lv): -99})
                    std_dataByLevs.update({int(lv): -99})
                    count_dataByLevs.update({int(lv): -99})

            if Level == None:
                list_meanByLevs.append(list(mean_dataByLevs.values()))
                list_stdByLevs.append(list(std_dataByLevs.values()))
                list_countByLevs.append(list(count_dataByLevs.values()))
            else:
                list_meanByLevs.append(mean_dataByLevs[int(Level)])
                list_stdByLevs.append(std_dataByLevs[int(Level)])
                list_countByLevs.append(count_dataByLevs[int(Level)])
 
            dataByLevs.clear()
            mean_dataByLevs.clear()
            std_dataByLevs.clear()
            count_dataByLevs.clear()

            if info_check[date.strftime("%d%H")] == True:
                a.close()
                del(a)
                del(dataTable)
            
            date_finale = date
            date = date + timedelta(hours=int(delta))

        print()
        print(separator)
        print()
        
        print(' Making Graphics...')

        y_axis      = np.arange(0, len(levs), 1)
        x_axis      = np.arange(0, len(DayHour), 1)

        mean_final  = np.ma.masked_array(np.array(list_meanByLevs), np.array(list_meanByLevs) == -99)
        std_final   = np.ma.masked_array(np.array(list_stdByLevs), np.array(list_stdByLevs) == -99)
        count_final = np.ma.masked_array(np.array(list_countByLevs), np.array(list_countByLevs) == -99)

        mean_limit = np.max(np.array([np.abs(np.min(mean_final)), np.abs(np.min(mean_final))]))

        if (vminOMA == None) and (vmaxOMA == None): vminOMA, vmaxOMA = -1*mean_limit, mean_limit

        date_title = str(datei.strftime("%d%b")) + '-' + str(date_finale.strftime("%d%b")) + ' ' + str(date_finale.strftime("%Y"))
        instrument_title = str(varName) + '-' + str(varType) + '  |  ' + Ds.getVarInfo(varType, varName, 'instrument')

        # Figure with more than one level - default levels: [600, 700, 800, 900, 1000]
        if Level == None:
            fig = plt.figure(figsize=(6, 9))
            plt.rcParams['axes.facecolor'] = 'None'
            plt.rcParams['hatch.linewidth'] = 0.3

            plt.subplot(3, 1, 1)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(mean_final.T), origin='lower', vmin=vminOMA, vmax=vmaxOMA, cmap='seismic', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
            plt.tight_layout()
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Levels (hPa)')
            plt.xlabel('Mean ('+omflag+')')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)

            plt.subplot(3, 1, 2)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(std_final.T), origin='lower', vmin=vminSTD, vmax=vmaxSTD, cmap='Blues', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
            plt.tight_layout()
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Levels (hPa)')
            plt.xlabel('Standard Deviation ('+omflag+')')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)

            plt.subplot(3, 1, 3)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(count_final.T), origin='lower', vmin=0.0, vmax=np.max(count_final), cmap='gist_heat_r', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Levels (hPa)')
            plt.xlabel('Total Observations SAPU')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)
            plt.tight_layout()
        
        # Figure with only one level
        else:

            fig = plt.figure(figsize=(6, 4))
            fig, ax1 = plt.subplots(1, 1)
            plt.style.use('seaborn-ticks')

            plt.axhline(y=0.0,ls='solid',c='#d3d3d3')
            # plt.text(0, 0, 'Level='+str(Level) +'hPa', fontsize=12, color='lightgray')
            plt.annotate('Level='+str(Level) +'hPa', xy=(0.20, 0.995), xytext=(-9, -9), xycoords='axes fraction', textcoords='offset points', color='lightgray', fontweight='bold', fontsize='12',
            horizontalalignment='center', verticalalignment='center')

            ax1.plot(x_axis, list_meanByLevs, "b-", label="Mean ("+omflag+")")
            ax1.plot(x_axis, list_meanByLevs, "bo", label="Mean ("+omflag+")")
            ax1.set_xlabel('Date (DayHour)', fontsize=10)
            # Make the y-axis label, ticks and tick labels match the line color.
            ax1.set_ylim(vminOMA, vmaxOMA)
            ax1.set_ylabel('Mean ('+omflag+')', color='b', fontsize=10)
            ax1.tick_params('y', colors='b')
            plt.xticks(x_axis, DayHour)
            plt.axhline(y=np.mean(list_meanByLevs),ls='dotted',c='blue')
            
            ax2 = ax1.twinx()
            ax2.plot(x_axis, std_final, "r-", label="Std. Deviation ("+omflag+")")
            ax2.plot(x_axis, std_final, "rs", label="Std. Deviation ("+omflag+")")
            ax2.set_ylim(vminSTD, vmaxSTD)
            ax2.set_ylabel('Std. Deviation ('+omflag+')', color='r', fontsize=10)
            ax2.tick_params('y', colors='r')
            plt.axhline(y=np.mean(std_final),ls='dotted',c='red')

            ax3 = ax1.twinx()
            ax3.plot(x_axis, count_final, "g-", label="Total Observations")
            ax3.plot(x_axis, count_final, "g^", label="Total Observations")
            ax3.set_ylim(0, np.max(count_final) + (np.max(count_final)/8))
            ax3.set_ylabel('Total Observations', color='g', fontsize=10)
            ax3.tick_params('y', colors='g')
            ax3.spines["right"].set_position(("axes", 1.15))
            plt.yticks(rotation=90)
            plt.axhline(y=np.mean(count_final),ls='dotted',c='green')

            ax3.set_title(instrument_title, loc='left', fontsize=10)
            ax3.set_title(date_title, loc='right', fontsize=10)

            plt.xticks(x_axis, DayHour)
            plt.title(instrument_title, loc='left', fontsize=9)
            plt.title(date_title, loc='right', fontsize=9)
            plt.subplots_adjust(left=None, bottom=None, right=0.80, top=None)
            plt.tight_layout()

        print(' Done!')
        print()

        # Save Figure
        if Level == None:
            plt.savefig('time_series_'+str(varName) + '-' + str(varType)+'_'+omflag+'_all_levels.png', bbox_inches='tight', dpi=100)
        else:
            plt.savefig('time_series_'+str(varName) + '-' + str(varType)+'_'+omflag+'_'+ str(Level) +'hPa.png', bbox_inches='tight', dpi=100)

        #plt.show()

        # Cleaning up
        plt.cla()
        plt.clf()
        plt.close('all')

        return


    def time_series_omfoma(self, varName=None, varType=None, dateIni=None, dateFin=None,  path=None, nHour="06", vminOMA=-4.0, vmaxOMA=4.0, vminSTD=0.0, vmaxSTD=14.0, InfLev=None, SupLev=None, OuterL=None):
        
        '''
        A função time_seriesomfoma realiza a plotagem da série temporal de diferentes variáveis, considerando diferentes intervalos de tempo. O maior diferencial desta função
        com relação à função time_series é a possibilidade de fazer na mesma figura o OmF e o OmA. Por esta razão, esta função faz a plotagem apenas para um determinado nível.

        Exemplo:

        dini = 2013010100 # Data inicial
        dfin = 2013010900 # Data final
        nHour = "06"      # Intervalo de tempo
        vName = 'uv'      # Variável
        vType = 224       # Fonte da variável
        vminOMA = -4.0    # Valor mínimo da escala Y para OmF e OmA
        vmaxOMA = 4.0     # Valor máximo da escala Y para OmF e OmA
        vminSTD = 0.0     # Valor mínimo da escala Y para o desvio-padrão
        vmaxSTD = 14.0    # Valor máximo da escala Y para o desvio-padrão
        Level = 1000      # Valor do nível a ser feita a série temporal, se não informar, será¡ feita para todos os níveis padrões
        OtL = 03          # Número do outer loop da análise
        
        gsiTools = gs.gsiFunctions()
        gsiTools.time_series_omfoma(varName=vName, varType=vType, dateIni=dini, dateFin=dfin, vminOMA=None, vmaxOMA=None, vminSTD=None, vmaxSTD=None, Level=None, OuterL=OtL)

        '''
        
        delta = nHour

        if OuterL == None:
            print()
            print("ERROR in time_series, OuterL not informed")
            print()
            exit()
        else:
            if OuterL == "01":
                print()
                print("ERROR in time_series, analysis OuterL must be greater than 01")
                print()
                exit()
            else:    
                omflag = "OmA"

        if path != None:
            data_path = path
        else:
            data_path = '/scratchin/grupos/assim_dados/home/gdad/public/eval/stat/testcase/SMG/gsi/dataout/'
                        # '/stornext/online6/das/Avalia/SMR/EXP02/gsi/dataout/2017010106/diag/pe%e.conv_01'

        separator = " ====================================================================================================="

        print()
        print(separator)
        print(" Reading dataset in " + data_path)
        print(" Analyzing data of variable: " + varName + "  ||  type: " + str(varType) + "  ||  " + Ds.getVarInfo(varType, varName, 'instrument'))
        print(separator)
        print()
        
        datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
        datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
        date = datei

        levs_tmp, DayHour_tmp = [], []
        while (date <= datef):
            datefmt = date.strftime("%Y%m%d%H")
            DayHour_tmp.append(date.strftime("%d%H"))
            a = open(data_path + str(datefmt) + '/diag/pe%e.conv_'+OuterL)
            dataDict = a.GDict(varName, varType)

            if 'lev' in dataDict and InfLev == None:
                levs_tmp.extend(list(set(map(int,dataDict['lev']))))
                print(date.strftime(' Preparing data for: ' + "%Y%m%d%H"), ' - Levels: ', sorted(list(set(map(int,dataDict['lev'])))), end='\n')
            else:
                levs_tmp.extend([1000])
                print(date.strftime(' Preparing data for: ' + "%Y%m%d%H"), ' - Level: ', InfLev , '-' , SupLev, end='\n')

            a.close()
            del(a)
            del(dataDict)
            date = date + timedelta(hours=int(delta))
            
        DayHour = [hr if (ix % int(len(DayHour_tmp) / 4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
        
        print()
        print(separator)
        print()
        
        list_meanByLevs, list_stdByLevs, list_countByLevs = [], [], []
        list_meanByLevsb, list_stdByLevsb, list_countByLevsb = [], [], []
        date = datei
        levs = sorted(list(set(levs_tmp)))
        levs_tmp.clear()
        del(levs_tmp[:])

        while (date <= datef):

            print(date.strftime(' Calculating for ' + "%Y%m%d%H"))
            datefmt = date.strftime("%Y%m%d%H")

            try: 
                a = open(data_path + str(datefmt) + '/diag/pe%e.conv_01')
                b = open(data_path + str(datefmt) + '/diag/pe%e.conv_'+OuterL)
                dataTable = a.GTable(varName, varType)
                dataTableb = b.GTable(varName, varType)
                dataDict  = a.GDict(varName, varType)
                dataByLevs, mean_dataByLevs, std_dataByLevs, count_dataByLevs = {}, {}, {}, {}
                dataByLevsb, mean_dataByLevsb, std_dataByLevsb, count_dataByLevsb = {}, {}, {}, {}
                [dataByLevs.update({int(lvl): []}) for lvl in levs]
                [dataByLevsb.update({int(lvl): []}) for lvl in levs]
                if InfLev != None:
                    [dataByLevs[int(InfLev)].append(v[9]) for v in dataTable[:,:] if int(v[3]) <= InfLev and int(v[3]) >= SupLev ]
                    [dataByLevsb[int(InfLev)].append(v[9]) for v in dataTableb[:,:] if int(v[3]) <= InfLev and int(v[3]) >= SupLev ]
                else:
                    [dataByLevs[int(v[3])].append(v[9]) for v in dataTable[:,:] ]
                
                for lv in levs:
                    if len(dataByLevs[lv]) != 0:
                       mean_dataByLevs.update({int(lv): np.mean(np.array(dataByLevs[lv]))})
                       std_dataByLevs.update({int(lv): np.std(np.array(dataByLevs[lv]))})
                       count_dataByLevs.update({int(lv): len(np.array(dataByLevs[lv]))})
                       mean_dataByLevsb.update({int(lv): np.mean(np.array(dataByLevsb[lv]))})
                       std_dataByLevsb.update({int(lv): np.std(np.array(dataByLevsb[lv]))})
                       count_dataByLevsb.update({int(lv): len(np.array(dataByLevsb[lv]))})
                    else:
                       mean_dataByLevs.update({int(lv): -99})
                       std_dataByLevs.update({int(lv): -99})
                       count_dataByLevs.update({int(lv): -99})
                       mean_dataByLevsb.update({int(lv): -99})
                       std_dataByLevsb.update({int(lv): -99})
                       count_dataByLevsb.update({int(lv): -99})

            except:
                print("ERROR in time_series function.")
                for lv in levs:
                    mean_dataByLevs.update({int(lv): -99})
                    std_dataByLevs.update({int(lv): -99})
                    count_dataByLevs.update({int(lv): -99})
                    mean_dataByLevsb.update({int(lv): -99})
                    std_dataByLevsb.update({int(lv): -99})
                    count_dataByLevsb.update({int(lv): -99})
           
            if InfLev == None:
                list_meanByLevs.append(list(mean_dataByLevs.values()))
                list_stdByLevs.append(list(std_dataByLevs.values()))
                list_countByLevs.append(list(count_dataByLevs.values()))
            else:
                list_meanByLevs.append(mean_dataByLevs[int(InfLev)])
                list_stdByLevs.append(std_dataByLevs[int(InfLev)])
                list_countByLevs.append(count_dataByLevs[int(InfLev)])
                list_meanByLevsb.append(mean_dataByLevsb[int(InfLev)])
                list_stdByLevsb.append(std_dataByLevsb[int(InfLev)])
                list_countByLevsb.append(count_dataByLevsb[int(InfLev)])

            a.close()
            b.close()

            dataByLevs.clear()
            mean_dataByLevs.clear()
            std_dataByLevs.clear()
            count_dataByLevs.clear()
            dataByLevsb.clear()
            mean_dataByLevsb.clear()
            std_dataByLevsb.clear()
            count_dataByLevsb.clear()

            del(a)
            del(dataTable)
            del(b)
            del(dataTableb)

            date = date + timedelta(hours=int(delta))

        print()
        print(separator)
        print()
        
        print(' Making Graphics...')

        y_axis      = np.arange(0, len(levs), 1)
        x_axis      = np.arange(0, len(DayHour), 1)

        mean_final   = np.ma.masked_array(np.array(list_meanByLevs), np.array(list_meanByLevs) == -99)
        std_final    = np.ma.masked_array(np.array(list_stdByLevs), np.array(list_stdByLevs) == -99)
        count_final  = np.ma.masked_array(np.array(list_countByLevs), np.array(list_countByLevs) == -99)
        mean_finalb  = np.ma.masked_array(np.array(list_meanByLevsb), np.array(list_meanByLevsb) == -99)
        std_finalb   = np.ma.masked_array(np.array(list_stdByLevsb), np.array(list_stdByLevsb) == -99)
        count_finalb = np.ma.masked_array(np.array(list_countByLevsb), np.array(list_countByLevsb) == -99)

        date_title = str(datei.strftime("%d%b")) + '-' + str(date.strftime("%d%b")) + ' ' + str(date.strftime("%Y"))
        instrument_title = str(varName) + '-' + str(varType) + '  |  ' + Ds.getVarInfo(varType, varName, 'instrument')

        # Figure with more than one level - default levels: [600, 700, 800, 900, 1000]
        if InfLev == None:
            fig = plt.figure(figsize=(6, 9))
            plt.rcParams['axes.facecolor'] = 'None'
            plt.rcParams['hatch.linewidth'] = 0.3

            plt.subplot(3, 1, 1)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(mean_final.T), origin='lower', vmin=vminOMA, vmax=vmaxOMA, cmap='seismic', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=0.5)
            plt.tight_layout()
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Leves (hPa)')
            plt.xlabel('Mean ('+omflag+')')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)

            plt.subplot(3, 1, 2)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(std_final.T), origin='lower', vmin=vminSTD, vmax=vmaxSTD, cmap='Blues', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=0.5)
            plt.tight_layout()
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Leves (hPa)')
            plt.xlabel('Standard Deviation ('+omflag+')')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)

            plt.subplot(3, 1, 3)
            ax = plt.gca()
            ax.add_patch(mpl.patches.Rectangle((-1,-1),(len(DayHour)+1),(len(levs)+3), hatch='xxxxx', color='black', fill=False, snap=False, zorder=0))
            plt.imshow(np.flipud(count_final.T), origin='lower', vmin=0.0, vmax=np.max(count_final), cmap='gist_heat_r', aspect='auto', zorder=1)
            plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
            plt.title(instrument_title, loc='left', fontsize=10)
            plt.title(date_title, loc='right', fontsize=10)
            plt.ylabel('Vertical Leves (hPa)')
            plt.xlabel('Total Observations')
            plt.yticks(y_axis, levs[::-1])
            plt.xticks(x_axis, DayHour)
            plt.tight_layout()
        
        # Figure with only one level
        else:

            fig, ax1 = plt.subplots(1, 1, figsize=(6, 4))
            plt.style.use('seaborn-ticks')

            plt.axhline(y=0.0,ls='solid',c='#d3d3d3')
            ax1.plot(x_axis, list_meanByLevs, "b-", label="Mean (OmF)")
            ax1.plot(x_axis, list_meanByLevs, "bo", label="Mean (OmF)")
            ax1.set_xlabel('Date (DayHour)', fontsize=10)
            # Make the y-axis label, ticks and tick labels match the line color.
            ax1.set_ylim(vminOMA, vmaxOMA)
            ax1.set_ylabel('Mean (OmF)', color='b', fontsize=10)
            ax1.tick_params('y', colors='b')
            plt.xticks(x_axis, DayHour)
            plt.axhline(y=np.mean(list_meanByLevs),ls='dotted',c='blue')
            
            ax2 = ax1.twinx()
            ax2.plot(x_axis, list_meanByLevsb, "r-", label="Mean ("+omflag+")")
            ax2.plot(x_axis, list_meanByLevsb, "rs", label="Mean ("+omflag+")")
            ax2.set_ylim(vminOMA, vmaxOMA)
            ax2.set_ylabel('Mean ('+omflag+')', color='r', fontsize=10)
            ax2.tick_params('y', colors='r')
            plt.axhline(y=np.mean(list_meanByLevsb),ls='dotted',c='red')

            plt.xticks(x_axis, DayHour)
            plt.title(instrument_title, loc='left', fontsize=9)
            plt.title(date_title, loc='right', fontsize=9)
            plt.subplots_adjust(left=None, bottom=None, right=0.80, top=None)

            plt.tight_layout()

        print(' Done!')
        print()
        
        # Save Figure
        if (InfLev != None) or (SupLev != None):
            plt.savefig('time_series_'+str(varName) + '-' + str(varType)+'_'+omflag+'_'+str(InfLev)+'-'+str(SupLev)+'hPa.png', bbox_inches='tight', dpi=100)
        else:
            plt.savefig('time_series_'+str(varName) + '-' + str(varType)+'_'+omflag+'_all_levels.png', bbox_inches='tight', dpi=100)

        #plt.show()

        # Cleaning up
        plt.cla()
        plt.clf()
        plt.close('all')

        return

    def comparison(self, varName=None, varType=None, dateIni=None, dateFin=None,  path=None, nHour="06", OuterOMA="03", Levinf=None, Levsup=None):
        
        '''
        A função comparison realiza a comparação de diferentes conjuntos de dados. O resultado de seu processamento será uma série temporal
        contendo a média, o desvio padrão e o total de dados analisados. O processamento pode ser aplicado a uma camada ou a um nivel especifico.
        Um exemplo da utilização da função comparison é apresentada a seguir.
        

        Exemplo:

        dini     = 2017010100 # Data inicial
        dfin     = 2017010200 # Data final
        varName  = "ps"
        varType  = 180
        Levinf   = 1000
        Levsup   = 900
        path     = '/stornext/online6/das/Avalia/SMR/EXP02/gsi/dataout/'
        OuterOMA = '03'

        gsiTools = gs.gsiFunctions()
        gsiTools.comparison(varName=varName, varType=varType, dateIni=dini, dateFin=dfin, path=path, OuterOMA=OuterOMA, Levinf=Levinf, Levsup=Levsup)

        '''
        
        layer = str(Levinf) + " " + str(Levsup) if (Levinf != None) and (Levsup != None) else "all levels"

        delta      = nHour
        dataSource = '/scratchin/grupos/das/projetos/gdad/public/eval/stat/testcase/SMG/gsi/dataout/'
        data_path  = dataSource if path == None else path
        
        separator = " ====================================================================================================="

        print()
        print(separator)
        print(" Reading dataset in " + data_path)
        print(" Analyzing variable: " + varName + "  ||  type: " + str(varType) + "  ||  " + Ds.getVarInfo(varType, varName, 'instrument') + "  ||  Layer: " + layer)
        print(separator)
        print()
        
        datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
        datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
        date  = datei

        # ==============================================================================================================
        # Building the dataset levels matrix ===========================================================================

        print(" Preparing...  ")

        lev_tmp_omf, DayHour_tmp = [], []
        while (date <= datef):
            datefmt = date.strftime("%Y%m%d%H")
            DayHour_tmp.append(date.strftime("%d%H"))
            dataOMF = open(data_path + str(datefmt) + '/diag/pe%e.conv_01')          
            lev_tmp_omf.extend(dataOMF.GDict(varName, varType)['lev'])
            print(date.strftime('   > Checking levels in ' + "%Y%m%d%H"), sorted(list(set(map(int, dataOMF.GDict(varName, varType)['lev'])))))
            dataOMF.close()
            del(dataOMF)
            date = date + timedelta(hours=int(delta))

        data_levs = sorted(list(set(map(int, lev_tmp_omf))))
        DayHour   = [hr if (ix % int(len(DayHour_tmp) / 4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
        lev_tmp_omf.clear()
        DayHour_tmp.clear()
        del(lev_tmp_omf[:])
        del(DayHour_tmp[:])
        date = datei

        if (Levinf != None) and (Levsup != None):
            LINF, LSUP = int(Levinf), int(Levsup)
        else:
            LINF, LSUP = max(data_levs), min(data_levs)

        # ==============================================================================================================
        # Building the dataset matrix ==================================================================================

        print("\n Processing...  ")

        y_axis = np.arange(0, len(data_levs), 1)
        x_axis = np.arange(0, len(DayHour), 1)

        list_OMF_mean, list_OMF_std, list_OMF_count = [], [], []
        list_OMA_mean, list_OMA_std, list_OMA_count = [], [], []
        while (date <= datef):

                print(date.strftime('   > Verifying OmF and OmA values for ' + "%Y%m%d%H"))
                datefmt = date.strftime("%Y%m%d%H")

                dataOMF = open(data_path + str(datefmt) + '/diag/pe%e.conv_01')
                dataOMA = open(data_path + str(datefmt) + '/diag/pe%e.conv_'+OuterOMA)

                dataTableOMF = dataOMF.GTable(varName, varType)
                dataTableOMA = dataOMA.GTable(varName, varType)

                valuesOMF = dataTableOMF[:, 9][(dataTableOMF[:, 3].astype(int) >= LSUP) & (dataTableOMF[:, 3].astype(int) <= LINF)]
                valuesOMA = dataTableOMA[:, 9][(dataTableOMA[:, 3].astype(int) >= LSUP) & (dataTableOMA[:, 3].astype(int) <= LINF)]

                mean_OMF  = np.mean(valuesOMF)
                mean_OMA  = np.mean(valuesOMA)

                std_OMF   = np.std(valuesOMF)
                std_OMA   = np.std(valuesOMA)

                list_OMF_mean.append(mean_OMF)
                list_OMA_mean.append(mean_OMA)

                list_OMF_std.append(std_OMF)
                list_OMA_std.append(std_OMA)

                list_OMF_count.append(len(valuesOMF))
                list_OMA_count.append(len(valuesOMA))

                dataOMF.close()
                del(dataOMF)
                del(dataTableOMF)
                del(valuesOMF)

                dataOMA.close()
                del(dataOMA)
                del(dataTableOMA)
                del(valuesOMA)

                date = date + timedelta(hours=int(delta))

        # ==============================================================================================================
        # Plotting Dataset  ============================================================================================

        print()
        print("\n Plotting Results...  ")

        date_title = str(datei.strftime("%d%b")) + '-' + str(date.strftime("%d%b")) + ' ' + str(date.strftime("%Y"))
        instrument_title = str(varName) + '-' + str(varType) + '  |  ' + Ds.getVarInfo(varType, varName, 'instrument')

        OMF_inf = np.array(list_OMF_mean)-np.array(list_OMF_std)
        OMF_sup = np.array(list_OMF_mean)+np.array(list_OMF_std)
        OMA_inf = np.array(list_OMA_mean)-np.array(list_OMA_std)
        OMA_sup = np.array(list_OMA_mean)+np.array(list_OMA_std)


        fig, ax1 = plt.subplots(1, 1)
        plt.style.use('seaborn-ticks')

        ax1.plot(x_axis, list_OMA_mean, lw=2, label='OmA Mean', color='blue', zorder=2)
        ax1.fill_between(x_axis, OMA_inf, OMA_sup, label='OmA Std dev',  facecolor='blue', alpha=0.3, zorder=2)
        ax1.plot(x_axis, list_OMF_mean, lw=2, label='OmF Mean', color='red', zorder=1)
        ax1.fill_between(x_axis, OMF_inf, OMF_sup, label='OmF Std dev',  facecolor='red', alpha=0.3, zorder=1)
        ax1.set_ylabel('OmF | OmA', fontsize=12)
        ax1.set_xlabel('Date (DayHour)', fontsize=12)
        ax1.set_ylim(-20,20)
        ax1.legend(loc='lower right', ncol=2, fancybox=True, shadow=False, frameon=True, framealpha=1.0, fontsize='11', facecolor='white', edgecolor='lightgray')
        plt.grid(axis='y', color='lightgray', linestyle='-.', linewidth=0.5, zorder=0)

        ax2 = ax1.twinx()
        ax2.plot(x_axis, list_OMA_count, lw=2, label='OmA', linestyle='--', color='darkgray', zorder=3)
        ax2.plot(x_axis, list_OMF_count, lw=2, label='OmF', linestyle=':', color='lightgray', zorder=3)
        ax2.set_ylabel('Total Observations (OmF | OmA)', fontsize=12)
        ax2.set_ylim(0, (np.max(list_OMA_count) + np.max(list_OMA_count)/5))
        ax2.legend(loc='upper left', ncol=2, fancybox=True, shadow=False, frameon=True, framealpha=1.0, fontsize='11', facecolor='white', edgecolor='lightgray')
        
        plt.xticks(x_axis, DayHour)
        plt.title(instrument_title, loc='left', fontsize=12)
        plt.title(date_title, loc='right', fontsize=12)
      
        if LINF == LSUP:
            t = plt.annotate('Level ' + str(LINF) + 'hPa', xy=(0.87, 0.995), xytext=(-9, -9), xycoords='axes fraction', textcoords='offset points', color='darkgray', fontweight='bold', fontsize='10',
            horizontalalignment='center', verticalalignment='center')
            t.set_bbox(dict(facecolor='whitesmoke', alpha=1.0, edgecolor='whitesmoke', boxstyle="square,pad=0.3"))
        else:
            t = plt.annotate('Layer ' + str(LINF) + 'hPa - ' + str(LSUP) + 'hPa', xy=(0.78, 0.995), xytext=(-9, -9), xycoords='axes fraction', textcoords='offset points', color='darkgray', fontweight='bold', fontsize='10',
            horizontalalignment='center', verticalalignment='center')
            t.set_bbox(dict(facecolor='whitesmoke', alpha=1.0, edgecolor='whitesmoke', boxstyle="square,pad=0.3"))

        plt.tight_layout()

        # Save Figure
        if LINF == LSUP:
            plt.savefig('comparison_'+str(varName) + '-' + str(varType) + '_level_'+ str(Levinf)+'_hPa.png', bbox_inches='tight', dpi=100)
        elif LINF != LSUP and LINF!=None:
            plt.savefig('comparison_'+str(varName) + '-' + str(varType) + '_levels_'+ str(LINF)+ '-'+ str(LSUP) +'hPa.png', bbox_inches='tight', dpi=100)
        else:
            plt.savefig('comparison_'+str(varName) + '-' + str(varType) + '_all_levels.png', bbox_inches='tight', dpi=100)


        print(' Done!')
        print()

        # Cleaning up
        plt.cla()
        plt.clf()
        plt.close('all')

        # plt.show()

        return

#EOC
#-----------------------------------------------------------------------------#


    def export_csv(self, varName=None, varType=None, dateIni=None, dateFin=None,  path=None, nHour="06", Level=None):
        
        '''
        A função export_csv gera um arquivo CSV com o resultado do processamento dos dados provenientes do ReadDiag.
        Dentre as informações contidas no arquivo CSV estão:

            Data, media, desvio padrão, total de dados

        Estas informações são apresentadas para cada nível ou para um nível em particular. A seguir se encontra um exemplo de 
        como usar a função export_csv.

        Exemplo:

        dini     = 2017010100 # Data inicial
        dfin     = 2017010106 # Data final
        varName  = "ps"
        varType  = 180
        Level    = None
        path     = '/stornext/online6/das/Avalia/SMR/EXP02/gsi/dataout/'

        gsiTools = gs.gsiFunctions()
        gsiTools.export_csv(varName=varName, varType=varType, dateIni=dini, dateFin=dfin, path=path, Level=None)

        '''

        delta      = nHour
        dataSource = '/scratchin/grupos/das/projetos/gdad/public/eval/stat/testcase/SMG/gsi/dataout/'
        data_path  = dataSource if path == None else path
        separator  = " ______________________________________________________________________________________________________"

        print()
        print(separator+'\n')
        print(" Reading dataset in " + data_path)
        print(" Analyzing variable: " + varName + "  ||  type: " + str(varType) + "  ||  " + Ds.getVarInfo(varType, varName, 'instrument'))
        print()
        
        datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
        datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
        date  = datei

        # ==============================================================================================================
        # Building the dataset levels matrix ===========================================================================

        print(" Preparing...  ")

        lev_tmp_omf, DayHour_tmp = [], []
        while (date <= datef):
            datefmt = date.strftime("%Y%m%d%H")
            DayHour_tmp.append(date.strftime("%d%H"))
            dataOMF = open(data_path + str(datefmt) + '/diag/pe%e.conv_01')          
            lev_tmp_omf.extend(dataOMF.GDict(varName, varType)['lev'])
            print(date.strftime('   > Checking levels in ' + "%Y%m%d%H"), sorted(list(set(map(int, dataOMF.GDict(varName, varType)['lev'])))))
            dataOMF.close()
            del(dataOMF)
            date = date + timedelta(hours=int(delta))

        data_levs = sorted(list(set(map(int, lev_tmp_omf))))
        lev_tmp_omf.clear()
        DayHour_tmp.clear()
        del(lev_tmp_omf[:])
        del(DayHour_tmp[:])
        date = datei

        get_levels = list(reversed(data_levs)) if Level == None else [int(Level)]

        # ==============================================================================================================
        # Building the dataset matrix ==================================================================================

        print("\n Processing...  ")

        head_levs = ['datetime']
        for lev in get_levels:
            head_levs.append('mean'+str(lev))
            head_levs.append('std'+str(lev))
            head_levs.append('count'+str(lev))

        dset = []
        while (date <= datef):

                print(date.strftime('   > Verifying OmF values for ' + "%Y%m%d%H"))
                datefmt      = date.strftime("%Y%m%d%H")
                dataEXP      = open(data_path + str(datefmt) + '/diag/pe%e.conv_01')
                dataTableEXP = dataEXP.GTable(varName, varType)

                values_levs = [datefmt]
                for lev in get_levels:
                    valuesEXP = dataTableEXP[:, 9][(dataTableEXP[:, 3].astype(int) == lev)]
                    values_levs.append(str(np.round(np.mean(valuesEXP),2)))
                    values_levs.append(str(np.round(np.std(valuesEXP),2)))
                    values_levs.append(str(len(valuesEXP)))

                dset.append(values_levs)

                del(values_levs)
                del(valuesEXP)
                dataEXP.close()
                del(dataEXP)
                del(dataTableEXP)
                
                date = date + timedelta(hours=int(delta))

        # ==============================================================================================================
        # Save dataset into CSV File ===================================================================================

        print("\n Saving Dataset in CSV File...  ")

        import pandas as pd
        dataout_file = 'dataout_' + str(varName) + '_' + str(varType) + '.csv'
        df = pd.DataFrame.from_records(dset, columns=head_levs)
        df.to_csv(dataout_file, index=False)
        del(df)
        print(" Done \n")

        return


#EOC
#-----------------------------------------------------------------------------#
