 #! /bin/bash 
#--------------------------------------------------------------------#
#                    obsSatw - GDAD/CPTEC/INPE - 2017                #
#--------------------------------------------------------------------#
#BOP
#
# !DESCRIPTION:
#  Ferramenta de diagnostico da assimilacao de dados de vento 
#  de sensores em satélites artificiais na assimilacao de dados usando o GSI
#
# !INTERFACE:
#    ./run_obsSatw.sh LABELINI LABELFIM NHOURS OmA  GSI_system EXPE GSIOUTpath
#
#    Onde:
#         => LABELINI  : Data da analise [%y4%m2%d2%h2]
#         => LABELFIM  : Data da analise Final [%y4%m2%d2%h2]
#         => NHOURS    : Incremento entre analises [+Nh] - N = numero de horas: Ex. "6"
#         => OmF or OmA: Selecione o que deseja avaliar OmF ou OmA. 
#         => GSI_system: Label do sistema de modelagem com o GSI: Ex. "SMG" "RMS"
#         => EXPE      : Label do experimento onde se destina os dados de saida: Ex. "EXPE"
#         => GSIOUTpath: Path of the diag file of GSI dataout organized by date yyyymmddhh
#
#  !EXAMPLES:  
#    ./run_obsSatw.sh 2013010100 2013010818 CPT 62 28 06 OmA SMG EXPE $WORK_HOME/SMG/datainout/gsi/dataout
#
#  !TESTCASE:
#  Digite sem argumentos de entrada para o testcase do sistema hospedeiro, da forma:
#    ./run_obsSatw.sh              
#  Digite a palavra testcase no primeiro argumento para o testcase padrÃ£o (/shared/das/dist):
#    ./run_obsSatw.sh testcase
#
# !REVISION HISTORY:
#  22-06-2017 - Luiz Sapucci - Codigo Inicial baseado no run_humNphy.sh
#  25-03-2025 - Thaisa Lopes  - Versão inicial baseada na obsRadi para a EGEON usando o readDiag.Radi
#  25-03-2025 - Luiz Sapucci - aperfeiçoamento da versão para oestcase.
#
# !REMARKS:
#  O readDiag-1.0.0 faz a leitura dos dados do diag do gsi concatenados em um unico arquivo.
#
# !BUGS:
#  1- 
#
#EOP
#--------------------------------------------------------------------#
#BOC

#Teste...

# Descomente abaixo para debugar
#set -o xtrace

# Tipo de dados entre os convencionais
#obsType=Satw


# Verificando argumentos de entrada se testcase
if [ "${1}" = "testcase" ]
then
  export LABELI=2024020100 
  export LABELF=2024022918
  export FCT=12
  export OmFOmA="OmA"

##########################
  if [ -z "${2}" ]
  then
    export nome_sys="SMNA"
  else
    export nome_sys=${2} 
  fi
  export EXPE="testcase"
  
  export GSIout=/mnt/beegfs/luiz.sapucci/SMNA/EXP20/datainput/gsi/dataout
  
  echo -e ""
  echo -e "\033[34;1m > obsSatw: Ferramenta de diagnostico da assimilacao de dados de \033[m"
  echo -e "\033[34;1m >          Satelite usando o GSI \033[m"
  
  echo -e ""
  echo -e "\033[31;1m  ATENCAO !!! usando dados do /shared/dist/ \033[m"
  echo -e "\033[31;1m RODANDO COM PARAMETROS DO TESTCASE PADRÃƒO \033[m"
  echo -e ""
  echo -e "\033[33;1m ./run_obsSatw.sh ${LABELI} ${LABELF}  ${FCT} ${OmFOmA} ${nome_sys} ${EXPE} ${GSIout} \033[m"
  echo -e ""
else

# Verificando se ajuda
  if [ "${1}" = "ajuda" ]
    then
    echo -e ""
    echo -e "\033[33;1m Ferramenta de diagnostico da assimilacao de dados de \033[m"
    echo -e "\033[33;1m vento por satÃ©lites na assimilacao de dados usando o GSI\033[m"
    echo -e ""
    echo -e "\033[33;1m !INTERFACE:\033[m"
    echo -e "\033[33;1m ./run_obsSatw.sh LABELINI LABELFIM NHOURS OmA GSI_system EXPE GSIout \033[m"
    echo -e ""
    echo -e "\033[32;1m Digite com a palavra testcase no primeiro argumento para o testcase padrÃ£o:\033[m"
    echo -e "\033[32;1m ./run_obsSatw.sh testcase <GSI_system> \033[m"
    echo -e ""
    exit 
  fi
  
echo -e ""
echo -e "\033[34;1m > obsSatw: Ferramenta de diagnostico da assimilacao de dados de \033[m"
echo -e "\033[34;1m >          Satelite usando o GSI \033[m"

  
  # Verificando argumentos de entrada
  if [ -z "${1}" ]
  then
    export LABELI=2024020100 
  else
    export LABELI=${1}
  fi
  if [ -z "${2}" ]
  then
    export LABELF=2024022918 
  else
    export LABELF=${2} 
  fi
  if [ -z "${3}" ]
  then
    export FCT=12
  else
    export FCT=${3}
  fi
  if [ -z "${4}" ]
  then
    export OmFOmA="OmA"
  else
    export OmFOmA=${4}
  fi
  if [ -z "${5}" ]
  then
    export nome_sys="SMNA"
  else
    export nome_sys=${5}
  fi
  if [ -z "${6}" ]
  then
    export EXPE="EXPE"
  else
    export EXPE=${6}
  fi
  if [ -z "${7}" ]
  then
    export GSIout=/mnt/beegfs/luiz.sapucci/SMNA/EXP20/datainout/gsi/dataout
  else
    export GSIout=${7}
  fi

############################
  if [ $# -eq 0 ]
   then
     echo -e ""
     echo -e "\033[31;1m          !!! ATENCAO !!!\033[m"
     echo -e "\033[31;1m RODANDO COM PARAMETROS DO TESTCASE DO SISTEMA HOSPEDEIRO \033[m"
     echo -e ""
     echo -e "\033[33;1m ./run_obsSatw.sh ${LABELI} ${LABELF} ${FCT} ${OmFOmA} ${nome_sys} ${EXPE} ${GSIout}\033[m"
     echo -e ""
     echo -e "\033[31;1m ATENCAO!! CASO NÃO TENHA RODADO O TESTCASE DO SISTEMA DARÃ ERRO \033[m"
     echo -e "\033[31;1m NESSE CASO UTILIZE O TESTCASE PADRÃ„O DIGITANDO: "
     echo -e ""
     echo -e "\033[33;1m ./run_obsSatw.sh testcase <GSI_system>  \033[m"
     echo ""
     echo -ne "\033[31;1m > Deseja continuar? (S/N) \033[m"
     read resposta

    if [[ ${resposta} != "S" && ${resposta} != "s" ]]
    then
      echo ""
      echo -e "\033[34;1m  Saindo do protocolo!!! \033[m"
      echo ""
      exit
    fi

  fi

fi

# Verificando se esta logado no eslogin02
if [ ${HOSTNAME:0:7} = "eslogin" ];then
     echo "#####################################################################"
     echo "#                                                                   #"
     echo "#               Voce esta logado no ${HOSTNAME}                     #"
     echo "#                                                                   #"
     echo "# O python no tupa funciona corretamente apenas no eslogin02.       #"
     echo "# Logue nesse servidor antes de proceguir,se for o caso digitando   #"
     echo "# $ ssh eslogin02 -XC                                               #"
     echo "#                                                                   #"
     echo "#####################################################################"
     
     if [ ${HOSTNAME:8:9} = "02" ];then
       . /opt/modules/default/etc/modules.sh
       module swap PrgEnv-pgi PrgEnv-gnu >& /dev/null
       echo "#                                                                   #"
       echo "#####################################################################"
     else
       echo "# O python no tupa funciona corretamente apenas no eslogin02.       #"
       echo "# Logue nesse servidor antes de proceguir,se for o caso digitando   #"
       echo "# $ ssh eslogin02 -XC                                               #"
       echo "#                                                                   #"
       echo "#####################################################################"
     fi
fi

echo -e ""
echo -e "\033[34;1m CONFIGURACAO DA RODADA \033[m"
echo -e ""
echo -e "\033[34;1m > Data Inicial      : \033[m \033[31;1m${LABELI}\033[m"
echo -e "\033[34;1m > Data Final        : \033[m \033[31;1m${LABELF}\033[m"
echo -e "\033[34;1m > Intervalo do ciclo: \033[m \033[31;1m${FCT}\033[m"
echo -e "\033[34;1m > Opcao OmF ou OmA  : \033[m \033[31;1m${OmFOmA}\033[m"
echo -e "\033[34;1m > Sistema hospedeiro: \033[m \033[31;1m${nome_sys}\033[m"
echo -e "\033[34;1m > Experimento label : \033[m \033[31;1m${EXPE}\033[m"
echo -e "\033[34;1m > GSI dataout(Diags): \033[m \033[31;1m${GSIout}\033[m"

# Exportando variaveis do eval
echo ""
source  ../../../config_eval.sh vars_export ${nome_sys} ${EXPE}

if test ! -d ${obsSatw_work}/dataout/logfile; then
    echo "Criando a arvore de diretorio para esse experimento e protocolo: " ${nome_sys} ${EXPE}
    echo "../../../config_eval.sh configurar ${nome_sys} ${EXPE}"
    ../../../config_eval.sh configurar ${nome_sys} ${EXPE}
    echo "${obsSatw_home}/config_obsSatw.ksh configurar ${nome_sys} ${EXPE}"
    ${obsSatw_home}/config_obsSatw.ksh configurar ${nome_sys} ${EXPE}
    if test ! -d ${obsSatw_work}/dataout/logfile; then echo "${obsSatw_work}/dataout/logfile"; echo "Rodada cancelada. Dados inexistentes!!!" ;exit; fi
else 
   echo "Arvore de diretorio do experimento" ${EXPE} "ja existentente para o protocolo"; 
   echo ""
fi

# Exportando variaveis do obsSatw
source  ${obsSatw_home}/config_obsSatw.ksh vars_export ${nome_sys} ${EXPE}

echo -e "\033[33;1m Com os dados de saida do GSI lidos em \033[m"
echo -e "\033[33;1m ${GSIout} \033[m" 

mkdir -p ${obsSatw_work_run}
cd ${obsSatw_work_run}
echo 

#Arquivo de log da rodada
logfile=${obsSatw_logs}/obsSatw_${LABELI}_${LABELF}.log

echo ""
echo -e "\033[34;1m >>> Submetendo o dignostico \033[33;1m obsSatw \033[m no sistema ${nome_sys} \033[34;1m para o periodo \033[m \033[31;1m${LABELI} ${LABELF}\033[m \033[m"
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%" > ${logfile}
echo " >>> Submetendo o dignostico \033[33;1m obsSatw no sistema ${nome_sys} para o periodo ${LABELI} ${LABELF}" >> ${logfile}
echo "" 
echo -e "\033[36;1m >>>> Arquivo de log dessa rodada:\033[m " 
echo -e "\033[33;1m ${logfile} \033[m "
date

##########################################################################################
# Gerando as figuras da distribuicao espacial/estatistica OmF/OmA e dados rejeitados
# gerando o script do python para gerar as figuras selecione o que quer fazer nessa rodada

cat << EOF > ./runPython.py
#! /usr/bin/env python3

import gsidiag as gd
from datetime import datetime, timedelta
import pandas as pd
import matplotlib.pyplot as plt

import numpy as np
import numpy.ma as ma

import matplotlib.patches as mpatches
import matplotlib as mpl
from gsidiag.datasources import getVarInfo

DIRdiag = "${GSIout}"

varName = 'uv'
varType = 245
dateIni='${LABELI}' 
dateFin='${LABELF}' 
nHour = '${FCT}'          
vminOMA = -2.0       
vmaxOMA = 2.0        
vminSTD = 0.0        
vmaxSTD = 4.0       
Level = 1000
Lay = 15           
SingleL = 'OneL'


datei = datetime.strptime(str(dateIni), '%Y%m%d%H')
datef = datetime.strptime(str(dateFin), '%Y%m%d%H')
dates = [dates.strftime('%Y%m%d%H') for dates in pd.date_range(datei, datef,freq='12H').tolist()]

print(dates)

# Caminhos dos arquivos diagnosticos
paths, pathsc = [], []
    
OuterL = "01"        
[paths.append(DIRdiag+"/"+dt+"/diag_conv_"+OuterL+"."+dt) for dt in dates]
    
OuterLc = "03"
[pathsc.append(DIRdiag+"/"+dt+"/diag_conv_"+OuterLc+"."+dt) for dt in dates]

print(paths)
print(pathsc)
    
    
# Leitura dos arquivos de diagnosticos
read = True
    
if read:
    gdf_list = []
    print('')
    
    print('Aguarde, o tempo total estimado para a leitura dos arquivos e de ' +
    str(int((float(len(paths))*20)/60)) + ' minutos e ' +
    str(int((float(len(paths))*20)%60)) + ' segundos.')
    
    print('')
    
    for path, pathc in zip(paths, pathsc):
        print('Reading ' + path)
            
        gdf = gd.read_diag(path, pathc)
            
        gdf_list.append(gdf)
    print("Done!")
gdf_list	
	
# Fazendo o loop para as datas do periodo para os plots espaciais

varNames=['uv']
param = "obs"
varTypesT= [245,246,247] 

numer = range(len(dates))

print('Numero de datas: ', numer)

for i in range(len(dates)):
    tidx = i
#    print('%%%%%%%%%%%%%% PUP PII %%%%%%%%%%%%%%%%%%%%')
#    print('>> Gerando as figuras para a data',dates[i])
#    print("vartypesT antes : ", varTypesT)
#    print('pfileinfo: ')
#    gdf_list[tidx].pfileinfo()
#    for j in varNames:
#        varName = j    
#        for kx in varTypesT: # loop sobre os valores de kx da temperatura
#            kxoverview = gdf_list[tidx].overview()[varName]; # kxoverview e uma lista com os kx da variavel j
#            if kx not in kxoverview: # se o kx da lista kxList nao estiver dentro da lista overview...
#                varTypesT.remove(kx);
		
#        for k in varTypesT:
#           varType = k
#           print("Plotando para a variavel: ", varName, "Tipo: ",varType)
#           gd.plot_diag.plot(gdf_list[tidx],varName=varName,varType=varType,param=param,area=[-100,-60,-30,15],mask="((iuse == 1) & (lat>-60.0 and lat<15) & (lon>260.0 and lon<330))",legend='true')
#           figname=varName+"_"+str(varType)+"_"+param+"_"+dates[i]+"_"+"plot.png"
#           plt.tight_layout()
#           plt.savefig(figname)
#           plt.close() 
#           gdf_lis = []
#           gdf_lis.append(gdf_list[i])
#           gd.plot_diag.statcount(gdf_list,  varName=varName, varType=varType, noiqc=True, dateIni=dates[i], dateFin=dates[i], nHour=nHour, figTS=False, figMap=True)
#           plt.close()



#    varTypes = [245,246,247]
#    varName = 'uv'
#    print("vartypes Vento antes: ", varTypes)
#    for kx in varTypes: # loop sobre os valores de kx
#        kxoverview = gdf_list[0].overview()['uv']; # kxoverview e uma lista com os kx da variavel uv
#        if kx not in kxoverview: # se o kx da lista kxList nao estiver dentro da lista overview...
#            varTypes.remove(kx); # remove o kx da lista kxList
#    print("Plotando para a variavel: ", varName, "Tipo: ",varTypes)  
#    gd.plot_diag.ptmap(gdf_list[tidx],varName=varName,varType=varTypes)
#    figname=varName+"_"+param+"_"+dates[i]+"_"+"plot.png"
#    plt.tight_layout()
#    plt.savefig(figname)
#    plt.close()
#    gd.plot_diag.statcount(gdf_lis,  varName=varName, varType=varType,noiqc=True, dateIni=dates[i], dateFin=dates[i], nHour=nHour, figTS=False, figMap=True)
#    plt.close()
#    print('')
 
#### Figuras do time series
#Loop para as variaveis e 
print('%%%%%%%%%%%%%% PUP PII %%%%%%%%%%%%%%%%%%%%')
print('>> Gerando as figuras da serie temporal')

varNames=['uv']
varTypes= [245,246,247]
vminOMA = -8.0       
vmaxOMA = 8.0        
vminSTD = 0.0        
vmaxSTD = 8.0

mask='((iuse == 1) & (lat>-40.0 and lat<-20.0) & (lon>300.0 and lon<330.0))'

for j in varNames:
    varName = j    
    for kx in varTypes: # loop sobre os valores de kx do vento
        kxoverview = gdf_list[tidx].overview()[varName]; # kxoverview e uma lista com os kx da variavel j
        if kx not in kxoverview: # se o kx da lista kxList nao estiver dentro da lista overview...
            varTypes.remove(kx);

    for k in varTypes:
        varType = k           	      
        print("Variavel: ", varName, "Tipo: ",varType)	    
        #ASSIMILADO
        # Atmosfera em tres camadas 
        # Media 500 com espessura de 200 (700-300)
        # label Figure: "_layer_700-300hPa.png"
        # Figuras geradas: OmF, OmA e OmFOmA OmFOmA_StdDev
        SingleL = "OneL"
        Level = 500
        Lay = 200
        gd.plot_diag.time_series(gdf_list,varName=varName, varType=varType, mask=mask, dateIni=dateIni, dateFin=dateFin, nHour=nHour, vminOMA=vminOMA, vmaxOMA=vmaxOMA, vminSTD=vminSTD, vmaxSTD=vmaxSTD, Level=Level, Lay=Lay, SingleL=SingleL,Clean=False)
	
        plt.title("Assimilados")
        plt.tight_layout()
        plt.savefig(f'Assimilado_500_camada_700-300_{varType}.png')
        plt.show()
	
	
	#REJEITADO
        # Atmosfera em tres camadas 
        # Media 500 com espessura de 200 (700-300)
        # label Figure: "_layer_700-300hPa.png"
        # Figuras geradas: OmF, OmA e OmFOmA OmFOmA_StdDev
        SingleL = "OneL"
        Level = 500
        Lay = 200
        gd.plot_diag.time_series(gdf_list,varName=varName, varType=varType, mask='((iuse == -1) & (lat>-40.0 and lat<-20.0) & (lon>300.0 and lon<330.0))', dateIni=dateIni, dateFin=dateFin, nHour=nHour, vminOMA=vminOMA, vmaxOMA=vmaxOMA, vminSTD=vminSTD, vmaxSTD=vmaxSTD, Level=Level, Lay=Lay, SingleL=SingleL,Clean=False)
	
        plt.title("Rejeitados")
        plt.tight_layout()
        plt.savefig(f'Rejeitado_500_camada_700-300_{varType}.png')
        plt.show()
	

        # Atmosfera em tres camadas 
        # Alta 150 com espessura de 150 (300-0)
        # label Figure: "_layer_300-0hPa.png"
        # Figuras geradas: OmF, OmA e OmFOmA OmFOmA_StdDev
#        SingleL = "OneL"
#        Level = 150
#        Lay = 150
#        gd.plot_diag.time_series(gdf_list,varName=varName, varType=varType, dateIni=dateIni, dateFin=dateFin, nHour=nHour, vminOMA=vminOMA, vmaxOMA=vmaxOMA, vminSTD=vminSTD, vmaxSTD=vmaxSTD, Level=Level, Lay=Lay, SingleL=SingleL,Clean=False)

print('')

print('%%%%%%%%%%%%%%%%%%%%%% STATCOUNT %%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('>> Statcount para cada kx')

#varTypes = [245,246,247]

#for varType in varTypes:
#    gd.plot_diag.statcount(gdf_list,varName=varName,varType=varType,noiqc=True,dateIni=dateIni,dateFin=dateFin,nHour=nHour,figTS=True,figMap=False,markersize=4.80)

#    plot.tight_layout()
#    plt.savefig(f'{varType}.png')
#    plt.close()


print('%%%%%%%%%%%%%%%%%%%%%% DISTRIBUICAO ESPACIAL %%%%%%%%%%%%%%%%%%%%%%%%%%')
print('>> OMF-OMA')

import geopandas as gpd

varName = 'uv'
varTypes = [245, 246, 247]
param = 'oma-omf'
nivel = None  # por exemplo, 250 hPa


# Mascara de area
# mask = '((iuse == 1) & (lat>-27.0 and lat<-22.0) & (lon>312.0 and lon<320.0))'
#area = [-48, -27, -40, -22]  # America do Sul
# minVal = -5
#maxVal = 5

# Sua lista de datas em formato string
#datei = datetime.strptime(str(dateIni), '%Y%m%d%H')
#datef = datetime.strptime(str(dateFin), '%Y%m%d%H')
#dates = [d.strftime('%Y%m%d%H') for d in pd.date_range(datei, datef, freq='12H').tolist()]

#for tidx in range(len(gdf_list)):
#    for varType in varTypes:
#        data_str = datetime.strptime(dates[tidx], '%Y%m%d%H').strftime('%Y-%m-%d %H:%M')

#        if param == 'oma-omf':
#            df0 = gdf_list[tidx].obsInfo[varName].loc[varType]
#            df0['oma-omf'] = df0['oma'] - df0['omf']
#
#            plt.style.use('seaborn-v0_8')
#            fig = plt.figure(figsize=(12, 6))
#            ax = fig.add_subplot(1, 1, 1)

#            path = gpd.datasets.get_path('naturalearth_lowres')
#            world = gpd.read_file(path)
#            ax = world.plot(ax=ax, facecolor='lightgrey', edgecolor='k')

#            ax.set_xlabel('Longitude')
#            ax.set_ylabel('Latitude')

#            if area:
#                ax.set_xlim([area[0], area[2]])
#                ax.set_ylim([area[1], area[3]])
#            else:
#                ax.set_xlim([-180, 180])
#                ax.set_ylim([-90, 90])
#
#            try:
#                if mask is None:
#                    ax = df0.plot(param, ax=ax, vmin=minVal, vmax=maxVal, cmap='jet',
#                                  legend=True, legend_kwds={'shrink': 0.5}, markersize=4.80)
#                else:
#                    df = df0.query(mask)
#                    ax = df.plot(param, ax=ax, vmin=minVal, vmax=maxVal, cmap='jet',
#                                 legend=True, legend_kwds={'shrink': 0.5}, markersize=4.80)
#
#                ax.set_title(f'{varName} - kx {varType} - Nivel {nivel} hPa - {data_str}: {param}')

#            except:
#                ax = None
#                print("++++++++++++++++++++++++++ ERROR: file reading --> plot ++++++++++++++++++++++++++")
#                print("    >>> No information on this date <<< ")

#        else:
#            gd.plot_diag.plot(gdf_list[tidx],
#                              varName=varName,
#                              varType=varType,
#                              param=param,
#                              minVal=minVal,
#                              maxVal=maxVal,
#                              mask=mask,
#                              area=area,
#                              legend='true',
#                              markersize=4.80)

#        figname = f'{varName}_{param}_varType{varType}_{data_str.replace(":", "").replace(" ", "_")}_plot_recorte_America.png'
#        plt.tight_layout()
#        plt.savefig(figname)
#        plt.show()



print('%%%%%%%%%%%%%%%%%%%% MATRIZ ASSIMILADOS %%%%%%%%%%%%%%%%%%%%%%%%%%')

# === Parametros comuns ===

Level = 'Zlevs'
Lay = 15
varTypes = [245, 246, 247]
kx_data_assim = {}  # Dicionario onde os dados serao armazenados


dates = pd.date_range(start=pd.to_datetime(dateIni, format='%Y%m%d%H'),
                      end=pd.to_datetime(dateFin, format='%Y%m%d%H'),
                      freq=f'{nHour}H')

# === Parametros necessarios: certifique-se de que essas variaveis estejam definidas ===
# varName, vminOMA, vmaxOMA, vminSTD, vmaxSTD, SingleL, gdf_list

#mask= "((iuse == 1) & (lat>-60.0 and lat<15) & (lon>260.0 and lon<330))" #  teste usando america do sul
# mask= '(iuse == 1)'

# mask ---> Akara
mask = '((iuse == 1) & (lat>-40.0 and lat<-20.0) & (lon>300.0 and lon<330.0))'
#mask= '(iuse == 1)'

# === Loop para obter as matrizes de cada KX ===
for varType in varTypes:
    TAB_kx = gd.plot_diag.time_series(gdf_list,
                                      varName=varName,
                                      varType=varType,
                                      mask=mask, 
                                      dateIni=dateIni,
                                      dateFin=dateFin,
                                      nHour=nHour,
                                      vminOMA=vminOMA,
                                      vmaxOMA=vmaxOMA,
                                      vminSTD=vminSTD,
                                      vmaxSTD=vmaxSTD,
                                      Level=Level,
                                      Lay=Lay,
                                      SingleL=SingleL,
                                      Clean=False
    )
    kx_data_assim[varType] = TAB_kx[1]  # Armazena apenas a matriz (niveis x tempos)

# === Inicializa a soma com o primeiro KX (transposto) ===
soma_Kx_assim = kx_data_assim[varTypes[0]].T.filled(0).copy()
print(f'KX = {varTypes[0]}')
print(soma_Kx_assim)

# === Soma os demais KX (tambem transpostos) ===
for kx_escolhido in varTypes[1:]:
    data = kx_data_assim[kx_escolhido].T.filled(0)
    soma_Kx_assim += data
    print(f'KX = {kx_escolhido}')
    print(data)
    print('Soma parcial:')
    print(soma_Kx_assim)

# === Verificacao de um nivel especifico (ex: nivel 4) ===
print("Soma total (todos os niveis, tempos e KX):", int(np.sum(soma_Kx_assim)))



# === Plotagem da serie temporal por nivel ===
plt.figure(figsize=(12, 6))
for i, linha_nivel in enumerate(soma_Kx_assim):
   # if not np.all(linha_nivel == 0):  # Plota apenas niveis com dados
        plt.plot(dates, linha_nivel, label=f'Levels {i}', marker='o')

plt.xlabel("Tempo (Data e Hora)")
plt.ylabel("Quantidade de Observacoes Assimiladas")
plt.title("Evolucao Temporal por Nivel - Soma dos KX 245, 246 e 247")
plt.xticks(rotation=45)
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.savefig(f'Matriz_ASSIMILADO{varType}.png')
plt.show()



print('%%%%%%%%%%%%%%%%%%%% ASSIMILADOS KX SEPARADOS %%%%%%%%%%%%%%%%%%%%%%%%%%')
# ===== Parametros de entrada =====
varTypes = [245, 246, 247]

# Datas e titulos
datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
date_title = f"{datei.strftime('%d%b')}-{datef.strftime('%d%b')} {datef.strftime('%Y')}"
dates = [d.strftime('%m%d%H') for d in pd.date_range(datei, datef, freq=f'{nHour}H')]

# Niveis verticais e eixos
levs = list(map(int, gdf_list[0].zlevs))
y_axis = np.arange(0, len(levs), 1)
x_axis = np.arange(0, len(dates), 1)

# === Loop por KX para dados assimilados ===
for varType in varTypes:
    print(f'KX {varType} - matriz bruta:')
    matriz_assim = kx_data_assim[varType].T.filled(0)
    print(matriz_assim)
    print()

    # Inversao e mascara
    matriz_assim_invertida = np.array(matriz_assim)[::-1]
    matriz_plot = np.ma.masked_array(matriz_assim_invertida, matriz_assim_invertida == 0)

    # Calculo dos valores
    vmax_val = int(np.max(matriz_assim))          # valor maximo para escala de cores
    total_assimilado = int(np.sum(matriz_assim))  # valor total da matriz

    # Titulo do grafico
    instrument_title = f'{varName} - {varType} | {getVarInfo(varType, varName, "instrument")} [ASSIMILADAS]'

    # Plot
    fig = plt.figure(figsize=(8, 5))
    plt.rcParams['axes.facecolor'] = 'None'
    plt.rcParams['hatch.linewidth'] = 0.3

    plt.subplot(1, 1, 1)
    ax = plt.gca()

    # Fundo com hachura
    ax.add_patch(mpatches.Rectangle(
        (-1, -1),
        len(dates) + 1,
        len(levs) + 3,
        hatch='xxxxx',
        color='black',
        fill=False,
        snap=False,
        zorder=0
    ))

    # Imagem com coloracao
    im = plt.imshow(
        matriz_plot,
        origin='lower',
        vmin=0,
        vmax=vmax_val,
        cmap='hot',
        aspect='auto',
        zorder=1,
        interpolation='none'
    )

    # Barra de cores com soma total
    cbar = plt.colorbar(im, orientation='horizontal', pad=0.18, shrink=1.0)
    cbar.set_label(f'Soma total de observacoes assimiladas (KX {varType}): {total_assimilado:,}')

    # Titulos e eixos
    plt.title(instrument_title, loc='left', fontsize=10)
    plt.title(date_title, loc='right', fontsize=10)
    plt.ylabel('Niveis verticais (hPa)')
    plt.xlabel('Tempo (MMDDHH)')

    plt.yticks(y_axis, levs)
    plt.xticks(x_axis, dates)
    
    
    DayHour_tmp = dates
    if(len(DayHour_tmp)>4):
        DayHour = [hr if (ix % int(len(DayHour_tmp)/4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
    else:
        DayHour = DayHour_tmp
    major_ticks = [DayHour.index(dh) for dh in filter(None, DayHour)]
    ax.set_xticks(major_ticks)
    
#    ax.set_xticks([dates.index(d) for d in dates])

    plt.tight_layout()
    plt.savefig(f'ASSIMILADAS_KX_SEPARADO{varType}_{varName}.png')
    plt.show()


print('%%%%%%%%%%%%%%%%%%%%%% ASSIMILADOS TOTAL %%%%%%%%%%%%%%%%%%%%%%%%%%%')

# Datas e titulos
datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
date_title = f"{datei.strftime('%d%b')}-{datef.strftime('%d%b')} {datef.strftime('%Y')}"
instrument_title = f'{varName} - {varTypes} | {getVarInfo(varType, varName, "instrument")} [ASSIMILADAS]'

# Lista de datas para o eixo X
dates = [d.strftime('%m%d%H') for d in pd.date_range(datei, datef, freq=f'{nHour}H')]
x_axis = np.arange(0, len(dates), 1)
levs = list(map(int, gdf_list[0].zlevs))
y_axis = np.arange(0, len(levs), 1)

print('y_axis = ', levs)
print()
print('x_axis = ', dates)
print()

# Inverte a matriz e aplica mascara para zeros
print('soma_Kx_assim = ', soma_Kx_assim)
print()
soma_Kx_assim_invertida = np.array(soma_Kx_assim)[::-1]
soma_Kx_assim_final = np.ma.masked_array(soma_Kx_assim_invertida, soma_Kx_assim_invertida == 0)
print('soma_Kx_assim_invert = ', soma_Kx_assim_invertida)
print()

# === Calculo correto ===
vmax_val = int(np.max(soma_Kx_assim))         # Escala de cor baseada no valor maximo
total_assimilado = int(np.sum(soma_Kx_assim))  # Soma total da matriz (valor correto que voce quer mostrar)

# === Plotando a matriz ===
fig = plt.figure(figsize=(8, 5))
plt.rcParams['axes.facecolor'] = 'None'
plt.rcParams['hatch.linewidth'] = 0.3

plt.subplot(1, 1, 1)
ax = plt.gca()

# Fundo com hachura
ax.add_patch(mpatches.Rectangle(
    (-1, -1),
    len(dates) + 1,
    len(levs) + 3,
    hatch='xxxxx',
    color='black',
    fill=False,
    snap=False,
    zorder=0
))

# Imagem com escala de cor
plt.imshow(
    soma_Kx_assim_final,
    origin='lower',
    vmin=0,
    vmax=vmax_val,
    cmap='tab20b',
    aspect='auto',
    zorder=1,
    interpolation='none'
)

# Barra de cores com soma total
cbar = plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
cbar.set_label(f'Soma total de observacoes assimiladas: {total_assimilado:,}')

# Titulos e eixos
plt.title(instrument_title, loc='left', fontsize=10)
plt.title(date_title, loc='right', fontsize=10)
plt.ylabel('Niveis verticais (hPa)')
plt.xlabel('Tempo (MMDDHH)')

plt.yticks(y_axis, levs)
plt.xticks(x_axis, dates)

DayHour_tmp = dates
if(len(DayHour_tmp)>4):
    DayHour = [hr if (ix % int(len(DayHour_tmp)/4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
else:
    DayHour = DayHour_tmp
major_ticks = [DayHour.index(dh) for dh in filter(None, DayHour)]
ax.set_xticks(major_ticks)

#ax.set_xticks([dates.index(d) for d in dates])

plt.tight_layout()
plt.savefig(f'ASSIMILADAS_TOTAL_{varName}.png')
plt.show()



print('%%%%%%%%%%%%%%%%%%%%% MATRIZ REJEITADO %%%%%%%%%%%%%%%%%%%%%%%%%%%')

# === Parametros ===
Level = 'Zlevs'
Lay = 15
varTypes = [245, 246, 247]
kx_data_rej = {}

# === Datas e frequencia ===
dates = pd.date_range(start=pd.to_datetime(dateIni, format='%Y%m%d%H'),
                      end=pd.to_datetime(dateFin, format='%Y%m%d%H'),
                      freq=f'{nHour}H')

# === Mascara e area para America do Sul (rejeitados)
#mask_rej_sul = '((iuse == -1) & (lat > -60.0) & (lat < 15.0) & (lon > 260.0) & (lon < 330.0))'
#area_sul = [-100, -60, -20, 15]

#Akara
mask = '((iuse == -1) & (lat>-40.0 and lat<-20.0) & (lon>300.0 and lon<330.0))'

#lim_qm = 4 # Carol: inseri por causa da classificacao dos rejeitados (olhar funcao statcount no arquivo main)
## olhar noiqc no arquivo gsiparm.anl dentro da pasta de dados do experimento
## EXP20: noiqc   = .true.
## Sendo assim e preciso considerar:

noiqc = True

if(noiqc):
    lim_qm = 8
    if(varName == 'ps'):
        lim_qm = 7
else:
    lim_qm = 4

# mask ---> AMERICA DO SUL
#mask= "(iuse==-1) & ((idqc > 15 or idqc <= 0) or (idqc > 0 and idqc < "+str(lim_qm)+")) & (lat>-60.0 and lat<15) & (lon>260.0 and lon<330)"
# Carol: o mask acima considera a classificacao dos dados rejeitados como esta na funcao statcount e tambem considera latitude
# e longitude da america do sul. Na minha versao do readDiag funcionou. Depois verificar se funciona para a regiao do Akara.

# mask ---> Akara
mask= "(iuse==-1) & ((idqc > 15 or idqc <= 0) or (idqc > 0 and idqc < "+str(lim_qm)+")) & (lat>-27.0 and lat<-22.0) & (lon>312.0 and lon<320.0))"



# === Loop para cada KX
for varType in varTypes:
    TAB_kx = gd.plot_diag.time_series(gdf_list,
                                      varName=varName,
                                      varType=varType,
                                      mask=mask, 
                                      dateIni=dateIni,
                                      dateFin=dateFin,
                                      nHour=nHour,
                                      vminOMA=vminOMA,
                                      vmaxOMA=vmaxOMA,
                                      vminSTD=vminSTD,
                                      vmaxSTD=vmaxSTD,
                                      Level=Level,
                                      Lay=Lay,
                                      SingleL=SingleL,
                                      Clean=False
    )
    kx_data_rej[varType] = TAB_kx[1]  # Matriz niveis x tempo

#soma_Kx_rej = kx_data_rej[varTypes[0]].T.filled(0).copy()
#for kx in varTypes[1:]:
    #soma_Kx_rej += kx_data_rej[kx].T.filled(0) #  Carol: Aqui estava -> kx_data_rej_sul ---> kx_data_rej (correto) ERRO!
    
soma_Kx_rej = kx_data_rej[varTypes[0]].T.filled(0).copy()
print(f'KX = {varTypes[0]}')
print(soma_Kx_rej)



# === Soma os demais KX (tambem transpostos) ===
for kx in varTypes[1:]:
    data = kx_data_rej[kx].T.filled(0)
    soma_Kx_rej += data
    print(f'KX = {kx}')
    print(data)
    print('Soma parcial:')
    print(soma_Kx_rej)

# === Verificacao de um nivel especifico (ex: nivel 4) ===
print("Soma total (todos os niveis, tempos e KX):", int(np.sum(soma_Kx_rej)))



# === Plotagem da serie temporal por nivel ===
plt.figure(figsize=(12, 6))
for i, linha_nivel in enumerate(soma_Kx_rej):
    #if not np.all(linha_nivel == 0):  # Plota apenas niveis com dados
        plt.plot(dates, linha_nivel, label=f'Nivel {i}', marker='o')

plt.xlabel("Tempo (Data e Hora)")
plt.ylabel("Quantidade de Observacoes Rejeitadas")
plt.title("Evolucao Temporal por Nivel - Soma dos KX 245, 246 e 247")
plt.xticks(rotation=45)
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.savefig(f'Matriz_REJEITADO{varType}.png')
plt.show()



print('%%%%%%%%%%%%%%%%%%%%% REJEITADO TOTAL %%%%%%%%%%%%%%%%%%%%%%%%%%%')


# Datas e titulos
datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
date_title = str(datei.strftime("%d%b")) + '-' + str(datef.strftime("%d%b")) + ' ' + str(datef.strftime("%Y"))
instrument_title = f'{varName} - {varTypes} | {getVarInfo(varType, varName, "instrument")} [REJEITADAS]'

# Lista de datas para o eixo X
dates = [d.strftime('%m%d%H') for d in pd.date_range(datei, datef, freq=f'{nHour}H')]

# Niveis verticais e eixos
levs = list(map(int, gdf_list[0].zlevs))
y_axis = np.arange(0, len(levs), 1)
x_axis = np.arange(0, len(dates), 1)

print('y_axis = ', levs)
print()
print('x_axis = ', dates)
print()

# === Inverte e mascara a matriz ===
print('soma_Kx_rej = ', soma_Kx_rej)
print()
soma_Kx_rej_invertida = np.array(soma_Kx_rej)[::-1]
soma_Kx_rej_final = np.ma.masked_array(soma_Kx_rej_invertida, soma_Kx_rej_invertida == 0)
print('soma_Kx_rej_invert = ', soma_Kx_rej_invertida)
print()

# === Calculo dos valores ===
vmax_val = int(np.max(soma_Kx_rej))         # para a escala de cor
total_rejeitado = int(np.sum(soma_Kx_rej))  # valor total a ser exibido

# === Plotando a matriz ===
fig = plt.figure(figsize=(8, 5))
plt.rcParams['axes.facecolor'] = 'None'
plt.rcParams['hatch.linewidth'] = 0.3

plt.subplot(1, 1, 1)
ax = plt.gca()

# Fundo com hachura
ax.add_patch(mpatches.Rectangle(
    (-1, -1),
    len(dates) + 1,
    len(levs) + 3,
    hatch='xxxxx',
    color='black',
    fill=False,
    snap=False,
    zorder=0
))

# Imagem com coloracao baseada no valor maximo
plt.imshow(
    soma_Kx_rej_final,
    origin='lower',
    vmin=0,
    vmax=vmax_val,
    cmap='hot',
    aspect='auto',
    zorder=1,
    interpolation='none'
)

# Barra de cores com a soma total no texto
cbar = plt.colorbar(orientation='horizontal', pad=0.18, shrink=1.0)
cbar.set_label(f'Soma total de observacoes rejeitadas (iuse == -1): {total_rejeitado:,}')

# Titulos e eixos
plt.title(instrument_title, loc='left', fontsize=10)
plt.title(date_title, loc='right', fontsize=10)
plt.ylabel('Niveis verticais (hPa)')
plt.xlabel('Tempo (MMDDHH)')

plt.yticks(y_axis, levs)
plt.xticks(x_axis, dates)

DayHour_tmp = dates
if(len(DayHour_tmp)>4):
    DayHour = [hr if (ix % int(len(DayHour_tmp)/4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
else:
    DayHour = DayHour_tmp
major_ticks = [DayHour.index(dh) for dh in filter(None, DayHour)]
ax.set_xticks(major_ticks)

#major_ticks = [dates.index(dh) for dh in filter(None, dates)]
#ax.set_xticks(major_ticks)

plt.tight_layout()
plt.savefig(f'REJEITADAS_TOTAL_{varName}.png')
plt.show()


print('%%%%%%%%%%%%%%%%%%%%% REJEITADO KX SEPARADO %%%%%%%%%%%%%%%%%%%%%%%%%%%')

# === Parametros de entrada ===
varTypes = [245, 246, 247]

# Datas e titulos
datei = datetime.strptime(str(dateIni), "%Y%m%d%H")
datef = datetime.strptime(str(dateFin), "%Y%m%d%H")
date_title = f"{datei.strftime('%d%b')}-{datef.strftime('%d%b')} {datef.strftime('%Y')}"

dates = [d.strftime('%m%d%H') for d in pd.date_range(datei, datef, freq=f'{nHour}H')]
x_axis = np.arange(0, len(dates), 1)
levs = list(map(int, gdf_list[0].zlevs))
y_axis = np.arange(0, len(levs), 1)

# === Loop por cada KX ===
for varType in varTypes:
    print(f"\n KX = {varType}")

    # Obtem a matriz rejeitada desse KX (nivel x tempo \u2192 transpoe para tempo x nivel)
    matriz_kx = kx_data_rej[varType].T.filled(0)
    print(matriz_kx)

    # Inverte e mascara
    matriz_invertida = matriz_kx[::-1]
    matriz_plot = np.ma.masked_array(matriz_invertida, matriz_invertida == 0)

    # Calculo de valores
    vmax_val = int(np.max(matriz_kx))
    soma_total = int(np.sum(matriz_kx))

    # Titulo do instrumento
    instrument_title = f'{varName} - {varType} | {getVarInfo(varType, varName, "instrument")} [REJEITADAS]'

    # === Plot ===
    fig = plt.figure(figsize=(8, 5))
    plt.rcParams['axes.facecolor'] = 'None'
    plt.rcParams['hatch.linewidth'] = 0.3

    plt.subplot(1, 1, 1)
    ax = plt.gca()

    # Fundo com hachura
    ax.add_patch(mpatches.Rectangle(
        (-1, -1),
        len(dates) + 1,
        len(levs) + 3,
        hatch='xxxxx',
        color='black',
        fill=False,
        snap=False,
        zorder=0
    ))

    # Matriz colorida
    im = plt.imshow(
        matriz_plot,
        origin='lower',
        vmin=0,
        vmax=vmax_val,
        cmap='tab20b', 
        aspect='auto',
        zorder=1,
        interpolation='none'
    )

    # Barra de cores
    cbar = plt.colorbar(im, orientation='horizontal', pad=0.18, shrink=1.0)
    cbar.set_label(f'Total de rejeitadas (KX {varType}): {soma_total:,}')

    # Titulos e eixos
    plt.title(instrument_title, loc='left', fontsize=10)
    plt.title(date_title, loc='right', fontsize=10)
    plt.ylabel('Niveis verticais (hPa)')
    plt.xlabel('Tempo (MMDDHH)')

    plt.yticks(y_axis, levs)
    plt.xticks(x_axis, dates)
    
    
    DayHour_tmp = dates
    if(len(DayHour_tmp)>4):
        DayHour = [hr if (ix % int(len(DayHour_tmp)/4)) == 0 else '' for ix, hr in enumerate(DayHour_tmp)]
    else:
        DayHour = DayHour_tmp
    major_ticks = [DayHour.index(dh) for dh in filter(None, DayHour)]
    ax.set_xticks(major_ticks)
    
#   ax.set_xticks([dates.index(d) for d in dates])

    plt.tight_layout()
    plt.savefig(f'REJEITADAS_KX_SEPARADO{varType}_{varName}.png')
    plt.show()

#print('%%%%%%%%%%%%%%%%%%%%% PUP PII %%%%%%%%%%%%%%%%%%%%%%%%%%%')
#print('>> Gerando as figuras aceitos e rejeitados serie temporal')


#varName=['uv1','uv2','uv3']

#for i in varNames:
#    varName = i
    
#    if i == 'uv1':
#        varName = 'uv'             
#        varType= 245  

#    if i == 'uv2':
#        varName = 'uv'             
#        varType= 246
	
#    if i == 'uv3':
#        varName = 'uv'             
#        varType= 247


#    print("Variavel: ", varName, "Tipo: ",varType)	    
#    gd.plot_diag.statcount(gdf_list,  varName=varName, varType=varType, noiqc=True, dateIni=dateIni, dateFin=dateFin, nHour=nHour, figTS=True, figMap=False)
#    plt.close() 

print("")
print("----------------- Fim da rodada do Python --------------------")

EOF

cd ${obsSatw_work_run}/

#criando diretorio das figuras do periodo e temporarios para gerar gif animado
PERIOD=${LABELI}${LABELF}
mkdir -p ${obsSatw_figu}/${PERIOD} 
mkdir -p ${obsSatw_page}/${PERIOD} 
tempANL=${obsSatw_figu}/${PERIOD}/tempANL/
mkdir -p ${tempANL} 

rm -rf runPythonFig.py

sed -e "s/LABELI/$LABELIfig/g" -e "s/LABELF/$LABELfig/g" runPython.py > runPythonFig.py
 

chmod 755 runPythonFig.py
  
#Para rodar na maquina local sem submeter descomente as duas linhas abaixo e comente a submissão na egeon
### Activando o ambiente do ReadDiag
#  source /home/luiz.sapucci/.conda/envs/readDiag/bin/activate
#  ./runPython.py >> ${logfile}

###########################
# Submetendo o processo na EGEON

HMS=`date +'%F.%H%M'`

cat << EOF > ./runPython.sbt
#! /bin/bash
#SBATCH --job-name=Satwind
#SBATCH --nodes=1
#SBATCH --partition=PESQ1
#SBATCH --tasks-per-node=128
#SBATCH --time=04:00:00
#SBATCH --output=${obsSatw_work_run}/runPythonFig.${HMS}.sbt.log

cd ${obsSatw_work_run}

echo "Activando o ambiente ReadDiag na egeon"
source /home/$USER/.conda/envs/readDiag.Radi/bin/activate
echo "Rodando o runPythonFig.py com o python em:"
echo "/home/$USER/.conda/envs/readDiag.Radi/bin/python"
mpirun -np 1 /home/$USER/.conda/envs/readDiag.Radi/bin/python -u runPythonFig.py >> runPythonFig.${HMS}.sbt.out 2>&1


EOF

chmod 755 runPython.sbt

  
  echo "" 
  echo -e "\033[36;1m >>>> Para acompanhar o processo sendo rodado digite em outro terminal:\033[m " 
  echo -e "\033[33;1m tail -f ${logfile} \033[m "
  echo "" 

  echo -e "\033[33;1m tail -f ${obsSatw_work_run}/runPythonFig.${HMS}.sbt.out \033[m "
  echo "" 

  sbatch -W runPython.sbt

  # se desejar subnmeter no nó de submissão faça
  #source /home/luiz.sapucci/.conda/envs/readDiag/bin/activate
  #./runPythonFig.py >> ${logfile}

##############################################################################
 #############################################################################
 
# Movendo figuras geradas para ${obsSatw_figu}
echo 
echo -e "\033[36;1m >>>> Movendo figuras geradas para obsSatw_figu \033[m"
ls *.png
mv *_plot.png ${tempANL}
mv TotalObs*.png ${tempANL}
cp time_series_*.png ${obsSatw_page}/${PERIOD} 
mv *.png ${obsSatw_figu}/${PERIOD}
  

echo ""
echo " Gerando as figs animados usando convert"
echo " Processo computacionalmente custoso!! Aguarde..."
echo ""

# Copiando as figuras para montar o gif
cd ${tempANL}

#Espacial
  # Gerando o gif da ANL
  contfile=`ls uv_245_obs_*_plot.png   2> /dev/null | wc -l` 
  if [ $contfile -ne 0 ]; then
    ${convert} -dispose Background -delay 20 -loop 3 -density 288 uv_245_obs_*_plot.png ../${obsType}_uv_245_${PERIOD}.gif
  fi 
  contfile=`ls uv_246_obs_*_plot.png   2> /dev/null | wc -l` 
  if [ $contfile -ne 0 ]; then
    ${convert} -dispose Background -delay 20 -loop 3 -density 288 uv_246_obs_*_plot.png ../${obsType}_uv_246_${PERIOD}.gif
  fi
  contfile=`ls uv_247_obs_*_plot.png   2> /dev/null | wc -l` 
  if [ $contfile -ne 0 ]; then
    ${convert} -dispose Background -delay 20 -loop 3 -density 288 uv_247_obs_*_plot.png ../${obsType}_uv_247_${PERIOD}.gif
  fi
 

#Dados Aceitos/Rejeitados/Monitorados
  ${convert} -dispose Background -delay 20 -loop 3 -density 288 TotalObs_uv-245_*.png ../TotOb_Satw_uv_245_${PERIOD}.gif
  ${convert} -dispose Background -delay 20 -loop 3 -density 288 TotalObs_uv-246_*.png ../TotOb_Satw_uv_246_${PERIOD}.gif
  ${convert} -dispose Background -delay 20 -loop 3 -density 288 TotalObs_uv-247_*.png ../TotOb_Satw_uv_247_${PERIOD}.gif



cd ${obsSatw_figu}/${PERIOD}
#rm -rf ${tempANL}
cp ${obsSatw_figu}/${PERIOD}/*_${PERIOD}.gif ${obsSatw_page}/${PERIOD} 
  
echo -e "\033[36;1m >>> Arquivos Figs do periodo: no NIS e FTP\033[m"
echo -e "\033[33;1m http://ftp1.cptec.inpe.br/pesquisa/das/${USER}/${nome_sys}/evalu/${EXPE}/diag/obsSatw/ \033[m "
echo -e "\033[33;1m ${obsSatw_page}/${PERIOD}/ \033[m "
ls ${obsSatw_page}/${PERIOD}/

echo 
echo -e "\033[36;1m >>>> Binarios quando disponiveis estarao:\033[m " 
echo -e "\033[33;1m ${obsSatw_bina}/ \033[m "
echo -e "\033[36;1m >>>> Arquivos de tabelas em:\033[m "
echo -e "\033[33;1m ${obsSatw_tabe}/ \033[m "
echo -e "\033[36;1m >>>> Arquivos de logs em:\033[m "
echo -e "\033[33;1m ${obsSatw_logs}/ \033[m "
echo -e "\033[36;1m >>>> Figuras em:\033[m "
echo -e "\033[33;1m ${obsSatw_figu}/ \033[m "
echo ""

exit 0
