#!/usr/bin/env python
# coding: utf-8

# # Reconstructing Barents Sea Ice cover with linear response functions
# 

#import multiprocessing.popen_spawn_posix
#from dask_jobqueue import SGECluster
#from dask.distributed import Client
#from dask.distributed import progress
import xarray as xr
import numpy as np
#from eofs.xarray import Eof
import os
import glob
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
#import cartopy.crs as ccrs
#import dask.array as da
from scipy import signal,stats,linalg,interpolate
from scipy.optimize import curve_fit
#from scipy import stats
import crf_utils as cutils
from importlib import reload
import time
import sys
sys.path.append('NorEM_utils')
#import NorESM_utils as nutils
from NorESM_utils import NorESM_utils as nutils
#reload(crf_monthly)
import crf_calc_predictor_icevar as ccalc
from matplotlib.colors import from_levels_and_colors
sys.path.append('pyunicorn_timeseries')
from pyunicorn_timeseries.surrogates import Surrogates
# # Response functions in CMIP6 piControl and obs
machine = 'nird'
if machine in ['home']:
    dpath = '/home/aleksi/Documents/NORCE/Projects/APPLICATE/Barents_predictability/from_ppi/crf_savepath2/'
elif machine in ['nird']:
    dpath = '/trd-project3/NS9874K/from_ppi/crf_savepath2/'
elif machine in ['ppi']:
    dpath= '/lustre/storeC-ext/applicate/dm_upload/anummelin/crf_savepath2/'
cases     = ['piControl'] #
cases    = ['piControl'] #,'control-1950','omip1'] 
freq     = 'mon' 
variables = ['tos','siconc']
mlists = {}
#
for case in cases:
    for v,var in enumerate(variables):
        fpath =  dpath+case+'/'
        if v==0:
            mlist = os.listdir(fpath)
            for model in mlist:
                flist = glob.glob(dpath+case+model+'/'+case+'_'+model+'_*-'+var+'*.nc')
                if len(flist)>0:
                    mlist.pop(mlist.index(model))
        else:
            mdum = os.listdir(fpath)
            for model in mdum:
                flist = glob.glob(dpath+case+model+'/'+case+'_'+model+'_*-'+var+'*.nc')
                if len(flist)>0:
                    mdum.pop(mdum.index(model))
            mlist = list(set(mdum)&set(mlist))
    #
    mlists[case] = sorted(mlist)
    print(mlists[case])


recalculate=False #True forces recalculation, useful for testing or new production
save_predictor=True #False is useful for testing
calculate_surrogate=False #this will take 5-10 min per model depending on the length of the timeseries
normalized = True #if True the sea ice timeseries will be normalized by its standard deviation -> response is STD/STD
# choose a predictor
#predictor = 'hfx'#
predictor = 'tos'
# choose a predictant
icevar = 'siconc' #
#icevar = 'tos'
#
# surrogate computation will be done in parallel
# choose a backend for this purpose
# 'dask' will use the dask-cluster and computation will be done on the computational nodes
# 'loky' will use the login nodes. This computation is not very heavy so it might be faster
# to do this on the login node.
#
parallel_engine = 'loky' #dask or 'loky'
percentiles = [5,25,50,75,95]
filtered=False #True #False
filter_length=10*12
n_surrogates=100
#icevar = 'sivol'
#
G_all = {}
Gstep_all = {}
predictor_all = {}
icevar_mean = {}
icevar_anom = {}
icevar_reconstructed={}
icevar_combined={}
icevar_combined2={}
slopes_all={}
intercepts_all={}
rvals_all={}
pvals_all={}
lat_line_all={}
lon_line_all={}
#
minlag = 0 
maxlags = np.arange(1*12,11*12,12)
#
latnew = np.arange(60,74,2)
nX = len(latnew)
#
#lon0=[-20,-18,-16,-14,-12,-10,-8 , -6]
lon0=list(np.array([-20,-18,-16,-14,-10, -8, -6, -4])+13)
lon1=[  8, 10, 12, 14, 16, 18, 20, 22]

if predictor in ['tos'] and icevar in ['siconc']:
    # observations
    cases.append('obs')
    mlists['obs'] = ['OI-SST']

# data path for the saved post-processed predictor and icevar
if machine in ['home']:
    savepath = '/home/aleksi/Documents/NORCE/Projects/APPLICATE/Barents_predictability/from_ppi/crf_savepath2'
elif machine in ['nird']:
    savepath = '/trd-project3/NS9874K/from_ppi/crf_savepath2/'
elif machine in ['ppi']:
    savepath = '/lustre/storeC-ext/applicate/dm_upload/anummelin/crf_savepath2/'


ens = 'ens'
for case in cases:
    #print(mlists[case])
    for mod,model in enumerate(mlists[case]):
    #for mod,model in enumerate(['NorESM2-LM']):
        #ens = sorted(data[model+'_'+case+'_enslist'])[0]
        print(case,model)
        #
        # Read from file if predcitors have been calculated before, otherwise calculate and save for future use
        #
        savepathm=savepath+'/'+case+'/'+model
        outname = '/'+case+'_'+model+'_'+'predictor-'+predictor+'_short_orig_add_13.nc'
        #outname = '/'+case+'_'+model+'_'+'predictor-'+predictor+'.nc'
        predictor_data=xr.open_dataset(savepathm+outname).predictor_data.values
        if icevar in ['tos']:
            print('predicting temperature!')
            BSice_mean=xr.open_dataset(savepathm+outname).predictor_data.isel(latnew=-1)
        else:
            BSice_mean=xr.open_dataset(savepathm+'/'+case+'_'+model+'_'+'icevar-'+icevar+'.nc')[icevar]
        #
        ntime=BSice_mean.shape[0]
        print('processed predictor and icevar already existed for: '+case+'  '+model)
         #
        #        
        # calculate response function and reconstructed timeseries
        print('Calculate response function')
        G, Gstep, BSice_reconstructed, BSice_combined, BSice_anom = cutils.crf_monthly2(predictor_data,BSice_mean,minlag=0,maxlags=maxlags,filtered=filtered,filter_length=filter_length,normalized=normalized)
        #
        # calculate correlation
        print('Calculate correlation')
        slopes,rvals,pvals,intercepts,BSice_combined2 = cutils.correlate_Y_Y_combined_Y_reconstructed(BSice_anom,BSice_combined,BSice_reconstructed,ensemble_length=min(int(ntime/12//2),50))
        #
        slopes_all[model+'_'+case+'_'+ens+'_'+icevar]           = slopes
        intercepts_all[model+'_'+case+'_'+ens+'_'+icevar]       = intercepts
        rvals_all[model+'_'+case+'_'+ens+'_'+icevar]            = rvals
        pvals_all[model+'_'+case+'_'+ens+'_'+icevar]            = pvals
        G_all[model+'_'+case+'_'+ens+'_'+icevar]                = G
        Gstep_all[model+'_'+case+'_'+ens+'_'+icevar]            = Gstep
        icevar_reconstructed[model+'_'+case+'_'+ens+'_'+icevar] = BSice_reconstructed
        icevar_combined[model+'_'+case+'_'+ens+'_'+icevar]      = BSice_combined
        icevar_combined2[model+'_'+case+'_'+ens+'_'+icevar]     = BSice_combined2
        icevar_anom[model+'_'+case+'_'+ens+'_'+icevar]          = BSice_anom
        icevar_mean[model+'_'+case+'_'+ens+'_'+icevar]          = BSice_mean.values
        predictor_all[model+'_'+case+'_'+ens+'_'+predictor]     = predictor_data
        #

# ## Calculate smoothed G
# 

# In[36]:


if predictor in ['tos']:
    for case in cases:
        for mod,model in enumerate(mlists[case]):
            #ens = sorted(data[model+'_'+case+'_enslist'])[0]
            print(case,model,ens)
            # GNEW: there is quite some seasonal variability in the response function, we will average it out in the following
            Gnew = np.nan*np.ones(G_all[model+'_'+case+'_'+ens+'_'+icevar][:,0,:,:,2].shape+tuple([12]))
            for mo in range(12):
                for j in range(Gnew.shape[0]):
                    for month in range(12):
                        f1 = interpolate.interp1d(np.arange(month/12,max(maxlags)/12+month/12,1), np.nanmedian(G_all[model+'_'+case+'_'+ens+'_'+icevar][j,:,month::12,mo,2],0),bounds_error=False)
                        Gnew[j,:,mo,month] = f1(np.arange(0,max(maxlags)/12,1/12))
            #
            Gnew = np.nanmedian(Gnew,-1)[:,np.newaxis,:,:]
            Gnew = np.tile(Gnew,(1,len(maxlags),1,1))
            Gnew[np.where(np.isnan(Gnew))]=0
            G_all[model+'_'+case+'_'+ens+'_'+icevar+'_smooth'] = Gnew
            #
            Gnew2 =np.nanmedian(G_all[model+'_'+case+'_'+ens+'_'+icevar][:,:,:,:,2],1)[:,np.newaxis,:,:]
            Gnew2 = np.tile(Gnew2,(1,len(maxlags),1,1))
            G_all[model+'_'+case+'_'+ens+'_'+icevar+'_mean'] = Gnew2


# ### Reconstruct the timeseries with the smoothed G

#something is wrong here - combine_reconstructed does not work properly with the data
return_slopes=False
if predictor in ['tos']:
    for case in cases:
        for mod,model in enumerate(mlists[case]):
            #ens = sorted(data[model+'_'+case+'_enslist'])[0]
            print(case,model,ens)
            ntime = predictor_all[model+'_'+case+'_'+ens+'_'+predictor].shape[0]
            # GNEW: there is quite some seasonal variability in the response function, we will average it out in the following
            # GNEW2: Will average out all the different length estimates - this is perhaps most reasonable option
            #
            Y_reconstructed  = cutils.Y_from_X_and_G(predictor_all[model+'_'+case+'_'+ens+'_'+predictor],G_all[model+'_'+case+'_'+ens+'_'+icevar+'_smooth'],minlag=0,maxlags=maxlags)
            Y_reconstructed2 = cutils.Y_from_X_and_G(predictor_all[model+'_'+case+'_'+ens+'_'+predictor],G_all[model+'_'+case+'_'+ens+'_'+icevar+'_mean'],minlag=0,maxlags=maxlags)
            Y_combined       = cutils.combine_reconstructed(icevar_anom[model+'_'+case+'_'+ens+'_'+icevar],Y_reconstructed) #np.nanmedian(Y_reconstructed,axis=1) #cutils.combine_reconstructed(predictor_all[model+'_'+case+'_'+ens+'_'+predictor],Y_reconstructed2)
            Y_combined2      = cutils.combine_reconstructed(icevar_anom[model+'_'+case+'_'+ens+'_'+icevar],Y_reconstructed2) #np.nanmedian(Y_reconstructed2,axis=1)
            #
            slopes,rvals,pvals,intercepts,Y_combined3 = cutils.correlate_Y_Y_combined_Y_reconstructed(icevar_anom[model+'_'+case+'_'+ens+'_'+icevar],Y_combined,Y_reconstructed,ensemble_length=min(int(ntime/12//2),50),return_slopes=return_slopes)
            #    
            rvals[np.where(pvals>0.05)] = np.nan    
            rvals_all[model+'_'+case+'_'+ens+'_'+icevar+'_smooth']             = rvals
            slopes_all[model+'_'+case+'_'+ens+'_'+icevar+'_smooth']            = slopes
            icevar_reconstructed[model+'_'+case+'_'+ens+'_'+icevar+'_smooths'] = Y_reconstructed
            icevar_combined[model+'_'+case+'_'+ens+'_'+icevar+'_smooth']       = Y_combined
            icevar_combined2[model+'_'+case+'_'+ens+'_'+icevar+'_smooth']      = Y_combined3


# ## Reconstruction vs Prediction
# 
# There are two potential uses of the response functions. A good response function can be used to reconstruct the sea ice back in time, given observations of sea surface temperature. Another use case is to use the response function to make a forecast for the future ice cover. 
# 
# Here we assess the impact of lead time on the forecast error. This is done by reconstructing the sea ice timeseries and then comparing the reconstruction to one that is made from X month long forecasts.
# 
# Calculate the error also for persistence (i.e the sea ice being persistent) and lagged prediction.

# In[97]:


def linear_fit(x,a):
    return a*x

def ideal_G(x,a,b,c):
    return a*np.log(x)-b*x-c

def ideal_G2(x,a,b):
    return a*np.log(x)-b

def ideal_G3(x,a):
    return a*np.log(x)


# The integral of ideal_G is 
# 
# $$ \int G(\tau) = a\tau[log(\tau)-1]-0.5b\tau^2-c\tau = [alog(\tau)-0.5b\tau-(c+a)]\tau $$
# 
# The integral of ideal_G2 is 
# 
# $$ \int [G_2(\tau)] d\tau = \int [a log(\tau) - b ]d\tau = a \tau log(\tau)-\tau(a+b) $$

# ## MONTHLY RESOLUTION

# In[ ]:


#LET'S TRY TO DO MONTHLY RESOLUTION HERE
mo=2
mode='valid'
GPminlag = 12*3
target_all    = {}
lagpred_all   = {}
lagpred_r_all = {}
lagpred_s_all = {}
err_all       = {}
for case in cases[:1]:
    for mod,model in enumerate(mlists[case][27:]):
        print(case,model)
        #ens    = sorted(data[model+'_'+case+'_enslist'])[0]
        ntime0 = predictor_all[model+'_'+case+'_'+ens+'_'+predictor].shape[0]
        maxlag = max(maxlags)
        if mode in ['full']:
            ntimes = np.arange(maxlag//2,ntime0-mo) #make the prediction every year - maybe increase the frequency?
        elif mode in ['valid']:
            ntimes = np.arange(maxlag,ntime0-mo)
        #
        err       = np.ones((maxlag,nX))*np.nan
        err0       = np.ones((maxlag,nX))*np.nan
        err1       = np.ones((maxlag,nX))*np.nan
        err_ideal = np.ones((maxlag,nX))*np.nan
        err0_ideal = np.ones((maxlag,nX))*np.nan
        err1_ideal = np.ones((maxlag,nX))*np.nan
        err_pers  = np.ones(maxlag)*np.nan
        err_lag0  = np.ones((maxlag,nX))*np.nan
        #lagpred0   = np.ones((ntime0//12,maxlag//24,nX))*np.nan
        #lagpred0   = np.ones((ntime0//12+maxlag//12,len(ntimes),nX))*np.nan
        lagpred    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred0    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred1    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred_r  = np.ones((maxlag,nX))*np.nan
        lagpred0_r  = np.ones((maxlag,nX))*np.nan
        lagpred1_r  = np.ones((maxlag,nX))*np.nan
        lagpred_s  = np.ones((maxlag,nX))*np.nan
        lagpred0_s  = np.ones((maxlag,nX))*np.nan
        lagpred1_s  = np.ones((maxlag,nX))*np.nan
        #
        lagpred_ideal    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred_r_ideal  = np.ones((maxlag,nX))*np.nan
        lagpred_s_ideal  = np.ones((maxlag,nX))*np.nan
        #
        lagpred0_ideal    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred0_r_ideal  = np.ones((maxlag,nX))*np.nan
        #
        lagpred1_ideal    = np.ones((ntime0//12+maxlag//12,maxlag,nX))*np.nan
        lagpred1_r_ideal  = np.ones((maxlag,nX))*np.nan
        #
        lagpred_rlag0  = np.ones((maxlag,nX))*np.nan
        lagpred_rpers  = np.ones((maxlag))*np.nan
        #
        # use the normalized sea ice as a target
        #target_detrended = signal.detrend(icevar_anom[model+'_'+case+'_'+ens+'_'+icevar][mo::12])
        #target           = target_detrended/np.std(target_detrended)
        target_detrended = signal.detrend(np.reshape(icevar_anom[model+'_'+case+'_'+ens+'_'+icevar],(-1,12)),axis=0)
        target           = (target_detrended/np.nanstd(target_detrended,axis=0)).flatten()
        # PERSISTENCE I.E. LAGGED AUTOCORRELATION OF ICE (START WITH 1 MONTH LAG)
        for j in range(1,maxlag):
            err_pers[j-1]      = np.nanmedian(abs(target[maxlag+mo::12]-target[maxlag+mo-j:-j][::12]))
            lagpred_rpers[j-1] = np.corrcoef(target[maxlag+mo::12],target[maxlag+mo-j:-j][::12]).min()
        # loop over sections
        for l in range(nX):
            print(l)
            # PICK-UP THE G FOR PREDICTION ICE COVER ON MONTH MO
            G0           = np.nanmedian(G_all[model+'_'+case+'_'+ens+'_'+icevar+'_smooth'][l,:,:,mo],0)
            poptG, pcovG = curve_fit(ideal_G, np.arange(1,maxlag-12+1), G0[:-12]) #ignore the last 12 months when doing the fit
            G0_ideal     = ideal_G(np.arange(1,maxlag+1),*poptG)
            # use the normalized predictor
            pred   = predictor_all[model+'_'+case+'_'+ens+'_'+predictor][:,l]
            pred   = signal.detrend(np.reshape(pred,(-1,12)),axis=0)
            pred   = (pred/np.nanstd(pred,axis=0)).flatten()
            #
            #THIS IS A VERY SIMPLE APPROACH, LOOP OVER EACH YEAR AND THE LAGS
            #
            #gp = cutils.GPR_simple_fit(0,ntrain,np.arange(ntrain).reshape(-1,1),pred[:ntrain].reshape(-1,1))
            #
            for y,year in enumerate(range(maxlag//12,ntime0//12)):
                gp = cutils.GPR_simple_fit(GPminlag,maxlag,np.arange(maxlag).reshape(-1,1),pred[year*12-maxlag:year*12].reshape(-1,1))
                #gp2 = cutils.GPR_simple_fit(GPminlag,maxlag,np.arange(maxlag).reshape(-1,1),target[year*12-maxlag:year*12].reshape(-1,1))
                for lag in range(maxlag):
                    if lag>0:
                        y_pred = cutils.GPR_simple_predict(gp,np.arange(maxlag+lag).reshape(-1,1))[0]
                        #lagpred_GPR[year,lag,l] = cutils.GPR_simple_predict(gp2,np.arange(maxlag+lag).reshape(-1,1))[0]
                        dum  = np.concatenate([pred[mo+y*12+1:mo+year*12-lag+1],y_pred[-lag:,0]])
                        dum0 = np.concatenate([pred[mo+y*12+1:mo+year*12-lag+1],np.zeros(lag)])
                        dum1 = np.concatenate([pred[mo+y*12+1:mo+year*12-lag+1],pred[mo+year*12-lag]*np.ones(lag)])
                    else:
                        dum = dum0 = dum1 = pred[mo+y*12+1:mo+year*12-lag+1]
                    #
                    lagpred[year,lag,l] = np.convolve(dum,G0,mode='valid')
                    lagpred_ideal[year,lag,l] = np.convolve(dum,G0_ideal,mode='valid')
                    #
                    lagpred0[year,lag,l] = np.convolve(dum0,G0,mode='valid')
                    lagpred0_ideal[year,lag,l] = np.convolve(dum0,G0_ideal,mode='valid')
                    #
                    lagpred1[year,lag,l] = np.convolve(dum1,G0,mode='valid')
                    lagpred1_ideal[year,lag,l] = np.convolve(dum1,G0_ideal,mode='valid')
                    #
                    #lagpred[year,lag,l] = np.convolve(np.concatenate([pred[mo+y*12+1:mo+year*12-lag+1],pred[mo+year*12-lag]*np.zeros(lag)]),G0,mode='valid')
                    #lagpred_ideal[year,lag,l] = np.convolve(np.concatenate([pred[mo+y*12+1:mo+year*12-lag+1],pred[mo+year*12-lag]*np.zeros(lag)]),G0_ideal,mode='valid')
            #
            for j in range(maxlag):
                if j==0:
                    slope2, intercept2, r_value2, p_value2, std_err2 = stats.linregress(pred[mo+maxlag-j::12], target[mo::12][maxlag//12:])
                    err_lag0[j,l]      = np.nanmedian(abs(target[mo::12][maxlag//12:]-(slope2*pred[mo+maxlag-j::12]+intercept2)))
                else:
                    slope2, intercept2, r_value2, p_value2, std_err2 = stats.linregress(pred[mo+maxlag-j:-j:12], target[mo::12][maxlag//12:])
                    err_lag0[j,l]      = np.nanmedian(abs(target[mo::12][maxlag//12:]-(slope2*pred[mo+maxlag-j:-j:12]+intercept2)))
                #
                lagpred_rlag0[j,l] = r_value2
            #
            for j,jj in enumerate(range(maxlag-12)):
                # response function lacks some amplitude so we will allow for correcting that
                popt, pcov             = curve_fit(linear_fit, lagpred[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3]) #ignore the last 12 months when doing the fit
                popt_ideal, pcov_ideal = curve_fit(linear_fit, lagpred_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3])
                #
                popt0, pcov             = curve_fit(linear_fit, lagpred0[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3]) #ignore the last 12 months when doing the fit
                popt0_ideal, pcov_ideal = curve_fit(linear_fit, lagpred0_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3])
                popt1, pcov             = curve_fit(linear_fit, lagpred1[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3]) #ignore the last 12 months when doing the fit
                popt1_ideal, pcov_ideal = curve_fit(linear_fit, lagpred1_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:],bounds=[0,1E3])
                # calculate the correlation and error
                lagpred_r[j,l]   = np.corrcoef(lagpred[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                lagpred0_r[j,l]  = np.corrcoef(lagpred0[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                lagpred1_r[j,l]  = np.corrcoef(lagpred1[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                #
                lagpred_s[j,l]  = popt[0]
                lagpred0_s[j,l] = popt0[0]
                lagpred1_s[j,l] = popt1[0]
                err[j,l]        = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt[0]*lagpred[(maxlag+j)//12:len(target[mo::12]),j,l]))
                err0[j,l]       = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt0[0]*lagpred0[(maxlag+j)//12:len(target[mo::12]),j,l]))
                err1[j,l]       = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt1[0]*lagpred1[(maxlag+j)//12:len(target[mo::12]),j,l]))
                #
                lagpred_r_ideal[j,l]   = np.corrcoef(lagpred_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                lagpred0_r_ideal[j,l]  = np.corrcoef(lagpred0_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                lagpred1_r_ideal[j,l]  = np.corrcoef(lagpred1_ideal[(maxlag+j)//12:len(target[mo::12]),j,l],target[mo::12][(maxlag+j)//12:]).min()
                #
                lagpred_s_ideal[j,l]  = popt_ideal[0]
                err_ideal[j,l]        = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt_ideal[0]*lagpred_ideal[(maxlag+j)//12:len(target[mo::12]),j,l]))
                err0_ideal[j,l]       = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt0_ideal[0]*lagpred0_ideal[(maxlag+j)//12:len(target[mo::12]),j,l]))
                err1_ideal[j,l]       = np.nanmedian(abs(target[mo::12][(maxlag+j)//12:]-popt1_ideal[0]*lagpred1_ideal[(maxlag+j)//12:len(target[mo::12]),j,l]))
                #lagpred_rlag0[j,l]    = r_value2
        #
        err_all[case+'_'+model+'_'+ens]                = err
        err_all[case+'_'+model+'_'+ens+'_0']           = err0
        err_all[case+'_'+model+'_'+ens+'_1']           = err1
        err_all[case+'_'+model+'_'+ens+'_ideal']       = err_ideal
        err_all[case+'_'+model+'_'+ens+'_0_ideal']     = err0_ideal
        err_all[case+'_'+model+'_'+ens+'_1_ideal']     = err1_ideal
        err_all[case+'_'+model+'_'+ens+'_persistence'] = err_pers
        err_all[case+'_'+model+'_'+ens+'_lag0']        = err_lag0
        lagpred_all[case+'_'+model+'_'+ens]            = lagpred
        lagpred_all[case+'_'+model+'_'+ens+'_ideal']   = lagpred_ideal
        lagpred_r_all[case+'_'+model+'_'+ens]          = lagpred_r
        lagpred_r_all[case+'_'+model+'_'+ens+'_0']     = lagpred0_r
        lagpred_r_all[case+'_'+model+'_'+ens+'_1']     = lagpred1_r
        lagpred_r_all[case+'_'+model+'_'+ens+'_ideal'] = lagpred_r_ideal
        lagpred_r_all[case+'_'+model+'_'+ens+'_0_ideal']     = lagpred0_r_ideal
        lagpred_r_all[case+'_'+model+'_'+ens+'_1_ideal']     = lagpred1_r_ideal
        lagpred_r_all[case+'_'+model+'_'+ens+'_persistence'] = lagpred_rpers
        lagpred_r_all[case+'_'+model+'_'+ens+'_lag0']  = lagpred_rlag0
        lagpred_s_all[case+'_'+model+'_'+ens]          = lagpred_s
        lagpred_s_all[case+'_'+model+'_'+ens+'_ideal'] = lagpred_s_ideal
        target_all[case+'_'+model+'_'+ens]             = target[mo::12]



