<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">#%% Python script for "Why a slowdown of the AMOC does not enhance global surface warming"
# by Levke Caesar, 25/02/2020
#%% Import modules that are used
from __future__ import division 
import numpy as np # mathematical functions
from scipy import stats # load the scipy package
from scipy.interpolate import griddata # load the function to regridd data
import statsmodels.api as sm # load the statsmodel package needed for the lowess smoother
import matplotlib as mpl # for plotting
from netCDF4 import Dataset # to load netcdf files
import matplotlib.pyplot as plt # use Matplotlib to plot the data
#import pyunicorn.timeseries # to create the surrogates and test for significance of the correlation coefficients
# Details about how to install the pyunicorn package can be found here http://www.pik-potsdam.de/~donges/pyunicorn/installing.html
mpl.rcParams['pdf.fonttype'] = 42 # to make the writing in pdf and ps files adjustable afterwards
mpl.rcParams['ps.fonttype'] = 42
from scipy import signal
import os
import xlrd

#%% Define function that removes linear trend from data
def remove_linear_trend(time, data):

    from scipy import stats

    slope, intercept, r_value, p_value, slope_std_error = stats.linregress(time, data)
    data_linear = intercept + slope*time
    data_lin = data - data_linear
    
    return data_lin
 
# load lowess smoother from the statsmodel package, more information here: https://www.statsmodels.org/dev/generated/statsmodels.nonparametric.smoothers_lowess.lowess.html
lowess = sm.nonparametric.lowess   

#%% go to the directory where the data is stored
os.chdir('C:\\Users\\lcaesar\\Documents\\Promotion\\ERL Paper\\Submission\\Website\\')
#%% load HadCRUT data downloaded from http://www.metoffice.gov.uk/hadobs/hadcrut4/data/current/download.html on 11/26/2018
# used is the median anomaly from the 100 ensemle members
year = np.arange(1947,2016,1) # time range 1947-2015 (maximum time range for which all AMOC indices are available)
T = np.loadtxt('Data\\HadCRUT4_Global_Mean.txt', skiprows=10)[:,0:2]
T = griddata(T[:,0],T[:,1]-T[0,1],year) # in the second column (temperature anomaly) the value of 1850 such that now all anomalies are given with respect to 1850 (see paper)

#%% load AMOC proxies as used by Chen &amp; Tung (2018), Fig.3 downloaded from https://www.nature.com/articles/s41586-018-0320-y#Sec17
wb = xlrd.open_workbook('AMOC_indices.xlsx') 
sheet = wb.sheet_by_index(0) #1st sheet: AMOC indixces

amoc_year = np.asarray(sheet.col_values(0,6))
amoc_ishii = sheet.col_values(1,6)
amoc_scripps = sheet.col_values(2,714)
amoc_ishii_scripps = np.asarray(amoc_ishii[0:708] + amoc_scripps[0:132]) # combine as Chen and Tung have done it since both have periods of missing data
amoc_en4 = np.asarray(sheet.col_values(3,6))

# regrid data to the annual time axis 
amoc_ishii_scripps = griddata(amoc_year[0:840],amoc_ishii_scripps,year)
amoc_en4 = griddata(amoc_year,amoc_en4,year)

# smooth data with a 10 year lowess smoother (smoothing parameter bas to be given as a fraction of the length of the time series)
amoc_ishii_scripps_lowess = lowess(amoc_ishii_scripps, year, 10/len(year), it=5)[:,1]     
amoc_en4_lowess = lowess(amoc_en4, year, 10/len(year), it=5)[:,1]    

# load MDV of the global temperatures as used by Chen and Tung
sheet2 = wb.sheet_by_index(1)
mdv_year = np.asarray(sheet2.col_values(0,8))
mdv_t = np.asarray(sheet2.col_values(4,8))
mdv_t = mdv_t[97:163]
mdv_year = mdv_year[97:163]

#%% Import our AMOC index
sg_index_with_time = np.loadtxt('sg_index_with_time.txt')
year_sg = sg_index_with_time[:,1]
sg_i = sg_index_with_time[:,0]-np.mean(sg_index_with_time[:,0])
sg_i = griddata(year_sg ,sg_i,year)
sg_i_lowess = lowess(sg_i, year, 10/len(year), it=5)[:,1] 

#%% Import historical forcing
historical_forcing = np.loadtxt('historical_forcing_sum.txt', skiprows=5)
delta_Q_rad_year = historical_forcing[:,0]
delta_Q_rad = historical_forcing[:,1]
# the forcing time series ends in 2012, thus regrid to the time period 1947-2012
delta_Q_rad = griddata(delta_Q_rad_year,delta_Q_rad,year[0:66])

#%% Detrend the data

results = np.zeros((10,6))
slope_t = np.zeros(6)

for t in (0,1,2,3,4,5):
    # list of the different values for the feedback parameter lambda [0,1.3,1.5,1.9,2.3,3.0] in W/(m^2 K)
    lambda_ = (0,1.3,1.5,1.9,2.3,3.0)
    # not taking radiative forcing into account, i.e. T'=T
    if t == 0:
        slope_t[t] = 0
    else:
        slope_t[t] = 1/lambda_[t]
    # calculate T' = T-1/lambda*delta_Q_rad 
##%%

#%% the correlation analysis is done with two options: either removing or not removing the linear trend of the data before calculating the correlation
# remove linear trend  
#amoc_ishii_scripps_det = remove_linear_trend(year, amoc_ishii_scripps)
#amoc_en4_det = remove_linear_trend(year, amoc_en4)
#sg_i_det = remove_linear_trend(year, sg_i)

# don't remove linear trend
amoc_ishii_scripps_det = amoc_ishii_scripps
amoc_en4_det = amoc_en4
sg_i_det = sg_i

## determine the slope of the linear trend for the Caesar et al (2018) amoc index and the GMST, asked by one of the reviewers not used otherwise
#slope_sg_i_det, intercept_sg_i_det, r_value_sg_i_det, p_value_sg_i_det, slope_std_error_sg_i_det = stats.linregress(year, sg_i)
#slope_T, intercept_T, r_value_T, p_value_T, slope_std_error_T = stats.linregress(year[0:66], T[0:66])

# Smooth data LOWESS filter
amoc_ishii_scripps_det_l = lowess(amoc_ishii_scripps_det, year, 10/len(year), it=5)[:,1]    
amoc_en4_det_l = lowess(amoc_en4_det, year, 10/len(year), it=5)[:,1]   
sg_i_det_l = lowess(sg_i_det, year, 10/len(year), it=5)[:,1]    
#mdv_t_l = lowess(mdv_t[97:163], mdv_year[97:163], 10/len(mdv_year), it=5)[:,1]   # not necessary as already smoothed

# calculate the correlation coefficients
correlation = np.zeros((5,6))
p_value = np.zeros((5,6))

for t in (0,1,2,3,4,5):
    T_dash = T[0:66] - (slope_t[t])*delta_Q_rad # remove radiative forcing, slope_t=0 -&gt; no radiative forcing is removed
    # remove linear trend AFTER radiative forcing is removed
    #T_dash_det = remove_linear_trend(year[0:66], T_dash)   
    # don't remove linear trend
    T_dash_det = T_dash
    # Smooth data 
    T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]      

    # calculate correlation
    #full time span
    original_data = np.zeros((4,len(year[0:66])))
    # create array will all time series
    for i in range (0,66): 
        original_data[0,i] = ((T_dash_det_l[i]) - np.mean(T_dash_det_l))/np.std(T_dash_det_l)
        original_data[1,i] = ((amoc_ishii_scripps_det_l[i]) - np.mean(amoc_ishii_scripps_det_l)) / np.std(amoc_ishii_scripps_det_l)
        original_data[2,i] = ((amoc_en4_det_l[i]) - np.mean(amoc_en4_det_l)) / np.std(amoc_en4_det_l)
        original_data[3,i] = ((sg_i_det_l[i]) - np.mean(sg_i_det_l))/ np.std(sg_i_det_l)
    
##    years 1975-1998
#    original_data = np.zeros((4,len(year[28:52])))
#    # create array will all time series      
#    for i in range (28,52): 
#        original_data[0,i-28] = (T_dash_det_l[i])
#        original_data[1,i-28] = (amoc_ishii_scripps_det_l[i])
#        original_data[2,i-28] = (amoc_en4_det_l[i])
#        original_data[3,i-28] = (sg_i_det_l[i])
#        
#    # years 1975-end (2012)
#    original_data = np.zeros((4,len(year[28:66])))
#    # create array will all time series      
#    for i in range(28,66): 
#        original_data[0,i-28] = (T_dash_det_l[i])
#        original_data[1,i-28] = (amoc_ishii_scripps_det_l[i])
#        original_data[2,i-28] = (amoc_en4_det_l[i])
#        original_data[3,i-28] = (sg_i_det_l[i])
             
    # calculate the correlation for the original data 
    correlation[0,t] = lambda_[t]
    p_value[0,t] = lambda_[t]

    for i in range(0,4):
        correlation[i+1,t] = stats.pearsonr(original_data[0,:], original_data[i,:])[0]
        p_value[i+1,t] = stats.pearsonr(original_data[0,:], original_data[i,:])[1]
        
#%% Correlation with MDV from Chen and Tung:

#full time span
original_data = np.zeros((4,len(year[0:66])))
correlation = np.zeros((5,1))
p_value = np.zeros((5,1))

# create array will all time series
for i in range (0,66): 
    original_data[0,i] = ((mdv_t[i]) - np.mean(mdv_t))/np.std(mdv_t)
    original_data[1,i] = ((amoc_ishii_scripps_det_l[i]) - np.mean(amoc_ishii_scripps_det_l)) / np.std(amoc_ishii_scripps_det_l)
    original_data[2,i] = ((amoc_en4_det_l[i]) - np.mean(amoc_en4_det_l)) / np.std(amoc_en4_det_l)
    original_data[3,i] = ((sg_i_det_l[i]) - np.mean(sg_i_det_l))/ np.std(sg_i_det_l)

#    # years 1975-1998
#    original_data = np.zeros((4,len(year[28:52])))
#    # create array will all time series      
#for i in range (28,52): 
#    original_data[0,i-28] = (mdv_t[i])
#    original_data[1,i-28] = (amoc_ishii_scripps_det_l[i])
#    original_data[2,i-28] = (amoc_en4_det_l[i])
#    original_data[3,i-28] = (sg_i_det_l[i])
    
#    # years 1975-end (2012)
#    original_data = np.zeros((4,len(year[28:66])))
#    # create array will all time series      
#for i in range(28,66): 
#    original_data[0,i-28] = (mdv_t[i])
#    original_data[1,i-28] = (amoc_ishii_scripps_det_l[i])
#    original_data[2,i-28] = (amoc_en4_det_l[i])
#    original_data[3,i-28] = (sg_i_det_l[i])
         
for i in range(0,4):
    correlation[i+1] = stats.pearsonr(original_data[0,:], original_data[i,:])[0]
    p_value[i+1] = stats.pearsonr(original_data[0,:], original_data[i,:])[1]

#%% load rapid data

data = 'Data\\moc_transports_2018_yearmean.nc'
fh = Dataset(data, mode='r')
time_moc = fh.variables['time'][:] 
moc = fh.variables['moc_mar_hc10'][:] # overturning transport in Sv
fh.close()
#%%
year_moc = [2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018]
slope, intercept, r_value, p_value, slope_std_error = stats.linregress(time_moc, moc)
slope2, intercept2, r_value, p_value, slope_std_error = stats.linregress(time_moc[1:12], moc[1:12])
annual_slope = slope2 * 365

#%% Create figure 1 for the Reply to the Comment
mpl.rc('xtick', labelsize=8) 
mpl.rc('ytick', labelsize=8) 
mpl.rc("figure", facecolor="white")

fig, axes = plt.subplots(figsize=(19/2.54,8/2.54)) # 1 inch = 2.54 cm
ax1 = plt.subplot()
#ax1.axvspan(1975, 1998, color='lightgrey', alpha=0.3)
#plt.axvline(x=1975, color='lightgrey', linestyle='-', linewidth=1, alpha=0.3)
#plt.axvline(x=1998, color='lightgrey', linestyle='-', linewidth=1, alpha=0.3)
ax1.fill_between([1975, 1998],-1,1, alpha=0.3, facecolor='lightgrey')
plt.plot(mdv_year,((mdv_t)),color='magenta',linewidth=1.5,label=(r"Non-linear detrended HadCRUT4.6 GMST $\Delta$T"))
#plt.ylim(-0.55,0.38)
plt.ylim(-0.45,0.3)
plt.ylabel(r"GMST anomaly $\Delta$T (K)", fontsize=8, color='black', rotation=90, labelpad=0)

ax2 = ax1.twinx()
plt.plot(year,(amoc_en4_det),color='blue',linewidth=0.5, alpha=0.3)
#plt.plot(year,(amoc_en4_det_l),color='blue',linewidth=1,label=(r'EN4; r=' + str(results[2*2,4].round(2)) + ' (' + str(np.min(results[2*2,:].round(2))) + ', ' + str(np.max(results[2*2,:].round(2))) +')' ))
plt.plot(year,(amoc_en4_det_l),color='blue',linewidth=1.5,label=(r'A$_{EN4}$; r=0.40' ))
plt.plot(year,(amoc_ishii_scripps_det),color='skyblue',linewidth=0.5, alpha=0.3)
#plt.plot(year,(amoc_ishii_scripps_det_l),color='cyan',linewidth=1,label=(r'ISHII+Scripps; r=' + str(results[2*1,4].round(2))  + ' (' + str(np.min(results[2*1,:].round(2))) + ', ' + str(np.max(results[2*1,:].round(2))) +')' ))
plt.plot(year,(amoc_ishii_scripps_det_l),color='skyblue',linestyle='--',linewidth=1.5,label=(r'A$_{ISHII+Scripps}$; r=0.61' ))
# conversion factor between AMOC index in K and AMOC strength in Sv is 3.8 Sv/K see Caesar et al. (2018)
plt.plot(year,sg_i_det*3.8,color='navy',linewidth=0.5, alpha=0.3)
#plt.plot(year,sg_i_det_l*3.8,color='navy',linewidth=1,label=(r'AMOC Index; r=' + str(results[2*3,4].round(2)) + ' (' + str(np.min(results[2*3,:].round(2))) + ', ' + str(np.max(results[2*3,:].round(2))) +')' ))
plt.plot(year,(sg_i_det_l*3.8),color='navy',linestyle='-.',linewidth=1.5,label=(r'A$_{HadISST}$; r=0.41' ))
plt.ylim(-4.3,3)
plt.xlim(1946,2016)
plt.axhline(y=0, color='black', linestyle='-', linewidth=1, alpha=0.3)
plt.ylabel('AMOC (Sv)', fontsize=8, color='black', rotation=270, labelpad=8)
ax2.tick_params(labelbottom=True)

lines1, labels1 = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax1.legend(lines1 + lines2, labels1 + labels2, loc='lower left', fontsize=5, frameon=False)
plt.show()
fig.savefig('Figure_1_Reply.png', format='png', dpi=1200 ,bbox_inches='tight')


#%% Create figure 1
mpl.rc('xtick', labelsize=8) 
mpl.rc('ytick', labelsize=8) 
mpl.rc("figure", facecolor="white")

fig, axes = plt.subplots(figsize=(19/2.54,8/2.54)) # 1 inch = 2.54 cm
ax1 = plt.subplot()
#ax1.axvspan(1975, 1998, color='lightgrey', alpha=0.3)
#plt.axvline(x=1975, color='lightgrey', linestyle='-', linewidth=1, alpha=0.3)
#plt.axvline(x=1998, color='lightgrey', linestyle='-', linewidth=1, alpha=0.3)
ax1.fill_between([1975, 1998],-1,1, alpha=0.3, facecolor='lightgrey')
plt.plot(mdv_year,((mdv_t)),color='magenta',linewidth=1.5,label=(r"Non-linear detrended HadCRUT4.6 GMST $\Delta$T"))
t=0
T_dash = T[0:66] - (slope_t[t])*delta_Q_rad
# remove linear trend
T_dash_det = remove_linear_trend(year[0:66], T_dash)    
#T_dash_det = T_dash    
# Smooth data 
T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]    
plt.plot(year[0:66],((T_dash_det[0:66])),color='orange',linewidth=1, alpha=0.3)
plt.plot(year[0:66],((T_dash_det_l)),color='orange',linewidth=1.5,label=(r"Linear detrended HadCRUT4.6 GMST $\Delta$T"))
t=4
T_dash = T[0:66] - (slope_t[t])*delta_Q_rad
# remove linear trend
T_dash_det = remove_linear_trend(year[0:66], T_dash)    
#T_dash_det = T_dash    
# Smooth data 
T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]    
plt.plot(year[0:66],((T_dash_det[0:66])),color='red',linewidth=1, alpha=0.3)
plt.plot(year[0:66],((T_dash_det_l)),color='red',linewidth=1.5,label=(r"Radiatively corrected HadCRUT4.6 GMST $\Delta$T'"))
plt.ylim(-0.55,0.38)
plt.ylabel(r"GMST anomaly $\Delta$T (K)", fontsize=8, color='black', rotation=90, labelpad=0)

ax2 = ax1.twinx()
plt.plot(year,(amoc_en4_det),color='blue',linewidth=0.5, alpha=0.3)
#plt.plot(year,(amoc_en4_det_l),color='blue',linewidth=1,label=(r'EN4; r=' + str(results[2*2,4].round(2)) + ' (' + str(np.min(results[2*2,:].round(2))) + ', ' + str(np.max(results[2*2,:].round(2))) +')' ))
plt.plot(year,(amoc_en4_det_l),color='blue',linewidth=1.5,label=(r'A$_{EN4}$; r=0.57 (0.42, 0.63)' ))
plt.plot(year,(amoc_ishii_scripps_det),color='skyblue',linewidth=0.5, alpha=0.3)
#plt.plot(year,(amoc_ishii_scripps_det_l),color='cyan',linewidth=1,label=(r'ISHII+Scripps; r=' + str(results[2*1,4].round(2))  + ' (' + str(np.min(results[2*1,:].round(2))) + ', ' + str(np.max(results[2*1,:].round(2))) +')' ))
plt.plot(year,(amoc_ishii_scripps_det_l),color='skyblue',linestyle='--',linewidth=1.5,label=(r'A$_{ISHII+Scripps}$; r=0.49 (0.28, 0.62)' ))
# conversion factor between AMOC index in K and AMOC strength in Sv is 3.8 Sv/K see Caesar et al. (2018)
plt.plot(year,sg_i_det*3.8,color='navy',linewidth=0.5, alpha=0.3)
#plt.plot(year,sg_i_det_l*3.8,color='navy',linewidth=1,label=(r'AMOC Index; r=' + str(results[2*3,4].round(2)) + ' (' + str(np.min(results[2*3,:].round(2))) + ', ' + str(np.max(results[2*3,:].round(2))) +')' ))
plt.plot(year,(sg_i_det_l*3.8),color='navy',linestyle='-.',linewidth=1.5,label=(r'A$_{HadISST}$; r=0.22 (0.01, 0.65)' ))
slope2, intercept2, r_value, p_value, slope_std_error = stats.linregress(time_moc[1:12], moc[1:12])
plt.plot(year_moc[1:12],time_moc[1:12]*slope2+2.2,color='black',linewidth=2.5,label='Smeed et al., 2018 RAPID measurements')

plt.ylim(-4.3,3)
plt.xlim(1946,2016)
plt.axhline(y=0, color='black', linestyle='-', linewidth=1, alpha=0.3)
plt.ylabel('AMOC (Sv)', fontsize=8, color='black', rotation=270, labelpad=8)
ax2.tick_params(labelbottom=True)

ax3 = ax1.twiny()

#plt.plot(mdv_year[97:163],((mdv_t[97:163])),color='magenta',linewidth=0.5, alpha=0.3)
#plt.plot(mdv_year,((mdv_t)),color='magenta',linewidth=1.5,label=(r"Non-linear detrended HadCRUT4.6 GMST $\Delta$T"))

t=0
T_dash = T[0:66] - (slope_t[t])*delta_Q_rad
# remove linear trend
T_dash_det = remove_linear_trend(year[0:66], T_dash)    
#T_dash_det = T_dash    
# Smooth data 
T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]    
plt.plot(year[0:66],((T_dash_det[0:66])),color='orange',linewidth=0.5, alpha=0.3)
plt.plot(year[0:66],((T_dash_det_l)),color='orange',linewidth=1.5,label=(r"Linear detrended HadCRUT4.6 GMST $\Delta$T"))
t=4
T_dash = T[0:66] - (slope_t[t])*delta_Q_rad
# remove linear trend
T_dash_det = remove_linear_trend(year[0:66], T_dash)    
#T_dash_det = T_dash    
# Smooth data 
T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]    
plt.plot(year[0:66],((T_dash_det[0:66])),color='red',linewidth=0.5, alpha=0.3)
plt.plot(year[0:66],((T_dash_det_l)),color='red',linewidth=1.5,label=(r"Radiatively corrected HadCRUT4.6 GMST $\Delta$T'"))
plt.xlim(1946,2016)


lines1, labels1 = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax1.legend(lines1 + lines2, labels1 + labels2, loc='lower left', fontsize=5, frameon=False)
plt.show()
fig.savefig('Figure_1.png', format='png', dpi=1200 ,bbox_inches='tight')

#%% load Ocean heat content as used by Chen &amp; Tung (2018), for bar plot ocean data taken from Cheng et al. (2017)
wb = xlrd.open_workbook('OHC.xlsx') 
sheet = wb.sheet_by_index(1) 
sheet = wb.sheet_by_index(0) # this sheet contains  the time series for global ocean heat content
ohc_year = np.asarray(sheet.col_values(0,5))
ohc_0_1500 = np.asarray(sheet.col_values(1,5))
ohc_0_200 = np.asarray(sheet.col_values(2,5))

# calculate ocean heat uptake rate 
ohc_0_1500_int = np.zeros(len(ohc_0_1500)-1)
for i in range(0, len(ohc_0_1500)-1):
    ohc_0_1500_int[i] = ohc_0_1500[i+1] - ohc_0_1500[i]
       
#%% load rapid data downloaded from from http://www.rapid.ac.uk/rapidmoc/rapid_data/datadl.php
import datetime
data = 'moc_transports.nc'
fh = Dataset(data, mode='r')
time_moc = fh.variables['time'][:] 
moc = fh.variables['moc_mar_hc10'][:] # overturning transport in Sv
fh.close()

time_date_moc = np.zeros((len(time_moc)), dtype=datetime.date)
for l in range (0, len(time_moc)):
    # add 2004*365 + 121 + 91 to make fromordinal start in 01/04/2004 as the rapid data is supposed to
    time_date_moc[l] = datetime.date.fromordinal(int(time_moc[l]) + 2004*365 + 121 + 91) 
year_moc = np.zeros(len(time_moc)) 
# calculate the year as    
for l in range (0, len(time_moc)): 
    year_moc[l] = time_date_moc[l].year+1/12*(time_date_moc[l].month-1)+1/12*1/30*(time_date_moc[l].day-1)

# regrid both time series to the same axis beginning of 2005- 2014
time = np.arange(2005,2014,1/12)    
moc_m = griddata(year_moc,moc,time)    
ohc_0_1500_int_m = griddata(ohc_year[:-1],ohc_0_1500_int,time)   
# caclculate for both time series the linear slope
slope, intercept, r_value, p_value, slope_std_error = stats.linregress(time, moc_m)
slope_ohc, intercept_o, r_value, p_value_ohc, slope_std_error = stats.linregress(time,ohc_0_1500_int_m)

#%% Create figure 3
mpl.rc('xtick', labelsize=8) 
mpl.rc('ytick', labelsize=8) 
mpl.rc("figure", facecolor="white")

fig, axes = plt.subplots(figsize=(18.3/2.54,6/2.54),nrows=1, ncols=3) 
fig.subplots_adjust(wspace=0.3, hspace=0.2)

ax3 = plt.subplot2grid((1, 3), (0, 0), colspan=3)
N = 2
ind = np.arange(2)
width=.7
# the numbers for the ocean heat uptake in the different ocean parts are calculated and lisetd at the bottom of the 3rd cheet of OHC.xlsx
p1 = plt.bar(ind, (26.4+17,10.3+32.4), width, color='navy')
p2 = plt.bar(ind, (17,32.4), width, color='red')
# hypothetical numbers including a northward heat transport of 14 ZJ in case the AMOC did not slow down
p1 = plt.bar(2, (10.3+32.4-(32.4-9.5)), width, color='navy', alpha=0.4, bottom=32.4-9.5)
p2 = plt.bar(2, (32.4-9.5), width, color='red',alpha=0.4)
plt.xticks([0,1,2], ('2000-2004', '2005-2014', '2005-2014 \n without AMOC weakening'), size=6)
plt.ylabel('OHC (ZJ)', size=7)
plt.ylim(0,55)
plt.xlim(-0.5,3.5)
plt.text(-0.35, 51, 'Atlantic Ocean OHC 0-2000m', color='navy', size=6)
plt.text(-0.35, 48., 'Southern Ocean OHC 0-2000m', color='red', size=6)
plt.show()
fig.savefig('Figure_3.jpg', format='jpg', dpi=600 ,bbox_inches='tight')

#%% Create figure 2 new
mpl.rc('xtick', labelsize=8) 
mpl.rc('ytick', labelsize=8) 
mpl.rc("figure", facecolor="white")

fig, axes = plt.subplots(figsize=(19/2.54,8/2.54),nrows=1, ncols=3) 
fig.subplots_adjust(wspace=0.3, hspace=0.2)
ax1 = plt.subplot2grid((1, 3), (0, 0), colspan=3)
plt.plot(year_moc,moc,color='blue',linewidth=1, alpha=0.3)
plt.plot(time,slope*time + intercept,color='blue',linewidth=2, label='RAPID AMOC')
plt.ylim(-5,45)
plt.ylabel('AMOC strength (Sv)', size=8)
plt.xlabel('Year', size=7)
ax2 = ax1.twinx()
plt.plot(ohc_year[:-1],ohc_0_1500_int,linewidth=1, color='darkorange', alpha=0.5)
plt.plot(time,(slope_ohc*time + intercept_o),color='darkorange',linewidth=2 ,label='Rate of ocean heat uptake')
plt.ylim(-2,4)
plt.ylabel('Ocean heat uptake rate (ZJ/year)', size=8, rotation=270, labelpad=8)
plt.xlim(2004.5,2014.5)

lines1, labels1 = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax1.legend(lines1 + lines2, labels1 + labels2, loc='upper left', fontsize=7, frameon=False)

plt.show()
fig.savefig('Figure_2.jpg', format='jpg', dpi=600 ,bbox_inches='tight')

#%%########################Further Testing #############################################################################
# Test time lag between and AMOC proxies
tau_max = np.zeros(3)
tau_min = np.zeros(3)

T_dash = T[0:66] - (slope_t[4])*delta_Q_rad
# remove linear trend
T_dash_det = remove_linear_trend(year[0:66], T_dash)    
# Smooth data 
T_dash_det_l = lowess(T_dash_det, year[0:66], 10/len(year[0:66]), it=5)[:,1]      
# calculate correlation
original_data = np.zeros((4,len(year)))
# create array will all time series
for i in range (0,66):
    original_data[0,i] = (T_dash_det[i])
    original_data[1,i] = (amoc_ishii_scripps_det[i])
    original_data[2,i] = (amoc_en4_det[i])
    original_data[3,i] = (sg_i_det[i])

for l in range(0,3):    
    corr_vec = signal.correlate(original_data[0,:], original_data[l+1,:],'full')/len(original_data[0,:])
    tau = np.zeros(len(corr_vec))
    for i in range (0,len(corr_vec)):
        tau[i] = int(len(corr_vec)/2) - i
    tau_max[l] = int(len(corr_vec)/2) - np.argmax(corr_vec)
    tau_min[l] = int(len(corr_vec)/2) - np.argmin(corr_vec)

#%%
fig, axes = plt.subplots(figsize=(4,3.2),nrows=1, ncols=1)
fig.subplots_adjust(wspace=0,hspace=0)

ax = plt.subplot(111)
cl = ('red','blue','green')
k = len(original_data[0,:])

# calculate first-order (time lag 1) autocorrelation coefficient of both time series individually
# assumption is that the autocorrelation is first-order only = Markov process
# for effective length used to determine statistical significance
autocoeff_amoc = np.corrcoef(original_data[l,1:k], original_data[l,0:k-1]) 
autoc_amoc = autocoeff_amoc[0,1]
autocoeff_temp = np.corrcoef(original_data[0,1:k], original_data[l,0:k-1])
autoc_temp = autocoeff_temp[0,1]

for l in range(0,3):
    corr_vec = np.zeros((2*(k-1)-1,2))
    sig_corr_vec = np.zeros((2*(k-1)-1,2))
    
    t_value = 1.97 # corresponds to 0.95 statistical significance, two-sided
    for t in range(0,k-1):
        c = np.corrcoef(original_data[0,:], original_data[l+1,:]) 
        corr_vec[k-1-t-1,1] = c[0,1]
        corr_vec[k-1-t-1,0] = -t
        sig_corr_vec[k-1-t-1,1] = t_value/np.sqrt(k-t)
        sig_corr_vec[k-1-t-1,0] =-t
        
    for t in range(1,k-1):
        N = k-t
        N_eff = N*(1-autoc_amoc*autoc_temp)/(1-autoc_amoc*autoc_temp)
        c = np.corrcoef(original_data[l,t:k], original_data[0,0:k-t]) 
        corr_vec[k-1+t-1,1] = c[0,1]
        corr_vec[k-1+t-1,0] = t
        sig_corr_vec[k-1+t-1,1] = t_value/np.sqrt(N_eff)
        sig_corr_vec[k-1+t-1,0] = t


    
    corr_vec = signal.correlate(original_data[0,:], original_data[l+1,:],'full')/k
    tau = np.zeros(len(corr_vec))
    for i in range (0,len(corr_vec)):
        tau[i] = int(len(corr_vec)/2) - i
    tau_max[l] = int(len(corr_vec)/2) - np.argmax(corr_vec)
    tau_min[l] = int(len(corr_vec)/2) - np.argmin(corr_vec)
    plt.plot(sig_corr_vec[:,0],sig_corr_vec[:,1],color='red',linewidth=1)
    plt.plot(sig_corr_vec[:,0],-1*sig_corr_vec[:,1],color='red', label='95% Significance level',linewidth=1)
    plt.plot(tau,corr_vec, color = cl[l], label='Correlation function',linewidth=1)
    
plt.axhline(y=0.0, color='k', linestyle='dashed',linewidth=1)
plt.axvline(x=0.0, color='k', linestyle='dashed',linewidth=1)
plt.xlabel('Time lag (yr)', size=7)

plt.show()

</pre></body></html>