Skip to content

Commit

Permalink
see readme
Browse files Browse the repository at this point in the history
  • Loading branch information
Ruud van der Ent committed Jul 7, 2016
1 parent 0b4d343 commit bf25fd4
Show file tree
Hide file tree
Showing 5 changed files with 221 additions and 63 deletions.
4 changes: 2 additions & 2 deletions Con_E_Recyc_Output.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
@author: Ent00002
"""
#delayed runs
#import time
#time.sleep(55004)
import time
time.sleep(44000)

#%% Import libraries

Expand Down
13 changes: 9 additions & 4 deletions Con_E_Recyc_Plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,8 @@ def data_path(years,timetracking):
map.drawparallels(np.arange(-90,90,30),linewidth=0.1)

x_shift,y_shift = np.meshgrid(longitude,latitude)
x = x_shift - 0.75
y = y_shift - 0.75
x = x_shift
y = y_shift

# contour data over the map.
clevs = np.linspace(0,1.00,11)
Expand All @@ -227,7 +227,7 @@ def data_path(years,timetracking):

x_shift,y_shift = np.meshgrid(longitude,latitude)
x = x_shift - 0.75
y = y_shift - 0.75
y = y_shift + 0.75

lol = map.pcolormesh(x,y,eps_c_landtotal, latlon=True, cmap=cmap,vmin=0,vmax=1)
map.colorbar(lol,location='right',pad="5%", label = '(-)')
Expand All @@ -243,7 +243,12 @@ def data_path(years,timetracking):
plt.imshow(eps_c_landtotal)
plt.colorbar()

#%% Timetracking results
#%% Timetracking results
# these results are very crude and just meant for a quick check. For publications these have been calculated more detailed

#######################################
# possible check to do the same with daily output or timestep output
########################################

# for some reasons the sizes of the variables in the "variable explorer" of Spyder get wrongly displayed after running code below, but calling them from the memory works fine
if timetracking == 1:
Expand Down
129 changes: 72 additions & 57 deletions Con_P_Recyc_Plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,46 +76,46 @@ def data_path(years,timetracking):
P_total = np.squeeze(np.sum(np.sum(P_per_year_per_month,axis = 0), axis = 0))

# loading horizontal fluxes
#loading_HF2S = sio.loadmat(datapath[1])
#Fa_E_down_per_year_per_month = loading_HF2S['Fa_E_down_per_year_per_month']
#Fa_E_top_per_year_per_month = loading_HF2S['Fa_E_top_per_year_per_month']
#Fa_N_down_per_year_per_month = loading_HF2S['Fa_N_down_per_year_per_month']
#Fa_N_top_per_year_per_month = loading_HF2S['Fa_N_top_per_year_per_month']
#Fa_Vert_per_year_per_month = loading_HF2S['Fa_Vert_per_year_per_month']
#
#Fa_E_down_total = np.squeeze(np.sum(np.sum(Fa_E_down_per_year_per_month,axis = 0),axis = 0))
#Fa_E_top_total = np.squeeze(np.sum(np.sum(Fa_E_top_per_year_per_month,axis = 0),axis = 0))
#Fa_N_down_total = np.squeeze(np.sum(np.sum(Fa_N_down_per_year_per_month,axis = 0),axis = 0))
#Fa_N_top_total = np.squeeze(np.sum(np.sum(Fa_N_top_per_year_per_month,axis = 0),axis = 0))
#Fa_E_total = Fa_E_down_total + Fa_E_top_total
#Fa_N_total = Fa_N_down_total + Fa_N_top_total
#
## annual average fluxes per unit width (m3/m/a)
#Fa_N_total_m2a = np.zeros(np.shape(Fa_N_total))
#Fa_N_down_total_m2a = np.zeros(np.shape(Fa_N_down_total))
#Fa_N_top_total_m2a = np.zeros(np.shape(Fa_N_top_total))
#
#for i in range(len(latitude)):
# Fa_N_total_m2a[i,:] = (Fa_N_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)
# Fa_N_down_total_m2a[i,:] = (Fa_N_down_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)
# Fa_N_top_total_m2a[i,:] = (Fa_N_top_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)
#
#Fa_E_total_m2a = (Fa_E_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)
#Fa_E_down_total_m2a = (Fa_E_down_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)
#Fa_E_top_total_m2a = (Fa_E_top_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)
#
## fluxes for plotting
#FluxDispMatSmall = np.zeros([5,5])
#FluxDispMatSmall[3,2] = 1
#FluxDispMat = np.tile(FluxDispMatSmall,[np.floor(len(latitude)/5+1),np.floor(len(longitude)/5)+1])
#FluxDispMat = FluxDispMat[:len(latitude),:len(longitude)]
#
#Fa_E_total_m2a_part = Fa_E_total_m2a * FluxDispMat
#Fa_N_total_m2a_part = Fa_N_total_m2a * FluxDispMat
#Fa_E_down_total_m2a_part = Fa_E_down_total_m2a * FluxDispMat
#Fa_N_down_total_m2a_part = Fa_N_down_total_m2a * FluxDispMat
#Fa_E_top_total_m2a_part = Fa_E_top_total_m2a * FluxDispMat
#Fa_N_top_total_m2a_part = Fa_N_top_total_m2a * FluxDispMat
loading_HF2S = sio.loadmat(datapath[1])
Fa_E_down_per_year_per_month = loading_HF2S['Fa_E_down_per_year_per_month']
Fa_E_top_per_year_per_month = loading_HF2S['Fa_E_top_per_year_per_month']
Fa_N_down_per_year_per_month = loading_HF2S['Fa_N_down_per_year_per_month']
Fa_N_top_per_year_per_month = loading_HF2S['Fa_N_top_per_year_per_month']
Fa_Vert_per_year_per_month = loading_HF2S['Fa_Vert_per_year_per_month']

Fa_E_down_total = np.squeeze(np.sum(np.sum(Fa_E_down_per_year_per_month,axis = 0),axis = 0))
Fa_E_top_total = np.squeeze(np.sum(np.sum(Fa_E_top_per_year_per_month,axis = 0),axis = 0))
Fa_N_down_total = np.squeeze(np.sum(np.sum(Fa_N_down_per_year_per_month,axis = 0),axis = 0))
Fa_N_top_total = np.squeeze(np.sum(np.sum(Fa_N_top_per_year_per_month,axis = 0),axis = 0))
Fa_E_total = Fa_E_down_total + Fa_E_top_total
Fa_N_total = Fa_N_down_total + Fa_N_top_total

# annual average fluxes per unit width (m3/m/a)
Fa_N_total_m2a = np.zeros(np.shape(Fa_N_total))
Fa_N_down_total_m2a = np.zeros(np.shape(Fa_N_down_total))
Fa_N_top_total_m2a = np.zeros(np.shape(Fa_N_top_total))

for i in range(len(latitude)):
Fa_N_total_m2a[i,:] = (Fa_N_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)
Fa_N_down_total_m2a[i,:] = (Fa_N_down_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)
Fa_N_top_total_m2a[i,:] = (Fa_N_top_total[i,:] / (0.5*(L_N_gridcell[i] + L_S_gridcell[i])))/len(years)

Fa_E_total_m2a = (Fa_E_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)
Fa_E_down_total_m2a = (Fa_E_down_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)
Fa_E_top_total_m2a = (Fa_E_top_total / L_EW_gridcell) / len(years) # Annual average flux per unit width (m3/m/a)

# fluxes for plotting
FluxDispMatSmall = np.zeros([5,5])
FluxDispMatSmall[3,2] = 1
FluxDispMat = np.tile(FluxDispMatSmall,[np.floor(len(latitude)/5+1),np.floor(len(longitude)/5)+1])
FluxDispMat = FluxDispMat[:len(latitude),:len(longitude)]

Fa_E_total_m2a_part = Fa_E_total_m2a * FluxDispMat
Fa_N_total_m2a_part = Fa_N_total_m2a * FluxDispMat
Fa_E_down_total_m2a_part = Fa_E_down_total_m2a * FluxDispMat
Fa_N_down_total_m2a_part = Fa_N_down_total_m2a * FluxDispMat
Fa_E_top_total_m2a_part = Fa_E_top_total_m2a * FluxDispMat
Fa_N_top_total_m2a_part = Fa_N_top_total_m2a * FluxDispMat

# recycling metrics
rho_c_total = P_c_total / P_total
Expand Down Expand Up @@ -200,23 +200,18 @@ def data_path(years,timetracking):
map.drawparallels(np.arange(-90,90,30),linewidth=0.1)

x_shift,y_shift = np.meshgrid(longitude,latitude)
x = x_shift - 0.75
y = y_shift - 0.75
x = x_shift
y = y_shift

# contour data over the map
#Fa_E_total_m2a_part[Fa_E_total_m2a_part==0] = np.nan
#Fa_N_total_m2a_part[Fa_N_total_m2a_part==0] = np.nan
Fa_E_total_m2a_part[Fa_E_total_m2a_part==0] = np.nan
Fa_N_total_m2a_part[Fa_N_total_m2a_part==0] = np.nan
clevs = np.linspace(0,1.00,11)
clevs2 = np.linspace(0,1,1100)
cs = map.contourf(x_shift,y_shift,rho_c_landtotal,clevs2,linewidths=1.5,latlon=True, cmap=cmap)
cs = map.contourf(x,y,rho_c_landtotal,clevs2,linewidths=1.5,latlon=True, cmap=cmap)
map.colorbar(cs,location='right',pad="5%", ticks = clevs, label = '(-)')

# standard way of plotting (looks worse)
#cs = map.contourf(x,y,rho_c_landtotal,linewidths=1.5,latlon=True)
#map.colorbar(cs,location='right', label = '(-)')


#windplot = map.quiver(x,y,Fa_E_total_m2a_part,Fa_N_total_m2a_part,color = 'black',scale = 199999999,latlon=True,width = 0.002, headaxislength = 5, headwidth = 2)
windplot = map.quiver(x,y,Fa_E_total_m2a_part,Fa_N_total_m2a_part,color = 'black',scale = 199999999,latlon=True,width = 0.002, headaxislength = 5, headwidth = 2)
#adjust scale till arrows are visible

plt.title('Continental precipitation recycling ratio ' r'$\rho_c$', fontsize = 14)
Expand All @@ -238,7 +233,7 @@ def data_path(years,timetracking):
lol = map.pcolormesh(x,y,rho_c_landtotal, latlon=True, cmap=cmap,vmin=0,vmax=1)
map.colorbar(lol,location='right',pad="5%", label = '(-)')

#windplot = map.quiver(x,y,Fa_E_total_m2a_part,Fa_N_total_m2a_part,color = 'black',scale = 199999999,latlon=True,width = 0.002, headaxislength = 5, headwidth = 2)
windplot = map.quiver(x,y,Fa_E_total_m2a_part,Fa_N_total_m2a_part,color = 'black',scale = 199999999,latlon=True,width = 0.002, headaxislength = 5, headwidth = 2)
#adjust scale till arrows are visible

plt.title('Continental precipitation recycling ratio ' r'$\rho_c$', fontsize = 14)
Expand All @@ -250,7 +245,12 @@ def data_path(years,timetracking):
plt.imshow(rho_c_landtotal)
plt.colorbar

#%% Timetracking results
#%% Timetracking results
# these results are very crude and just meant for a quick check. For publications these have been calculated more detailed

#######################################
# possible check to do the same with daily output or timestep output
########################################

if timetracking == 1:
Sa_track_down_average = np.squeeze(np.mean(np.mean(Sa_track_down_per_year_per_month, axis=0), axis = 0)) #m3
Expand All @@ -264,19 +264,34 @@ def data_path(years,timetracking):
Sa_time_top_average = np.squeeze(np.mean(np.mean(Sa_time_top_per_year_per_month, axis=0), axis = 0)) / 86400 #d quick assesment
Sa_time_average = (( np.squeeze(np.mean(np.mean(Sa_time_down_per_year_per_month * Sa_track_down_per_year_per_month, axis=0), axis = 0))
+ np.squeeze(np.mean(np.mean(Sa_time_top_per_year_per_month * Sa_track_top_per_year_per_month,axis=0), axis = 0))
) / Sa_track_average / 86400) #d

) / Sa_track_average / 86400) #d
Sa_track_down_average = np.squeeze(np.mean(np.mean(Sa_track_down_per_year_per_month, axis=0), axis = 0)) #m3
Sa_track_top_average = np.squeeze(np.mean(np.mean(Sa_track_top_per_year_per_month, axis=0), axis = 0)) #m3

age_down_track_water_global_mean = (np.sum(np.sum(Sa_time_down_average[1:-1,:] * Sa_track_down_average[1:-1,:], axis = 0 ), axis = 0)
/ np.sum(np.sum(Sa_track_down_average[1:-1,:], axis = 0), axis = 0)) #d
age_top_track_water_global_mean = (np.sum(np.sum(Sa_time_top_average[1:-1,:] * Sa_track_top_average[1:-1,:], axis = 0 ), axis = 0)
/ np.sum(np.sum(Sa_track_top_average[1:-1,:], axis = 0), axis = 0)) #d
age_track_water_global_mean = (( np.sum(np.sum(Sa_time_down_average[1:-1,:] * Sa_track_down_average[1:-1,:], axis = 0), axis = 0)

# weighted with the volume of moisture in the atmosphere
age_track_water_global_mean_atmos_weight2 = (( np.sum(np.sum(Sa_time_down_average[1:-1,:] * Sa_track_down_average[1:-1,:], axis = 0), axis = 0)
+ np.sum(np.sum(Sa_time_top_average[1:-1,:] * Sa_track_top_average[1:-1,:], axis = 0 ), axis = 0) )
/ ( np.sum(np.sum(Sa_track_down_average[1:-1,:], axis = 0), axis = 0) + np.sum(np.sum(Sa_track_top_average[1:-1,:], axis = 0), axis = 0) )) #d


age_track_water_global_mean_atmos_weight = ( ( np.sum(Sa_time_down_per_year_per_month[:,:,1:-1,:] * Sa_track_down_per_year_per_month[:,:,1:-1,:])
+ np.sum(Sa_time_top_per_year_per_month[:,:,1:-1,:] * Sa_track_top_per_year_per_month[:,:,1:-1,:]) )
/ ( np.sum(Sa_track_down_per_year_per_month[:,:,1:-1,:]) + np.sum(Sa_track_top_per_year_per_month[:,:,1:-1,:]) ) ) / 86400 #d

# weighted with precipitation
age_track_water_global_mean_precip_weight = ( np.sum( P_time_per_year_per_month[:,:,1:-1,:] * P_track_per_year_per_month[:,:,1:-1,:] )
/ np.sum(P_track_per_year_per_month) / 86400 )# d

# weigthed with area
age_track_water_global_mean_area_weight = ( np.sum(Sa_time_average[1:-1,:] * A_gridcell2D[1:-1,:])
/ np.sum(A_gridcell2D) )# d

# spatial figure time of tracked precipitation
P_c_time_total = (np.squeeze(np.sum(np.sum(P_time_per_year_per_month * P_track_per_year_per_month, axis =0), axis = 0))
/ P_c_total / 86400) #d

Expand Down
134 changes: 134 additions & 0 deletions Hor_Fluxes_Output.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 16 13:24:45 2016
@author: Ent00002
"""

#delayed runs
#import time
#time.sleep(7500)

#%% Import libraries

import matplotlib.pyplot as plt
import numpy as np
from netCDF4 import Dataset
get_ipython().magic(u'matplotlib inline')
import numpy.matlib
import scipy.io as sio
import calendar
import time
import datetime
from getconstants import getconstants
from timeit import default_timer as timer

#%% BEGIN OF INPUT (FILL THIS IN)
years = np.arange(2002,2009) #fill in the years
yearpart = np.arange(0,366) # for a full (leap)year fill in 0:366

# Manage the extent of your dataset (FILL THIS IN)
# Define the latitude and longitude cell numbers to consider and corresponding lakes that should be considered part of the land
latnrs = np.arange(7,114)
lonnrs = np.arange(0,240)

# the lake numbers below belong to the ERA-Interim data on 1.5 degree starting at Northern latitude 79.5 and longitude -180
lake_mask_1 = np.array([9,9,9,12,12,21,21,22,22,23,24,25,23,23,25,25,53,54,61,23,24,23,24,25,27,22,23,24,25,26,27,28,22,25,26,27,28,23,23,12,18])
lake_mask_2 = np.array([120+19,120+40,120+41,120+43,120+44,120+61,120+62,120+62,120+63,120+62,120+62,120+62,120+65,120+66,120+65,120+66,142-120,142-120,143-120,152-120,152-120,153-120,153-120,153-120,153-120,154-120,154-120,154-120,154-120,154-120,154-120,154-120,155-120,155-120,155-120,155-120,155-120,159-120,160-120,144-120,120+55])
lake_mask = np.transpose(np.vstack((lake_mask_1,lake_mask_2))) #recreate the arrays of the matlab model

daily = 1 # 1 for writing out daily data, 0 for only monthly data

#END OF INPUT

#%% Datapaths (FILL THIS IN)
invariant_data = 'Interim_data/full/invariants.nc'#invariants

def data_path(y,a,years):
load_fluxes_and_storages = 'interdata/' + str(y) + '-' + str(a) + 'fluxes_storages.mat'

save_path = 'outputdata/Hor_Fluxes_full' + str(years[0]) + '-' + str(years[-1]) + '.mat'

save_path_daily = 'outputdata/Hor_Fluxes_daily_full' + str(y) + '.mat'

return load_fluxes_and_storages,save_path,save_path_daily

#%% Runtime % Results

start1 = timer()

# obtain the constants
latitude,longitude,lsm,g,density_water,timestep,A_gridcell,L_N_gridcell,L_S_gridcell,L_EW_gridcell,gridcell = getconstants(latnrs,lonnrs,lake_mask,Dataset,invariant_data,np)

startyear = years[0]
Fa_E_down_per_year_per_month = np.zeros((len(years),12,len(latitude),len(longitude)))
Fa_E_top_per_year_per_month = np.zeros((len(years),12,len(latitude),len(longitude)))
Fa_N_down_per_year_per_month = np.zeros((len(years),12,len(latitude),len(longitude)))
Fa_N_top_per_year_per_month = np.zeros((len(years),12,len(latitude),len(longitude)))
Fa_Vert_per_year_per_month = np.zeros((len(years),12,len(latitude),len(longitude)))

for i in range(len(years)):
y = years[i]
ly = int(calendar.isleap(y))
final_time = 364+ly

Fa_E_down_per_day = np.zeros((365+ly,len(latitude),len(longitude)))
Fa_E_top_per_day = np.zeros((365+ly,len(latitude),len(longitude)))
Fa_N_down_per_day = np.zeros((365+ly,len(latitude),len(longitude)))
Fa_N_top_per_day = np.zeros((365+ly,len(latitude),len(longitude)))
Fa_Vert_per_day = np.zeros((365+ly,len(latitude),len(longitude)))

for j in range(len(yearpart)):
start = timer()
a = yearpart[j]
datapath = data_path(y,a,years)

if a > final_time: # a = 365 and not a leapyear
pass
else:
# load horizontal fluxes
loading_FS = sio.loadmat(datapath[0],verify_compressed_data_integrity=False)
Fa_E_top = loading_FS['Fa_E_top']
Fa_N_top = loading_FS['Fa_N_top']
Fa_E_down = loading_FS['Fa_E_down']
Fa_N_down = loading_FS['Fa_N_down']
Fa_Vert = loading_FS['Fa_Vert']

# save per day
Fa_E_down_per_day[a,:,:] = np.sum(Fa_E_down, axis =0)
Fa_E_top_per_day[a,:,:] = np.sum(Fa_E_top, axis =0)
Fa_N_down_per_day[a,:,:] = np.sum(Fa_N_down, axis =0)
Fa_N_top_per_day[a,:,:] = np.sum(Fa_N_top, axis =0)
Fa_Vert_per_day[a,:,:] = np.sum(Fa_Vert, axis =0)

# timer
end = timer()
print 'Runtime output for day ' + str(a+1) + ' in year ' + str(y) + ' is',(end - start),' seconds.'

# save daily fluxes on disk
if daily == 1:
sio.savemat(datapath[2],
{'Fa_E_down_per_day':Fa_E_down_per_day,'Fa_E_top_per_day':Fa_E_top_per_day,
'Fa_N_down_per_day':Fa_N_down_per_day,'Fa_N_top_per_day':Fa_N_top_per_day,
'Fa_Vert_per_day':Fa_Vert_per_day},do_compression=True)

for m in range(12):
first_day = int(datetime.date(y,m+1,1).strftime("%j"))
last_day = int(datetime.date(y,m+1,calendar.monthrange(y,m+1)[1]).strftime("%j"))
days = np.arange(first_day,last_day+1)-1 # -1 because Python is zero-based

Fa_E_down_per_year_per_month[y-startyear,m,:,:] = (np.squeeze(np.sum(Fa_E_down_per_day[days,:,:], axis = 0)))
Fa_E_top_per_year_per_month[y-startyear,m,:,:] = (np.squeeze(np.sum(Fa_E_top_per_day[days,:,:], axis = 0)))
Fa_N_down_per_year_per_month[y-startyear,m,:,:] = (np.squeeze(np.sum(Fa_N_down_per_day[days,:,:], axis = 0)))
Fa_N_top_per_year_per_month[y-startyear,m,:,:] = (np.squeeze(np.sum(Fa_N_top_per_day[days,:,:], axis = 0)))
Fa_Vert_per_year_per_month[y-startyear,m,:,:] = (np.squeeze(np.sum(Fa_Vert_per_day[days,:,:], axis = 0)))

# save monthly fluxes on disk
sio.savemat(datapath[1],
{'Fa_E_down_per_year_per_month':Fa_E_down_per_year_per_month,'Fa_E_top_per_year_per_month':Fa_E_top_per_year_per_month,
'Fa_N_down_per_year_per_month':Fa_N_down_per_year_per_month,'Fa_N_top_per_year_per_month':Fa_N_top_per_year_per_month,
'Fa_Vert_per_year_per_month':Fa_Vert_per_year_per_month})

end1 = timer()
print 'The total runtime is',(end1-start1),' seconds.'

4 changes: 4 additions & 0 deletions readme.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ I might be able to offer some kind of support if it is in my interest as well, b

Released under the GNU General Public License

WAM-2layers v2.4.07 | 7-7-2016
- added the Hor_fluxes output file
- small changes to the example plotting files

WAM-2layers v2.4.06 | 6-7-2016
- solve another bug in backward time tracking
- more accurate area size calculation
Expand Down

0 comments on commit bf25fd4

Please sign in to comment.