diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport.py new file mode 100644 index 00000000..02f00e92 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport.py @@ -0,0 +1,110 @@ +#Program determines the ACC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lat = fh.variables['lat'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_y = fh.variables['DY'][:] #Meridional grid cell length (m) + u_vel = fh.variables['UVEL'][depth_min_index:depth_max_index] #Zonal velocity (m/s) + + fh.close() + + return lat, depth, layer, grid_y, u_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, layer_field, grid_y, u_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lat_i in range(len(lat)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lat_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lat_i] -= (np.sum(layer_field[:, lat_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lat, depth, layer_field_old, grid_y, u_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = u_vel * layer_field * grid_y + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport_plot.py new file mode 100644 index 00000000..e5fa9689 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/ACC_transport_plot.py @@ -0,0 +1,44 @@ +#Plot the ACC strength + +from pylab import * +import numpy +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + + +fig, ax = subplots() + + +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(500, 600) +ax.set_ylim(90, 210) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([500, 520, 540, 560, 580, 600]) +ax.grid() + +ax.set_title('ACC strength, E3SM Antarctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport.py new file mode 100644 index 00000000..3990142e --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport.py @@ -0,0 +1,113 @@ +#Program determines the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + +lat_FOV = 26 +section_name = 'FOV_section_26N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lon_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lon_i] -= (np.sum(layer_field[:, lon_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lon, depth, layer_field_old, grid_x, v_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport_plot.py new file mode 100644 index 00000000..53de138c --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/AMOC_transport_plot.py @@ -0,0 +1,47 @@ +#Program plots the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + + +fig, ax = subplots() + +ax.fill_between([-100, 2500], 16, 19, alpha=0.25, edgecolor='orange', facecolor='orange') + + +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(500, 600) +ax.set_ylim(-2, 22) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([500, 520, 540, 560, 580, 600]) +ax.grid() + +ax.set_title('AMOC strength, E3SM Antarctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Atlantic_sector_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Atlantic_sector_plot.py new file mode 100644 index 00000000..80a3a084 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Atlantic_sector_plot.py @@ -0,0 +1,182 @@ +#Program plots the Atlantic Sector + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP'][:] #Temperature (m/s) + salt = fh.variables['SALT'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +year_start = 500 +year_end = 599 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Atlantic_sector/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 20.01, 0.5), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(0, 20.01, 5)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-20, 20.01, 1), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-20, 20.01, 5)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) +ax.clabel(CS_1, inline=True, fontsize=10, manual = [(-10, 500)]) + + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, E3SM Antrctic ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Drake_Passage_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Drake_Passage_plot.py new file mode 100644 index 00000000..a7e777d0 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Drake_Passage_plot.py @@ -0,0 +1,184 @@ +#Program plots sections along Drake Passage + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP'][:] #Temperature (m/s) + salt = fh.variables['SALT'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + + +year_start = 500 +year_end = 599 + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 10.01, 0.25), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(-2, 10.01, 2)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-40, 40.01, 2), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-40, 40.01, 10)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_34S_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_34S_plot.py new file mode 100644 index 00000000..60fb47f3 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_34S_plot.py @@ -0,0 +1,184 @@ +#Program plots the F_ovS and the components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + transport = fh.variables['Transport'][:] #MOC strength (Sv) + FOV = fh.variables['F_OV'][:] #Fresh water + FOV_ASW = fh.variables['F_OV_ASW'][:] #Fresh water + FOV_AIW = fh.variables['F_OV_AIW'][:] #Fresh water + FOV_NADW = fh.variables['F_OV_NADW'][:] #Fresh water + FOV_ABW = fh.variables['F_OV_ABW'][:] #Fresh water + salt_ASW = fh.variables['SALT_ASW'][:] #Salinity + salt_AIW = fh.variables['SALT_AIW'][:] #Salinity + salt_NADW = fh.variables['SALT_NADW'][:] #Salinity + salt_ABW = fh.variables['SALT_ABW'][:] #Salininty + vel_ASW = fh.variables['VVEL_ASW'][:] #Meridional velocity + vel_AIW = fh.variables['VVEL_AIW'][:] #Meridional velocity + vel_NADW = fh.variables['VVEL_NADW'][:] #Meridional velocity + vel_ABW = fh.variables['VVEL_ABW'][:] #Meridional velocity + + fh.close() + + return time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW + + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +section_name = 'section_34S' + +#----------------------------------------------------------------------------------------- + +time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW = ReadinData(directory+'Ocean/FOV_index_'+section_name+'.nc') + +FOV_rean, FOV_ASW_rean, FOV_AIW_rean, FOV_NADW_rean, FOV_ABW_rean, FOV_rean_gyre = -0.10138855319303171, -0.12769111454122556, 0.12011490376119702, -0.10644935101861515, 0.012637008605611988, 0.2136790553107374 + +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_'+section_name+'.nc', 'r') + +FOV_gyre = fh.variables['F_gyre'][:] #Fresh water + +fh.close() + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_FOV_all = plot(time, FOV, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_FOV_gyre = plot(time, FOV_gyre, '-r', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, E3SM') +graph_rean_all = plot(time, np.zeros(len(time))+FOV_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_gyre = plot(time, np.zeros(len(time))+FOV_rean_gyre, '--', color = 'firebrick', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(500, 600) +ax.grid() +ax.set_xticks([500, 520, 540, 560, 580, 600]) + +ax.fill_between([-100, 600], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_FOV_all + graph_FOV_gyre +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='lower left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_all + graph_rean_gyre +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'lower right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('$F_{\mathrm{ovS}}$ and azonal (gyre) component ($F_{\mathrm{azS}}$), E3SM Antarctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ASW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ASW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(500, 600) +ax.grid() +ax.set_xticks([500, 520, 540, 560, 580, 600]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Atlantic Surface Water (ASW), E3SM Antarctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_AIW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_AIW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(500, 600) +ax.grid() +ax.set_xticks([500, 520, 540, 560, 580, 600]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Intermediate Water (AIW), E3SM Antarctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_NADW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_NADW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(500, 600) +ax.grid() +ax.set_xticks([500, 520, 540, 560, 580, 600]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('North Atlantic Deep Water (NADW), E3SM Antarctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ABW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ABW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(500, 600) +ax.grid() +ax.set_xticks([500, 520, 540, 560, 580, 600]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Bottom Water (ABW), E3SM Antarctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_60_index.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_60_index.py new file mode 100644 index 00000000..21d89cf9 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_60_index.py @@ -0,0 +1,135 @@ +#Program determines the FOV index for 60N + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_60N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_convergence_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_convergence_plot.py new file mode 100644 index 00000000..bdf1ece6 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_convergence_plot.py @@ -0,0 +1,69 @@ +#Program plots the freshwater convergence (34S and 60N) + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + FOV = fh.variables['F_OV'][:] #Fresh water + + fh.close() + + return time, FOV + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +time, FOV_34S = ReadinData(directory+'Ocean/FOV_index_section_34S.nc') +time, FOV_60N = ReadinData(directory+'Ocean/FOV_index_section_60N.nc') +#----------------------------------------------------------------------------------------- + +FOV_34S_rean, FOV_60N_rean = -0.10138855319303171, -0.027075354933136512 +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_rcp_34S = ax.plot(time, FOV_34S, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_rcp_60N = ax.plot(time, FOV_60N, '-b', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, E3SM') +graph_rcp_conver = ax.plot(time, FOV_34S - FOV_60N, '-r', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, E3SM') + +graph_rean_34S = ax.plot(time, np.zeros(len(time))+FOV_34S_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_60N = ax.plot(time, np.zeros(len(time))+FOV_60N_rean, '--', color = 'cyan', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, Reanalysis') +graph_rean_conver = ax.plot(time, np.zeros(len(time))+FOV_34S_rean - FOV_60N_rean, '--', color = 'firebrick', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_xlim(500, 600) +ax.set_ylim(-0.5, 0.5) +ax.set_xticks([500, 520, 540, 560, 580, 600]) +ax.grid() + +ax.fill_between([500, 600], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_rcp_34S + graph_rcp_60N + graph_rcp_conver +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='upper left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_34S + graph_rean_60N + graph_rean_conver +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'upper right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('Freshwater convergence, E3SM Antarctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_gyre.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_gyre.py new file mode 100644 index 00000000..15dc9739 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_gyre.py @@ -0,0 +1,121 @@ +#Program determines the azonal (gyre) component at 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_gyre_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the zonal means + v_vel_zonal = np.mean(v_vel, axis = 1) + salt_zonal = np.mean(salt, axis = 1) + + v_vel_prime = ma.masked_all(np.shape(v_vel)) + salt_prime = ma.masked_all(np.shape(salt)) + + for depth_i in range(len(depth)): + #Determine the differences with respect to the zonal means + v_vel_prime[depth_i] = v_vel[depth_i] - v_vel_zonal[depth_i] + salt_prime[depth_i] = salt[depth_i] - salt_zonal[depth_i] + + #Now determine the azonal component (gyre, in Sv) + transport_gyre_all[file_i] = (-1.0 / 35.0) * np.sum(v_vel_prime * salt_prime * layer_field_area) / 10**6.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_section_34S.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('F_gyre', float, ('time'), zlib=True) + +fh.variables['F_gyre'].longname = 'Freshwater transport by gyre' + +fh.variables['time'].units = 'Year' +fh.variables['F_gyre'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['F_gyre'][:] = transport_gyre_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_index.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_index.py new file mode 100644 index 00000000..21acb79d --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/FOV_index.py @@ -0,0 +1,261 @@ +#Program determines the FOV index for 34S and the difference components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) +transport_salt_ASW_all = ma.masked_all(len(time)) +transport_salt_AIW_all = ma.masked_all(len(time)) +transport_salt_NADW_all = ma.masked_all(len(time)) +transport_salt_ABW_all = ma.masked_all(len(time)) +salt_ASW_all = ma.masked_all(len(time)) +salt_AIW_all = ma.masked_all(len(time)) +salt_NADW_all = ma.masked_all(len(time)) +salt_ABW_all = ma.masked_all(len(time)) +vel_ASW_all = ma.masked_all(len(time)) +vel_AIW_all = ma.masked_all(len(time)) +vel_NADW_all = ma.masked_all(len(time)) +vel_ABW_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + #Get the water properties + water_prop = ma.masked_all((len(depth), len(lon))) + + #North Atlantic Deep Water (NADW) has negative meridional velocities + depth_index_NADW = np.where((depth >= 700) & (transport_clin <= 0))[0][0] + + #Antarctic bottom water (ABW) is directly below the NADW, get the first index + depth_index_ABW = np.where((depth >= 3000) & (transport_clin >= 0))[0] + + if len(depth_index_ABW) == 0: + #Assume below 4000m depth the ABW + depth_index_ABW = np.where(depth >= 4000)[0][0] + else: + depth_index_ABW = depth_index_ABW[0] + + for depth_i in range(len(depth)): + + if depth_i < depth_index_NADW: + #Surface water + water_prop[depth_i] = 1.0 + + if depth[depth_i] >= 500 and depth_i < depth_index_NADW: + #Antarctic Intermediate water + water_prop[depth_i] = 2.0 + + if depth_i >= depth_index_NADW and depth_i < depth_index_ABW: + #North Atlantic Deep Water (NADW) + water_prop[depth_i] = 3.0 + + if depth_i >= depth_index_ABW: + #The ABW is defined below the NADW + water_prop[depth_i] = 4.0 + + water_prop = ma.masked_array(water_prop, mask = v_vel.mask) + + #----------------------------------------------------------------------------------------- + area_ASW = ma.masked_where(water_prop != 1.0, layer_field_area) + area_AIW = ma.masked_where(water_prop != 2.0, layer_field_area) + area_NADW = ma.masked_where(water_prop != 3.0, layer_field_area) + area_ABW = ma.masked_where(water_prop != 4.0, layer_field_area) + area_ASW = area_ASW / np.sum(area_ASW) + area_AIW = area_AIW / np.sum(area_AIW) + area_NADW = area_NADW / np.sum(area_NADW) + area_ABW = area_ABW / np.sum(area_ABW) + + #Determine the spatial means + vel_ASW_all[file_i] = np.sum(vel_baroclinic * area_ASW) + vel_AIW_all[file_i] = np.sum(vel_baroclinic * area_AIW) + vel_NADW_all[file_i] = np.sum(vel_baroclinic * area_NADW) + vel_ABW_all[file_i] = np.sum(vel_baroclinic * area_ABW) + salt_ASW_all[file_i] = np.sum(salt * area_ASW) + salt_AIW_all[file_i] = np.sum(salt * area_AIW) + salt_NADW_all[file_i] = np.sum(salt * area_NADW) + salt_ABW_all[file_i] = np.sum(salt * area_ABW) + + #Determine the means over the water masses + transport_ASW = np.sum(ma.masked_where(water_prop != 1.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_AIW = np.sum(ma.masked_where(water_prop != 2.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_NADW = np.sum(ma.masked_where(water_prop != 3.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_ABW = np.sum(ma.masked_where(water_prop != 4.0, vel_baroclinic * layer_field * grid_x), axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + transport_salt_ASW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ASW * salt_zonal) / 1000000.0 + transport_salt_AIW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_AIW * salt_zonal) / 1000000.0 + transport_salt_NADW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_NADW * salt_zonal) / 1000000.0 + transport_salt_ABW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ABW * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) +fh.createVariable('F_OV_ASW', float, ('time'), zlib=True) +fh.createVariable('F_OV_AIW', float, ('time'), zlib=True) +fh.createVariable('F_OV_NADW', float, ('time'), zlib=True) +fh.createVariable('F_OV_ABW', float, ('time'), zlib=True) +fh.createVariable('SALT_ASW', float, ('time'), zlib=True) +fh.createVariable('SALT_AIW', float, ('time'), zlib=True) +fh.createVariable('SALT_NADW', float, ('time'), zlib=True) +fh.createVariable('SALT_ABW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ASW', float, ('time'), zlib=True) +fh.createVariable('VVEL_AIW', float, ('time'), zlib=True) +fh.createVariable('VVEL_NADW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ABW', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' +fh.variables['F_OV_ASW'].longname = 'Fresh water transport (Atlantic Surface Water)' +fh.variables['F_OV_AIW'].longname = 'Fresh water transport (Antarctic Intermediate Water)' +fh.variables['F_OV_NADW'].longname = 'Fresh water transport (North Atlantic Deep Water)' +fh.variables['F_OV_ABW'].longname = 'Fresh water transport (Antarctic Bottom Water)' +fh.variables['SALT_ASW'].longname = 'Salinity (Atlantic Surface Water)' +fh.variables['SALT_AIW'].longname = 'Salinity (Antarctic Intermediate Water)' +fh.variables['SALT_NADW'].longname = 'Salinity (North Atlantic Deep Water)' +fh.variables['SALT_ABW'].longname = 'Salinity (Antarctic Bottom Water)' +fh.variables['VVEL_ASW'].longname = 'Meridional velocity (Atlantic Surface Water)' +fh.variables['VVEL_AIW'].longname = 'Meridional velocity (Antarctic Intermediate Water)' +fh.variables['VVEL_NADW'].longname = 'Meridional velocity (North Atlantic Deep Water)' +fh.variables['VVEL_ABW'].longname = 'Meridional velocity (Antarctic Bottom Water)' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' +fh.variables['F_OV_ASW'].units = 'Sv' +fh.variables['F_OV_AIW'].units = 'Sv' +fh.variables['F_OV_NADW'].units = 'Sv' +fh.variables['F_OV_ABW'].units = 'Sv' +fh.variables['SALT_ASW'].units = 'g/kg' +fh.variables['SALT_AIW'].units = 'g/kg' +fh.variables['SALT_NADW'].units = 'g/kg' +fh.variables['SALT_ABW'].units = 'g/kg' +fh.variables['VVEL_ASW'].units = 'cm/s' +fh.variables['VVEL_AIW'].units = 'cm/s' +fh.variables['VVEL_NADW'].units = 'cm/s' +fh.variables['VVEL_ABW'].units = 'cm/s' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all +fh.variables['F_OV_ASW'][:] = transport_salt_ASW_all +fh.variables['F_OV_AIW'][:] = transport_salt_AIW_all +fh.variables['F_OV_NADW'][:] = transport_salt_NADW_all +fh.variables['F_OV_ABW'][:] = transport_salt_ABW_all +fh.variables['SALT_ASW'][:] = salt_ASW_all +fh.variables['SALT_AIW'][:] = salt_AIW_all +fh.variables['SALT_NADW'][:] = salt_NADW_all +fh.variables['SALT_ABW'][:] = salt_ABW_all +fh.variables['VVEL_ASW'][:] = vel_ASW_all * 100.0 +fh.variables['VVEL_AIW'][:] = vel_AIW_all * 100.0 +fh.variables['VVEL_NADW'][:] = vel_NADW_all * 100.0 +fh.variables['VVEL_ABW'][:] = vel_ABW_all * 100.0 + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Atlantic_sector.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Atlantic_sector.py new file mode 100644 index 00000000..f98e79f9 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Atlantic_sector.py @@ -0,0 +1,183 @@ +#Generates the Atlantic sector fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -50 +lon_max = 20 +lat_min = -71 +lat_max = 1 + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_min_index:lon_max_index] + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = np.mean(salt, axis = 2) + temp_depth[file_i] = np.mean(temp, axis = 2) + u_vel_depth[file_i] = np.mean(u_vel, axis = 2) + dens_depth[file_i] = np.mean(dens, axis = 2) + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth = np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Atlantic_sector/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['SALT'].longname = 'Zonally-averaged salinity' + fh.variables['TEMP'].longname = 'Zonally-averaged potential temperature' + fh.variables['UVEL'].longname = 'Zonally-averaged zonal velocity' + fh.variables['POT_DENS'].longname = 'Zonally-averaged potential density' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + + + fh.close() + + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Drake.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Drake.py new file mode 100644 index 00000000..c1dec1f7 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_Drake.py @@ -0,0 +1,190 @@ +#Generates the Drake Passage fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -67 +lat_max = -54.9 +lon_section = -66.3 + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_index = (np.abs(lon - lon_section)).argmin() +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_index] +lat = lat[lat_min_index:lat_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_index] + + +dy = 6371000 * 2 * np.pi * 0.5 / 360 + np.zeros(len(lat)) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = salt + temp_depth[file_i] = temp + u_vel_depth[file_i] = u_vel + dens_depth[file_i] = dens + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth = np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Drake_Passage/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lat'), zlib=True) + fh.createVariable('DY', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['DY'].longname = 'y-spacing' + fh.variables['SALT'].longname = 'Salinity' + fh.variables['TEMP'].longname = 'Potential temperature' + fh.variables['UVEL'].longname = 'Zonal velocity' + fh.variables['POT_DENS'].longname = 'Potential density' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['layer'].units = 'm' + fh.variables['DY'].units = 'm' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['layer'][:] = layer + fh.variables['DY'][:] = dy + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_FOV.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_FOV.py new file mode 100644 index 00000000..b8a928bd --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_FOV.py @@ -0,0 +1,303 @@ +#Generates the FOV fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_velocityMeridional') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0] #Salinity (g/kg) + v_vel = fh.variables['timeMonthly_avg_velocityMeridional'][0] #Meridional velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + v_vel = ma.masked_where(layer <= 0.0, v_vel) + + for lat_section in [-34, 26, 60]: + #Get the lat index + lat_index = (np.abs(lat - lat_section)).argmin() + + if lat_section == -34: + #Section at 34S, start of Atlantic Sector + lon_1, lon_2 = 250, 401 + section_name = 'FOV_section_34S' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_34S = lon[lon_1:lon_2] + layer_34S = layer[:, lat_index, lon_1:lon_2] + dx_34S = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_34S)) + + if lat_section == 26: + #Section at 26N, RAPID array + lon_1, lon_2 = 198, 335 + section_name = 'FOV_section_26N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_26N = lon[lon_1:lon_2] + layer_26N = layer[:, lat_index, lon_1:lon_2] + dx_26N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_26N)) + if lat_section == 60: + #Section at 60N, RAPID array + lon_1, lon_2 = 230, 373 + section_name = 'FOV_section_60N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_60N = lon[lon_1:lon_2] + layer_60N = layer[:, lat_index, lon_1:lon_2] + dx_60N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_60N)) + if file_i == 0 and lat_section == -34: + #Make empty arrays for the months + v_vel_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 26: + #Make empty arrays for the months + v_vel_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 60: + #Make empty arrays for the months + v_vel_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if lat_section == -34: + #Now save the data to the general array + v_vel_34S[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_34S[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 26: + #Now save the data to the general array + v_vel_26N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_26N[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 60: + #Now save the data to the general array + v_vel_60N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_60N[file_i] = salt[:, lat_index, lon_1:lon_2] + + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_34S = ma.masked_all((len(month_days), len(depth), len(lon_34S))) + month_days_26N = ma.masked_all((len(month_days), len(depth), len(lon_26N))) + month_days_60N = ma.masked_all((len(month_days), len(depth), len(lon_60N))) + + for month_i in range(len(month_days)): + month_days_34S[month_i] = month_days[month_i] + month_days_26N[month_i] = month_days[month_i] + month_days_60N[month_i] = month_days[month_i] + + #Now set mask + month_days_34S = ma.masked_array(month_days_34S, mask = salt_34S.mask) + month_days_26N = ma.masked_array(month_days_26N, mask = salt_26N.mask) + month_days_60N = ma.masked_array(month_days_60N, mask = salt_60N.mask) + + #Normalise the data + month_days_34S = month_days_34S / np.sum(month_days_34S, axis = 0) + month_days_26N = month_days_26N / np.sum(month_days_26N, axis = 0) + month_days_60N = month_days_60N / np.sum(month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + #Determine the time mean over the months of choice + v_vel_34S = np.sum(v_vel_34S * month_days_34S, axis = 0) + salt_34S = np.sum(salt_34S * month_days_34S, axis = 0) + v_vel_26N = np.sum(v_vel_26N * month_days_26N, axis = 0) + salt_26N = np.sum(salt_26N * month_days_26N, axis = 0) + v_vel_60N = np.sum(v_vel_60N * month_days_60N, axis = 0) + salt_60N = np.sum(salt_60N * month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_34S/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_34S)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_34S + fh.variables['lon'][:] = lon_34S + fh.variables['DX'][:] = dx_34S + fh.variables['VVEL'][:] = v_vel_34S + fh.variables['SALT'][:] = salt_34S + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_26N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_26N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_26N + fh.variables['lon'][:] = lon_26N + fh.variables['DX'][:] = dx_26N + fh.variables['VVEL'][:] = v_vel_26N + fh.variables['SALT'][:] = salt_26N + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_60N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_60N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_60N + fh.variables['lon'][:] = lon_60N + fh.variables['DX'][:] = dx_60N + fh.variables['VVEL'][:] = v_vel_60N + fh.variables['SALT'][:] = salt_60N + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_MXL.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_MXL.py new file mode 100644 index 00000000..a509fa64 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_MXL.py @@ -0,0 +1,184 @@ +#Generates the mixed layer depth fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +def Distance(lon1, lat1, lon2, lat2): + """Returns distance (m) of two points located at the globe + coordinates need input in degrees""" + + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) #Convert to radians + + #Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2.0)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2.0)**2 + c = 2.0 * math.asin(sqrt(a)) + r = 6371000.0 # Radius of earth in meters + + return c * r #Distance between two points in meter + +def GridCellComputer(longitude, latitude): + """Determines the area (m^2) per grid cell + returns 2-D array (lat, lon) with the area per box""" + + #Define empty array for latitude per grid cell and the Area covered by the Ocean + grid_x = np.zeros((len(latitude), len(longitude))) + grid_y = np.zeros((len(latitude), len(longitude))) + + for lat_i in range(len(latitude)): + + #Determining zonal length (m), is latitude dependent, therefore, take middle of grid cell + length_zonal_grid = Distance(0.0, latitude[lat_i], 0.08333206, latitude[lat_i]) + #Determining meriodinal length (m), is longitude independent + length_meridional_grid = Distance(0.0, 0.0, 0.0, 0.08333206) + + grid_x[lat_i] = length_zonal_grid + grid_y[lat_i] = length_meridional_grid + + return grid_x, grid_y + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -71.25 +lat_max = 1.25 +lon_min = -70.25 +lon_max = 20.25 +depth_level = 500 + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + +depth_index = (np.abs(depth-depth_level)).argmin() + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_dThreshMLD,timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + mixed = fh.variables['timeMonthly_avg_dThreshMLD'][0, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (m) + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, depth_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, depth_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + + fh.close() + + if file_i == 0: + #Empty array + temp_month = ma.masked_all((12, len(lat), len(lon))) + salt_month = ma.masked_all((12, len(lat), len(lon))) + mixed_month = ma.masked_all((12, len(lat), len(lon))) + + #Save the vertical velocity + temp_month[file_i] = temp + salt_month[file_i] = salt + mixed_month[file_i] = mixed + #------------------------------------------------------------------------------ + + + filename = directory+'Data/Mixed_layer/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('month', 12) + fh.createDimension('lat', len(lat)) + fh.createDimension('lon', len(lon)) + + fh.createVariable('month', float, ('month'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('TEMP', float, ('month', 'lat', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('month', 'lat', 'lon'), zlib=True) + fh.createVariable('MXL', float, ('month', 'lat', 'lon'), zlib=True) + + fh.variables['month'].longname = 'Month' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['lon'].longname = 'Array of longitudes' + fh.variables['TEMP'].longname = 'Potential temperature (500 m depth)' + fh.variables['SALT'].longname = 'Salinity (500 m depth)' + fh.variables['MXL'].longname = 'Mixed layer depth' + + fh.variables['lat'].units = 'degrees N' + fh.variables['lon'].units = 'degrees E' + fh.variables['TEMP'].units = 'degC' + fh.variables['SALT'].units = 'g/kg' + fh.variables['MXL'].units = 'm' + + #Writing data to correct variable + fh.variables['month'][:] = np.arange(1,13) + fh.variables['lat'][:] = lat + fh.variables['lon'][:] = lon + fh.variables['TEMP'][:] = temp_month + fh.variables['SALT'][:] = salt_month + fh.variables['MXL'][:] = mixed_month + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_SALT.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_SALT.py new file mode 100644 index 00000000..cbf759f9 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_SALT.py @@ -0,0 +1,221 @@ +#Generates the vertically-integrated salinity fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +def Distance(lon1, lat1, lon2, lat2): + """Returns distance (m) of two points located at the globe + coordinates need input in degrees""" + + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) #Convert to radians + + #Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2.0)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2.0)**2 + c = 2.0 * math.asin(sqrt(a)) + r = 6371000.0 # Radius of earth in meters + + return c * r #Distance between two points in meter + +def GridCellComputer(longitude, latitude): + """Determines the area (m^2) per grid cell + returns 2-D array (lat, lon) with the area per box""" + + #Define empty array for latitude per grid cell and the Area covered by the Ocean + grid_x = np.zeros((len(latitude), len(longitude))) + grid_y = np.zeros((len(latitude), len(longitude))) + + for lat_i in range(len(latitude)): + + #Determining zonal length (m), is latitude dependent, therefore, take middle of grid cell + length_zonal_grid = Distance(0.0, latitude[lat_i], np.mean(np.diff(longitude)), latitude[lat_i]) + #Determining meriodinal length (m), is longitude independent + length_meridional_grid = Distance(0.0, 0.0, 0.0, np.mean(np.diff(latitude))) + + grid_x[lat_i] = length_zonal_grid + grid_y[lat_i] = length_meridional_grid + + return grid_x, grid_y, grid_x * grid_y +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -110 +lon_max = 143 +lat_min = -80 +lat_max = 25.5 +depth_min = 0 +depth_max = 100 + + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +grid_x, grid_y, area = GridCellComputer(lon, lat) + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] +area = area[lat_min_index:lat_max_index, lon_min_index:lon_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + + +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 +depth = depth[depth_min_index:depth_max_index] +layer = layer[depth_min_index:depth_max_index] + +for lat_i in range(len(lat)): + for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer[:, lat_i, lon_i]) > depth_max: + #Adjust the last layer + layer[-1, lat_i, lon_i] -= (np.sum(layer[:, lat_i, lon_i]) - depth_max) + +#Get the total vertical extent for each layer +total_layer = np.sum(layer, axis = 0) +volume = total_layer * area +area = ma.masked_array(area, mask = volume.mask) + +for depth_i in range(len(depth)): + #Normalise the field by its vertical extent + layer[depth_i] = layer[depth_i] / total_layer + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, depth_min_index:depth_max_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(lat), len(lon))) + + #Get the vertical depth averaged salinity + salt_depth[file_i] = np.sum(salt * layer, axis = 0) + + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(lat), len(lon))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('lon', len(lon)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('AREA', float, ('lat', 'lon'), zlib=True) + fh.createVariable('VOLUME', float, ('lat', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('lat', 'lon'), zlib=True) + + fh.variables['lon'].longname = 'Array of T-longtidues' + fh.variables['lat'].longname = 'Array of T-latitudes' + fh.variables['AREA'].longname = 'Area of T cells' + fh.variables['VOLUME'].longname = 'Volume of T cells' + fh.variables['SALT'].longname = 'Depth-averaged salinity' + + fh.variables['lon'].units = 'degrees E' + fh.variables['lat'].units = 'degrees N' + fh.variables['AREA'].units = 'm^2' + fh.variables['VOLUME'].units = 'm^3' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['lon'][:] = lon + fh.variables['lat'][:] = lat + fh.variables['AREA'][:] = area + fh.variables['VOLUME'][:] = volume + fh.variables['SALT'][:] = salt_depth + + fh.close() + + + + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_pycnocline_depth.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_pycnocline_depth.py new file mode 100644 index 00000000..fea1704e --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Field_generation_pycnocline_depth.py @@ -0,0 +1,174 @@ +#Generates the 1027 kg/m^3 pycnocine depth fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/pscratch/sd/a/abarthel/data/E3SMv2.1/20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso/archive/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -50 +lon_max = 20 +lat_min = -30 +lat_max = -25 + +files = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_min_index:lon_max_index] + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20221116.CRYO1950.ne30pg2_SOwISC12to60E2r4.N2Dependent.submeso.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + dens_depth = ma.masked_all((12, len(lat), len(lon))) + + for lat_i in range(len(lat)): + + fig, ax = subplots() + + CS = ax.contour(lon, depth, dens[:, lat_i], levels = [1027]) + + close() + + for item in CS.collections: + for i in item.get_paths(): + v = i.vertices + x = v[:, 0] + y = v[:, 1] + + + for lon_i in range(len(lon)): + #Check the location for each position + if np.all(lon[lon_i] < x) or np.all(lon[lon_i] > x) or lon[lon_i] > 14.95: + continue + + x_index = (np.abs(lon[lon_i] - x)).argmin() + + if np.abs(lon[lon_i] - x[x_index]) > 0.1: + #Too far apart + continue + + + dens_depth[file_i, lat_i, lon_i] = y[x_index] + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Pycnocline_depth/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('month', 12) + fh.createDimension('lat', len(lat)) + fh.createDimension('lon', len(lon)) + + fh.createVariable('month', float, ('month'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('PD_depth', float, ('month', 'lat', 'lon'), zlib=True) + + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['lat'].longname = 'Array of longitude' + fh.variables['PD_depth'].longname = 'Potetial density (1027) depth' + + fh.variables['lat'].units = 'degrees N' + fh.variables['lon'].units = 'degrees E' + fh.variables['PD_depth'].units = 'm' + + + #Writing data to correct variable + fh.variables['month'][:] = np.arange(12)+1 + fh.variables['lat'][:] = lat + fh.variables['lon'][:] = lon + fh.variables['PD_depth'][:] = dens_depth + + + fh.close() + + + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Grid_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Grid_plot.py new file mode 100644 index 00000000..f42a90a4 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Grid_plot.py @@ -0,0 +1,49 @@ +#Program plots the resolution of the native grid + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +import matplotlib.tri as tri +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset('/global/cfs/cdirs/m4259/mapping_files/map_SOwISC12to60E2r4_to_0.5x0.5degree_bilinear.nc', 'r') + +lon = fh.variables['xc_a'][:] * 180 / np.pi +lat = fh.variables['yc_a'][:] * 180 / np.pi +area_a = fh.variables['area_a'][:] * (180 / np.pi)**2.0 + +fh.close() + +lon[lon > 180] = lon[lon > 180] - 360.0 + +print(np.max(np.sqrt(area_a))) + +fig, ax = plt.subplots(subplot_kw={'projection': ccrs.Robinson()}) + +CS = ax.tripcolor(lon, lat, np.sqrt(area_a), vmin=0, vmax=0.6, cmap='Spectral_r', transform=ccrs.PlateCarree()) + +divider = make_axes_locatable(ax) +ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) +fig.add_axes(ax_cb) + +cbar = colorbar(CS, ticks = np.arange(0, 0.61, 0.2), cax=ax_cb) +cbar.set_label('Horizontal resolution ($^{\circ}$)') + +ax.set_global() + +ax.add_feature(cfeature.LAND, zorder=100, edgecolor='k') +ax.coastlines() + +ax.set_title('Grid resolution, E3SM Antarctic') + +show() \ No newline at end of file diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Mixed_layer_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Mixed_layer_plot.py new file mode 100644 index 00000000..a11c25fa --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Mixed_layer_plot.py @@ -0,0 +1,171 @@ +#Program plots the mixed layer depth climatology + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + lat = fh.variables['lat'][:] #Latitude + temp = fh.variables['TEMP'][:] #Temperature + salt = fh.variables['SALT'][:] #Salinity + mixed = fh.variables['MXL'][:] #Depth (m) + + fh.close() + + return lon, lat, temp, salt, mixed + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Mixed_layer/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, lat, temp, salt, mixed = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time)*12, len(lat), len(lon))) +salt_all = ma.masked_all((len(time)*12, len(lat), len(lon))) +mixed_all = ma.masked_all((len(time)*12, len(lat), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(file_i) + + lon, lat, temp, salt, mixed = ReadinData(files[file_i]) + + for month_i in range(len(mixed)): + #Add each month + temp_all[file_i*12+month_i] = temp[month_i] + salt_all[file_i*12+month_i] = salt[month_i] + mixed_all[file_i*12+month_i] = mixed[month_i] + + +temp_month = ma.masked_all((12, len(lat), len(lon))) +salt_month = ma.masked_all((12, len(lat), len(lon))) +mixed_month = ma.masked_all((12, len(lat), len(lon))) + +for month_i in range(12): + #Loop over each month + month_index = np.arange(month_i, len(mixed_all), 12) + temp_month[month_i] = np.mean(temp_all[month_index], axis = 0) + salt_month[month_i] = np.mean(salt_all[month_index], axis = 0) + mixed_month[month_i] = np.mean(mixed_all[month_index], axis = 0) + +#----------------------------------------------------------------------------------------- + +mixed_crop = 200 +factor_mixed_crop = 2 +mixed_month[mixed_month > mixed_crop] = ((mixed_month[mixed_month > mixed_crop] - mixed_crop) / factor_mixed_crop) + mixed_crop + +#----------------------------------------------------------------------------------------- + +month = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + +for month_i in range(12): + + #----------------------------------------------------------------------------------------- + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, temp_month[month_i] - temp_month[0], levels = np.arange(-1, 1.01, 0.05), extend = 'both', cmap = 'RdBu_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = np.arange(-1, 1.01, 1), cax=ax_cb) + cbar.set_label('Temperature difference ($^{\circ}$C)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+' minus January, E3SM Antarctic') + + #----------------------------------------------------------------------------------------- + + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, salt_month[month_i] - salt_month[0], levels = np.arange(-0.1, 0.101, 0.005), extend = 'both', cmap = 'BrBG_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = np.arange(-0.1, 0.101, 0.1), cax=ax_cb) + cbar.set_label('Salinity difference (g kg$^{-1}$)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+' minus January, E3SM Antarctic') + + #----------------------------------------------------------------------------------------- + + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, mixed_month[month_i], levels = np.arange(0, 400.1, 10), extend = 'max', cmap = 'Spectral_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = [0, 100, 200, 300, 400], cax=ax_cb) + cbar.ax.set_yticklabels([0, 100, 200, 400, 600]) + cbar.set_label('Mixed layer depth (m)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+', E3SM Antarctic') + + show() + + #----------------------------------------------------------------------------------------- + + + + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Pycnocline_depth_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Pycnocline_depth_plot.py new file mode 100644 index 00000000..ed14719d --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Pycnocline_depth_plot.py @@ -0,0 +1,120 @@ +#Program plots the pycnocline depth and Hovmoller diagram + +from pylab import * +import netCDF4 as netcdf +import glob, os +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#Making pathway to folder with all data +directory = '../../Data/' + +#------------------------------------------------------------------------------ +#--------------------------MAIN SCRIPT STARTS HERE----------------------------- +#------------------------------------------------------------------------------ + +#Only select for 27 years (similar to reanalysis) +time = np.zeros((526-500+1)*12) + +for year_i in range(500, 527): + #Loop over each year + + filename = directory+'Data/Pycnocline_depth/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + #------------------------------------------------------------------------------ + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + depth = fh.variables['PD_depth'][:] + + fh.close() + + + if year_i == 500: + #Make empty array to save the data + depth_all = ma.masked_all((len(time), len(lat), len(lon))) + + for month_i in range(12): + #Loop over each month + time[(year_i-500)*12+month_i] = year_i + month_i / 12. + depth_all[(year_i-500)*12+month_i] = depth[month_i] + + +filename = directory+'Data/Pycnocline_depth/E3SM_data_year_'+str(500).zfill(4)+'_January.nc' + +fh = netcdf.Dataset(filename, 'r') + +lon_plot = fh.variables['lon'][:] +lat_plot = fh.variables['lat'][:] +depth_plot = fh.variables['PD_depth'][0] + +fh.close() + +#----------------------------------------------------------------------------------------- + +#Take the meridional mean +depth_all = np.mean(depth_all, axis = 1) + +fig, ax = subplots() + +CS = ax.contourf(lon, time, depth_all, levels = np.arange(0, 800.01, 25), extend = 'max', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(0, 800.01, 100)) +cbar.set_label('Pycnocline depth (m)') + +fig, ax = subplots() + +ax.plot(lon, np.mean(depth_all, axis = 0), '-k', linewidth = 2.0) +ax.set_xlim(-50, 20) +ax.set_ylim(1000, 0) +ax.set_ylabel('Depth (m)') +ax.grid() +ax.set_title('Isopycnal depth, E3SM (500 - 526)') + +ax.set_xticks(np.arange(-50, 20.01, 10)) +ax.set_xticklabels(['50$^{\circ}$W', '40$^{\circ}$W', '30$^{\circ}$W', '20$^{\circ}$W', '10$^{\circ}$W','0$^{\circ}$', '10$^{\circ}$E', '20$^{\circ}$E']) + +ax2 = fig.add_axes([0.15, 0.50, 0.45, 0.30], projection = ccrs.PlateCarree()) + +CS = ax2.contourf(lon_plot, lat_plot, depth_plot, levels = np.arange(0, 800.1, 25), extend = 'max', cmap = 'Spectral_r', transform=ccrs.PlateCarree()) +cbar = colorbar(CS, ticks = np.arange(0, 800.01, 200), fraction=0.021, pad=0.04) +cbar.set_label('Isopycnal depth (m)', fontsize = 8) + +ax2.plot([-50, 20], [-30, -30], '--k', transform=ccrs.PlateCarree(), zorder = 100) +ax2.plot([-50, 20], [-25, -25], '--k', transform=ccrs.PlateCarree(), zorder = 100) + +gl = ax2.gridlines(draw_labels=False) +ax2.set_extent([-70, 30, -46, 1], ccrs.PlateCarree()) +ax2.coastlines('50m') +ax2.add_feature(cfeature.LAND, zorder=0) +ax2.set_title('Isopycnal depth, E3SM (January 500)', fontsize = 10) + +#----------------------------------------------------------------------------------------- + + +#Remove climatology +for month_i in range(12): + time_index = np.arange(month_i, len(time), 12) + depth_mean = np.mean(depth_all[time_index], axis = 0) + depth_all[time_index] = depth_all[time_index] - depth_mean + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +CS = ax.contourf(lon, time, depth_all, levels = np.arange(-50, 50.01, 5), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-50, 50.01, 10)) +cbar.set_label('Pycnocline depth anomaly (m)') + +ax.set_xlim(-50, 20) +ax.set_ylim(500, 527-1/12.) +ax.set_ylabel('Model year') + +ax.set_xticks(np.arange(-50, 20.01, 10)) +ax.set_xticklabels(['50$^{\circ}$W', '40$^{\circ}$W', '30$^{\circ}$W', '20$^{\circ}$W', '10$^{\circ}$W','0$^{\circ}$', '10$^{\circ}$E', '20$^{\circ}$E']) + +ax.plot([15, -47.6], [504.9, 512.8], '-k', linewidth = 3.0) +ax.set_title('Isopycnal depth (30$^{\circ}$S - 25$^{\circ}$S), E3SM') + + +show() diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/SALT_SO_0_100m_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/SALT_SO_0_100m_plot.py new file mode 100644 index 00000000..bea6b55f --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/SALT_SO_0_100m_plot.py @@ -0,0 +1,88 @@ +#Program plots the vertically averaged (upper 100 m) salinity in the Southern Oceaan + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats +from scipy.stats import genextreme +from matplotlib.colors import LogNorm +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 100 +year_start = 500 +year_end = 599 + +files = glob.glob(directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- + +for file_i in range(len(files)): + print(files[file_i]) + fh = netcdf.Dataset(files[file_i], 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['SALT'][:] #Salinity + + fh.close() + + if file_i == 0: + salt_all = ma.masked_all((len(files), len(lat), len(lon))) + + salt_all[file_i] = salt + +salt_all = np.mean(salt_all, axis = 0) +#----------------------------------------------------------------------------------------- + +fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + +CS = ax.contourf(lon, lat, salt_all, levels = np.arange(33, 37.1, 0.1), extend = 'both', cmap = 'BrBG_r', transform=ccrs.PlateCarree()) + +divider = make_axes_locatable(ax) +ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) +fig.add_axes(ax_cb) + +cbar = colorbar(CS, ticks = np.arange(33, 37.1, 1), cax=ax_cb) +cbar.set_label('Salinity (g kg$^{-1}$)') + +gl = ax.gridlines(draw_labels=True) +gl.top_labels = False +gl.right_labels = False +ax.set_extent([-80, 130, -70, 25], ccrs.PlateCarree()) +ax.coastlines('50m') +ax.add_feature(cfeature.LAND, zorder=0) +ax.set_title('Salinity (0 - 100 m), E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +show() + diff --git a/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Water_properties_34S_plot.py b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Water_properties_34S_plot.py new file mode 100644 index 00000000..f3fe44be --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Antarctic/Program/Ocean/Water_properties_34S_plot.py @@ -0,0 +1,288 @@ +#Program plots sections along 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 +year_start = 500 +year_end = 599 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_norm = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Normalise each layer + layer_field_norm[depth_i] = layer_field[depth_i] / np.sum(layer_field[depth_i]) + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +vel_all = ma.masked_all((len(time), len(depth))) +vel_salt_all = ma.masked_all((len(time), len(depth))) +salt_all = ma.masked_all((len(time), len(depth), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + + #Save the meridional baroclinic transport + vel_all[file_i] = np.sum(vel_baroclinic * grid_x_norm, axis = 1) * 100.0 + vel_salt_all[file_i] = (-1.0 / 35.0) * transport_clin * salt_zonal / 10**6.0 + salt_all[file_i] = salt + +layer_norm = layer_field[:, 123] +layer_norm[-1] = layer_norm[-2] +vel_all = np.mean(vel_all, axis = 0) +vel_salt_all = np.mean(vel_salt_all, axis = 0) +vel_salt_all = vel_salt_all / layer_norm * 1000.0 +salt_all = np.mean(salt_all, axis = 0) + +#----------------------------------------------------------------------------------------- +#Get the water properties + +#North Atlantic Deep Water (NADW) has negative meridional velocities +depth_index_NADW = np.where((depth >= 500) & (vel_all <= 0))[0][0] + +#Antarctic bottom water (ABW) is directly below the NADW, get the first index +depth_index_ABW = np.where((depth >= 3000) & (vel_all >= 0))[0][0] + +#The Antarctic Intermediate water is between the NADW and 500 m +depth_index_AIW = np.where(depth >= 500)[0][0] + + +depth_top = np.zeros(len(depth)) + +for depth_i in range(1, len(depth)): + depth_top[depth_i] = depth_top[depth_i - 1] + layer_norm[depth_i - 1] + +depth_AIW = depth_top[depth_index_AIW] +depth_NADW = depth_top[depth_index_NADW] +depth_ABW = depth_top[depth_index_ABW] + +lon_AIW_index = np.where(salt_all[depth_index_AIW].mask == False)[0] +lon_NADW_index = np.where(salt_all[depth_index_NADW].mask == False)[0] +lon_ABW_index = np.where(salt_all[depth_index_ABW].mask == False)[0] +lon_AIW_1, lon_AIW_2 = lon[lon_AIW_index[0]], lon[lon_AIW_index[-1]] +lon_NADW_1, lon_NADW_2 = lon[lon_NADW_index[0]], lon[lon_NADW_index[-1]] +lon_ABW_1, lon_ABW_2 = lon[lon_ABW_index[0]], lon[lon_ABW_index[-1]] + +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +if depth_AIW > depth_crop: + depth_AIW = ((depth_AIW - depth_crop) / factor_depth_crop) + depth_crop +if depth_NADW > depth_crop: + depth_NADW = ((depth_NADW - depth_crop) / factor_depth_crop) + depth_crop +if depth_ABW > depth_crop: + depth_ABW = ((depth_ABW - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=-1, vmax= 1) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='RdBu_r') #Using colormap +color_south = scalarMap.to_rgba(-0.5) +color_north = scalarMap.to_rgba(0.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +plot(vel_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-2, 2) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.fill_betweenx(depth, vel_all, where = vel_all >= 0.0, color = color_north, alpha = 0.50) +ax.fill_betweenx(depth, vel_all, where = vel_all <= 0.0, color = color_south, alpha = 0.50) + +ax.set_xlabel('Meridional velocity (cm s$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.text(1.9, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Meridional velocity, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-60, 20], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) +ax.plot([lon_AIW_1, lon_AIW_2], [depth_AIW, depth_AIW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_NADW_1, lon_NADW_2], [depth_NADW, depth_NADW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_ABW_1, lon_ABW_2], [depth_ABW, depth_ABW], linestyle = '--', linewidth = 2.0, color = 'k') + +CS = contourf(lon, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-60, 20) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-60, 21, 10)) +ax.set_xticklabels(['60$^{\circ}$W', '50$^{\circ}$W', '40$^{\circ}$W', '30$^{\circ}$W', '20$^{\circ}$W', '10$^{\circ}$W','0$^{\circ}$', '10$^{\circ}$E', '20$^{\circ}$E']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + + +ax.text(-18, 350, 'ASW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 850, 'AIW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1350, 'NADW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1900, 'ABW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) + +ax.set_title('Salinity, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=34, vmax= 36) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='BrBG_r') #Using colormap +color_fresh = scalarMap.to_rgba(34.5) +color_salt = scalarMap.to_rgba(35.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot(vel_salt_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-1.5, 1.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_xlabel(r'Freshwater transport (mSv m$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all >= 0.0, color = color_fresh, alpha = 0.50) +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all <= 0.0, color = color_salt, alpha = 0.50) + +ax.text(1.45, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Freshwater transport, E3SM Antarctic ('+str(year_start)+' - '+str(year_end)+')') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport.py new file mode 100644 index 00000000..02f00e92 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport.py @@ -0,0 +1,110 @@ +#Program determines the ACC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lat = fh.variables['lat'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_y = fh.variables['DY'][:] #Meridional grid cell length (m) + u_vel = fh.variables['UVEL'][depth_min_index:depth_max_index] #Zonal velocity (m/s) + + fh.close() + + return lat, depth, layer, grid_y, u_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, layer_field, grid_y, u_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lat_i in range(len(lat)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lat_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lat_i] -= (np.sum(layer_field[:, lat_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lat, depth, layer_field_old, grid_y, u_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = u_vel * layer_field * grid_y + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport_plot.py new file mode 100644 index 00000000..7d6770a7 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/ACC_transport_plot.py @@ -0,0 +1,63 @@ +#Plot the ACC strength + +from pylab import * +import numpy +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + +fh = netcdf.Dataset('../../../E3SM_LR/Data/Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time_low = fh.variables['time'][:] +transport_low = fh.variables['Transport'][:] + +fh.close() + +fig, ax = subplots() + +ax.plot(time_low, transport_low, '-', color = 'gray', linewidth = 1.0) +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 60) +ax.set_ylim(90, 210) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) +ax.grid() + +ax.set_title('ACC strength, E3SM Arctic') + +fig, ax = subplots() + +ax.plot(time_low, transport_low, '-', color = 'gray', linewidth = 1.0) +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 200) +ax.set_ylim(90, 210) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) +ax.grid() + +ax.set_title('ACC strength, E3SM Arctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport.py new file mode 100644 index 00000000..3990142e --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport.py @@ -0,0 +1,113 @@ +#Program determines the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + +lat_FOV = 26 +section_name = 'FOV_section_26N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lon_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lon_i] -= (np.sum(layer_field[:, lon_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lon, depth, layer_field_old, grid_x, v_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport_plot.py new file mode 100644 index 00000000..04bfedb3 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/AMOC_transport_plot.py @@ -0,0 +1,68 @@ +#Program plots the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + +fh = netcdf.Dataset('../../../E3SM_LR/Data/Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time_low = fh.variables['time'][:] +transport_low = fh.variables['Transport'][:] + +fh.close() + +fig, ax = subplots() + +ax.fill_between([-100, 2500], 16, 19, alpha=0.25, edgecolor='orange', facecolor='orange') + +ax.plot(time_low, transport_low, '-', color = 'gray', linewidth = 1.0) +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 60) +ax.set_ylim(-2, 22) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) +ax.grid() + +ax.set_title('AMOC strength, E3SM Arctic') + +fig, ax = subplots() + +ax.fill_between([-100, 2500], 16, 19, alpha=0.25, edgecolor='orange', facecolor='orange') + +ax.plot(time_low, transport_low, '-', color = 'gray', linewidth = 1.0) +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 200) +ax.set_ylim(-2, 22) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) +ax.grid() + +ax.set_title('AMOC strength, E3SM Arctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Atlantic_sector_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Atlantic_sector_plot.py new file mode 100644 index 00000000..2983bea0 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Atlantic_sector_plot.py @@ -0,0 +1,183 @@ +#Program plots the Atlantic Sector + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + #Note that the monthly averages are also available, but these are the annual means + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP_mean'][:] #Temperature (m/s) + salt = fh.variables['SALT_mean'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL_mean'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS_mean'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +year_start = 196 +year_end = 200 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Atlantic_sector/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 20.01, 0.5), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(0, 20.01, 5)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-20, 20.01, 1), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-20, 20.01, 5)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) +ax.clabel(CS_1, inline=True, fontsize=10, manual = [(-10, 500)]) + + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Drake_Passage_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Drake_Passage_plot.py new file mode 100644 index 00000000..b4f351a8 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Drake_Passage_plot.py @@ -0,0 +1,186 @@ +#Program plots sections along Drake Passage + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP'][:] #Temperature (m/s) + salt = fh.variables['SALT'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + + +year_start = 196 +year_end = 200 + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 10.01, 0.25), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(-2, 10.01, 2)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-40, 40.01, 2), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-40, 40.01, 10)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_34S_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_34S_plot.py new file mode 100644 index 00000000..05aa94d0 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_34S_plot.py @@ -0,0 +1,183 @@ +#Program plots the F_ovS and the components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + transport = fh.variables['Transport'][:] #MOC strength (Sv) + FOV = fh.variables['F_OV'][:] #Fresh water + FOV_ASW = fh.variables['F_OV_ASW'][:] #Fresh water + FOV_AIW = fh.variables['F_OV_AIW'][:] #Fresh water + FOV_NADW = fh.variables['F_OV_NADW'][:] #Fresh water + FOV_ABW = fh.variables['F_OV_ABW'][:] #Fresh water + salt_ASW = fh.variables['SALT_ASW'][:] #Salinity + salt_AIW = fh.variables['SALT_AIW'][:] #Salinity + salt_NADW = fh.variables['SALT_NADW'][:] #Salinity + salt_ABW = fh.variables['SALT_ABW'][:] #Salininty + vel_ASW = fh.variables['VVEL_ASW'][:] #Meridional velocity + vel_AIW = fh.variables['VVEL_AIW'][:] #Meridional velocity + vel_NADW = fh.variables['VVEL_NADW'][:] #Meridional velocity + vel_ABW = fh.variables['VVEL_ABW'][:] #Meridional velocity + + fh.close() + + return time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW + + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +section_name = 'section_34S' + +#----------------------------------------------------------------------------------------- + +time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW = ReadinData(directory+'Ocean/FOV_index_'+section_name+'.nc') + +FOV_rean, FOV_ASW_rean, FOV_AIW_rean, FOV_NADW_rean, FOV_ABW_rean, FOV_rean_gyre = -0.10138855319303171, -0.12769111454122556, 0.12011490376119702, -0.10644935101861515, 0.012637008605611988, 0.2136790553107374 + +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_'+section_name+'.nc', 'r') + +FOV_gyre = fh.variables['F_gyre'][:] #Fresh water + +fh.close() + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_FOV_all = plot(time, FOV, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_FOV_gyre = plot(time, FOV_gyre, '-r', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, E3SM') +graph_rean_all = plot(time, np.zeros(len(time))+FOV_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_gyre = plot(time, np.zeros(len(time))+FOV_rean_gyre, '--', color = 'firebrick', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 200) +ax.grid() +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) + +ax.fill_between([-100, 600], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_FOV_all + graph_FOV_gyre +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='lower left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_all + graph_rean_gyre +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'lower right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('$F_{\mathrm{ovS}}$ and azonal (gyre) component ($F_{\mathrm{azS}}$), E3SM Arctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ASW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ASW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 200) +ax.grid() +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Atlantic Surface Water (ASW), E3SM Arctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_AIW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_AIW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 200) +ax.grid() +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Intermediate Water (AIW), E3SM Arctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_NADW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_NADW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 200) +ax.grid() +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('North Atlantic Deep Water (NADW), E3SM Arctic') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ABW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ABW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 200) +ax.grid() +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Bottom Water (ABW), E3SM Arctic') + +show() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_60_index.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_60_index.py new file mode 100644 index 00000000..21d89cf9 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_60_index.py @@ -0,0 +1,135 @@ +#Program determines the FOV index for 60N + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_60N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_convergence_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_convergence_plot.py new file mode 100644 index 00000000..71133fd6 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_convergence_plot.py @@ -0,0 +1,68 @@ +#Program plots the freshwater convergence (34S and 60N) + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + FOV = fh.variables['F_OV'][:] #Fresh water + + fh.close() + + return time, FOV + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +time, FOV_34S = ReadinData(directory+'Ocean/FOV_index_section_34S.nc') +time, FOV_60N = ReadinData(directory+'Ocean/FOV_index_section_60N.nc') +#----------------------------------------------------------------------------------------- + +FOV_34S_rean, FOV_60N_rean = -0.10138855319303171, -0.027075354933136512 +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_rcp_34S = ax.plot(time, FOV_34S, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_rcp_60N = ax.plot(time, FOV_60N, '-b', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, E3SM') +graph_rcp_conver = ax.plot(time, FOV_34S - FOV_60N, '-r', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, E3SM') + +graph_rean_34S = ax.plot(time, np.zeros(len(time))+FOV_34S_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_60N = ax.plot(time, np.zeros(len(time))+FOV_60N_rean, '--', color = 'cyan', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, Reanalysis') +graph_rean_conver = ax.plot(time, np.zeros(len(time))+FOV_34S_rean - FOV_60N_rean, '--', color = 'firebrick', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_xlim(1, 200) +ax.set_ylim(-0.5, 0.5) +ax.set_xticks([1, 25, 50, 75, 100, 125, 150, 175, 200]) +ax.grid() + +ax.fill_between([0, 200], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_rcp_34S + graph_rcp_60N + graph_rcp_conver +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='upper left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_34S + graph_rean_60N + graph_rean_conver +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'upper right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('Freshwater convergence, E3SM Arctic') +show() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_gyre.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_gyre.py new file mode 100644 index 00000000..15dc9739 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_gyre.py @@ -0,0 +1,121 @@ +#Program determines the azonal (gyre) component at 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_gyre_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the zonal means + v_vel_zonal = np.mean(v_vel, axis = 1) + salt_zonal = np.mean(salt, axis = 1) + + v_vel_prime = ma.masked_all(np.shape(v_vel)) + salt_prime = ma.masked_all(np.shape(salt)) + + for depth_i in range(len(depth)): + #Determine the differences with respect to the zonal means + v_vel_prime[depth_i] = v_vel[depth_i] - v_vel_zonal[depth_i] + salt_prime[depth_i] = salt[depth_i] - salt_zonal[depth_i] + + #Now determine the azonal component (gyre, in Sv) + transport_gyre_all[file_i] = (-1.0 / 35.0) * np.sum(v_vel_prime * salt_prime * layer_field_area) / 10**6.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_section_34S.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('F_gyre', float, ('time'), zlib=True) + +fh.variables['F_gyre'].longname = 'Freshwater transport by gyre' + +fh.variables['time'].units = 'Year' +fh.variables['F_gyre'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['F_gyre'][:] = transport_gyre_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_index.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_index.py new file mode 100644 index 00000000..21acb79d --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/FOV_index.py @@ -0,0 +1,261 @@ +#Program determines the FOV index for 34S and the difference components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) +transport_salt_ASW_all = ma.masked_all(len(time)) +transport_salt_AIW_all = ma.masked_all(len(time)) +transport_salt_NADW_all = ma.masked_all(len(time)) +transport_salt_ABW_all = ma.masked_all(len(time)) +salt_ASW_all = ma.masked_all(len(time)) +salt_AIW_all = ma.masked_all(len(time)) +salt_NADW_all = ma.masked_all(len(time)) +salt_ABW_all = ma.masked_all(len(time)) +vel_ASW_all = ma.masked_all(len(time)) +vel_AIW_all = ma.masked_all(len(time)) +vel_NADW_all = ma.masked_all(len(time)) +vel_ABW_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + #Get the water properties + water_prop = ma.masked_all((len(depth), len(lon))) + + #North Atlantic Deep Water (NADW) has negative meridional velocities + depth_index_NADW = np.where((depth >= 700) & (transport_clin <= 0))[0][0] + + #Antarctic bottom water (ABW) is directly below the NADW, get the first index + depth_index_ABW = np.where((depth >= 3000) & (transport_clin >= 0))[0] + + if len(depth_index_ABW) == 0: + #Assume below 4000m depth the ABW + depth_index_ABW = np.where(depth >= 4000)[0][0] + else: + depth_index_ABW = depth_index_ABW[0] + + for depth_i in range(len(depth)): + + if depth_i < depth_index_NADW: + #Surface water + water_prop[depth_i] = 1.0 + + if depth[depth_i] >= 500 and depth_i < depth_index_NADW: + #Antarctic Intermediate water + water_prop[depth_i] = 2.0 + + if depth_i >= depth_index_NADW and depth_i < depth_index_ABW: + #North Atlantic Deep Water (NADW) + water_prop[depth_i] = 3.0 + + if depth_i >= depth_index_ABW: + #The ABW is defined below the NADW + water_prop[depth_i] = 4.0 + + water_prop = ma.masked_array(water_prop, mask = v_vel.mask) + + #----------------------------------------------------------------------------------------- + area_ASW = ma.masked_where(water_prop != 1.0, layer_field_area) + area_AIW = ma.masked_where(water_prop != 2.0, layer_field_area) + area_NADW = ma.masked_where(water_prop != 3.0, layer_field_area) + area_ABW = ma.masked_where(water_prop != 4.0, layer_field_area) + area_ASW = area_ASW / np.sum(area_ASW) + area_AIW = area_AIW / np.sum(area_AIW) + area_NADW = area_NADW / np.sum(area_NADW) + area_ABW = area_ABW / np.sum(area_ABW) + + #Determine the spatial means + vel_ASW_all[file_i] = np.sum(vel_baroclinic * area_ASW) + vel_AIW_all[file_i] = np.sum(vel_baroclinic * area_AIW) + vel_NADW_all[file_i] = np.sum(vel_baroclinic * area_NADW) + vel_ABW_all[file_i] = np.sum(vel_baroclinic * area_ABW) + salt_ASW_all[file_i] = np.sum(salt * area_ASW) + salt_AIW_all[file_i] = np.sum(salt * area_AIW) + salt_NADW_all[file_i] = np.sum(salt * area_NADW) + salt_ABW_all[file_i] = np.sum(salt * area_ABW) + + #Determine the means over the water masses + transport_ASW = np.sum(ma.masked_where(water_prop != 1.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_AIW = np.sum(ma.masked_where(water_prop != 2.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_NADW = np.sum(ma.masked_where(water_prop != 3.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_ABW = np.sum(ma.masked_where(water_prop != 4.0, vel_baroclinic * layer_field * grid_x), axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + transport_salt_ASW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ASW * salt_zonal) / 1000000.0 + transport_salt_AIW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_AIW * salt_zonal) / 1000000.0 + transport_salt_NADW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_NADW * salt_zonal) / 1000000.0 + transport_salt_ABW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ABW * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) +fh.createVariable('F_OV_ASW', float, ('time'), zlib=True) +fh.createVariable('F_OV_AIW', float, ('time'), zlib=True) +fh.createVariable('F_OV_NADW', float, ('time'), zlib=True) +fh.createVariable('F_OV_ABW', float, ('time'), zlib=True) +fh.createVariable('SALT_ASW', float, ('time'), zlib=True) +fh.createVariable('SALT_AIW', float, ('time'), zlib=True) +fh.createVariable('SALT_NADW', float, ('time'), zlib=True) +fh.createVariable('SALT_ABW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ASW', float, ('time'), zlib=True) +fh.createVariable('VVEL_AIW', float, ('time'), zlib=True) +fh.createVariable('VVEL_NADW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ABW', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' +fh.variables['F_OV_ASW'].longname = 'Fresh water transport (Atlantic Surface Water)' +fh.variables['F_OV_AIW'].longname = 'Fresh water transport (Antarctic Intermediate Water)' +fh.variables['F_OV_NADW'].longname = 'Fresh water transport (North Atlantic Deep Water)' +fh.variables['F_OV_ABW'].longname = 'Fresh water transport (Antarctic Bottom Water)' +fh.variables['SALT_ASW'].longname = 'Salinity (Atlantic Surface Water)' +fh.variables['SALT_AIW'].longname = 'Salinity (Antarctic Intermediate Water)' +fh.variables['SALT_NADW'].longname = 'Salinity (North Atlantic Deep Water)' +fh.variables['SALT_ABW'].longname = 'Salinity (Antarctic Bottom Water)' +fh.variables['VVEL_ASW'].longname = 'Meridional velocity (Atlantic Surface Water)' +fh.variables['VVEL_AIW'].longname = 'Meridional velocity (Antarctic Intermediate Water)' +fh.variables['VVEL_NADW'].longname = 'Meridional velocity (North Atlantic Deep Water)' +fh.variables['VVEL_ABW'].longname = 'Meridional velocity (Antarctic Bottom Water)' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' +fh.variables['F_OV_ASW'].units = 'Sv' +fh.variables['F_OV_AIW'].units = 'Sv' +fh.variables['F_OV_NADW'].units = 'Sv' +fh.variables['F_OV_ABW'].units = 'Sv' +fh.variables['SALT_ASW'].units = 'g/kg' +fh.variables['SALT_AIW'].units = 'g/kg' +fh.variables['SALT_NADW'].units = 'g/kg' +fh.variables['SALT_ABW'].units = 'g/kg' +fh.variables['VVEL_ASW'].units = 'cm/s' +fh.variables['VVEL_AIW'].units = 'cm/s' +fh.variables['VVEL_NADW'].units = 'cm/s' +fh.variables['VVEL_ABW'].units = 'cm/s' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all +fh.variables['F_OV_ASW'][:] = transport_salt_ASW_all +fh.variables['F_OV_AIW'][:] = transport_salt_AIW_all +fh.variables['F_OV_NADW'][:] = transport_salt_NADW_all +fh.variables['F_OV_ABW'][:] = transport_salt_ABW_all +fh.variables['SALT_ASW'][:] = salt_ASW_all +fh.variables['SALT_AIW'][:] = salt_AIW_all +fh.variables['SALT_NADW'][:] = salt_NADW_all +fh.variables['SALT_ABW'][:] = salt_ABW_all +fh.variables['VVEL_ASW'][:] = vel_ASW_all * 100.0 +fh.variables['VVEL_AIW'][:] = vel_AIW_all * 100.0 +fh.variables['VVEL_NADW'][:] = vel_NADW_all * 100.0 +fh.variables['VVEL_ABW'][:] = vel_ABW_all * 100.0 + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Atlantic_sector.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Atlantic_sector.py new file mode 100644 index 00000000..bf981952 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Atlantic_sector.py @@ -0,0 +1,200 @@ +#Generates the Atlantic sector fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m1199/e3sm-arrm-simulations/E3SMv2.1B60to10rA02/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -50 +lon_max = 20 +lat_min = -71 +lat_max = 1 + +files = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_min_index:lon_max_index] + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_ARRM10to60E2r1_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_Arctic/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = np.mean(salt, axis = 2) + temp_depth[file_i] = np.mean(temp, axis = 2) + u_vel_depth[file_i] = np.mean(u_vel, axis = 2) + dens_depth[file_i] = np.mean(dens, axis = 2) + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth_year = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth_year = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth_year= np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth_year = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Atlantic_sector/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('month', 12) + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('month', float, ('month'), zlib=True) + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('SALT_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS_mean', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['SALT'].longname = 'Zonally-averaged salinity' + fh.variables['TEMP'].longname = 'Zonally-averaged potential temperature' + fh.variables['UVEL'].longname = 'Zonally-averaged zonal velocity' + fh.variables['POT_DENS'].longname = 'Zonally-averaged potential density' + fh.variables['SALT_mean'].longname = 'Zonally-averaged salinity (yearly mean)' + fh.variables['TEMP_mean'].longname = 'Zonally-averaged potential temperature (yearly mean)' + fh.variables['UVEL_mean'].longname = 'Zonally-averaged zonal velocity (yearly mean)' + fh.variables['POT_DENS_mean'].longname = 'Zonally-averaged potential density (yearly mean)' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + fh.variables['SALT_mean'].units = 'g/kg' + fh.variables['TEMP_mean'].units = 'deg C' + fh.variables['UVEL_mean'].units = 'm/s' + fh.variables['POT_DENS_mean'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['month'][:] = np.arange(12)+1 + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + fh.variables['SALT_mean'][:] = salt_depth_year + fh.variables['TEMP_mean'][:] = temp_depth_year + fh.variables['UVEL_mean'][:] = u_vel_depth_year + fh.variables['POT_DENS_mean'][:] = dens_depth_year + + + fh.close() diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Drake.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Drake.py new file mode 100644 index 00000000..c879d760 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_Drake.py @@ -0,0 +1,192 @@ +#Generates the Drake Passage fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m1199/e3sm-arrm-simulations/E3SMv2.1B60to10rA02/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -67 +lat_max = -54.9 +lon_section = -66.3 + +files = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +files = files[-60:] + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_index = (np.abs(lon - lon_section)).argmin() +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_index] +lat = lat[lat_min_index:lat_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_index] + + +dy = 6371000 * 2 * np.pi * 0.5 / 360 + np.zeros(len(lat)) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_ARRM10to60E2r1_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_Arctic/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = salt + temp_depth[file_i] = temp + u_vel_depth[file_i] = u_vel + dens_depth[file_i] = dens + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth = np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Drake_Passage/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lat'), zlib=True) + fh.createVariable('DY', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['DY'].longname = 'y-spacing' + fh.variables['SALT'].longname = 'Salinity' + fh.variables['TEMP'].longname = 'Potential temperature' + fh.variables['UVEL'].longname = 'Zonal velocity' + fh.variables['POT_DENS'].longname = 'Potential density' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['layer'].units = 'm' + fh.variables['DY'].units = 'm' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['layer'][:] = layer + fh.variables['DY'][:] = dy + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_FOV.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_FOV.py new file mode 100644 index 00000000..f46fc674 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_FOV.py @@ -0,0 +1,308 @@ +#Generates the FOV fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m1199/e3sm-arrm-simulations/E3SMv2.1B60to10rA02/ocn/hist/' +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +files = files[2340:] + +print(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_ARRM10to60E2r1_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_Arctic/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_velocityMeridional') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0] #Salinity (g/kg) + v_vel = fh.variables['timeMonthly_avg_velocityMeridional'][0] #Meridional velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + v_vel = ma.masked_where(layer <= 0.0, v_vel) + + for lat_section in [-34, 26, 60]: + #Get the lat index + lat_index = (np.abs(lat - lat_section)).argmin() + + if lat_section == -34: + #Section at 34S, start of Atlantic Sector + lon_1, lon_2 = 250, 401 + section_name = 'FOV_section_34S' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_34S = lon[lon_1:lon_2] + layer_34S = layer[:, lat_index, lon_1:lon_2] + dx_34S = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_34S)) + + if lat_section == 26: + #Section at 26N, RAPID array + lon_1, lon_2 = 198, 335 + section_name = 'FOV_section_26N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_26N = lon[lon_1:lon_2] + layer_26N = layer[:, lat_index, lon_1:lon_2] + dx_26N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_26N)) + if lat_section == 60: + #Section at 60N, RAPID array + lon_1, lon_2 = 230, 373 + section_name = 'FOV_section_60N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_60N = lon[lon_1:lon_2] + layer_60N = layer[:, lat_index, lon_1:lon_2] + dx_60N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_60N)) + if file_i == 0 and lat_section == -34: + #Make empty arrays for the months + v_vel_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 26: + #Make empty arrays for the months + v_vel_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 60: + #Make empty arrays for the months + v_vel_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if lat_section == -34: + #Now save the data to the general array + v_vel_34S[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_34S[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 26: + #Now save the data to the general array + v_vel_26N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_26N[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 60: + #Now save the data to the general array + v_vel_60N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_60N[file_i] = salt[:, lat_index, lon_1:lon_2] + + os.remove('Regrid_month.nc') + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_34S = ma.masked_all((len(month_days), len(depth), len(lon_34S))) + month_days_26N = ma.masked_all((len(month_days), len(depth), len(lon_26N))) + month_days_60N = ma.masked_all((len(month_days), len(depth), len(lon_60N))) + + for month_i in range(len(month_days)): + month_days_34S[month_i] = month_days[month_i] + month_days_26N[month_i] = month_days[month_i] + month_days_60N[month_i] = month_days[month_i] + + #Now set mask + month_days_34S = ma.masked_array(month_days_34S, mask = salt_34S.mask) + month_days_26N = ma.masked_array(month_days_26N, mask = salt_26N.mask) + month_days_60N = ma.masked_array(month_days_60N, mask = salt_60N.mask) + + #Normalise the data + month_days_34S = month_days_34S / np.sum(month_days_34S, axis = 0) + month_days_26N = month_days_26N / np.sum(month_days_26N, axis = 0) + month_days_60N = month_days_60N / np.sum(month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + #Determine the time mean over the months of choice + v_vel_34S = np.sum(v_vel_34S * month_days_34S, axis = 0) + salt_34S = np.sum(salt_34S * month_days_34S, axis = 0) + v_vel_26N = np.sum(v_vel_26N * month_days_26N, axis = 0) + salt_26N = np.sum(salt_26N * month_days_26N, axis = 0) + v_vel_60N = np.sum(v_vel_60N * month_days_60N, axis = 0) + salt_60N = np.sum(salt_60N * month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_34S/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_34S)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_34S + fh.variables['lon'][:] = lon_34S + fh.variables['DX'][:] = dx_34S + fh.variables['VVEL'][:] = v_vel_34S + fh.variables['SALT'][:] = salt_34S + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_26N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_26N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_26N + fh.variables['lon'][:] = lon_26N + fh.variables['DX'][:] = dx_26N + fh.variables['VVEL'][:] = v_vel_26N + fh.variables['SALT'][:] = salt_26N + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_60N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_60N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_60N + fh.variables['lon'][:] = lon_60N + fh.variables['DX'][:] = dx_60N + fh.variables['VVEL'][:] = v_vel_60N + fh.variables['SALT'][:] = salt_60N + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_MXL.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_MXL.py new file mode 100644 index 00000000..add7c9e4 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_MXL.py @@ -0,0 +1,184 @@ +#Generates the mixed layer depth fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m1199/e3sm-arrm-simulations/E3SMv2.1B60to10rA02/ocn/hist/' +directory = '../../Data/' + +def Distance(lon1, lat1, lon2, lat2): + """Returns distance (m) of two points located at the globe + coordinates need input in degrees""" + + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) #Convert to radians + + #Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2.0)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2.0)**2 + c = 2.0 * math.asin(sqrt(a)) + r = 6371000.0 # Radius of earth in meters + + return c * r #Distance between two points in meter + +def GridCellComputer(longitude, latitude): + """Determines the area (m^2) per grid cell + returns 2-D array (lat, lon) with the area per box""" + + #Define empty array for latitude per grid cell and the Area covered by the Ocean + grid_x = np.zeros((len(latitude), len(longitude))) + grid_y = np.zeros((len(latitude), len(longitude))) + + for lat_i in range(len(latitude)): + + #Determining zonal length (m), is latitude dependent, therefore, take middle of grid cell + length_zonal_grid = Distance(0.0, latitude[lat_i], 0.08333206, latitude[lat_i]) + #Determining meriodinal length (m), is longitude independent + length_meridional_grid = Distance(0.0, 0.0, 0.0, 0.08333206) + + grid_x[lat_i] = length_zonal_grid + grid_y[lat_i] = length_meridional_grid + + return grid_x, grid_y + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -71.25 +lat_max = 1.25 +lon_min = -70.25 +lon_max = 20.25 +depth_level = 500 + +files = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + +depth_index = (np.abs(depth-depth_level)).argmin() + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_ARRM10to60E2r1_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_Arctic/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_dThreshMLD,timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + mixed = fh.variables['timeMonthly_avg_dThreshMLD'][0, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (m) + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, depth_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, depth_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + + fh.close() + + if file_i == 0: + #Empty array + temp_month = ma.masked_all((12, len(lat), len(lon))) + salt_month = ma.masked_all((12, len(lat), len(lon))) + mixed_month = ma.masked_all((12, len(lat), len(lon))) + + #Save the vertical velocity + temp_month[file_i] = temp + salt_month[file_i] = salt + mixed_month[file_i] = mixed + #------------------------------------------------------------------------------ + + + filename = directory+'Data/Mixed_layer/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('month', 12) + fh.createDimension('lat', len(lat)) + fh.createDimension('lon', len(lon)) + + fh.createVariable('month', float, ('month'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('TEMP', float, ('month', 'lat', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('month', 'lat', 'lon'), zlib=True) + fh.createVariable('MXL', float, ('month', 'lat', 'lon'), zlib=True) + + fh.variables['month'].longname = 'Month' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['lon'].longname = 'Array of longitudes' + fh.variables['TEMP'].longname = 'Potential temperature (500 m depth)' + fh.variables['SALT'].longname = 'Salinity (500 m depth)' + fh.variables['MXL'].longname = 'Mixed layer depth' + + fh.variables['lat'].units = 'degrees N' + fh.variables['lon'].units = 'degrees E' + fh.variables['TEMP'].units = 'degC' + fh.variables['SALT'].units = 'g/kg' + fh.variables['MXL'].units = 'm' + + #Writing data to correct variable + fh.variables['month'][:] = np.arange(1,13) + fh.variables['lat'][:] = lat + fh.variables['lon'][:] = lon + fh.variables['TEMP'][:] = temp_month + fh.variables['SALT'][:] = salt_month + fh.variables['MXL'][:] = mixed_month + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_SALT.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_SALT.py new file mode 100644 index 00000000..6fba1d88 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Field_generation_SALT.py @@ -0,0 +1,221 @@ +#Generates the vertically-integrated salinity fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m1199/e3sm-arrm-simulations/E3SMv2.1B60to10rA02/ocn/hist/' +directory = '../../Data/' + +def Distance(lon1, lat1, lon2, lat2): + """Returns distance (m) of two points located at the globe + coordinates need input in degrees""" + + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) #Convert to radians + + #Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2.0)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2.0)**2 + c = 2.0 * math.asin(sqrt(a)) + r = 6371000.0 # Radius of earth in meters + + return c * r #Distance between two points in meter + +def GridCellComputer(longitude, latitude): + """Determines the area (m^2) per grid cell + returns 2-D array (lat, lon) with the area per box""" + + #Define empty array for latitude per grid cell and the Area covered by the Ocean + grid_x = np.zeros((len(latitude), len(longitude))) + grid_y = np.zeros((len(latitude), len(longitude))) + + for lat_i in range(len(latitude)): + + #Determining zonal length (m), is latitude dependent, therefore, take middle of grid cell + length_zonal_grid = Distance(0.0, latitude[lat_i], np.mean(np.diff(longitude)), latitude[lat_i]) + #Determining meriodinal length (m), is longitude independent + length_meridional_grid = Distance(0.0, 0.0, 0.0, np.mean(np.diff(latitude))) + + grid_x[lat_i] = length_zonal_grid + grid_y[lat_i] = length_meridional_grid + + return grid_x, grid_y, grid_x * grid_y +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -110 +lon_max = 143 +lat_min = -80 +lat_max = 25.5 +depth_min = 0 +depth_max = 100 + + +files = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +grid_x, grid_y, area = GridCellComputer(lon, lat) + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] +area = area[lat_min_index:lat_max_index, lon_min_index:lon_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + + +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 +depth = depth[depth_min_index:depth_max_index] +layer = layer[depth_min_index:depth_max_index] + +for lat_i in range(len(lat)): + for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer[:, lat_i, lon_i]) > depth_max: + #Adjust the last layer + layer[-1, lat_i, lon_i] -= (np.sum(layer[:, lat_i, lon_i]) - depth_max) + +#Get the total vertical extent for each layer +total_layer = np.sum(layer, axis = 0) +volume = total_layer * area +area = ma.masked_array(area, mask = volume.mask) + +for depth_i in range(len(depth)): + #Normalise the field by its vertical extent + layer[depth_i] = layer[depth_i] / total_layer + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'E3SMv2.1B60to10rA02.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_ARRM10to60E2r1_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_Arctic/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, depth_min_index:depth_max_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(lat), len(lon))) + + #Get the vertical depth averaged salinity + salt_depth[file_i] = np.sum(salt * layer, axis = 0) + + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(lat), len(lon))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('lon', len(lon)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('AREA', float, ('lat', 'lon'), zlib=True) + fh.createVariable('VOLUME', float, ('lat', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('lat', 'lon'), zlib=True) + + fh.variables['lon'].longname = 'Array of T-longtidues' + fh.variables['lat'].longname = 'Array of T-latitudes' + fh.variables['AREA'].longname = 'Area of T cells' + fh.variables['VOLUME'].longname = 'Volume of T cells' + fh.variables['SALT'].longname = 'Depth-averaged salinity' + + fh.variables['lon'].units = 'degrees E' + fh.variables['lat'].units = 'degrees N' + fh.variables['AREA'].units = 'm^2' + fh.variables['VOLUME'].units = 'm^3' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['lon'][:] = lon + fh.variables['lat'][:] = lat + fh.variables['AREA'][:] = area + fh.variables['VOLUME'][:] = volume + fh.variables['SALT'][:] = salt_depth + + fh.close() + + + + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Mixed_layer_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Mixed_layer_plot.py new file mode 100644 index 00000000..fa8d56e7 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Mixed_layer_plot.py @@ -0,0 +1,170 @@ +#Program plots the mixed layer depth climatology + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + lat = fh.variables['lat'][:] #Latitude + temp = fh.variables['TEMP'][:] #Temperature + salt = fh.variables['SALT'][:] #Salinity + mixed = fh.variables['MXL'][:] #Depth (m) + + fh.close() + + return lon, lat, temp, salt, mixed + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Mixed_layer/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, lat, temp, salt, mixed = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time)*12, len(lat), len(lon))) +salt_all = ma.masked_all((len(time)*12, len(lat), len(lon))) +mixed_all = ma.masked_all((len(time)*12, len(lat), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(file_i) + + lon, lat, temp, salt, mixed = ReadinData(files[file_i]) + + for month_i in range(len(mixed)): + #Add each month + temp_all[file_i*12+month_i] = temp[month_i] + salt_all[file_i*12+month_i] = salt[month_i] + mixed_all[file_i*12+month_i] = mixed[month_i] + + +temp_month = ma.masked_all((12, len(lat), len(lon))) +salt_month = ma.masked_all((12, len(lat), len(lon))) +mixed_month = ma.masked_all((12, len(lat), len(lon))) + +for month_i in range(12): + #Loop over each month + month_index = np.arange(month_i, len(mixed_all), 12) + temp_month[month_i] = np.mean(temp_all[month_index], axis = 0) + salt_month[month_i] = np.mean(salt_all[month_index], axis = 0) + mixed_month[month_i] = np.mean(mixed_all[month_index], axis = 0) + +#----------------------------------------------------------------------------------------- + +mixed_crop = 200 +factor_mixed_crop = 2 +mixed_month[mixed_month > mixed_crop] = ((mixed_month[mixed_month > mixed_crop] - mixed_crop) / factor_mixed_crop) + mixed_crop + +#----------------------------------------------------------------------------------------- + +month = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + +for month_i in range(12): + + #----------------------------------------------------------------------------------------- + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, temp_month[month_i] - temp_month[0], levels = np.arange(-1, 1.01, 0.05), extend = 'both', cmap = 'RdBu_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = np.arange(-1, 1.01, 1), cax=ax_cb) + cbar.set_label('Temperature difference ($^{\circ}$C)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+' minus January, E3SM Arctic') + + #----------------------------------------------------------------------------------------- + + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, salt_month[month_i] - salt_month[0], levels = np.arange(-0.1, 0.101, 0.005), extend = 'both', cmap = 'BrBG_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = np.arange(-0.1, 0.101, 0.1), cax=ax_cb) + cbar.set_label('Salinity difference (g kg$^{-1}$)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+' minus January, E3SM Arctic') + + #----------------------------------------------------------------------------------------- + + fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + + CS = ax.contourf(lon, lat, mixed_month[month_i], levels = np.arange(0, 400.1, 10), extend = 'max', cmap = 'Spectral_r', transform=ccrs.PlateCarree()) + + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) + fig.add_axes(ax_cb) + + cbar = colorbar(CS, ticks = [0, 100, 200, 300, 400], cax=ax_cb) + cbar.ax.set_yticklabels([0, 100, 200, 400, 600]) + cbar.set_label('Mixed layer depth (m)') + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.set_extent([-70, 20, -71, 1], ccrs.PlateCarree()) + ax.coastlines('50m') + ax.add_feature(cfeature.LAND, zorder=0) + + + ax.set_title(month[month_i]+', E3SM Arctic') + show() + + #----------------------------------------------------------------------------------------- + + + + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Rossby_wave_propagation.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Rossby_wave_propagation.py new file mode 100644 index 00000000..a6a8f638 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Rossby_wave_propagation.py @@ -0,0 +1,92 @@ +#Program determines the MOV index + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#Making pathway to folder with all data +directory = '/global/homes/r/rvwesten/E3SM/Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + lat = fh.variables['lat'][:] #Latitude + temp = fh.variables['TEMP'][:] #Temperature + + fh.close() + + return lon, lat, temp + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -30 +lat_max = -25 +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Mixed_layer/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)*12) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + + for month_i in range(12): + time[year_i*12+month_i] = year + month_i / 12.0 + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, lat, temp = ReadinData(files[0]) + +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(file_i) + + lon, lat, temp = ReadinData(files[file_i]) + + for month_i in range(len(temp)): + #Add each month + temp_all[file_i*12+month_i] = np.mean(temp[month_i, lat_min_index:lat_max_index], axis = 0) + +#Now remove the monthly mean +for month_i in range(12): + time_index = np.arange(month_i, len(time), 12) + temp_mean = np.mean(temp_all[time_index], axis = 0) + temp_all[time_index] = temp_all[time_index] - temp_mean +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +CS = contourf(lon, time, temp_all, levels = np.arange(-1, 1.01, 0.05), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS) + +ax.set_ylim(500, 527) + +show() + + + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/SALT_SO_0_100m_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/SALT_SO_0_100m_plot.py new file mode 100644 index 00000000..f01ee482 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/SALT_SO_0_100m_plot.py @@ -0,0 +1,88 @@ +#Program plots the vertically averaged (upper 100 m) salinity in the Southern Oceaan + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats +from scipy.stats import genextreme +from matplotlib.colors import LogNorm +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 100 +year_start = 196 +year_end = 200 + +files = glob.glob(directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- + +for file_i in range(len(files)): + print(files[file_i]) + fh = netcdf.Dataset(files[file_i], 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['SALT'][:] #Salinity + + fh.close() + + if file_i == 0: + salt_all = ma.masked_all((len(files), len(lat), len(lon))) + + salt_all[file_i] = salt + +salt_all = np.mean(salt_all, axis = 0) +#----------------------------------------------------------------------------------------- + +fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + +CS = ax.contourf(lon, lat, salt_all, levels = np.arange(33, 37.1, 0.1), extend = 'both', cmap = 'BrBG_r', transform=ccrs.PlateCarree()) + +divider = make_axes_locatable(ax) +ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) +fig.add_axes(ax_cb) + +cbar = colorbar(CS, ticks = np.arange(33, 37.1, 1), cax=ax_cb) +cbar.set_label('Salinity (g kg$^{-1}$)') + +gl = ax.gridlines(draw_labels=True) +gl.top_labels = False +gl.right_labels = False +ax.set_extent([-80, 130, -70, 25], ccrs.PlateCarree()) +ax.coastlines('50m') +ax.add_feature(cfeature.LAND, zorder=0) +ax.set_title('Salinity (0 - 100 m), E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +show() + diff --git a/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Water_properties_34S_plot.py b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Water_properties_34S_plot.py new file mode 100644 index 00000000..797bfb89 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_Arctic/Program/Ocean/Water_properties_34S_plot.py @@ -0,0 +1,290 @@ +#Program plots sections along 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 +year_start = 196 +year_end = 200 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_norm = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Normalise each layer + layer_field_norm[depth_i] = layer_field[depth_i] / np.sum(layer_field[depth_i]) + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +vel_all = ma.masked_all((len(time), len(depth))) +vel_salt_all = ma.masked_all((len(time), len(depth))) +salt_all = ma.masked_all((len(time), len(depth), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + + #Save the meridional baroclinic transport + vel_all[file_i] = np.sum(vel_baroclinic * grid_x_norm, axis = 1) * 100.0 + vel_salt_all[file_i] = (-1.0 / 35.0) * transport_clin * salt_zonal / 10**6.0 + salt_all[file_i] = salt + +layer_norm = layer_field[:, 123] +layer_norm[-1] = layer_norm[-2] +vel_all = np.mean(vel_all, axis = 0) +vel_salt_all = np.mean(vel_salt_all, axis = 0) +vel_salt_all = vel_salt_all / layer_norm * 1000.0 +salt_all = np.mean(salt_all, axis = 0) + + +#----------------------------------------------------------------------------------------- +#Get the water properties + +#North Atlantic Deep Water (NADW) has negative meridional velocities +depth_index_NADW = np.where((depth >= 500) & (vel_all <= 0))[0][0] + +#Antarctic bottom water (ABW) is directly below the NADW, get the first index +depth_index_ABW = np.where((depth >= 3000) & (vel_all >= 0))[0][0] + +#The Antarctic Intermediate water is between the NADW and 500 m +depth_index_AIW = np.where(depth >= 500)[0][0] + + +depth_top = np.zeros(len(depth)) + +for depth_i in range(1, len(depth)): + depth_top[depth_i] = depth_top[depth_i - 1] + layer_norm[depth_i - 1] + +depth_AIW = depth_top[depth_index_AIW] +depth_NADW = depth_top[depth_index_NADW] +depth_ABW = depth_top[depth_index_ABW] + +lon_AIW_index = np.where(salt_all[depth_index_AIW].mask == False)[0] +lon_NADW_index = np.where(salt_all[depth_index_NADW].mask == False)[0] +lon_ABW_index = np.where(salt_all[depth_index_ABW].mask == False)[0] +lon_AIW_1, lon_AIW_2 = lon[lon_AIW_index[0]], lon[lon_AIW_index[-1]] +lon_NADW_1, lon_NADW_2 = lon[lon_NADW_index[0]], lon[lon_NADW_index[-1]] +lon_ABW_1, lon_ABW_2 = lon[lon_ABW_index[0]], lon[lon_ABW_index[-1]] + +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +if depth_AIW > depth_crop: + depth_AIW = ((depth_AIW - depth_crop) / factor_depth_crop) + depth_crop +if depth_NADW > depth_crop: + depth_NADW = ((depth_NADW - depth_crop) / factor_depth_crop) + depth_crop +if depth_ABW > depth_crop: + depth_ABW = ((depth_ABW - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=-1, vmax= 1) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='RdBu_r') #Using colormap +color_south = scalarMap.to_rgba(-0.5) +color_north = scalarMap.to_rgba(0.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +plot(vel_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-2, 2) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.fill_betweenx(depth, vel_all, where = vel_all >= 0.0, color = color_north, alpha = 0.50) +ax.fill_betweenx(depth, vel_all, where = vel_all <= 0.0, color = color_south, alpha = 0.50) + +ax.set_xlabel('Meridional velocity (cm s$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.text(1.9, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Meridional velocity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-60, 20], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) +ax.plot([lon_AIW_1, lon_AIW_2], [depth_AIW, depth_AIW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_NADW_1, lon_NADW_2], [depth_NADW, depth_NADW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_ABW_1, lon_ABW_2], [depth_ABW, depth_ABW], linestyle = '--', linewidth = 2.0, color = 'k') + +CS = contourf(lon, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-60, 20) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-60, 21, 10)) +ax.set_xticklabels(['60$^{\circ}$W', '50$^{\circ}$W', '40$^{\circ}$W', '30$^{\circ}$W', '20$^{\circ}$W', '10$^{\circ}$W','0$^{\circ}$', '10$^{\circ}$E', '20$^{\circ}$E']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + + +ax.text(-18, 350, 'ASW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 850, 'AIW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1350, 'NADW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1900, 'ABW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) + +ax.set_title('Salinity, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=34, vmax= 36) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='BrBG_r') #Using colormap +color_fresh = scalarMap.to_rgba(34.5) +color_salt = scalarMap.to_rgba(35.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot(vel_salt_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-1.5, 1.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_xlabel(r'Freshwater transport (mSv m$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all >= 0.0, color = color_fresh, alpha = 0.50) +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all <= 0.0, color = color_salt, alpha = 0.50) + +ax.text(1.45, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Freshwater transport, E3SM Arctic ('+str(year_start)+' - '+str(year_end)+')') + +show() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport.py new file mode 100644 index 00000000..02f00e92 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport.py @@ -0,0 +1,110 @@ +#Program determines the ACC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lat = fh.variables['lat'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_y = fh.variables['DY'][:] #Meridional grid cell length (m) + u_vel = fh.variables['UVEL'][depth_min_index:depth_max_index] #Zonal velocity (m/s) + + fh.close() + + return lat, depth, layer, grid_y, u_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, layer_field, grid_y, u_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lat_i in range(len(lat)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lat_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lat_i] -= (np.sum(layer_field[:, lat_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lat, depth, layer_field_old, grid_y, u_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = u_vel * layer_field * grid_y + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport_plot.py new file mode 100644 index 00000000..655e20a7 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/ACC_transport_plot.py @@ -0,0 +1,43 @@ +#Plot the ACC strength + +from pylab import * +import numpy +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/ACC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + +fig, ax = subplots() + +ax.plot(time, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 60) +ax.set_ylim(90, 210) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) +ax.grid() + +ax.set_title('ACC strength, LR-E3SM') + +show() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport.py new file mode 100644 index 00000000..3990142e --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport.py @@ -0,0 +1,113 @@ +#Program determines the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + +lat_FOV = 26 +section_name = 'FOV_section_26N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel = ReadinData(files[0], depth_min_index, depth_max_index) + +for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer_field[:, lon_i]) > depth_max: + #Adjust the last layer + layer_field[-1, lon_i] -= (np.sum(layer_field[:, lon_i]) - depth_max) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) + +for time_i in range(len(time)): + #Now determine for each month + print(time_i) + + lon, depth, layer_field_old, grid_x, v_vel = ReadinData(files[time_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[time_i] = np.sum(transport) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) + +fh.variables['Transport'].long_name = 'Volume transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport_plot.py new file mode 100644 index 00000000..c783859f --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/AMOC_transport_plot.py @@ -0,0 +1,45 @@ +#Program plots the AMOC strength + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 1000 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Ocean/AMOC_transport_depth_'+str(depth_min)+'-'+str(depth_max)+'_m.nc', 'r') + +time = fh.variables['time'][:] +transport = fh.variables['Transport'][:] + +fh.close() + +fig, ax = subplots() + +ax.fill_between([-100, 2500], 16, 19, alpha=0.25, edgecolor='orange', facecolor='orange') + +ax.plot(time+0.5, transport, '-k', linewidth = 2.0) +ax.set_xlim(1, 60) +ax.set_ylim(-2, 22) +ax.set_xlabel('Model year') +ax.set_ylabel('Volume transport (sv)') +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) +ax.grid() + +ax.set_title('AMOC strength, LR-E3SM') + +show() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Atlantic_sector_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Atlantic_sector_plot.py new file mode 100644 index 00000000..23246d83 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Atlantic_sector_plot.py @@ -0,0 +1,183 @@ +#Program plots the Atlantic Sector + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + #Note that the monthly averages are also available, but these are the annual means + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP_mean'][:] #Temperature (m/s) + salt = fh.variables['SALT_mean'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL_mean'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS_mean'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +year_start = 55 +year_end = 59 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Atlantic_sector/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 20.01, 0.5), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(0, 20.01, 5)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-20, 20.01, 1), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-20, 20.01, 5)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) +ax.clabel(CS_1, inline=True, fontsize=10, manual = [(-10, 500)]) + + +ax.set_xlim(-71, 1) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-70, 1, 10)) +ax.set_xticklabels(['70$^{\circ}$S', '60$^{\circ}$S', '50$^{\circ}$S', '40$^{\circ}$S', '30$^{\circ}$S', '20$^{\circ}$S','10$^{\circ}$S', 'Eq']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Drake_Passage_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Drake_Passage_plot.py new file mode 100644 index 00000000..ebc83788 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Drake_Passage_plot.py @@ -0,0 +1,186 @@ +#Program plots sections along Drake Passage + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + lat = fh.variables['lat'][:] #Latitude + depth = fh.variables['depth'][:] #Depth (m) + temp = fh.variables['TEMP'][:] #Temperature (m/s) + salt = fh.variables['SALT'][:] #Salinity (g / kg) + u_vel = fh.variables['UVEL'][:] #Zonal velocity (m / s) + dens = fh.variables['POT_DENS'][:] #Potential density (g / kg) + + fh.close() + + return lat, depth, temp, salt, u_vel, dens + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + + +year_start = 1 +year_end = 5 + +depth_min = 0 +depth_max = 6000 + +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/Drake_Passage/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lat, depth, temp, salt, u_vel, dens = ReadinData(files[0]) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +temp_all = ma.masked_all((len(time), len(depth), len(lat))) +salt_all = ma.masked_all((len(time), len(depth), len(lat))) +u_vel_all = ma.masked_all((len(time), len(depth), len(lat))) +dens_all = ma.masked_all((len(time), len(depth), len(lat))) + +for file_i in range(len(files)): + #Now determine for each month + print(file_i) + + lat, depth, temp, salt, u_vel, dens = ReadinData(files[file_i]) + + #Save the data + temp_all[file_i] = temp + salt_all[file_i] = salt + u_vel_all[file_i] = u_vel + dens_all[file_i] = dens + +#Take the time mean +temp_all = np.mean(temp_all, axis = 0) +salt_all = np.mean(salt_all, axis = 0) +u_vel_all = np.mean(u_vel_all, axis = 0) +dens_all = np.mean(dens_all, axis = 0) +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +#----------------------------------------------------------------------------------------- + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Salinity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, temp_all, levels = np.arange(-2, 10.01, 0.25), extend = 'both', cmap = 'Spectral_r') +cbar = colorbar(CS, ticks = np.arange(-2, 10.01, 2)) +cbar.set_label('Temperature ($^{\circ}$C)') + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Temperature, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-80, 10], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) + +CS = contourf(lat, depth, u_vel_all*100, levels = np.arange(-40, 40.01, 2), extend = 'both', cmap = 'RdBu_r') +cbar = colorbar(CS, ticks = np.arange(-40, 40.01, 10)) +cbar.set_label('Zonal velocity (cm s$^{-1}$)') + +CS_1 = ax.contour(lat, depth, dens_all, levels = [1027], colors = 'k', linewidths = 2) +CS_2 = ax.contour(lat, depth, dens_all, levels = [1025, 1025.25, 1025.5, 1025.75, 1026, 1026.25, 1026.5, 1026.75, 1027.25, 1027.5, 1027.75, 1028], colors = 'k', linewidths = 1) + +ax.set_xlim(-67, -54.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-65, -54, 5)) +ax.set_xticklabels(['65$^{\circ}$S', '60$^{\circ}$S', '55$^{\circ}$S']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_title('Zonal velocity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +show() +#----------------------------------------------------------------------------------------- diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_34S_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_34S_plot.py new file mode 100644 index 00000000..8e1fd0c7 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_34S_plot.py @@ -0,0 +1,184 @@ +#Program plots the F_ovS and the components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + transport = fh.variables['Transport'][:] #MOC strength (Sv) + FOV = fh.variables['F_OV'][:] #Fresh water + FOV_ASW = fh.variables['F_OV_ASW'][:] #Fresh water + FOV_AIW = fh.variables['F_OV_AIW'][:] #Fresh water + FOV_NADW = fh.variables['F_OV_NADW'][:] #Fresh water + FOV_ABW = fh.variables['F_OV_ABW'][:] #Fresh water + salt_ASW = fh.variables['SALT_ASW'][:] #Salinity + salt_AIW = fh.variables['SALT_AIW'][:] #Salinity + salt_NADW = fh.variables['SALT_NADW'][:] #Salinity + salt_ABW = fh.variables['SALT_ABW'][:] #Salininty + vel_ASW = fh.variables['VVEL_ASW'][:] #Meridional velocity + vel_AIW = fh.variables['VVEL_AIW'][:] #Meridional velocity + vel_NADW = fh.variables['VVEL_NADW'][:] #Meridional velocity + vel_ABW = fh.variables['VVEL_ABW'][:] #Meridional velocity + + fh.close() + + return time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW + + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +section_name = 'section_34S' + +#----------------------------------------------------------------------------------------- + +time, transport, FOV, FOV_ASW, FOV_AIW, FOV_NADW, FOV_ABW, salt_ASW, salt_AIW, salt_NADW, salt_ABW, vel_ASW, vel_AIW, vel_NADW, vel_ABW = ReadinData(directory+'Ocean/FOV_index_'+section_name+'.nc') + +FOV_rean, FOV_ASW_rean, FOV_AIW_rean, FOV_NADW_rean, FOV_ABW_rean, FOV_rean_gyre = -0.10138855319303171, -0.12769111454122556, 0.12011490376119702, -0.10644935101861515, 0.012637008605611988, 0.2136790553107374 + +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_'+section_name+'.nc', 'r') + +FOV_gyre = fh.variables['F_gyre'][:] #Fresh water + +fh.close() + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_FOV_all = plot(time, FOV, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_FOV_gyre = plot(time, FOV_gyre, '-r', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, E3SM') +graph_rean_all = plot(time, np.zeros(len(time))+FOV_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_gyre = plot(time, np.zeros(len(time))+FOV_rean_gyre, '--', color = 'firebrick', linewidth = 1.5, label = '$F_{\mathrm{azS}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 60) +ax.grid() +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) + +ax.fill_between([-100, 600], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_FOV_all + graph_FOV_gyre +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='lower left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_all + graph_rean_gyre +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'lower right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('$F_{\mathrm{ovS}}$ and azonal (gyre) component ($F_{\mathrm{azS}}$), LR-E3SM') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ASW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ASW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 60) +ax.grid() +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Atlantic Surface Water (ASW), LR-E3SM') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_AIW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_AIW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 60) +ax.grid() +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Intermediate Water (AIW), LR-E3SM') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_NADW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_NADW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 60) +ax.grid() +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('North Atlantic Deep Water (NADW), LR-E3SM') + +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_E3SM = plot(time, FOV_ABW, '-k', linewidth = 1.5, label = 'E3SM') +graph_rean = plot(time, np.zeros(len(time))+FOV_ABW_rean, '--', color = 'gray', linewidth = 1.5, label = 'Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_ylim(-0.5, 0.5) +ax.set_xlim(1, 60) +ax.grid() +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) + +graphs = graph_E3SM + graph_rean + +legend_labels = [l.get_label() for l in graphs] +ax.legend(graphs, legend_labels, loc='upper right', ncol=1, framealpha = 1.0, numpoints = 1) + +ax.set_title('Antarctic Bottom Water (ABW), LR-E3SM') + +show() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_60_index.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_60_index.py new file mode 100644 index 00000000..21d89cf9 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_60_index.py @@ -0,0 +1,135 @@ +#Program determines the FOV index for 60N + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_60N' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_convergence_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_convergence_plot.py new file mode 100644 index 00000000..4b00ef18 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_convergence_plot.py @@ -0,0 +1,69 @@ +#Program plots the freshwater convergence (34S and 60N) + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename): + + fh = netcdf.Dataset(filename, 'r') + + time = fh.variables['time'][:] + FOV = fh.variables['F_OV'][:] #Fresh water + + fh.close() + + return time, FOV + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +time, FOV_34S = ReadinData(directory+'Ocean/FOV_index_section_34S.nc') +time, FOV_60N = ReadinData(directory+'Ocean/FOV_index_section_60N.nc') +#----------------------------------------------------------------------------------------- + +FOV_34S_rean, FOV_60N_rean = -0.10138855319303171, -0.027075354933136512 +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +graph_rcp_34S = ax.plot(time, FOV_34S, '-k', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, E3SM') +graph_rcp_60N = ax.plot(time, FOV_60N, '-b', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, E3SM') +graph_rcp_conver = ax.plot(time, FOV_34S - FOV_60N, '-r', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, E3SM') + +graph_rean_34S = ax.plot(time, np.zeros(len(time))+FOV_34S_rean, '--', color = 'gray', linewidth = 1.5, label = '$F_{\mathrm{ovS}}$, Reanalysis') +graph_rean_60N = ax.plot(time, np.zeros(len(time))+FOV_60N_rean, '--', color = 'cyan', linewidth = 1.5, label = '$F_{\mathrm{ovN}}$, Reanalysis') +graph_rean_conver = ax.plot(time, np.zeros(len(time))+FOV_34S_rean - FOV_60N_rean, '--', color = 'firebrick', linewidth = 1.5, label = '$\Delta F_{\mathrm{ov}}$, Reanalysis') + +ax.set_xlabel('Model year') +ax.set_ylabel('Freshwater transport (Sv)') +ax.set_xlim(1, 60) +ax.set_ylim(-0.5, 0.5) +ax.set_xticks([1, 10, 20, 30, 40, 50, 60]) +ax.grid() + +ax.fill_between([00, 100], -0.28, -0.05, alpha=0.25, edgecolor='orange', facecolor='orange') + +graphs = graph_rcp_34S + graph_rcp_60N + graph_rcp_conver +legend_labels = [l.get_label() for l in graphs] +legend_1 = ax.legend(graphs, legend_labels, loc='upper left', ncol=1, framealpha = 1.0, numpoints = 1) + + +graphs = graph_rean_34S + graph_rean_60N + graph_rean_conver +legend_labels = [l.get_label() for l in graphs] +legend_2 = ax.legend(graphs, legend_labels, loc = 'upper right', ncol=1, framealpha = 1.0, numpoints = 1) +ax.add_artist(legend_1) + + +ax.set_title('f) Freshwater convergence, LR-E3SM') + +show() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_gyre.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_gyre.py new file mode 100644 index 00000000..15dc9739 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_gyre.py @@ -0,0 +1,121 @@ +#Program determines the azonal (gyre) component at 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_gyre_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the zonal means + v_vel_zonal = np.mean(v_vel, axis = 1) + salt_zonal = np.mean(salt, axis = 1) + + v_vel_prime = ma.masked_all(np.shape(v_vel)) + salt_prime = ma.masked_all(np.shape(salt)) + + for depth_i in range(len(depth)): + #Determine the differences with respect to the zonal means + v_vel_prime[depth_i] = v_vel[depth_i] - v_vel_zonal[depth_i] + salt_prime[depth_i] = salt[depth_i] - salt_zonal[depth_i] + + #Now determine the azonal component (gyre, in Sv) + transport_gyre_all[file_i] = (-1.0 / 35.0) * np.sum(v_vel_prime * salt_prime * layer_field_area) / 10**6.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_gyre_section_34S.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('F_gyre', float, ('time'), zlib=True) + +fh.variables['F_gyre'].longname = 'Freshwater transport by gyre' + +fh.variables['time'].units = 'Year' +fh.variables['F_gyre'].units = 'Sv' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['F_gyre'][:] = transport_gyre_all + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_index.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_index.py new file mode 100644 index 00000000..21acb79d --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/FOV_index.py @@ -0,0 +1,261 @@ +#Program determines the FOV index for 34S and the difference components + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_area = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Determine the surface area + layer_field_area[depth_i] = layer_field[depth_i] * grid_x + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +transport_all = ma.masked_all(len(time)) +transport_salt_all = ma.masked_all(len(time)) +transport_salt_ASW_all = ma.masked_all(len(time)) +transport_salt_AIW_all = ma.masked_all(len(time)) +transport_salt_NADW_all = ma.masked_all(len(time)) +transport_salt_ABW_all = ma.masked_all(len(time)) +salt_ASW_all = ma.masked_all(len(time)) +salt_AIW_all = ma.masked_all(len(time)) +salt_NADW_all = ma.masked_all(len(time)) +salt_ABW_all = ma.masked_all(len(time)) +vel_ASW_all = ma.masked_all(len(time)) +vel_AIW_all = ma.masked_all(len(time)) +vel_NADW_all = ma.masked_all(len(time)) +vel_ABW_all = ma.masked_all(len(time)) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + #Get the water properties + water_prop = ma.masked_all((len(depth), len(lon))) + + #North Atlantic Deep Water (NADW) has negative meridional velocities + depth_index_NADW = np.where((depth >= 700) & (transport_clin <= 0))[0][0] + + #Antarctic bottom water (ABW) is directly below the NADW, get the first index + depth_index_ABW = np.where((depth >= 3000) & (transport_clin >= 0))[0] + + if len(depth_index_ABW) == 0: + #Assume below 4000m depth the ABW + depth_index_ABW = np.where(depth >= 4000)[0][0] + else: + depth_index_ABW = depth_index_ABW[0] + + for depth_i in range(len(depth)): + + if depth_i < depth_index_NADW: + #Surface water + water_prop[depth_i] = 1.0 + + if depth[depth_i] >= 500 and depth_i < depth_index_NADW: + #Antarctic Intermediate water + water_prop[depth_i] = 2.0 + + if depth_i >= depth_index_NADW and depth_i < depth_index_ABW: + #North Atlantic Deep Water (NADW) + water_prop[depth_i] = 3.0 + + if depth_i >= depth_index_ABW: + #The ABW is defined below the NADW + water_prop[depth_i] = 4.0 + + water_prop = ma.masked_array(water_prop, mask = v_vel.mask) + + #----------------------------------------------------------------------------------------- + area_ASW = ma.masked_where(water_prop != 1.0, layer_field_area) + area_AIW = ma.masked_where(water_prop != 2.0, layer_field_area) + area_NADW = ma.masked_where(water_prop != 3.0, layer_field_area) + area_ABW = ma.masked_where(water_prop != 4.0, layer_field_area) + area_ASW = area_ASW / np.sum(area_ASW) + area_AIW = area_AIW / np.sum(area_AIW) + area_NADW = area_NADW / np.sum(area_NADW) + area_ABW = area_ABW / np.sum(area_ABW) + + #Determine the spatial means + vel_ASW_all[file_i] = np.sum(vel_baroclinic * area_ASW) + vel_AIW_all[file_i] = np.sum(vel_baroclinic * area_AIW) + vel_NADW_all[file_i] = np.sum(vel_baroclinic * area_NADW) + vel_ABW_all[file_i] = np.sum(vel_baroclinic * area_ABW) + salt_ASW_all[file_i] = np.sum(salt * area_ASW) + salt_AIW_all[file_i] = np.sum(salt * area_AIW) + salt_NADW_all[file_i] = np.sum(salt * area_NADW) + salt_ABW_all[file_i] = np.sum(salt * area_ABW) + + #Determine the means over the water masses + transport_ASW = np.sum(ma.masked_where(water_prop != 1.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_AIW = np.sum(ma.masked_where(water_prop != 2.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_NADW = np.sum(ma.masked_where(water_prop != 3.0, vel_baroclinic * layer_field * grid_x), axis = 1) + transport_ABW = np.sum(ma.masked_where(water_prop != 4.0, vel_baroclinic * layer_field * grid_x), axis = 1) + + #Determine the transport per depth layer (in Sv) and take sum to determine total transport + transport_all[file_i] = np.sum(transport) / 1000000.0 + + #Determine the total salinity transport + transport_salt_all[file_i] = (-1.0 / 35.0) * np.sum(transport_clin * salt_zonal) / 1000000.0 + transport_salt_ASW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ASW * salt_zonal) / 1000000.0 + transport_salt_AIW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_AIW * salt_zonal) / 1000000.0 + transport_salt_NADW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_NADW * salt_zonal) / 1000000.0 + transport_salt_ABW_all[file_i] = (-1.0 / 35.0) * np.sum(transport_ABW * salt_zonal) / 1000000.0 + +#----------------------------------------------------------------------------------------- + +print('Data is written to file') +fh = netcdf.Dataset(directory+'Ocean/FOV_index_'+section_name[4:]+'.nc', 'w') + +fh.createDimension('time', len(time)) + +fh.createVariable('time', float, ('time'), zlib=True) +fh.createVariable('Transport', float, ('time'), zlib=True) +fh.createVariable('F_OV', float, ('time'), zlib=True) +fh.createVariable('F_OV_ASW', float, ('time'), zlib=True) +fh.createVariable('F_OV_AIW', float, ('time'), zlib=True) +fh.createVariable('F_OV_NADW', float, ('time'), zlib=True) +fh.createVariable('F_OV_ABW', float, ('time'), zlib=True) +fh.createVariable('SALT_ASW', float, ('time'), zlib=True) +fh.createVariable('SALT_AIW', float, ('time'), zlib=True) +fh.createVariable('SALT_NADW', float, ('time'), zlib=True) +fh.createVariable('SALT_ABW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ASW', float, ('time'), zlib=True) +fh.createVariable('VVEL_AIW', float, ('time'), zlib=True) +fh.createVariable('VVEL_NADW', float, ('time'), zlib=True) +fh.createVariable('VVEL_ABW', float, ('time'), zlib=True) + +fh.variables['Transport'].longname = 'Volume transport' +fh.variables['F_OV'].longname = 'Fresh water transport' +fh.variables['F_OV_ASW'].longname = 'Fresh water transport (Atlantic Surface Water)' +fh.variables['F_OV_AIW'].longname = 'Fresh water transport (Antarctic Intermediate Water)' +fh.variables['F_OV_NADW'].longname = 'Fresh water transport (North Atlantic Deep Water)' +fh.variables['F_OV_ABW'].longname = 'Fresh water transport (Antarctic Bottom Water)' +fh.variables['SALT_ASW'].longname = 'Salinity (Atlantic Surface Water)' +fh.variables['SALT_AIW'].longname = 'Salinity (Antarctic Intermediate Water)' +fh.variables['SALT_NADW'].longname = 'Salinity (North Atlantic Deep Water)' +fh.variables['SALT_ABW'].longname = 'Salinity (Antarctic Bottom Water)' +fh.variables['VVEL_ASW'].longname = 'Meridional velocity (Atlantic Surface Water)' +fh.variables['VVEL_AIW'].longname = 'Meridional velocity (Antarctic Intermediate Water)' +fh.variables['VVEL_NADW'].longname = 'Meridional velocity (North Atlantic Deep Water)' +fh.variables['VVEL_ABW'].longname = 'Meridional velocity (Antarctic Bottom Water)' + +fh.variables['time'].units = 'Year' +fh.variables['Transport'].units = 'Sv' +fh.variables['F_OV'].units = 'Sv' +fh.variables['F_OV_ASW'].units = 'Sv' +fh.variables['F_OV_AIW'].units = 'Sv' +fh.variables['F_OV_NADW'].units = 'Sv' +fh.variables['F_OV_ABW'].units = 'Sv' +fh.variables['SALT_ASW'].units = 'g/kg' +fh.variables['SALT_AIW'].units = 'g/kg' +fh.variables['SALT_NADW'].units = 'g/kg' +fh.variables['SALT_ABW'].units = 'g/kg' +fh.variables['VVEL_ASW'].units = 'cm/s' +fh.variables['VVEL_AIW'].units = 'cm/s' +fh.variables['VVEL_NADW'].units = 'cm/s' +fh.variables['VVEL_ABW'].units = 'cm/s' + +#Writing data to correct variable +fh.variables['time'][:] = time +fh.variables['Transport'][:] = transport_all +fh.variables['F_OV'][:] = transport_salt_all +fh.variables['F_OV_ASW'][:] = transport_salt_ASW_all +fh.variables['F_OV_AIW'][:] = transport_salt_AIW_all +fh.variables['F_OV_NADW'][:] = transport_salt_NADW_all +fh.variables['F_OV_ABW'][:] = transport_salt_ABW_all +fh.variables['SALT_ASW'][:] = salt_ASW_all +fh.variables['SALT_AIW'][:] = salt_AIW_all +fh.variables['SALT_NADW'][:] = salt_NADW_all +fh.variables['SALT_ABW'][:] = salt_ABW_all +fh.variables['VVEL_ASW'][:] = vel_ASW_all * 100.0 +fh.variables['VVEL_AIW'][:] = vel_AIW_all * 100.0 +fh.variables['VVEL_NADW'][:] = vel_NADW_all * 100.0 +fh.variables['VVEL_ABW'][:] = vel_ABW_all * 100.0 + +fh.close() diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Atlantic_sector.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Atlantic_sector.py new file mode 100644 index 00000000..b3e33af5 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Atlantic_sector.py @@ -0,0 +1,203 @@ +#Generates the Atlantic sector fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m4259/E3SMv2_1/20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -50 +lon_max = 20 +lat_min = -71 +lat_max = 1 + +files = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_min_index:lon_max_index] + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_EC30to60E2r2_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_LR/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = np.mean(salt, axis = 2) + temp_depth[file_i] = np.mean(temp, axis = 2) + u_vel_depth[file_i] = np.mean(u_vel, axis = 2) + dens_depth[file_i] = np.mean(dens, axis = 2) + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth_year = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth_year = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth_year= np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth_year = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Atlantic_sector/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('month', 12) + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('month', float, ('month'), zlib=True) + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('month', 'depth', 'lat'), zlib=True) + fh.createVariable('SALT_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL_mean', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS_mean', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['SALT'].longname = 'Zonally-averaged salinity' + fh.variables['TEMP'].longname = 'Zonally-averaged potential temperature' + fh.variables['UVEL'].longname = 'Zonally-averaged zonal velocity' + fh.variables['POT_DENS'].longname = 'Zonally-averaged potential density' + fh.variables['SALT_mean'].longname = 'Zonally-averaged salinity (yearly mean)' + fh.variables['TEMP_mean'].longname = 'Zonally-averaged potential temperature (yearly mean)' + fh.variables['UVEL_mean'].longname = 'Zonally-averaged zonal velocity (yearly mean)' + fh.variables['POT_DENS_mean'].longname = 'Zonally-averaged potential density (yearly mean)' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + fh.variables['SALT_mean'].units = 'g/kg' + fh.variables['TEMP_mean'].units = 'deg C' + fh.variables['UVEL_mean'].units = 'm/s' + fh.variables['POT_DENS_mean'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['month'][:] = np.arange(12)+1 + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + fh.variables['SALT_mean'][:] = salt_depth_year + fh.variables['TEMP_mean'][:] = temp_depth_year + fh.variables['UVEL_mean'][:] = u_vel_depth_year + fh.variables['POT_DENS_mean'][:] = dens_depth_year + + + fh.close() + + + diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Drake.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Drake.py new file mode 100644 index 00000000..02f4fa39 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_Drake.py @@ -0,0 +1,190 @@ +#Generates the Drake Passage fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m4259/E3SMv2_1/20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis/ocn/hist/' +directory = '../../Data/' + +def RHO_0(T, S): + #Reference density which is not pressure dependent + + rho = (999.842594 + 6.793952 * 10**(-2.0) * T - 9.095290 * 10**(-3.0)*T**(2.0) + 1.001685 * 10**(-4.0) * T**(3.0) - 1.120083 * 10**(-6.0) * T**(4.0) + 6.536332 * 10**(-9.0) * T**(5.0)+ (8.25917 * 10**(-1.0) - 4.4490 * 10**(-3.0) * T + 1.0485 * 10**(-4.0) * T**(2.0) - 1.2580 * 10**(-6.0) * T**(3.0) + 3.315 * 10**(-9.0) * T**(4.0)) * S+ (- 6.33761 * 10**(-3.0) + 2.8441 * 10**(-4.0) * T - 1.6871 * 10**(-5.0) * T**(2.0) + 2.83258 * 10**(-7.0) * T**(3.0)) * S**(3.0/2.0)+ (5.4705 * 10**(-4.0) - 1.97975 * 10**(-5.0) * T + 1.6641 * 10**(-6.0) * T**(2.0) - 3.1203 * 10**(-8.0) * T**(3.0)) * S**(2.0) ) + + return rho +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lat_min = -67 +lat_max = -54.9 +lon_section = -66.3 + +files = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +lon_index = (np.abs(lon - lon_section)).argmin() +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_index] +lat = lat[lat_min_index:lat_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) +layer = layer[:, lat_min_index:lat_max_index, lon_index] + + +dy = 6371000 * 2 * np.pi * 0.5 / 360 + np.zeros(len(lat)) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_EC30to60E2r2_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_LR/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_activeTracers_temperature,timeMonthly_avg_velocityZonal') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, :, lat_min_index:lat_max_index, lon_index] #Salinity (g/kg) + temp = fh.variables['timeMonthly_avg_activeTracers_temperature'][0, :, lat_min_index:lat_max_index, lon_index] #Temperature (deg C) + u_vel = fh.variables['timeMonthly_avg_velocityZonal'][0, :, lat_min_index:lat_max_index, lon_index] #Zonal velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + temp = ma.masked_where(layer <= 0.0, temp) + u_vel = ma.masked_where(layer <= 0.0, u_vel) + dens = RHO_0(temp, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(depth), len(lat))) + temp_depth = ma.masked_all((12, len(depth), len(lat))) + u_vel_depth = ma.masked_all((12, len(depth), len(lat))) + dens_depth = ma.masked_all((12, len(depth), len(lat))) + + #Get the zonal mean + salt_depth[file_i] = salt + temp_depth[file_i] = temp + u_vel_depth[file_i] = u_vel + dens_depth[file_i] = dens + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(depth), len(lat))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + temp_depth = np.sum(temp_depth * month_days_all, axis = 0) + u_vel_depth = np.sum(u_vel_depth * month_days_all, axis = 0) + dens_depth = np.sum(dens_depth * month_days_all, axis = 0) + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/Drake_Passage/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lat'), zlib=True) + fh.createVariable('DY', float, ('lat'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lat'), zlib=True) + fh.createVariable('TEMP', float, ('depth', 'lat'), zlib=True) + fh.createVariable('UVEL', float, ('depth', 'lat'), zlib=True) + fh.createVariable('POT_DENS', float, ('depth', 'lat'), zlib=True) + + fh.variables['depth'].longname = 'Mid-level depth' + fh.variables['lat'].longname = 'Array of latitudes' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['DY'].longname = 'y-spacing' + fh.variables['SALT'].longname = 'Salinity' + fh.variables['TEMP'].longname = 'Potential temperature' + fh.variables['UVEL'].longname = 'Zonal velocity' + fh.variables['POT_DENS'].longname = 'Potential density' + + fh.variables['depth'].units = 'm' + fh.variables['lat'].units = 'degrees N' + fh.variables['layer'].units = 'm' + fh.variables['DY'].units = 'm' + fh.variables['SALT'].units = 'g/kg' + fh.variables['TEMP'].units = 'deg C' + fh.variables['UVEL'].units = 'm/s' + fh.variables['POT_DENS'].units = 'kg/m^3' + + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['lat'][:] = lat + fh.variables['layer'][:] = layer + fh.variables['DY'][:] = dy + fh.variables['SALT'][:] = salt_depth + fh.variables['TEMP'][:] = temp_depth + fh.variables['UVEL'][:] = u_vel_depth + fh.variables['POT_DENS'][:] = dens_depth + + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_FOV.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_FOV.py new file mode 100644 index 00000000..0b19e741 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_FOV.py @@ -0,0 +1,305 @@ +#Generates the FOV fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m4259/E3SMv2_1/20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis/ocn/hist/' +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +layer = fh.variables['timeMonthly_avg_layerThickness'][0] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_EC30to60E2r2_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_LR/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity,timeMonthly_avg_velocityMeridional') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0] #Salinity (g/kg) + v_vel = fh.variables['timeMonthly_avg_velocityMeridional'][0] #Meridional velocity (m/s) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + v_vel = ma.masked_where(layer <= 0.0, v_vel) + + for lat_section in [-34, 26, 60]: + #Get the lat index + lat_index = (np.abs(lat - lat_section)).argmin() + + if lat_section == -34: + #Section at 34S, start of Atlantic Sector + lon_1, lon_2 = 250, 401 + section_name = 'FOV_section_34S' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_34S = lon[lon_1:lon_2] + layer_34S = layer[:, lat_index, lon_1:lon_2] + dx_34S = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_34S)) + + if lat_section == 26: + #Section at 26N, RAPID array + lon_1, lon_2 = 198, 335 + section_name = 'FOV_section_26N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_26N = lon[lon_1:lon_2] + layer_26N = layer[:, lat_index, lon_1:lon_2] + dx_26N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_26N)) + + if lat_section == 60: + #Section at 60N, RAPID array + lon_1, lon_2 = 230, 373 + section_name = 'FOV_section_60N' + + if year_i == int(np.min(time)): + #Get the layer for the section + lon_60N = lon[lon_1:lon_2] + layer_60N = layer[:, lat_index, lon_1:lon_2] + dx_60N = 6371000 * 2 * np.pi * np.cos(lat[lat_index] * np.pi / 180) * 0.5 / 360 + np.zeros(len(lon_60N)) + + if file_i == 0 and lat_section == -34: + #Make empty arrays for the months + v_vel_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_34S = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 26: + #Make empty arrays for the months + v_vel_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_26N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if file_i == 0 and lat_section == 60: + #Make empty arrays for the months + v_vel_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + salt_60N = ma.masked_all((12, len(depth), lon_2 - lon_1)) + + if lat_section == -34: + #Now save the data to the general array + v_vel_34S[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_34S[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 26: + #Now save the data to the general array + v_vel_26N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_26N[file_i] = salt[:, lat_index, lon_1:lon_2] + + if lat_section == 60: + #Now save the data to the general array + v_vel_60N[file_i] = v_vel[:, lat_index, lon_1:lon_2] + salt_60N[file_i] = salt[:, lat_index, lon_1:lon_2] + + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_34S = ma.masked_all((len(month_days), len(depth), len(lon_34S))) + month_days_26N = ma.masked_all((len(month_days), len(depth), len(lon_26N))) + month_days_60N = ma.masked_all((len(month_days), len(depth), len(lon_60N))) + + for month_i in range(len(month_days)): + month_days_34S[month_i] = month_days[month_i] + month_days_26N[month_i] = month_days[month_i] + month_days_60N[month_i] = month_days[month_i] + + #Now set mask + month_days_34S = ma.masked_array(month_days_34S, mask = salt_34S.mask) + month_days_26N = ma.masked_array(month_days_26N, mask = salt_26N.mask) + month_days_60N = ma.masked_array(month_days_60N, mask = salt_60N.mask) + + #Normalise the data + month_days_34S = month_days_34S / np.sum(month_days_34S, axis = 0) + month_days_26N = month_days_26N / np.sum(month_days_26N, axis = 0) + month_days_60N = month_days_60N / np.sum(month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + #Determine the time mean over the months of choice + v_vel_34S = np.sum(v_vel_34S * month_days_34S, axis = 0) + salt_34S = np.sum(salt_34S * month_days_34S, axis = 0) + v_vel_26N = np.sum(v_vel_26N * month_days_26N, axis = 0) + salt_26N = np.sum(salt_26N * month_days_26N, axis = 0) + v_vel_60N = np.sum(v_vel_60N * month_days_60N, axis = 0) + salt_60N = np.sum(salt_60N * month_days_60N, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_34S/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_34S)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_34S + fh.variables['lon'][:] = lon_34S + fh.variables['DX'][:] = dx_34S + fh.variables['VVEL'][:] = v_vel_34S + fh.variables['SALT'][:] = salt_34S + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_26N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_26N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_26N + fh.variables['lon'][:] = lon_26N + fh.variables['DX'][:] = dx_26N + fh.variables['VVEL'][:] = v_vel_26N + fh.variables['SALT'][:] = salt_26N + + fh.close() + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/FOV_section_60N/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('depth', len(depth)) + fh.createDimension('lon', len(lon_60N)) + + fh.createVariable('depth', float, ('depth'), zlib=True) + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('layer', float, ('depth', 'lon'), zlib=True) + fh.createVariable('DX', float, ('lon'), zlib=True) + fh.createVariable('VVEL', float, ('depth', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('depth', 'lon'), zlib=True) + + fh.variables['depth'].longname = 'Depth from surface to midpoint of layer' + fh.variables['layer'].longname = 'Thickness of layer' + fh.variables['lon'].longname = 'Array of longtidues' + fh.variables['DX'].longname = 'x-spacing' + fh.variables['VVEL'].longname = 'Velocity in meridional direction' + fh.variables['SALT'].longname = 'Salinity' + + fh.variables['depth'].units = 'm' + fh.variables['layer'].units = 'm' + fh.variables['lon'].units = 'degrees E' + fh.variables['DX'].units = 'm' + fh.variables['VVEL'].units = 'm/s' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['depth'][:] = depth + fh.variables['layer'][:] = layer_60N + fh.variables['lon'][:] = lon_60N + fh.variables['DX'][:] = dx_60N + fh.variables['VVEL'][:] = v_vel_60N + fh.variables['SALT'][:] = salt_60N + + fh.close() + diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_SALT.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_SALT.py new file mode 100644 index 00000000..389c0636 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Field_generation_SALT.py @@ -0,0 +1,222 @@ +#Generates the vertically-integrated salinity fields on the 0.5x0.5 rectangular grid +#Note that you also need the Layer grid file, first interpolate the variable timeMonthly_avg_layerThickness for one particular month +#Place this file in the corresponding directory +#You probably need to change the interpolated output directory path + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy.interpolate import griddata + +directory_data = '/global/cfs/cdirs/m4259/E3SMv2_1/20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis/ocn/hist/' +directory = '../../Data/' + +def Distance(lon1, lat1, lon2, lat2): + """Returns distance (m) of two points located at the globe + coordinates need input in degrees""" + + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) #Convert to radians + + #Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2.0)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2.0)**2 + c = 2.0 * math.asin(sqrt(a)) + r = 6371000.0 # Radius of earth in meters + + return c * r #Distance between two points in meter + +def GridCellComputer(longitude, latitude): + """Determines the area (m^2) per grid cell + returns 2-D array (lat, lon) with the area per box""" + + #Define empty array for latitude per grid cell and the Area covered by the Ocean + grid_x = np.zeros((len(latitude), len(longitude))) + grid_y = np.zeros((len(latitude), len(longitude))) + + for lat_i in range(len(latitude)): + + #Determining zonal length (m), is latitude dependent, therefore, take middle of grid cell + length_zonal_grid = Distance(0.0, latitude[lat_i], np.mean(np.diff(longitude)), latitude[lat_i]) + #Determining meriodinal length (m), is longitude independent + length_meridional_grid = Distance(0.0, 0.0, 0.0, np.mean(np.diff(latitude))) + + grid_x[lat_i] = length_zonal_grid + grid_y[lat_i] = length_meridional_grid + + return grid_x, grid_y, grid_x * grid_y + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +lon_min = -110 +lon_max = 143 +lat_min = -80 +lat_max = 25.5 +depth_min = 0 +depth_max = 100 + + +files = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-13:-3] + year = int(date[0:4]) + month = int(date[5:7]) + + time[year_i] = year + (month-1) / 12.0 + + +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset(directory+'Data/Layer_grid.nc', 'r') + +lon = fh.variables['lon'][:] +lat = fh.variables['lat'][:] + +grid_x, grid_y, area = GridCellComputer(lon, lat) + +lon_min_index = (np.abs(lon - lon_min)).argmin() +lon_max_index = (np.abs(lon - lon_max)).argmin()+1 +lat_min_index = (np.abs(lat - lat_min)).argmin() +lat_max_index = (np.abs(lat - lat_max)).argmin()+1 + +lon = lon[lon_min_index:lon_max_index] +lat = lat[lat_min_index:lat_max_index] +area = area[lat_min_index:lat_max_index, lon_min_index:lon_max_index] +layer = fh.variables['timeMonthly_avg_layerThickness'][0, :, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Layer thickness (m) + +fh.close() + +#Use general depth coordinate +depth = np.zeros(len(layer)+1) + +for depth_i in range(1, len(depth)): + #Generate the depth boundaries + depth[depth_i] = depth[depth_i-1] + layer[depth_i-1, 87, 255] + +#Take the mean to find general depth array +depth = 0.5 * (depth[1:] + depth[:-1]) + + +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 +depth = depth[depth_min_index:depth_max_index] +layer = layer[depth_min_index:depth_max_index] + +for lat_i in range(len(lat)): + for lon_i in range(len(lon)): + #Get all the layers which have a maximum depth below given range + if np.sum(layer[:, lat_i, lon_i]) > depth_max: + #Adjust the last layer + layer[-1, lat_i, lon_i] -= (np.sum(layer[:, lat_i, lon_i]) - depth_max) + +#Get the total vertical extent for each layer +total_layer = np.sum(layer, axis = 0) +volume = total_layer * area +area = ma.masked_array(area, mask = volume.mask) + +for depth_i in range(len(depth)): + #Normalise the field by its vertical extent + layer[depth_i] = layer[depth_i] / total_layer + +#----------------------------------------------------------------------------------------- + +time_year = ma.masked_all(int(len(time)/12)) + +for year_i in range(int(np.min(time)), int(np.min(time))+len(time_year)): + #Now determine for each month + print(year_i) + time_year[year_i - int(np.min(time))] = year_i + files_month = glob.glob(directory_data+'20220715.submeso.piControl.ne30pg2_EC30to60E2r2.chrysalis.mpaso.hist.am.timeSeriesStatsMonthly.'+str(year_i).zfill(4)+'-*.nc') + files_month.sort() + + for file_i in range(len(files_month)): + #Loop over each month + os.system('ncremap -i '+files_month[file_i]+' -P mpas -m /global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_EC30to60E2r2_to_0.5x0.5degree_bilinear.nc -T . -O /global/homes/r/rvwesten/E3SM_LR/Program/Ocean -o Regrid_month.nc -v timeMonthly_avg_activeTracers_salinity') + + fh = netcdf.Dataset('Regrid_month.nc', 'r') + + salt = fh.variables['timeMonthly_avg_activeTracers_salinity'][0, depth_min_index:depth_max_index, lat_min_index:lat_max_index, lon_min_index:lon_max_index] #Salinity (g/kg) + + fh.close() + + salt = ma.masked_where(layer <= 0.0, salt) + + if file_i == 0: + #Empty array + salt_depth = ma.masked_all((12, len(lat), len(lon))) + + #Get the vertical depth averaged salinity + salt_depth[file_i] = np.sum(salt * layer, axis = 0) + + #------------------------------------------------------------------------------ + #Now convert to yearly averages + month_days = np.asarray([31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + #Fill the array's with the same dimensions + month_days_all = ma.masked_all((len(month_days), len(lat), len(lon))) + + for month_i in range(len(month_days)): + month_days_all[month_i] = month_days[month_i] + + #Now set mask + month_days_all = ma.masked_array(month_days_all, mask = salt_depth.mask) + + #Normalise the data + month_days_all = month_days_all / np.sum(month_days_all, axis = 0) + + #Determine the time mean over the months of choice + salt_depth = np.sum(salt_depth * month_days_all, axis = 0) + + #----------------------------------------------------------------------------------------- + + filename = directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_data_year_'+str(year_i).zfill(4)+'.nc' + + fh = netcdf.Dataset(filename, 'w') + + fh.createDimension('lon', len(lon)) + fh.createDimension('lat', len(lat)) + + fh.createVariable('lon', float, ('lon'), zlib=True) + fh.createVariable('lat', float, ('lat'), zlib=True) + fh.createVariable('AREA', float, ('lat', 'lon'), zlib=True) + fh.createVariable('VOLUME', float, ('lat', 'lon'), zlib=True) + fh.createVariable('SALT', float, ('lat', 'lon'), zlib=True) + + fh.variables['lon'].longname = 'Array of T-longtidues' + fh.variables['lat'].longname = 'Array of T-latitudes' + fh.variables['AREA'].longname = 'Area of T cells' + fh.variables['VOLUME'].longname = 'Volume of T cells' + fh.variables['SALT'].longname = 'Depth-averaged salinity' + + fh.variables['lon'].units = 'degrees E' + fh.variables['lat'].units = 'degrees N' + fh.variables['AREA'].units = 'm^2' + fh.variables['VOLUME'].units = 'm^3' + fh.variables['SALT'].units = 'g/kg' + + #Writing data to correct variable + fh.variables['lon'][:] = lon + fh.variables['lat'][:] = lat + fh.variables['AREA'][:] = area + fh.variables['VOLUME'][:] = volume + fh.variables['SALT'][:] = salt_depth + + fh.close() + + + + diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Grid_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Grid_plot.py new file mode 100644 index 00000000..a5292c29 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Grid_plot.py @@ -0,0 +1,49 @@ +#Program plots the resolution of the native grid + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +import matplotlib.tri as tri +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +fh = netcdf.Dataset('/global/cfs/cdirs/e3sm/diagnostics/mpas_analysis/maps/map_EC30to60E2r2_to_0.5x0.5degree_bilinear.nc', 'r') + +lon = fh.variables['xc_a'][:] * 180 / np.pi +lat = fh.variables['yc_a'][:] * 180 / np.pi +area_a = fh.variables['area_a'][:] * (180 / np.pi)**2.0 + +fh.close() + +lon[lon > 180] = lon[lon > 180] - 360.0 + +print(np.max(np.sqrt(area_a))) + +fig, ax = plt.subplots(subplot_kw={'projection': ccrs.Robinson()}) + +CS = ax.tripcolor(lon, lat, np.sqrt(area_a), vmin=0, vmax=0.6, cmap='Spectral_r', transform=ccrs.PlateCarree()) + +divider = make_axes_locatable(ax) +ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) +fig.add_axes(ax_cb) + +cbar = colorbar(CS, ticks = np.arange(0, 0.61, 0.2), cax=ax_cb) +cbar.set_label('Horizontal resolution ($^{\circ}$)') + +ax.set_global() + +ax.add_feature(cfeature.LAND, zorder=100, edgecolor='k') +ax.coastlines() + +ax.set_title('Grid resolution, LR-E3SM') + +show() \ No newline at end of file diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/SALT_SO_0_100m_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/SALT_SO_0_100m_plot.py new file mode 100644 index 00000000..69ffc88f --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/SALT_SO_0_100m_plot.py @@ -0,0 +1,88 @@ +#Program plots the vertically averaged (upper 100 m) salinity in the Southern Oceaan + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors +from scipy import stats +from scipy.stats import genextreme +from matplotlib.colors import LogNorm +from cartopy import crs as ccrs, feature as cfeature +from mpl_toolkits.axes_grid1 import make_axes_locatable + + +#Making pathway to folder with all data +directory = '../../Data/' + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 100 +year_start = 1 +year_end = 5 + +files = glob.glob(directory+'Data/SALT_SO_'+str(depth_min)+'_'+str(depth_max)+'m/E3SM_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] + +#----------------------------------------------------------------------------------------- + +for file_i in range(len(files)): + print(files[file_i]) + fh = netcdf.Dataset(files[file_i], 'r') + + lon = fh.variables['lon'][:] + lat = fh.variables['lat'][:] + salt = fh.variables['SALT'][:] #Salinity + + fh.close() + + if file_i == 0: + salt_all = ma.masked_all((len(files), len(lat), len(lon))) + + salt_all[file_i] = salt + +salt_all = np.mean(salt_all, axis = 0) +#----------------------------------------------------------------------------------------- + +fig, ax = plt.subplots(subplot_kw={'projection': ccrs.PlateCarree()}) + +CS = ax.contourf(lon, lat, salt_all, levels = np.arange(33, 37.1, 0.1), extend = 'both', cmap = 'BrBG_r', transform=ccrs.PlateCarree()) + +divider = make_axes_locatable(ax) +ax_cb = divider.new_horizontal(size="5%", pad=0.1, axes_class=plt.Axes) +fig.add_axes(ax_cb) + +cbar = colorbar(CS, ticks = np.arange(33, 37.1, 1), cax=ax_cb) +cbar.set_label('Salinity (g kg$^{-1}$)') + +gl = ax.gridlines(draw_labels=True) +gl.top_labels = False +gl.right_labels = False +ax.set_extent([-80, 130, -70, 25], ccrs.PlateCarree()) +ax.coastlines('50m') +ax.add_feature(cfeature.LAND, zorder=0) +ax.set_title('Salinity (0 - 100 m), LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +show() + diff --git a/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Water_properties_34S_plot.py b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Water_properties_34S_plot.py new file mode 100644 index 00000000..c9ed7000 --- /dev/null +++ b/ocean/FOV_E3SM/E3SM_LR/Program/Ocean/Water_properties_34S_plot.py @@ -0,0 +1,291 @@ +#Program plots sections along 34S + +from pylab import * +import numpy +import datetime +import time +import glob, os +import math +import netCDF4 as netcdf +import matplotlib.colors as colors + +#Making pathway to folder with all data +directory = '../../Data/' + +def ReadinData(filename, depth_min_index, depth_max_index): + + fh = netcdf.Dataset(filename, 'r') + + #First get the u-grid + lon = fh.variables['lon'][:] #Longitude + depth = fh.variables['depth'][depth_min_index:depth_max_index] #Depth (m) + layer = fh.variables['layer'][depth_min_index:depth_max_index] #Layer thickness (m) + grid_x = fh.variables['DX'][:] #Zonal grid cell length (m) + v_vel = fh.variables['VVEL'][depth_min_index:depth_max_index] #Meridional velocity (m/s) + salt = fh.variables['SALT'][depth_min_index:depth_max_index] #Salinity (g / kg) + + fh.close() + + return lon, depth, layer, grid_x, v_vel, salt + +#----------------------------------------------------------------------------------------- +#--------------------------------MAIN SCRIPT STARTS HERE---------------------------------- +#----------------------------------------------------------------------------------------- + +depth_min = 0 +depth_max = 6000 +year_start = 55 +year_end = 59 + +section_name = 'FOV_section_34S' +#----------------------------------------------------------------------------------------- + +files = glob.glob(directory+'Data/'+section_name+'/E3SM_data_year_*.nc') +files.sort() + +#----------------------------------------------------------------------------------------- + +#Define empty array's +time = np.zeros(len(files)) + +for year_i in range(len(files)): + date = files[year_i][-7:-3] + year = int(date[0:4]) + time[year_i] = year + +time_start = (np.abs(time - year_start)).argmin() +time_end = (np.abs(time - year_end)).argmin() + 1 +files = files[time_start:time_end] +#----------------------------------------------------------------------------------------- + +#Get all the relevant indices to determine the mass transport +fh = netcdf.Dataset(files[0], 'r') + +depth = fh.variables['depth'][:] #Depth (m) + +fh.close() + +#Get the dimensions of depth and latitude +depth_min_index = (fabs(depth_min - depth)).argmin() +depth_max_index = (fabs(depth_max - depth)).argmin() + 1 + +#----------------------------------------------------------------------------------------- +#Determine the section length per depth layer +lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[0], depth_min_index, depth_max_index) + +#Normalise layer field per layer +layer_field_norm = ma.masked_all(shape(layer_field)) +grid_x_norm = ma.masked_all((len(depth), len(lon))) + +for depth_i in range(len(depth)): + #Normalise each layer + layer_field_norm[depth_i] = layer_field[depth_i] / np.sum(layer_field[depth_i]) + + #Normalise the length + grid_x_depth = ma.masked_array(grid_x, mask = v_vel[depth_i].mask) + grid_x_norm[depth_i] = grid_x_depth / np.sum(grid_x_depth) + +#----------------------------------------------------------------------------------------- + +#Define empty array's +vel_all = ma.masked_all((len(time), len(depth))) +vel_salt_all = ma.masked_all((len(time), len(depth))) +salt_all = ma.masked_all((len(time), len(depth), len(lon))) + +for file_i in range(len(files)): + #Now determine for each month + print(files[file_i]) + + lon, depth, layer_field, grid_x, v_vel, salt = ReadinData(files[file_i], depth_min_index, depth_max_index) + + #Determine the meridional transport + transport = v_vel * layer_field * grid_x + + #Determine the section averaged velocity (barotropic) + vel_barotropic = np.sum(transport) / np.sum(layer_field * grid_x) + + #Determine the overturning velocity (baroclinic) + vel_baroclinic = v_vel - vel_barotropic + + #Determine the zonal means + salt_zonal = np.sum(salt * grid_x_norm, axis = 1) - 35.0 + transport_clin = np.sum(vel_baroclinic * layer_field * grid_x, axis = 1) + + #----------------------------------------------------------------------------------------- + + #Save the meridional baroclinic transport + vel_all[file_i] = np.sum(vel_baroclinic * grid_x_norm, axis = 1) * 100.0 + vel_salt_all[file_i] = (-1.0 / 35.0) * transport_clin * salt_zonal / 10**6.0 + salt_all[file_i] = salt + +layer_norm = layer_field[:, 123] +layer_norm[-1] = layer_norm[-2] +vel_all = np.mean(vel_all, axis = 0) +vel_salt_all = np.mean(vel_salt_all, axis = 0) +vel_salt_all = vel_salt_all / layer_norm * 1000.0 +salt_all = np.mean(salt_all, axis = 0) + + +#----------------------------------------------------------------------------------------- +#Get the water properties + +#North Atlantic Deep Water (NADW) has negative meridional velocities +depth_index_NADW = np.where((depth >= 500) & (vel_all <= 0))[0][0] + +#Antarctic bottom water (ABW) is directly below the NADW, get the first index +depth_index_ABW = np.where((depth >= 3000) & (vel_all >= 0))[0][0] + +#The Antarctic Intermediate water is between the NADW and 500 m +depth_index_AIW = np.where(depth >= 500)[0][0] + + +depth_top = np.zeros(len(depth)) + +for depth_i in range(1, len(depth)): + depth_top[depth_i] = depth_top[depth_i - 1] + layer_norm[depth_i - 1] + +depth_AIW = depth_top[depth_index_AIW] +depth_NADW = depth_top[depth_index_NADW] +depth_ABW = depth_top[depth_index_ABW] + +lon_AIW_index = np.where(salt_all[depth_index_AIW].mask == False)[0] +lon_NADW_index = np.where(salt_all[depth_index_NADW].mask == False)[0] +lon_ABW_index = np.where(salt_all[depth_index_ABW].mask == False)[0] +lon_AIW_1, lon_AIW_2 = lon[lon_AIW_index[0]], lon[lon_AIW_index[-1]] +lon_NADW_1, lon_NADW_2 = lon[lon_NADW_index[0]], lon[lon_NADW_index[-1]] +lon_ABW_1, lon_ABW_2 = lon[lon_ABW_index[0]], lon[lon_ABW_index[-1]] + +#----------------------------------------------------------------------------------------- + +depth_crop = 1000 +factor_depth_crop = 4 +depth[depth > depth_crop] = ((depth[depth > depth_crop] - depth_crop) / factor_depth_crop) + depth_crop + +if depth_AIW > depth_crop: + depth_AIW = ((depth_AIW - depth_crop) / factor_depth_crop) + depth_crop +if depth_NADW > depth_crop: + depth_NADW = ((depth_NADW - depth_crop) / factor_depth_crop) + depth_crop +if depth_ABW > depth_crop: + depth_ABW = ((depth_ABW - depth_crop) / factor_depth_crop) + depth_crop +#----------------------------------------------------------------------------------------- + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=-1, vmax= 1) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='RdBu_r') #Using colormap +color_south = scalarMap.to_rgba(-0.5) +color_north = scalarMap.to_rgba(0.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +plot(vel_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-2, 2) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.fill_betweenx(depth, vel_all, where = vel_all >= 0.0, color = color_north, alpha = 0.50) +ax.fill_betweenx(depth, vel_all, where = vel_all <= 0.0, color = color_south, alpha = 0.50) + +ax.set_xlabel('Meridional velocity (cm s$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.text(1.9, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.9, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Meridional velocity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +fig, ax = subplots() + +ax.fill_between([-60, 20], y1 = np.zeros(2) + depth[0], y2 = np.zeros(2) + 2*depth[-1], color = 'gray', alpha = 0.50) +ax.plot([lon_AIW_1, lon_AIW_2], [depth_AIW, depth_AIW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_NADW_1, lon_NADW_2], [depth_NADW, depth_NADW], linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot([lon_ABW_1, lon_ABW_2], [depth_ABW, depth_ABW], linestyle = '--', linewidth = 2.0, color = 'k') + +CS = contourf(lon, depth, salt_all, levels = np.arange(34, 36.01, 0.1), extend = 'both', cmap = 'BrBG_r') +cbar = colorbar(CS, ticks = np.arange(34, 36.01, 0.5)) +cbar.set_label('Salinity (g kg$^{-1}$)') + +ax.set_xlim(-60, 20) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.set_ylabel('Depth (m)') + +ax.set_xticks(np.arange(-60, 21, 10)) +ax.set_xticklabels(['60$^{\circ}$W', '50$^{\circ}$W', '40$^{\circ}$W', '30$^{\circ}$W', '20$^{\circ}$W', '10$^{\circ}$W','0$^{\circ}$', '10$^{\circ}$E', '20$^{\circ}$E']) + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + + +ax.text(-18, 350, 'ASW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 850, 'AIW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1350, 'NADW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) +ax.text(-18, 1900, 'ABW', verticalalignment='center', horizontalalignment='center', color = 'k', fontsize=16) + +ax.set_title('Salinity, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +#----------------------------------------------------------------------------------------- + +cNorm = colors.Normalize(vmin=34, vmax= 36) +scalarMap = cm.ScalarMappable(norm=cNorm, cmap='BrBG_r') #Using colormap +color_fresh = scalarMap.to_rgba(34.5) +color_salt = scalarMap.to_rgba(35.5) + +fig, ax = subplots() + +ax.axhline(y = depth_AIW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_NADW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.axhline(y = depth_ABW, linestyle = '--', linewidth = 2.0, color = 'k') +ax.plot(vel_salt_all, depth, '-k', linewidth = 2.0) + +ax.set_xlim(-1.5, 1.5) +ax.set_ylim(((5500 - depth_crop) / factor_depth_crop) + depth_crop, 0) +ax.grid() + +labels = ax.get_yticks() +for label_i in range(len(labels)): + if labels[label_i] > depth_crop: + #Rescale the xlabels + labels[label_i] = ((labels[label_i] - depth_crop) * factor_depth_crop) + depth_crop + +labels = labels.astype(int) +ax.set_yticklabels(labels) + +ax.set_xlabel(r'Freshwater transport (mSv m$^{-1}$)') +ax.set_ylabel('Depth (m)') +ax.axvline(x = 0, linestyle = '--', color = 'k') + +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all >= 0.0, color = color_fresh, alpha = 0.50) +ax.fill_betweenx(depth, vel_salt_all, where = vel_salt_all <= 0.0, color = color_salt, alpha = 0.50) + +ax.text(1.45, 350, 'ASW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 850, 'AIW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1350, 'NADW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) +ax.text(1.45, 1900, 'ABW', verticalalignment='center', horizontalalignment='right', color = 'k', fontsize=16) + +ax.set_title('Freshwater transport, LR-E3SM ('+str(year_start)+' - '+str(year_end)+')') + +show() diff --git a/ocean/FOV_E3SM/README.md b/ocean/FOV_E3SM/README.md new file mode 100644 index 00000000..e2d83199 --- /dev/null +++ b/ocean/FOV_E3SM/README.md @@ -0,0 +1,15 @@ +# E3SM_FOV + +René M. van Westen + +These directories contain Python (v3) scripts for plotting/analysing various E3SM model output. + +Python scripts can be found in the directory 'Program'. +Model output can be found in the directory 'Data'. + +The processed model output are stored as NETCDF files and using the relevant scripts one can regenerate all the figures. +I provided a selection of the model output (interpolated onto the 0.5x0.5 rectangular grid) and is only converted to yearly-averaged data (due to storage limitations). +Some scripts (e.g., FOV_index.py and AMOC_transport.py) use the yearly-averaged model output, but you can use also the already available time series. + +The Field_generation_*.py scripts can only be used on perlmutter.nersc.gov machine, where you need the following conda environment: +source /global/common/software/e3sm/anaconda_envs/load_latest_e3sm_unified_pm-cpu.sh