Commit 8efb5139 authored by thopri's avatar thopri
Browse files

fixed handling of 2D varibles, e.g. SSH

parent 2e344420
......@@ -83,6 +83,7 @@ class Extract:
self.tmp_valid = None
self.data_ind = None
self.nan_ind = None
self.isslab = False
# TODO: Why are we deepcopying the coordinates???
......@@ -99,8 +100,11 @@ class Extract:
self.jpj, self.jpi = DC.lonlat[grd]['lon'].shape
self.jpk = DC.depths[grd]['bdy_z'].shape[0]
# Set some constants
try:
self.jpk = DC.depths[grd]['bdy_z'].shape[0]
except KeyError:
self.jpk = 1
# Set some constants
# Make function of dst grid resolution (used in 1-2-1 weighting)
# if the weighting can only find one addtional point this implies an
......@@ -127,12 +131,9 @@ class Extract:
dst_lon = DC.bdy_lonlat[self.g_type]['lon']
dst_lat = DC.bdy_lonlat[self.g_type]['lat']
try:
dst_dep = DC.depths[self.g_type]['bdy_z']
except KeyError:
dst_dep = np.zeros([1])
self.isslab = len(dst_dep) == 1
dst_dep = DC.depths[self.g_type]['bdy_z']
if dst_dep.size == len(dst_dep):
self.isslab = True
dst_dep = np.ones([1, len(dst_lon)])
# ??? Should this be read from settings?
......@@ -406,10 +407,11 @@ class Extract:
self.dst_dep = dst_dep
self.num_bdy = num_bdy
self.id_121 = id_121
if not self.isslab:
self.bdy_z = DC.depths[self.g_type]['bdy_H']
else:
self.bdy_z = np.zeros([1])
self.bdy_z = DC.depths[self.g_type]['bdy_H']
# if not self.isslab:
# self.bdy_z = DC.depths[self.g_type]['bdy_H']
# else:
# self.bdy_z = np.zeros([1])
self.dst_z = dst_dep
self.sc_z_len = sc_z_len
......@@ -575,7 +577,10 @@ class Extract:
# Note using isnan/sum is relatively fast, but less than
# bottleneck external lib
self.logger.info('SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
sc_array[0][self.t_mask == 0] = np.NaN
if not self.isslab and not self.key_vec:
sc_array[0][self.t_mask == 0] = np.NaN
if self.isslab and not self.key_vec:
sc_array[0][self.t_mask[:,0:1,:,:] == 0] = np.NaN
self.logger.info( 'SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
if not np.isnan(np.sum(meta_data[vn]['sf'])):
sc_array[0] *= meta_data[vn]['sf']
......@@ -592,23 +597,39 @@ class Extract:
# Now collapse the extracted data to an array
# containing only nearest neighbours to dest bdy points
# Loop over the depth axis
for dep in range(sc_z_len):
if self.isslab == False:
for dep in range(sc_z_len):
tmp_arr = [None, None]
# Consider squeezing
tmp_arr[0] = sc_array[0][0, dep, :, :].flatten('F') # [:,:,dep]
if not self.key_vec:
sc_bdy[vn, dep, :, :] = self._flat_ref(tmp_arr[0], ind)
else:
tmp_arr[1] = sc_array[1][0,dep,:,:].flatten('F') #[:,:,dep]
# Include in the collapse the rotation from the
# grid to real zonal direction, ie ij -> e
sc_bdy[vn, dep, :] = (tmp_arr[0][ind[:]] * self.gcos -
tmp_arr[1][ind[:]] * self.gsin)
# Include... meridinal direction, ie ij -> n
sc_bdy[vn+1, dep, :] = (tmp_arr[1][ind[:]] * self.gcos +
tmp_arr[0][ind[:]] * self.gsin)
if self.isslab == True:
tmp_arr = [None, None]
# Consider squeezing
tmp_arr[0] = sc_array[0][0,dep,:,:].flatten('F') #[:,:,dep]
tmp_arr[0] = sc_array[0][0, 0, :, :].flatten('F') # [:,:,dep]
if not self.key_vec:
sc_bdy[vn, dep, :, :] = self._flat_ref(tmp_arr[0], ind)
sc_bdy[vn, 0, :, :] = self._flat_ref(tmp_arr[0], ind)
else:
tmp_arr[1] = sc_array[1][0,dep,:,:].flatten('F') #[:,:,dep]
tmp_arr[1] = sc_array[1][0,0,:,:].flatten('F') #[:,:,dep]
# Include in the collapse the rotation from the
# grid to real zonal direction, ie ij -> e
sc_bdy[vn, dep, :] = (tmp_arr[0][ind[:]] * self.gcos -
tmp_arr[1][ind[:]] * self.gsin)
sc_bdy[vn, 0, :] = (tmp_arr[0][ind[:]] * self.gcos -
tmp_arr[1][ind[:]] * self.gsin)
# Include... meridinal direction, ie ij -> n
sc_bdy[vn+1, dep, :] = (tmp_arr[1][ind[:]] * self.gcos +
tmp_arr[0][ind[:]] * self.gsin)
sc_bdy[vn+1, 0, :] = (tmp_arr[1][ind[:]] * self.gcos +
tmp_arr[0][ind[:]] * self.gsin)
# End depths loop
# End depths loop
self.logger.info(' END DEPTHS LOOP ')
# End Looping over vars
self.logger.info(' END VAR LOOP ')
......@@ -889,6 +910,8 @@ class Extract:
'_bdy'+self.g_type.upper()+ '_y'+str(year)+'m'+'%02d' % month+'.nc'
ncml_out = glob(self.settings['ncml_out']+'/*'+str(self.g_type.upper())+'.ncml')
if len(ncml_out) == 0:
raise RuntimeError('NCML out file for grid '+str(self.g_type.upper())+' missing, please add into NCML directory')
ncml_out = ncml_out[0]
ncgen.CreateBDYNetcdfFile(f_out, self.num_bdy,
......@@ -906,12 +929,14 @@ class Extract:
# for v in self.variables:
for v in self.var_nam:
#if self.settings['dyn2d']: # Calculate depth averaged velocity
# tile_dz = np.tile(self.bdy_dz, [len(self.time_counter), 1, 1, 1])
# tmp_var = np.reshape(self.d_bdy[v][year]['data'][:,:,:], tile_dz.shape)
# tmp_var = np.nansum(tmp_var * tile_dz, 2) /np.nansum(tile_dz, 2)
#else: # Replace NaNs with specified fill value
tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:,:,:]),
if self.isslab == True: # Calculate depth averaged velocity
tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:, :, :]),
self.settings['fv'],
self.d_bdy[v][year]['data'][:, :, :])
tmp_var = tmp_var[:,0:1,:]
# Replace NaNs with specified fill value
else:
tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:,:,:]),
self.settings['fv'],
self.d_bdy[v][year]['data'][:,:,:])
......
This diff is collapsed.
......@@ -19,8 +19,8 @@ def write_data_to_file(filename, variable_name, data,ncml_out):
"""
ncid = Dataset(filename, 'a', clobber=False, format='NETCDF4')
count = data.shape
time = ncml_parse.gen_dims_NCML(ncml_out,'time_counter')
var_list = ncml_parse.gen_var_list_NCML(ncml_out,time)
time = ncml_parse.dst_dims(ncml_out,'time_counter')
var_list = ncml_parse.dst_var_list(ncml_out,time)
three_dim_variables = var_list['3D_vars'] #['votemper', 'vosaline', 'N1p', 'N3n', 'N5s','vobtcrtx','vozocrtx','vobtcrty','vomecrty']
two_dim_variables = var_list['2D_vars'] #['sossheig', 'iicethic', 'ileadfra', 'isnowthi']
......
......@@ -114,9 +114,11 @@ class Depth:
for p in list(self.zpoints.keys()):
self.zpoints[p] = self.zpoints[p].reshape(zshapes[p])
self.logger.debug( 'Done loop, zpoints: %s ', self.zpoints['t'].shape)
self.zpoints['z'] = self.zpoints['t'][0,:]
self.zpoints['wz'] = self.zpoints['wt'][0,:]
self.logger.debug('Done loop, zpoints: %s ', self.zpoints['t'].shape)
nc.close()
......
......@@ -8,7 +8,7 @@ import logging
logger = logging.getLogger(__name__)
def gen_dims_NCML(ncmlfile,orgName):
def dst_dims(ncmlfile,orgName):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
dimensions = ncml_dict['ns0:netcdf']['ns0:dimension']
if type(dimensions) is not list:
......@@ -21,7 +21,7 @@ def gen_dims_NCML(ncmlfile,orgName):
break
return dim_name
def gen_var_list_NCML(ncmlfile, time):
def dst_var_list(ncmlfile, time):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
variables = ncml_dict['ns0:netcdf']['ns0:variable']
var_3D = []
......@@ -36,7 +36,7 @@ def gen_var_list_NCML(ncmlfile, time):
}
return var_type
def gen_src_var_list_NCML(ncmlfile):
def src_var_list(ncmlfile):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
variables = ncml_dict['ns0:netcdf']['ns0:variable']
var_list = []
......@@ -44,7 +44,7 @@ def gen_src_var_list_NCML(ncmlfile):
var_list.append(variables[i]['@name'])
return var_list
def gen_var_NCML(ncmlfile, orgName):
def dst_var(ncmlfile, orgName):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
variables = ncml_dict['ns0:netcdf']['ns0:variable']
if type(variables) is not list:
......@@ -63,7 +63,7 @@ def gen_var_NCML(ncmlfile, orgName):
break
return var
def gen_attrib_NCML(ncmlfile,name):
def dst_glob_attrib(ncmlfile,name):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
nc_attrib = ncml_dict['ns0:netcdf']['ns0:attribute']
if type(nc_attrib) is not list:
......@@ -76,7 +76,7 @@ def gen_attrib_NCML(ncmlfile,name):
attrib_val = nc_attrib[i]['@value']
return attrib_val
def gen_var_attrib_NCML(ncmlfile, variable,name):
def dst_var_attrib(ncmlfile, variable,name):
ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
variables = ncml_dict['ns0:netcdf']['ns0:variable']
if type(variables) is not list:
......
......@@ -64,10 +64,4 @@
<attribute name="long_name" value="Temperature" />
<attribute name="grid" value="bdyT" />
</variable>
<variable name="votemper" orgName="votemper" shape="time_counter z yb xb" type="float">
<attribute name="units" value="C" />
<attribute name="short_name" value="votemper" />
<attribute name="long_name" value="Temperature" />
<attribute name="grid" value="bdyT" />
</variable>
</netcdf>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">
<dimension name="y" orgName="y" />
<dimension name="x" orgName="x" />
<dimension name="z" orgName="z" />
<dimension name="yb" orgName="yb" />
<dimension name="xb" orgName="xb" />
<dimension name="time_counter" orgName="time_counter" isUnlimited="true" />
<attribute name="institution" value="National Oceanography Centre, Livepool, U.K." />
<variable name="bdy_msk" orgName="bdy_msk" shape="y x" type="float">
<attribute name="short_name" value="bdy_msk" />
<attribute name="units" value="unitless" />
<attribute name="long_name" value="Structured boundary mask" />
</variable>
<variable name="depthz" orgName="depthz" shape="z yb xb" type="float">
<attribute name="axis" value="Depth" />
<attribute name="short_name" value="depthz" />
<attribute name="units" value="m" />
<attribute name="long_name" value="Depth" />
</variable>
<variable name="nav_lat" orgName="nav_lat" shape="y x" type="float">
<attribute name="axis" value="Latitude" />
<attribute name="short_name" value="nav_lat" />
<attribute name="units" value="degrees_east" />
<attribute name="long_name" value="Latitude" />
</variable>
<variable name="nav_lon" orgName="nav_lon" shape="y x" type="float">
<attribute name="axis" value="Longitude" />
<attribute name="short_name" value="nav_lon" />
<attribute name="units" value="degrees_east" />
<attribute name="long_name" value="Longitude" />
</variable>
<variable name="nbidta" orgName="nbidta" shape="yb xb" type="int">
<attribute name="short_name" value="nbidta" />
<attribute name="units" value="unitless" />
<attribute name="long_name" value="Bdy i indices" />
</variable>
<variable name="nbjdta" orgName="nbjdta" shape="yb xb" type="int">
<attribute name="short_name" value="nbjdta" />
<attribute name="units" value="unitless" />
<attribute name="long_name" value="Bdy j indices" />
</variable>
<variable name="nbrdta" orgName="nbrdta" shape="yb xb" type="int">
<attribute name="short_name" value="nbrdta" />
<attribute name="units" value="unitless" />
<attribute name="long_name" value="Bdy discrete distance" />
</variable>
<variable name="time_counter" orgName="time_counter" shape="time_counter" type="float">
<attribute name="axis" value="T" />
<attribute name="standard_name" value="time" />
<attribute name="units" value="seconds since 1960-01-01 00:00:00" />
<attribute name="title" value="Time" />
<attribute name="long_name" value="Time axis" />
</variable>
<variable name="sossheig" orgName="sossheig" shape="time_counter yb xb" type="float">
<attribute name="units" value="m" />
<attribute name="short_name" value="sossheig" />
<attribute name="long_name" value="Sea Surface Height" />
<attribute name="grid" value="bdyT" />
</variable>
</netcdf>
\ No newline at end of file
......@@ -319,6 +319,11 @@ def process_bdy(setup_filepath=0, mask_gui=False):
logger.info('Generated BDY %s information', grd)
logger.info('Grid %s has shape %s', grd, bdy_ind[grd].bdy_i.shape)
for grd in ['z']:
bdy_ind[grd] = gen_grid.Boundary(bdy_msk, settings, 't')
logger.info('Generated BDY %s information', 't')
logger.info('Grid %s has shape %s', grd, bdy_ind['t'].bdy_i.shape)
# TODO: Write in option to seperate out disconnected LBCs
# Write out grid information to coordinates.bdy.nc
......@@ -334,6 +339,9 @@ def process_bdy(setup_filepath=0, mask_gui=False):
for grd in ['t', 'u', 'v']:
nbdy[grd] = len(bdy_ind[grd].bdy_i[:, 0])
for grd in ['z']:
nbdy[grd] = len(bdy_ind['t'].bdy_i[:, 0])
# Gather grid information
# TODO: insert some logic here to account for 2D or 3D src_zgr
......@@ -355,13 +363,14 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# TODO: put conditional here as we may want to keep data on parent
# vertical grid
DstCoord.depths = {'t': {}, 'u': {}, 'v': {}}
DstCoord.depths = {'t': {}, 'u': {}, 'v': {}, 'z':{}}
for grd in ['t', 'u', 'v']:
for grd in ['t', 'u', 'v','z']:
DstCoord.depths[grd]['bdy_H'] = np.nanmax(z.zpoints['w'+grd], axis=0)
DstCoord.depths[grd]['bdy_dz'] = np.diff(z.zpoints['w'+grd], axis=0)
DstCoord.depths[grd]['bdy_z'] = z.zpoints[grd]
logger.info('Depths defined')
# Gather vorizontal grid information
......@@ -390,7 +399,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
nc.close()
DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}}
DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}, 'z':{}}
nc = GetFile(settings['dst_hgr'])
......@@ -399,6 +408,10 @@ def process_bdy(setup_filepath=0, mask_gui=False):
for grd in ['t', 'u', 'v']:
DstCoord.lonlat[grd]['lon'] = nc['glam' + grd][0, :, :]
DstCoord.lonlat[grd]['lat'] = nc['gphi' + grd][0, :, :]
for grd in ['z']:
DstCoord.lonlat[grd]['lon'] = nc['glamt'][0, :, :]
DstCoord.lonlat[grd]['lat'] = nc['gphit'][0, :, :]
nc.close()
......@@ -406,13 +419,13 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# Identify lons/lats of the BDY points
DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {}}
DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {},'z':{}}
for grd in ['t', 'u', 'v']:
for grd in ['t', 'u', 'v', 'z']:
for l in ['lon', 'lat']:
DstCoord.bdy_lonlat[grd][l] = np.zeros(nbdy[grd])
for grd in ['t', 'u', 'v']:
for grd in ['t', 'u', 'v', 'z']:
for i in range(nbdy[grd]):
x = bdy_ind[grd].bdy_i[i, 1]
y = bdy_ind[grd].bdy_i[i, 0]
......@@ -431,6 +444,8 @@ def process_bdy(setup_filepath=0, mask_gui=False):
reader = factory.GetReader(settings['src_dir'],t_adj)
for grd in ['t', 'u', 'v']:
bdy_ind[grd].source_time = reader[grd]
for grd in ['z']:
bdy_ind[grd].source_time = reader['t']
unit_origin = '%d-01-01 00:00:00' %settings['base_year']
......@@ -511,16 +526,16 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# Define mapping of variables to grids with a dictionary
emap = {}
grd = [ 't', 'u', 'v']
grd = [ 't', 'u', 'v', 'z']
#pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
pair = [None, None, None]
pair = [None, None, None, None]
# TODO: The following is a temporary stop gap to assign variables for both CMEMS downloads
# and existing variable names. In future we need a slicker way of determining the variables to extract.
# Perhaps by scraping the .ncml file - this way biogeochemical tracers
# can be included in the ln_tra = .true. option without having to
# explicitly declaring them.
var_list = ncml_parse.gen_src_var_list_NCML(settings['src_dir'])
var_list = ncml_parse.src_var_list(settings['src_dir'])
var_in = {}
for g in range(len(grd)):
var_in[grd[g]] = []
......@@ -540,7 +555,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
if ln_dyn2d:
if 'sossheig' in var_list:
var_in['t'].extend(['sossheig'])
var_in['z'].extend(['sossheig'])
if ln_ice:
if 'iicethic' and 'ileadfra' and 'isnowthi' in var_list:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment