profile.py 16.8 KB
Newer Older
James Harle's avatar
James Harle committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36
# ===================================================================
# The contents of this file are dedicated to the public domain.  To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================

'''
Created on Wed Sep 12 08:02:46 2012

The main application script for the NRCT. 

@author James Harle
@author John Kazimierz Farey
@author Srikanth Nagella
$Last commit on:$
'''

# pylint: disable=E1103
# pylint: disable=no-name-in-module

#External imports
import time
import logging
import numpy as np
James Harle's avatar
James Harle committed
37
from PyQt5.QtWidgets import QMessageBox
James Harle's avatar
James Harle committed
38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247

#Local imports
from pynemo import pynemo_settings_editor
from pynemo import nemo_bdy_ncgen as ncgen
from pynemo import nemo_bdy_ncpop as ncpop
from pynemo import nemo_bdy_source_coord as source_coord
from pynemo import nemo_bdy_dst_coord as dst_coord
from pynemo import nemo_bdy_setup as setup
from pynemo import nemo_bdy_gen_c as gen_grid
from pynemo import nemo_coord_gen_pop as coord
from pynemo import nemo_bdy_zgrv2 as zgrv
from pynemo import nemo_bdy_extr_tm3 as extract

from pynemo.reader.factory import GetFile
from pynemo.reader import factory
from pynemo.tide import nemo_bdy_tide3 as tide
from pynemo.tide import nemo_bdy_tide_ncgen
from pynemo.utils import Constants
from pynemo.gui.nemo_bdy_mask import Mask as Mask_File

class Grid(object):
    """ 
    A Grid object that stores bdy grid information
    """    
    def __init__(self):
        self.bdy_i       = None # bdy indices
        self.bdy_r       = None # associated rimwidth values
        self.grid_type   = None # this can be T/U/V
        self.fname_2     = None # 2nd file for vector rotation
        self.max_i       = None # length of i-axis in fname_2
        self.max_j       = None # length of j-axis in fname_2
        self.source_time = None # netcdftime information from parent files

logger = logging.getLogger(__name__)
logging.basicConfig(filename='nrct.log', level=logging.INFO)

def process_bdy(setup_filepath=0, mask_gui=False):
    """ 
    Main entry for processing BDY lateral boundary conditions.

    This is the main script that handles all the calls to generate open 
    boundary conditions for a given regional domain. Input options are handled 
    in a NEMO style namelist (namelist.bdy). There is an optional GUI allowing
    the user to create a mask that defines the extent of the regional model.

    Args:
        setup_filepath (str) : file path to find namelist.bdy
        mask_gui       (bool): whether use of the GUI is required

    """
    # Start Logger
    
    logger.info('Start NRCT Logging: '+time.asctime())
    logger.info('============================================')
    
    SourceCoord = source_coord.SourceCoord()
    DstCoord    = dst_coord.DstCoord()

    Setup = setup.Setup(setup_filepath) # default settings file
    settings = Setup.settings

    logger.info('Reading grid completed')

    bdy_msk = _get_mask(Setup, mask_gui)
    DstCoord.bdy_msk = bdy_msk == 1
    
    logger.info('Reading mask completed')
    
    bdy_ind = {} # define a dictionary to hold the grid information
    
    for grd in ['t', 'u', 'v']:
        bdy_ind[grd] = gen_grid.Boundary(bdy_msk, settings, grd)
        logger.info('Generated BDY %s information', grd)
        logger.info('Grid %s has shape %s', grd, bdy_ind[grd].bdy_i.shape)

    # TODO: Write in option to seperate out disconnected LBCs
    
    # Write out grid information to coordinates.bdy.nc

    co_set = coord.Coord(settings['dst_dir']+'/coordinates.bdy.nc', bdy_ind)
    co_set.populate(settings['dst_hgr'])
    logger.info('File: coordinates.bdy.nc generated and populated')

    # Idenitify number of boundary points
    
    nbdy = {}
    
    for grd in ['t', 'u', 'v']:
        nbdy[grd] = len(bdy_ind[grd].bdy_i[:, 0])

    # Gather grid information
    
    # TODO: insert some logic here to account for 2D or 3D src_zgr
    
    logger.info('Gathering grid information')
    nc = GetFile(settings['src_zgr'])
    SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
    nc.close()

    # Define z at t/u/v points

    z = zgrv.Depth(bdy_ind['t'].bdy_i,
                   bdy_ind['u'].bdy_i,
                   bdy_ind['v'].bdy_i, settings)

    # TODO: put conditional here as we may want to keep data on parent
    #       vertical grid
   
    DstCoord.depths = {'t': {}, 'u': {}, 'v': {}}

    for grd in ['t', 'u', 'v']:
        DstCoord.depths[grd]['bdy_H']  = np.nanmax(z.zpoints['w'+grd], axis=0)
        DstCoord.depths[grd]['bdy_dz'] = np.diff(z.zpoints['w'+grd], axis=0)
        DstCoord.depths[grd]['bdy_z']  = z.zpoints[grd]

    logger.info('Depths defined')
    
    # Gather vorizontal grid information

    nc = GetFile(settings['src_hgr'])
    SourceCoord.lon = nc['glamt'][:,:]
    SourceCoord.lat = nc['gphit'][:,:]
    
    try: # if they are masked array convert them to normal arrays
        SourceCoord.lon = SourceCoord.lon.filled()
    except:
        pass
    try:
        SourceCoord.lat = SourceCoord.lat.filled()
    except:
        pass
        
    nc.close()

    DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}}

    nc = GetFile(settings['dst_hgr'])

    # Read and assign horizontal grid data
    
    for grd in ['t', 'u', 'v']:
        DstCoord.lonlat[grd]['lon'] = nc['glam' + grd][0, :, :]
        DstCoord.lonlat[grd]['lat'] = nc['gphi' + grd][0, :, :]
    
    nc.close()

    logger.info('Grid coordinates defined')
    
    # Identify lons/lats of the BDY points
    
    DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {}}
     
    for grd in ['t', 'u', 'v']:
        for l in ['lon', 'lat']:
            DstCoord.bdy_lonlat[grd][l] = np.zeros(nbdy[grd])

    for grd in ['t', 'u', 'v']:
        for i in range(nbdy[grd]):
            x = bdy_ind[grd].bdy_i[i, 1]
            y = bdy_ind[grd].bdy_i[i, 0]
            DstCoord.bdy_lonlat[grd]['lon'][i] =                              \
                                              DstCoord.lonlat[grd]['lon'][x, y]
            DstCoord.bdy_lonlat[grd]['lat'][i] =                              \
                                              DstCoord.lonlat[grd]['lat'][x, y]

        DstCoord.lonlat[grd]['lon'][DstCoord.lonlat[grd]['lon'] > 180] -= 360

    logger.info('BDY lons/lats identified from %s', settings['dst_hgr'])

    # Set up time information
    
    t_adj = settings['src_time_adj'] # any time adjutments?
    reader = factory.GetReader(settings['src_dir'],t_adj)
    for grd in ['t', 'u', 'v']:
        bdy_ind[grd].source_time = reader[grd]
 
    unit_origin = '%d-01-01 00:00:00' %settings['base_year']

    # Extract source data on dst grid

    if settings['tide']:
        if settings['tide_model']=='tpxo':
            cons = tide.nemo_bdy_tpx7p2_rot(
                Setup, DstCoord, bdy_ind['t'], bdy_ind['u'], bdy_ind['v'],
                                                            settings['clname'])
        elif settings['tide_model']=='fes':
            logger.error('Tidal model: %s, not yet implimented', 
                         settings['tide_model'])
            return
        else:
            logger.error('Tidal model: %s, not recognised', 
                         settings['tide_model'])
            return
            
        write_tidal_data(Setup, DstCoord, bdy_ind, settings['clname'], cons)

    logger.info('Tidal constituents written to file')
    
    # Set the year and month range
    
    yr_000 = settings['year_000']
    yr_end = settings['year_end']
    mn_000 = settings['month_000']
    mn_end = settings['month_end']
    
    if yr_000 > yr_end:
        logging.error('Please check the nn_year_000 and nn_year_end '+ 
                      'values in input bdy file')
        return
    
James Harle's avatar
2to3  
James Harle committed
248
    yrs = list(range(yr_000, yr_end+1))
James Harle's avatar
James Harle committed
249 250
    
    if yr_end - yr_000 >= 1:
James Harle's avatar
2to3  
James Harle committed
251
        if list(range(mn_000, mn_end+1)) < 12:
James Harle's avatar
James Harle committed
252 253
            logger.info('Warning: All months will be extracted as the number '+
                        'of years is greater than 1')
James Harle's avatar
2to3  
James Harle committed
254
        mns = list(range(1,13))
James Harle's avatar
James Harle committed
255 256 257 258 259 260 261
    else:
        mn_000 = settings['month_000']
        mn_end = settings['month_end']
        if mn_end > 12 or mn_000 < 1:
            logging.error('Please check the nn_month_000 and nn_month_end '+
                          'values in input bdy file')
            return
James Harle's avatar
2to3  
James Harle committed
262
        mns = list(range(mn_000, mn_end+1))
James Harle's avatar
James Harle committed
263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314
    
    # Enter the loop for each year and month extraction
    
    logger.info('Entering extraction loop')
    
    ln_dyn2d   = settings['dyn2d']            
    ln_dyn3d   = settings['dyn3d'] # are total or bc velocities required
    ln_tra     = settings['tra']          
    ln_ice     = settings['ice']   

    # Define mapping of variables to grids with a dictionary
    
    emap = {}
    grd  = [  't',  'u',  'v']
    pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
    
    # TODO: The following is a temporary stop gap to assign variables. In 
    # future we need a slicker way of determining the variables to extract. 
    # Perhaps by scraping the .ncml file - this way biogeochemical tracers
    # can be included in the ln_tra = .true. option without having to
    # explicitly declaring them.

    var_in = {}
    for g in range(len(grd)):
        var_in[grd[g]] = []
        
    if ln_tra:
        var_in['t'].extend(['votemper', 'vosaline'])
        
    if ln_dyn2d or ln_dyn3d:
        var_in['u'].extend(['vozocrtx', 'vomecrty'])
        var_in['v'].extend(['vozocrtx', 'vomecrty'])
    
    if ln_dyn2d:
        var_in['t'].extend(['sossheig'])
        
    if ln_ice:
        var_in['t'].extend(['ice1', 'ice2', 'ice3'])
    
    # As variables are associated with grd there must be a filename attached
    # to each variable
    
    for g in range(len(grd)):
        
        if len(var_in[grd[g]])>0:
            emap[grd[g]]= {'variables': var_in[grd[g]],
                           'pair'     : pair[g]} 

    extract_obj = {}
    
    # Initialise the mapping indices for each grid 
    
James Harle's avatar
2to3  
James Harle committed
315
    for key, val in list(emap.items()):
James Harle's avatar
James Harle committed
316 317 318 319 320 321 322 323 324 325 326 327
        
        extract_obj[key] = extract.Extract(Setup.settings, 
                                           SourceCoord, DstCoord,
                                           bdy_ind, val['variables'], 
                                           key, val['pair'])
    
    # TODO: Write the nearest neighbour parent grid point to each bdy point
    #       possibly to the coordinates.bdy.nc file to help with comparison
    #       plots later.
        
    for year in yrs:
        for month in mns:
James Harle's avatar
2to3  
James Harle committed
328
            for key, val in list(emap.items()):
James Harle's avatar
James Harle committed
329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382
                
                # Extract the data for a given month and year
                
                extract_obj[key].extract_month(year, month)
                
                # Interpolate/stretch in time if time frequecy is not a factor 
                # of a month and/or parent:child calendars differ
                
                extract_obj[key].time_interp(year, month)
                
                # Finally write to file
                
                extract_obj[key].write_out(year, month, bdy_ind[key], 
                                           unit_origin)
                
    logger.info('End NRCT Logging: '+time.asctime())
    logger.info('==========================================')
                

def write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons):
    """ 
    This method writes the tidal data to netcdf file.

    Args:
        setup_var     (list): Description of arg1
        dst_coord_var (obj) : Description of arg1
        grid          (dict): Description of arg1
        tide_cons     (list): Description of arg1
        cons          (data): Description of arg1
    """
    indx = 0
    
    # Mapping of variable names to grid types
    
    tmap = {}
    grd = ['t', 'u', 'v']
    var = ['z', 'u', 'v']
    des = ['tidal elevation components for:',
           'tidal east velocity components for:',
           'tidal north velocity components for:']
    
    for g in range(len(grd)):
        bdy_r = grid[grd[g]].bdy_r
        tmap[grd[g]]= {'nam': var[g], 'des': des[g], 
                       'ind': np.where(bdy_r == 0),
                       'nx' : len(grid[grd[g]].bdy_i[bdy_r == 0, 0])}
        
    # Write constituents to file
    
    for tide_con in tide_cons:
        
        const_name = setup_var.settings['clname'][tide_con]
        const_name = const_name.replace("'", "").upper()

James Harle's avatar
2to3  
James Harle committed
383
        for key,val in list(tmap.items()):
James Harle's avatar
James Harle committed
384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474
            
            fout_tide = setup_var.settings['dst_dir']+             \
                        setup_var.settings['fn']+                  \
                        '_bdytide_'+const_name+'_grd_'+            \
                        val['nam'].upper()+'.nc'
            
            nemo_bdy_tide_ncgen.CreateBDYTideNetcdfFile(fout_tide, 
                            val['nx'], 
                            dst_coord_var.lonlat['t']['lon'].shape[1],
                            dst_coord_var.lonlat['t']['lon'].shape[0], 
                            val['des']+tide_con, 
                            setup_var.settings['fv'], key.upper())
            
            ncpop.write_data_to_file(fout_tide, val['nam']+'1', 
                                     cons['cos'][val['nam']][indx])
            ncpop.write_data_to_file(fout_tide, val['nam']+'2', 
                                     cons['sin'][val['nam']][indx])
            ncpop.write_data_to_file(fout_tide, 'bdy_msk',
                                     dst_coord_var.bdy_msk)
            ncpop.write_data_to_file(fout_tide, 'nav_lon',
                                     dst_coord_var.lonlat['t']['lon'])
            ncpop.write_data_to_file(fout_tide, 'nav_lat',
                                     dst_coord_var.lonlat['t']['lat'])
            ncpop.write_data_to_file(fout_tide, 'nbidta',
                                     grid[key].bdy_i[val['ind'], 0]+1)
            ncpop.write_data_to_file(fout_tide, 'nbjdta',
                                     grid[key].bdy_i[val['ind'], 1]+1)
            ncpop.write_data_to_file(fout_tide, 'nbrdta',
                                     grid[key].bdy_r[val['ind']]+1)
        
        # Iterate over constituents
        
        indx += 1

def _get_mask(Setup, mask_gui):
    """ 
    Read mask information from file or open GUI.

    This method reads the mask information from the netcdf file or opens a gui
    to create a mask depending on the mask_gui input. The default mask data 
    uses the bathymetry and applies a 1pt halo.

    Args:
        Setup    (list): settings for bdy
        mask_gui (bool): whether use of the GUI is required

    Returns:
        numpy.array     : a mask array of the regional domain
    """
    # Initialise bdy_msk array
    
    bdy_msk = None
    
    if mask_gui: # Do we activate the GUI
        
        # TODO: I do not like the use of _ for a dummy variable - better way?
        
        _, mask = pynemo_settings_editor.open_settings_dialog(Setup)
        bdy_msk = mask.data
        Setup.refresh()
        logger.info('Using GUI defined mask')
    else: # Try an read mask from file
        try:
            if (Setup.bool_settings['mask_file'] and 
                Setup.settings['mask_file'] is not None):
                mask = Mask_File(Setup.settings['bathy'], 
                                 Setup.settings['mask_file'])
                bdy_msk = mask.data
                logger.info('Using input mask file')
            elif Setup.bool_settings['mask_file']:
                logger.error('Mask file is not given')
                return
            else: # No mask file specified then use default 1px halo mask
                logger.warning('Using default mask with bathymetry!!!!')
                mask = Mask_File(Setup.settings['bathy'])
                mask.apply_border_mask(Constants.DEFAULT_MASK_PIXELS)
                bdy_msk = mask.data
        except:
            return
    
    if np.amin(bdy_msk) == 0: # Mask is not set, so set border to 1px
        logger.warning('Setting the mask to with a 1 grid point border')
        QMessageBox.warning(None,'NRCT', 'Mask is not set, setting a 1 grid '+
                                         'point border mask')
        if (bdy_msk is not None and 1 < bdy_msk.shape[0] and 
            1 < bdy_msk.shape[1]):
            tmp = np.ones(bdy_msk.shape, dtype=bool)
            tmp[1:-1, 1:-1] = False
            bdy_msk[tmp] = -1
            
    return bdy_msk