diff --git a/.gitignore b/.gitignore
index d4b7d99896683ab8a6d3531ac8c7e24e75a74d5f..b9e571dfeac4502371741f0d3898316f157bc7b9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,6 +7,11 @@ __pycache__/
 *.so
 *.nc
 *.xml
+
+# CMEMS creditial file
+CMEMS_cred.py
+motu_config.ini
+
 # Distribution / packaging
 .Python
 build/
diff --git a/inputs/CMEMS.ncml b/inputs/CMEMS.ncml
new file mode 100644
index 0000000000000000000000000000000000000000..68414a4e4adbd5ac6ebacf99100253f6801cf86c
--- /dev/null
+++ b/inputs/CMEMS.ncml
@@ -0,0 +1,28 @@
+<ns0:netcdf xmlns:ns0="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2" title="NEMO aggregation">
+  <ns0:aggregation type="union">
+    <ns0:netcdf>
+      <ns0:aggregation dimName="time" name="temperature" type="joinExisting">
+        <ns0:scan location="file://Users/thopri/Projects/PyNEMO/inputs/" regExp=".*T\.nc$" />
+      </ns0:aggregation>
+    </ns0:netcdf>
+    <ns0:netcdf>
+      <ns0:aggregation dimName="time" name="zonal_velocity" type="joinExisting">
+        <ns0:scan location="file://Users/thopri/Projects/PyNEMO/inputs/" regExp=".*U\.nc$" />
+      </ns0:aggregation>
+    </ns0:netcdf>
+    <ns0:netcdf>
+      <ns0:aggregation dimName="time" name="meridian_velocity" type="joinExisting">
+        <ns0:scan location="file://Users/thopri/Projects/PyNEMO/inputs/" regExp=".*V\.nc$" />
+      </ns0:aggregation>
+    </ns0:netcdf>
+    <ns0:netcdf>
+      <ns0:aggregation dimName="time" name="sea_surface_height" type="joinExisting">
+        <ns0:scan location="file://Users/thopri/Projects/PyNEMO/inputs/" regExp=".*T\.nc$" />
+      </ns0:aggregation>
+    </ns0:netcdf>
+  </ns0:aggregation>
+  <ns0:variable name="thetao" orgName="thetao" />
+  <ns0:variable name="uo" orgName="uo" />
+  <ns0:variable name="vo" orgName="vo" />
+  <ns0:variable name="zos" orgName="zos" />
+</ns0:netcdf>
diff --git a/inputs/namelist_cmems.bdy b/inputs/namelist_cmems.bdy
new file mode 100644
index 0000000000000000000000000000000000000000..20508ebdd773f2431dce73001b13c5bbf7d8a5a4
--- /dev/null
+++ b/inputs/namelist_cmems.bdy
@@ -0,0 +1,140 @@
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+!! NEMO/OPA  : namelist for BDY generation tool
+!!            
+!!             User inputs for generating open boundary conditions
+!!             employed by the BDY module in NEMO. Boundary data
+!!             can be set up for v3.2 NEMO and above.
+!!            
+!!             More info here.....
+!!            
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+!------------------------------------------------------------------------------
+!   vertical coordinate
+!------------------------------------------------------------------------------
+   ln_zco      = .false.   !  z-coordinate - full    steps   (T/F)  
+   ln_zps      = .true.    !  z-coordinate - partial steps   (T/F)
+   ln_sco      = .false.   !  s- or hybrid z-s-coordinate    (T/F)
+   rn_hmin     =   -10     !  min depth of the ocean (>0) or 
+                           !  min number of ocean level (<0)
+
+!------------------------------------------------------------------------------
+!   s-coordinate or hybrid z-s-coordinate
+!------------------------------------------------------------------------------
+   rn_sbot_min =   10.     !  minimum depth of s-bottom surface (>0) (m)
+   rn_sbot_max = 7000.     !  maximum depth of s-bottom surface 
+                           !  (= ocean depth) (>0) (m)
+   ln_s_sigma  = .false.   !  hybrid s-sigma coordinates
+   rn_hc       =  150.0    !  critical depth with s-sigma
+
+!------------------------------------------------------------------------------
+!  grid information 
+!------------------------------------------------------------------------------
+   sn_src_hgr = '/Users/thopri/Projects/PyNEMO/inputs/subset_coordinates.nc'
+   sn_src_zgr = '/Users/thopri/Projects/PyNEMO/inputs/subset_coordinates.nc'
+   sn_dst_hgr = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/mesh_hgr_zps.nc'
+   sn_dst_zgr = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/mesh_zgr_zps.nc'
+   sn_src_msk = '/Users/thopri/Projects/PyNEMO/inputs/subset_bathy.nc'
+   sn_bathy   = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/NNA_R12_bathy_meter_bench.nc'
+
+!------------------------------------------------------------------------------
+!  I/O 
+!------------------------------------------------------------------------------
+   sn_src_dir = '/Users/thopri/Projects/PyNEMO/inputs/CMEMS.ncml' ! src_files/'
+   sn_dst_dir = '/Users/thopri/Projects/PyNEMO/outputs'
+
+   sn_fn      = 'NNA_R12'             ! prefix for output files
+   nn_fv      = -1e20                 !  set fill value for output files
+   nn_src_time_adj = 0                ! src time adjustment
+   sn_dst_metainfo = 'CMEMS example'
+   
+!------------------------------------------------------------------------------
+!  CMEMS Data Source Configuration
+!------------------------------------------------------------------------------
+   ln_use_cmems             = .true.
+   ln_download_cmems        = .false.
+   sn_cmems_dir             = '/Users/thopri/Projects/PyNEMO/inputs/' ! where to download CMEMS input files (static and variable)
+   ln_download_static       = .false.
+   ln_subset_static         = .false.
+   nn_num_retry             = 2 ! how many times to retry CMEMS download after non critical errors?
+!------------------------------------------------------------------------------
+!  CMEMS MOTU Configuration (for Boundary Data)
+!------------------------------------------------------------------------------
+   sn_motu_server           = 'http://nrt.cmems-du.eu/motu-web/Motu'
+   sn_cmems_config_template = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config_template.ini'
+   sn_cmems_config          = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config.ini'
+   sn_cmems_model           = 'GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS'
+   sn_cmems_product         = 'global-analysis-forecast-phy-001-024'
+   sn_dl_prefix             = 'subset'
+!------------------------------------------------------------------------------
+!  CMEMS FTP Configuration (for Static Files)
+!------------------------------------------------------------------------------
+   sn_ftp_server            = 'nrt.cmems-du.eu'
+   sn_static_dir            = '/Core/GLOBAL_ANALYSIS_FORECAST_PHY_001_024/global-analysis-forecast-phy-001-024-statics'
+   sn_static_filenames      = 'GLO-MFC_001_024_coordinates.nc GLO-MFC_001_024_mask_bathy.nc GLO-MFC_001_024_mdt.nc'
+   sn_cdo_loc               = '/opt/local/bin/cdo' ! location of cdo executable can be found by running "where cdo"
+!------------------------------------------------------------------------------
+!  CMEMS Extent Configuration
+!------------------------------------------------------------------------------
+   nn_latitude_min          = 40
+   nn_latitude_max          = 66
+   nn_longitude_min         = -22
+   nn_longitude_max         = 16
+   nn_depth_min             = 0.493
+   nn_depth_max             = 5727.918000000001
+
+!------------------------------------------------------------------------------
+!  unstructured open boundaries                         
+!------------------------------------------------------------------------------
+    ln_coords_file = .true.               !  =T : produce bdy coordinates files
+    cn_coords_file = 'coordinates.bdy.nc' !  name of bdy coordinates files 
+                                          !  (if ln_coords_file=.TRUE.)
+    ln_mask_file   = .false.              !  =T : read mask from file
+    cn_mask_file   = '/Users/thopri/Projects/PyNEMO/inputs/subset_mask.nc'            !  name of mask file
+                                          !  (if ln_mask_file=.TRUE.)
+    ln_dyn2d       = .false.              !  boundary conditions for 
+                                          !  barotropic fields
+    ln_dyn3d       = .false.              !  boundary conditions for 
+                                          !  baroclinic velocities
+    ln_tra         = .true.               !  boundary conditions for T and S
+    ln_ice         = .false.              !  ice boundary condition   
+    nn_rimwidth    = 9                    !  width of the relaxation zone
+
+!------------------------------------------------------------------------------
+!  unstructured open boundaries tidal parameters                        
+!------------------------------------------------------------------------------
+    ln_tide        = .false.              !  =T : produce bdy tidal conditions
+    sn_tide_model  = 'FES'                !  Name of tidal model (FES|TPXO)
+    clname(1)      = 'M2'                 !  constituent name
+    clname(2)      = 'S2'         
+    clname(3)      = 'K2'        
+    ln_trans       = .true.               !  interpolate transport rather than
+                                          !  velocities
+!------------------------------------------------------------------------------
+!  Time information
+!------------------------------------------------------------------------------
+    nn_year_000     = 2017        !  year start
+    nn_year_end     = 2017        !  year end
+    nn_month_000    = 01          !  month start (default = 1 is years>1)
+    nn_month_end    = 03       !  month end (default = 12 is years>1)
+    sn_dst_calendar = 'gregorian' !  output calendar format
+    nn_base_year    = 1960        !  base year for time counter
+	sn_tide_grid   = './src_data/tide/grid_tpxo7.2.nc'
+	sn_tide_h      = './src_data/tide/h_tpxo7.2.nc'
+	sn_tide_u      = './src_data/tide/u_tpxo7.2.nc'
+	
+!------------------------------------------------------------------------------
+!  Additional parameters
+!------------------------------------------------------------------------------
+    nn_wei  = 1                   !  smoothing filter weights 
+    rn_r0   = 0.041666666         !  decorrelation distance use in gauss
+                                  !  smoothing onto dst points. Need to 
+                                  !  make this a funct. of dlon
+    sn_history  = 'CMEMS test case'
+                                  !  history for netcdf file
+    ln_nemo3p4  = .true.          !  else presume v3.2 or v3.3
+    nn_alpha    = 0               !  Euler rotation angle
+    nn_beta     = 0               !  Euler rotation angle
+    nn_gamma    = 0               !  Euler rotation angle
+	rn_mask_max_depth = 100.0     !  Maximum depth to be ignored for the mask
+	rn_mask_shelfbreak_dist = 20000.0 !  Distance from the shelf break
diff --git a/pynemo/config/motu_config_template.ini b/pynemo/config/motu_config_template.ini
new file mode 100644
index 0000000000000000000000000000000000000000..4c279d05316f25c18e52ba2f0ef5eb685ace0f0c
--- /dev/null
+++ b/pynemo/config/motu_config_template.ini
@@ -0,0 +1,36 @@
+[Main]
+# Motu credentials  
+user=J90TBS4Q1UCT4CM7  
+pwd=Z8UKFNXA5OIZRXCK
+
+motu=DSGJJGWODV2F8TFU  
+service_id=S7L40ACQHANTAC6Y   
+product_id=4LC8ALR9T96XN08U  
+date_min=M49OAWI14XESWY1K  
+date_max=DBT3J4GH2O19Q75P
+latitude_min=3M2FJJE5JW1EN4C1  
+latitude_max=OXI2PXSTJG5PV6OW  
+longitude_min=DWUJ65Y233FQFW3F  
+longitude_max=K0UQJJDJOKX14DPS    
+depth_min=FNO0GZ1INQDATAXA    
+depth_max=EI6GB1FHTMCIPOZC  
+# Empty or non set means all variables  
+# 1 or more variables separated by a coma and identified by their standard name  
+variable=4Y4LMQLAKP10YFUE 
+# Accept relative or absolute path. The dot character "." is the current folder  
+out_dir=QFCN2P56ZQSA7YNK  
+out_name=YSLTB459ZW0P84GE  
+
+# Logging
+# https://docs.python.org/3/library/logging.html#logging-levels  
+# log_level=X {CRITICAL:50, ERROR:40, WARNING:30, INFO:20, DEBUG:10, TRACE:0}   
+log_level=20   
+
+# block_size block used to download file (integer expressing bytes) default=65535
+# block_size=65535  
+socket_timeout=120000
+
+# Http proxy to connect to Motu server
+# proxy_server=proxy.domain.net:8080  
+# proxy_user=john  
+# proxy_pwd=secret  
diff --git a/pynemo/nemo_bdy_dl_cmems.py b/pynemo/nemo_bdy_dl_cmems.py
new file mode 100644
index 0000000000000000000000000000000000000000..96be72a52029f710d3a61aec281ac65e5eafe78a
--- /dev/null
+++ b/pynemo/nemo_bdy_dl_cmems.py
@@ -0,0 +1,346 @@
+# -*- coding: utf-8 -*-
+"""
+Set of functions to download CMEMS files using FTP (for static mask data) and MOTU (for subsetted variable data).
+
+"""
+# import modules
+from subprocess import Popen, PIPE
+import xml.etree.ElementTree as ET
+import logging
+import ftplib
+import re
+import pandas as pd
+from datetime import datetime
+import glob
+import os
+#local imports
+from pynemo.utils import cmems_errors as errors
+
+logger = logging.getLogger('CMEMS Downloader')
+
+'''
+This function checks to see if the MOTU client is installed on the PyNEMO python environment. If it is not installed
+error code 1 is returned . If it is installed the version number of the installed client is returned as a string
+'''
+def chk_motu():
+    stdout,stderr = Popen(['motuclient','--version'], stdout=PIPE, stderr=PIPE,universal_newlines=True).communicate()
+    stdout = stdout.strip()
+    stderr = stderr.strip()
+
+    if len(stderr) > 0:
+        return stderr
+    
+    if not 'motuclient-python' in stdout:
+        return 1
+    else:
+        idx = stdout.find('v')
+        return stdout[idx:-1]
+
+
+'''
+CMEMS holds mask data for the model grids as an FTP download only, i.e. it can't be used with the MOTU subsetter.
+This code logs on to the FTP server and downloads the requested files. The config bdy file needs to provide location and 
+filenames to download. These can be found using an FTP server or the CMEMS web portal. The credintials for the 
+FTP connection (and MOTU client) are stored in a Credintials files called CMEMS_cred.py located in the utils folder.
+If the download is successful a zero status code is returned. Other wise an error message is returned in the form of a string
+'''
+def get_static(args):
+    try:
+        from pynemo.utils import CMEMS_cred
+    except ImportError:
+        logger.error('Unable to import CMEMS creditials, see Readme for instructions on adding to PyNEMO')
+        return 'Unable to import credintials file'
+    try:
+        ftp = ftplib.FTP(host=args['ftp_server'], user=CMEMS_cred.user, passwd=CMEMS_cred.pwd)
+    except ftplib.error_temp:
+        return 'temporary error in FTP connection, please try running PyNEMO again........'
+    except ftplib.error_perm as err:
+        return err
+    except ftplib.error_reply as err:
+        return err
+    except ftplib.error_proto as err:
+        return err
+    # TODO: provide better returns for the various FTP errors
+    # TODO: add try excepts to handle issues with files being missing etc.
+    # TODO: Check there is enough space to download as well.....
+    # TODO: Handle timeouts etc as well......
+    ftp.cwd(args['static_dir'])
+    filenames = args['static_filenames'].split(' ')
+    for f in filenames:
+        try:
+            ftp.retrbinary("RETR " + f, open(args['cmems_dir']+f, 'wb').write)
+        except ftplib.error_temp:
+            return 'temporary error in FTP download, please try running PyNEMO again........'
+        except ftplib.error_perm as err:
+            return err
+        except ftplib.error_reply as err:
+            return err
+        except ftplib.error_proto as err:
+            return err
+    ftp.quit()
+
+    return 0
+
+'''
+The FTP download results in the whole product grid being downloaded, so this needs to be subset to match the data downloads
+This functions uses CDO library to subset the netcdf file. Therefore CDO should be installed on the operating system.
+For each of the defined static files this function subsets based on the defined extent in the settings bdy file, 
+if 'Abort' is in the string returned by CDO then this is returned as an error string. 
+If Abort is not in the returned string this indicates success, then a zero status code is returned.
+'''
+def subset_static(args):
+    logger.info('subsetting static files now......')
+    filenames = args['static_filenames'].split(' ')
+    for f in filenames:
+        v = f.split('_')
+        v = args['dl_prefix']+'_'+v[-1]
+        cdo = args['cdo_loc']
+        sellon = 'sellonlatbox,'+str(args['longitude_min'])+','+str(args['longitude_max'])+','\
+                 +str(args['latitude_min'])+','+str(args['latitude_max'])
+        src_file = args['cmems_dir']+f
+        dst_file = args['cmems_dir']+v
+        stdout, stderr = Popen([cdo, sellon, src_file, dst_file],stdout=PIPE, stderr=PIPE,universal_newlines=True).communicate()
+        stdout = stdout.strip()
+        stderr = stderr.strip()
+
+        # For some reason CDO seems to pipe output to stderr so check stderr for results and pass stdout
+        # if it has length greater than zero i.e. not an empty string
+        if 'Abort' in stderr:
+            return stderr
+        if len(stdout) > 0:
+            return stdout
+    return 0
+
+'''
+Request CMEMS data in either monthly, weekly, and daily intervals. Depending on the character passed to the function 'F'
+the function will split the requests into monthly, weekly or daily intervals. The function splits the requested period into 
+the relevent intervals and passes the interval into the request_cmems function below. If request_cmems returns a zero then
+the next interval is downloaded. If there is an error then the string containing the error is returned. Part of request_cmems
+is that it returns a 1 if the interval is too large.
+'''
+def MWD_request_cmems(args,date_min,date_max,F):
+    if F == 'M':
+        month_start = pd.date_range(date_min, date_max,
+                                freq='MS').strftime("%Y-%m-%d").tolist()
+        month_end = pd.date_range(date_min, date_max,
+                              freq='M').strftime("%Y-%m-%d").tolist()
+        for m in range(len(month_end)):
+            mnth_dl = request_cmems(args, month_start[m], month_end[m])
+            if mnth_dl == 0:
+                logger.info('CMEMS month request ' + str((m + 1)) + 'of' + (str(len(month_end))) + ' successful')
+            if type(mnth_dl) == str:
+                logger.error(
+                    'CMEMS month request ' + str((m + 1)) + 'of' + str((len(month_end))) + ' unsuccessful: Error Msg below')
+                logger.error(mnth_dl)
+                return mnth_dl
+            if mnth_dl == 1:
+                return 1
+
+    if F == 'W':
+        week_start = pd.date_range(date_min, date_max,
+                                   freq='W').strftime("%Y-%m-%d").tolist()
+        week_end = []
+        for w in range(len(week_start)):
+            week_end.append((datetime.strptime(week_start[w], '%Y-%m-%d')
+                             + datetime.timedelta(days=6)).strftime('%Y-%m-%d'))
+        for w in range(len(week_end)):
+            wk_dl = request_cmems(args, week_start[w], week_end[w])
+            if wk_dl == 0:
+                logger.info('CMEMS week request ' + str((w + 1)) + 'of' + str((len(week_end))) + ' successful')
+                if type(wk_dl) == str:
+                    logger.error(
+                        'CMEMS week request ' + str((m + 1)) + 'of' + str((len(week_end))) + ' unsuccessful: Error Msg below')
+                    logger.error(wk_dl)
+                    return wk_dl
+                if wk_dl == 1:
+                    return 1
+
+    if F == 'D':
+        days = pd.date_range(date_min, date_max,
+                             freq='D').strftime("%Y-%m-%d").tolist()
+        for d in range(len(days)):
+            dy_dl = request_cmems(args, days[d], days[d])
+            if dy_dl == 0:
+                logger.info('CMEMS day request ' + str((d + 1)) + 'of' + str((len(week_end) + 1)) + ' successful')
+            if dy_dl == 1:
+                logger.error('CMEMS day request still too big, please make domain smaller, or use less variables')
+                return
+            if type(dy_dl) == str:
+                logger.error('CMEMS day request ' + str((d + 1)) + 'of' + (
+                        str(len(days))) + ' unsuccessful: Error Msg below')
+                logger.error(dy_dl)
+                return dy_dl
+
+    if F not in ('MWD'):
+        time_int_err = 'incorrect string used to define time download interval please use M, W or D'
+        logger.error(time_int_err)
+        return time_int_err
+
+    return 0
+
+'''
+Main request cmems download function. First tries to import CMEMS creditials as per FTP function. This function reads
+the defined NCML file from the bdy file. This gives the number of variables to populate the MOTU download config file.
+For each variable, the name and grid type are pulled from the NCML and populated into a python dictionary.
+
+For each item in the dictionary the relevent request is populated in the CMEMS config file. The first request has
+ a size flag applied and a xml file is downloaded containing details of the request. 
+ If the request is valid a field in the XML is set to OK and then the request is repeated with the size flag removed
+resulting in the download of the relevent netcdf file. The console information is parsed to check for errors 
+and for confirmation of the success of the download. If there are errors the error string is returned otherwise a
+success message is written to the log file. If the request is too big than a 1 error code is returned. 
+Otherwise if all requests are successful then a zero status code is returned.
+'''
+def request_cmems(args, date_min, date_max):
+    try:
+        from pynemo.utils import CMEMS_cred
+    except ImportError:
+        logger.error('Unable to import CMEMS credentials, see Readme for instructions on adding to PyNEMO')
+        return 'Unable to import credentials file'
+
+    xml = args['src_dir']
+    root = ET.parse(xml).getroot()
+    num_var = (len(root.getchildren()[0].getchildren()))
+    logger.info('number of variables requested is '+str(num_var))
+    grids = {}
+    locs = {}
+
+    for n in range(num_var):
+        F = root.getchildren()[0].getchildren()[n].getchildren()[0].getchildren()[0].attrib
+        var_name = root.getchildren()[n+1].attrib['name']
+        Type = root.getchildren()[0].getchildren()[n].getchildren()[0].attrib
+        logger.info('Variable '+ str(n+1)+' is '+Type['name']+' (Variable name: '+var_name+')')
+        r = re.findall('([A-Z])', F['regExp'])
+        r = ''.join(r)
+        logger.info('It is on the '+str(r)+' grid')
+
+        if r in grids:
+            grids[r].append(var_name)
+        else:
+            grids[r] = [var_name]
+        if r in locs:
+            pass
+        else:
+            locs[r] = F['location'][6:]
+
+    for key in grids:
+        with open(args['cmems_config_template'], 'r') as file:
+            filedata = file.read()
+            
+            filedata = filedata.replace('J90TBS4Q1UCT4CM7', CMEMS_cred.user)
+            filedata = filedata.replace('Z8UKFNXA5OIZRXCK', CMEMS_cred.pwd)
+            filedata = filedata.replace('DSGJJGWODV2F8TFU', args['motu_server'])
+            filedata = filedata.replace('S7L40ACQHANTAC6Y', args['cmems_model'])
+            filedata = filedata.replace('4LC8ALR9T96XN08U', args['cmems_product'])
+            filedata = filedata.replace('M49OAWI14XESWY1K', date_min)
+            filedata = filedata.replace('DBT3J4GH2O19Q75P', date_max)
+            filedata = filedata.replace('3M2FJJE5JW1EN4C1', str(args['latitude_min']))
+            filedata = filedata.replace('OXI2PXSTJG5PV6OW', str(args['latitude_max']))
+            filedata = filedata.replace('DWUJ65Y233FQFW3F', str(args['longitude_min']))
+            filedata = filedata.replace('K0UQJJDJOKX14DPS', str(args['longitude_max']))
+            filedata = filedata.replace('FNO0GZ1INQDATAXA', str(args['depth_min']))
+            filedata = filedata.replace('EI6GB1FHTMCIPOZC', str(args['depth_max']))
+            filedata = filedata.replace('4Y4LMQLAKP10YFUE', ','.join(grids[key]))
+            filedata = filedata.replace('QFCN2P56ZQSA7YNK', locs[key])
+            filedata = filedata.replace('YSLTB459ZW0P84GE', args['dl_prefix']+'_'+str(date_min)+'_'+str(date_max)+'_'+str(key)+'.nc')
+    
+        with open(args['cmems_config'], 'w') as file:
+            file.write(filedata)
+
+        stdout,stderr = Popen(['motuclient', '--size','--config-file', args['cmems_config']], stdout=PIPE, stderr=PIPE, universal_newlines=True).communicate()
+        stdout = stdout.strip()
+        stderr = stderr.strip()
+        logger.info('checking size of request for variables '+' '.join(grids[key]))
+
+        if 'ERROR' in stdout:
+            idx = stdout.find('ERROR')
+            return stdout[idx-1:-1]
+
+        if 'Done' in stdout:
+            logger.info('download of request xml file for variable ' + ' '.join(grids[key]) + ' successful')
+
+        if len(stderr) > 0:
+            return stderr
+
+        xml = locs[key]+args['dl_prefix']+'_'+str(date_min)+'_'+str(date_max)+'_'+str(key)+ '.xml'
+        try:
+            root = ET.parse(xml).getroot()
+        except ET.ParseError:
+            return 'Parse Error in XML file, This generally occurs when CMEMS service is down and returns an unexpected XML.'
+
+        logger.info('size of request ' + root.attrib['size'])
+
+        if 'OK' in root.attrib['msg']:
+            logger.info('request valid, downloading now......')
+            stdout,stderr = Popen(['motuclient', '--config-file', args['cmems_config']], stdout=PIPE, stderr=PIPE, universal_newlines=True).communicate()
+            stdout = stdout.strip()
+            stderr = stderr.strip()
+
+            if 'ERROR' in stdout:
+                idx = stdout.find('ERROR')
+                return stdout[idx:-1]
+
+            if 'Done' in stdout:
+                logger.info('downloading of variables '+' '.join(grids[key])+' successful')
+
+            if len(stderr) > 0:
+                return stderr
+
+        elif 'too big' in root.attrib['msg']:
+            return 1
+        else:
+            return 'unable to determine if size request is valid (too big or not)'
+
+    return 0
+
+'''
+Function to check errors from both FTP or MOTU. This checks a python file containing dictionaries for different error types,
+(currently FTP and MOTU) with the error from the download. If the error code is present as a key, then the system will retry
+or exit. This depends which error dictionary the type is in. i.e. restart errors result in restart and critical errors result
+in sys exit. The number of restarts is defined in the BDY file and once expired results in sys exit. Finally if the error is 
+not in the type dictionaries the system will exit. Unlogged errors can be added so that a restart or exit occurs when they occur.
+'''
+def err_parse(error, err_type):
+
+    if err_type == 'FTP':
+        for key in errors.FTP_retry:
+            if key in error:
+                logger.info('retrying FTP download....')
+                return 0
+        for key in errors.FTP_critical:
+            if key in error:
+                logger.info('critical FTP error, stopping')
+                return 1
+        logger.critical('unlogged FTP error, stopping')
+        logger.critical(error)
+        return 2
+
+    if err_type == 'MOTU':
+        for key in errors.MOTU_retry:
+            if key in error:
+                logger.info('non critical error')
+                logger.info('restarting download....')
+                return 0
+        for key in errors.MOTU_critical:
+            if key in error:
+                logger.critical('critical error found: please see below')
+                logger.critical(error)
+                return 1
+        logger.critical('unlogged error: please see below')
+        logger.critical(error)
+        return 2
+
+'''
+Function to clean up the download process, this is called before every sys exit and at the end of the download function
+At the moment it only removes the xml size files that are no longer required. Other functionality maybe added in the future
+'''
+def clean_up(settings):
+    # remove size check XML files that are no longer needed.
+    try:
+        for f in glob.glob(settings['cmems_dir'] + "*.xml"):
+            os.remove(f)
+    except OSError:
+        logger.info('no xml files found to remove')
+        return
+    logger.info('removed size check xml files successfully')
+    return
diff --git a/pynemo/nemo_bdy_extr_tm3.py b/pynemo/nemo_bdy_extr_tm3.py
index 6bbb35a37ec1d528374d7db7346edcff293530c0..9c457b7e7c8bdcee9b261abde94214a6a8eec616 100644
--- a/pynemo/nemo_bdy_extr_tm3.py
+++ b/pynemo/nemo_bdy_extr_tm3.py
@@ -74,6 +74,14 @@ class Extract:
         self.g_type = grd
         self.settings = setup
         self.key_vec = False
+        self.t_mask = None
+        self.u_mask = None
+        self.v_mask = None
+        self.dist_wei = None
+        self.dist_fac = None
+        self.tmp_valid = None
+        self.data_ind = None
+        self.nan_ind = None
         
         # TODO: Why are we deepcopying the coordinates???
         
@@ -419,11 +427,15 @@ class Extract:
         """
         self.logger.info('extract_month function called')
         # Check year entry exists in d_bdy, if not create it.
+        #for v in range(self.nvar):
+        #    try:
+        #        self.d_bdy[self.var_nam[v]][year]
+        #    except KeyError:
+        #        self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
+
+        # flush previous months data......
         for v in range(self.nvar):
-            try:
-                self.d_bdy[self.var_nam[v]][year]
-            except KeyError:        
-                self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
+            self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
         
         i_run = np.arange(self.sc_ind['imin'], self.sc_ind['imax']) 
         j_run = np.arange(self.sc_ind['jmin'], self.sc_ind['jmax'])
@@ -468,13 +480,22 @@ class Extract:
 
         if self.first:
             nc_3 = GetFile(self.settings['src_msk'])
-            varid_3 = nc_3['tmask']
-            t_mask = varid_3[:1, :sc_z_len, j_run, i_run]
+            # TODO: sort generic mask variable name
+            try:
+                varid_3 = nc_3['tmask']
+                self.t_mask = varid_3[:1, :sc_z_len, j_run, i_run]
+            except:
+                varid_3 = nc_3['mask']
+                varid_3 = np.expand_dims(varid_3, axis=0)
+                self.t_mask = varid_3[:1, :sc_z_len, np.min(j_run):np.max(j_run) + 1, np.min(i_run):np.max(i_run) + 1]
+            # TODO: Sort out issue with j_run and i_run not broadcasting to varid_3
             if self.key_vec:
-                varid_3 = nc_3['umask']
-                u_mask = varid_3[:1, :sc_z_len, j_run, extended_i]
-                varid_3 = nc_3['vmask']
-                v_mask = varid_3[:1, :sc_z_len, extended_j, i_run]
+                #varid_3 = nc_3['umask']
+                varid_3 = nc_3['mask']
+                self.u_mask = varid_3[:1, :sc_z_len, j_run, extended_i]
+                #varid_3 = nc_3['vmask']
+                varid_3 = nc_3['mask']
+                self.v_mask = varid_3[:1, :sc_z_len, extended_j, i_run]
             nc_3.close()
 
         # Identify missing values and scale factors if defined
@@ -538,8 +559,8 @@ class Extract:
                 # Average vector vars onto T-grid
                 if self.key_vec:
                     # First make sure land points have a zero val
-                    sc_alt_arr[0] *= u_mask
-                    sc_alt_arr[1] *= v_mask
+                    sc_alt_arr[0] *= self.u_mask
+                    sc_alt_arr[1] *= self.v_mask
                     # Average from to T-grid assuming C-grid stagger
                     sc_array[0] = 0.5 * (sc_alt_arr[0][:,:,:,:-1] + 
                                          sc_alt_arr[0][:,:,:,1:])
@@ -551,7 +572,7 @@ class Extract:
                 # Note using isnan/sum is relatively fast, but less than 
                 # bottleneck external lib
                 self.logger.info('SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
-                sc_array[0][t_mask == 0] = np.NaN
+                sc_array[0][self.t_mask == 0] = np.NaN
                 self.logger.info( 'SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
                 if not np.isnan(np.sum(meta_data[vn]['sf'])):
                     sc_array[0] *= meta_data[vn]['sf']
@@ -559,7 +580,7 @@ class Extract:
                     sc_array[0] += meta_data[vn]['os']
 
                 if self.key_vec:
-                    sc_array[1][t_mask == 0] = np.NaN
+                    sc_array[1][self.t_mask == 0] = np.NaN
                     if not np.isnan(np.sum(meta_data[vn + 1]['sf'])):
                         sc_array[1] *= meta_data[vn + 1]['sf']
                     if not np.isnan(np.sum(meta_data[vn + 1]['os'])):
@@ -602,7 +623,7 @@ class Extract:
             # source data to dest bdy pts. Only need do once.
             if self.first:
                 # identify valid pts
-                data_ind = np.invert(np.isnan(sc_bdy[0,:,:,:]))
+                self.data_ind = np.invert(np.isnan(sc_bdy[0,:,:,:]))
                 # dist_tot is currently 2D so extend along depth
                 # axis to allow single array calc later, also remove
                 # any invalid pts using our eldritch data_ind
@@ -610,10 +631,10 @@ class Extract:
                 self.dist_tot = (np.repeat(self.dist_tot, sc_z_len).reshape(
                             self.dist_tot.shape[0],
                             self.dist_tot.shape[1], sc_z_len)).transpose(2,0,1)
-                self.dist_tot *= data_ind
+                self.dist_tot *= self.data_ind
                 self.logger.info('DIST TOT ZEROS %s', np.sum(self.dist_tot == 0))
 
-                self.logger.info('DIST IND ZEROS %s', np.sum(data_ind == 0))
+                self.logger.info('DIST IND ZEROS %s', np.sum(self.data_ind == 0))
 
                 # Identify problem pts due to grid discontinuities 
                 # using dists >  lat
@@ -625,22 +646,22 @@ class Extract:
 
                 # Calculate guassian weighting with correlation dist
                 r0 = self.settings['r0']
-                dist_wei = (1/(r0 * np.power(2 * np.pi, 0.5)))*(np.exp( -0.5 *np.power(self.dist_tot / r0, 2)))
+                self.dist_wei = (1/(r0 * np.power(2 * np.pi, 0.5)))*(np.exp( -0.5 *np.power(self.dist_tot / r0, 2)))
                 # Calculate sum of weightings
-                dist_fac = np.sum(dist_wei * data_ind, 2)
+                self.dist_fac = np.sum(self.dist_wei * self.data_ind, 2)
                 # identify loc where all sc pts are land
-                nan_ind = np.sum(data_ind, 2) == 0
-                self.logger.info('NAN IND : %s ', np.sum(nan_ind))
+                self.nan_ind = np.sum(self.data_ind, 2) == 0
+                self.logger.info('NAN IND : %s ', np.sum(self.nan_ind))
                 
                 # Calc max zlevel to which data available on sc grid
-                data_ind = np.sum(nan_ind == 0, 0) - 1
+                self.data_ind = np.sum(self.nan_ind == 0, 0) - 1
                 # set land val to level 1 otherwise indexing problems
                 # may occur- should not affect later results because
                 # land is masked in weightings array
-                data_ind[data_ind == -1] = 0
+                self.data_ind[self.data_ind == -1] = 0
                 # transform depth levels at each bdy pt to vector
                 # index that can be used to speed up calcs
-                data_ind += np.arange(0, sc_z_len * self.num_bdy, sc_z_len)
+                self.data_ind += np.arange(0, sc_z_len * self.num_bdy, sc_z_len)
 
                 # ? Attribute only used on first run so clear. 
                 del self.dist_tot
@@ -648,8 +669,8 @@ class Extract:
             # weighted averaged onto new horizontal grid
             for vn in range(self.nvar):
                 self.logger.info(' sc_bdy %s %s', np.nanmin(sc_bdy), np.nanmax(sc_bdy))
-                dst_bdy = (np.nansum(sc_bdy[vn,:,:,:] * dist_wei, 2) /
-                           dist_fac)
+                dst_bdy = (np.nansum(sc_bdy[vn,:,:,:] * self.dist_wei, 2) /
+                           self.dist_fac)
                 self.logger.info(' dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
                 # Quick check to see we have not got bad values
                 if np.sum(dst_bdy == np.inf) > 0:
@@ -658,8 +679,8 @@ class Extract:
                 # weight vector array and rotate onto dest grid
                 if self.key_vec:
                     # [:,:,:,vn+1]
-                    dst_bdy_2 = (np.nansum(sc_bdy[vn+1,:,:,:] * dist_wei, 2) /
-                                 dist_fac)
+                    dst_bdy_2 = (np.nansum(sc_bdy[vn+1,:,:,:] * self.dist_wei, 2) /
+                                 self.dist_fac)
                     self.logger.info('time to to rot and rep ')
                     self.logger.info('%s %s',  np.nanmin(dst_bdy), np.nanmax(dst_bdy))
                     self.logger.info( '%s en to %s %s' , self.rot_str,self.rot_dir, dst_bdy.shape)
@@ -668,18 +689,18 @@ class Extract:
                     self.logger.info('%s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
                 # Apply 1-2-1 filter along bdy pts using NN ind self.id_121
                 if self.first:
-                    tmp_valid = np.invert(np.isnan(
+                    self.tmp_valid = np.invert(np.isnan(
                                             dst_bdy.flatten('F')[self.id_121]))
                     # Finished first run operations
                     self.first = False
 
                 dst_bdy = (np.nansum(dst_bdy.flatten('F')[self.id_121] * 
                            self.tmp_filt, 2) / np.sum(self.tmp_filt *
-                           tmp_valid, 2))
+                           self.tmp_valid, 2))
                 # Set land pts to zero
 
-                self.logger.info(' pre dst_bdy[nan_ind] %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
-                dst_bdy[nan_ind] = 0
+                self.logger.info(' pre dst_bdy[self.nan_ind] %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                dst_bdy[self.nan_ind] = 0
                 self.logger.info(' post dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
                 # Remove any data on dst grid that is in land
                 dst_bdy[:,np.isnan(self.bdy_z)] = 0
@@ -690,7 +711,7 @@ class Extract:
                     # If all else fails fill down using deepest pt
                     dst_bdy = dst_bdy.flatten('F')
                     dst_bdy += ((dst_bdy == 0) *
-                                dst_bdy[data_ind].repeat(sc_z_len))
+                                dst_bdy[self.data_ind].repeat(sc_z_len))
                     # Weighted averaged on new vertical grid
                     dst_bdy = (dst_bdy[self.z_ind[:,0]] * self.z_dist[:,0] +
                                dst_bdy[self.z_ind[:,1]] * self.z_dist[:,1])
@@ -819,9 +840,9 @@ class Extract:
         # multiple of 86400 | data are annual means
         if del_t >= 86400.:
             for v in self.var_nam:    
-                intfn = interp1d(time_counter, self.d_bdy[v][1979]['data'][:,:,:], axis=0,
+                intfn = interp1d(time_counter, self.d_bdy[v][year]['data'][:,:,:], axis=0,
                                                                  bounds_error=True)
-                self.d_bdy[v][1979]['data'] = intfn(np.arange(time_000, time_end, 86400))
+                self.d_bdy[v][year]['data'] = intfn(np.arange(time_000, time_end, 86400))
         else:
             for v in self.var_nam: 
                 for t in range(dstep):
@@ -863,7 +884,7 @@ class Extract:
                                   unit_origin,
                                   self.settings['fv'],
                                   self.settings['dst_calendar'],
-                                  self.g_type.upper())
+                                  self.g_type.upper(),self.var_nam)
         
         self.logger.info('Writing out BDY data to: %s', f_out)
         
@@ -873,12 +894,12 @@ class Extract:
         for v in self.var_nam:
             if self.settings['dyn2d']: # Calculate depth averaged velocity
                 tile_dz = np.tile(self.bdy_dz, [len(self.time_counter), 1, 1, 1])
-                tmp_var = np.reshape(self.d_bdy[v][1979]['data'][:,:,:], tile_dz.shape)
+                tmp_var = np.reshape(self.d_bdy[v][year]['data'][:,:,:], tile_dz.shape)
                 tmp_var = np.nansum(tmp_var * tile_dz, 2) /np.nansum(tile_dz, 2)
             else: # Replace NaNs with specified fill value
-                tmp_var = np.where(np.isnan(self.d_bdy[v][1979]['data'][:,:,:]),
+                tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:,:,:]),
                                             self.settings['fv'], 
-                                            self.d_bdy[v][1979]['data'][:,:,:])
+                                            self.d_bdy[v][year]['data'][:,:,:])
                
             # Write variable to file
             
diff --git a/pynemo/nemo_bdy_ncgen.py b/pynemo/nemo_bdy_ncgen.py
index af21d1d32aac497737261c1cb9b079119270c0cc..96e1e95c9c66c43e8bb24e34b4ded005ade37d69 100644
--- a/pynemo/nemo_bdy_ncgen.py
+++ b/pynemo/nemo_bdy_ncgen.py
@@ -10,231 +10,458 @@ from netCDF4 import Dataset
 import datetime
 import logging
 
-def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd):
+def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd, var_nam):
     """ This method creates a template of bdy netcdf files. A common for
     T, I, U, V, E grid types.
     """
-
-    gridNames = ['T', 'I', 'U', 'V', 'E', 'Z'] # All possible grids
-
-    # Dimension Lengths
-    xb_len = N
-    yb_len = 1
-    x_len = I
-    y_len = J
-    depth_len = K
-
-    # Enter define mode
-    ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
-
-    #define dimensions
-    if grd in gridNames and grd != 'Z': # i.e grid NOT barotropic (Z) 
-        dimztID = ncid.createDimension('z', depth_len)
-    else:
-        logging.error('Grid tpye not known')
-    dimxbID = ncid.createDimension('xb', xb_len)
-    dimybID = ncid.createDimension('yb', yb_len)
-    dimxID = ncid.createDimension('x', x_len)
-    dimyID = ncid.createDimension('y', y_len)
-    dimtcID = ncid.createDimension('time_counter', None)
-
-    #define variable
-    vartcID = ncid.createVariable('time_counter', 'f4', ('time_counter', ))
-    varlonID = ncid.createVariable('nav_lon', 'f4', ('y', 'x', ))
-    varlatID = ncid.createVariable('nav_lat', 'f4', ('y', 'x', ))
-
-    if grd in ['E']:
-        varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
-        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
-        varN1pID = ncid.createVariable('N1p', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-        varN3nID = ncid.createVariable('N3n', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-        varN5sID = ncid.createVariable('N5s', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-    elif grd in ['T', 'I']:
-        varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
-        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
-        vartmpID = ncid.createVariable('votemper', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-        varsalID = ncid.createVariable('vosaline', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-        if grd == 'I':
-            varildID = ncid.createVariable('ileadfra', 'f4', ('time_counter', 'yb', 'xb',),
-                                           fill_value=fv)
-            variicID = ncid.createVariable('iicethic', 'f4', ('time_counter', 'yb', 'xb',),
-                                           fill_value=fv)
-            varisnID = ncid.createVariable('isnowthi', 'f4', ('time_counter', 'yb', 'xb',),
-                                           fill_value=fv)
-    elif grd == 'U':
-        varztID = ncid.createVariable('depthu', 'f4', ('z', 'yb', 'xb', ), fill_value=fv)
-        varbtuID = ncid.createVariable('vobtcrtx', 'f4', ('time_counter', 'yb', 'xb', ),
-                                       fill_value=fv)
-        vartouID = ncid.createVariable('vozocrtx', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
-                                       fill_value=fv)
-    elif grd == 'V':
-        varztID = ncid.createVariable('depthv', 'f4', ('z', 'yb', 'xb', ))
-        varbtvID = ncid.createVariable('vobtcrty', 'f4', ('time_counter', 'yb', 'xb', ),
-                                       fill_value=fv)
-        vartovID = ncid.createVariable('vomecrty', 'f4', ('time_counter', 'z', 'yb', 'xb',),
-                                       fill_value=fv)
-    elif grd == 'Z':
-        varsshID = ncid.createVariable('sossheig', 'f4', ('time_counter', 'yb', 'xb', ),
-                                       fill_value=fv)
-        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
-    else:
-        logging.error("Unknow Grid input")
-
-
-    varnbiID = ncid.createVariable('nbidta', 'i4', ('yb', 'xb', ))
-    varnbjID = ncid.createVariable('nbjdta', 'i4', ('yb', 'xb', ))
-    varnbrID = ncid.createVariable('nbrdta', 'i4', ('yb', 'xb', ))
-    #Global Attributes
-    ncid.file_name = filename
-    ncid.creation_date = str(datetime.datetime.now())
-    ncid.rim_width = rw
-    ncid.history = h
-    ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
-
-    #Time axis attributes
-    vartcID.axis = 'T'
-    vartcID.standard_name = 'time'
-    vartcID.units = 'seconds since '+orig
-    vartcID.title = 'Time'
-    vartcID.long_name = 'Time axis'
-    vartcID.time_origin = orig
-    vartcID.calendar = calendar
-
-    #Longitude axis attributes
-    varlonID.axis = 'Longitude'
-    varlonID.short_name = 'nav_lon'
-    varlonID.units = 'degrees_east'
-    varlonID.long_name = 'Longitude'
-
-    #Latitude axis attributes
-    varlatID.axis = 'Latitude'
-    varlatID.short_name = 'nav_lat'
-    varlatID.units = 'degrees_east'
-    varlatID.long_name = 'Latitude'
-
-    #nbidta attributes
-    varnbiID.short_name = 'nbidta'
-    varnbiID.units = 'unitless'
-    varnbiID.long_name = 'Bdy i indices'
-
-    #nbjdta attributes
-    varnbjID.short_name = 'nbjdta'
-    varnbjID.units = 'unitless'
-    varnbjID.long_name = 'Bdy j indices'
-
-    #nbrdta attributes
-    varnbrID.short_name = 'nbrdta'
-    varnbrID.units = 'unitless'
-    varnbrID.long_name = 'Bdy discrete distance'
-    if grd == 'E':
-        varztID.axis = 'Depth'
-        varztID.short_name = 'deptht'
-        varztID.units = 'm'
-        varztID.long_name = 'Depth'
-
-        varmskID.short_name = 'bdy_msk'
-        varmskID.units = 'unitless'
-        varmskID.long_name = 'Structured boundary mask'
-
-        varN1pID.units = 'mmol/m^3'
-        varN1pID.short_name = 'N1p'
-        varN1pID.long_name = 'Phosphate'
-        varN1pID.grid = 'bdyT'
-
-        varN3nID.units = 'mmol/m^3'
-        varN3nID.short_name = 'N3n'
-        varN3nID.long_name = 'Nitrate'
-        varN3nID.grid = 'bdyT'
-
-        varN5sID.units = 'mmol/m^3'
-        varN5sID.short_name = 'N5s'
-        varN5sID.long_name = 'Silicate'
-        varN5sID.grid = 'bdyT'
-
-    if grd in ['T', 'I']:
-        varztID.axis = 'Depth'
-        varztID.short_name = 'deptht'
-        varztID.units = 'm'
-        varztID.long_name = 'Depth'
-
-        varmskID.short_name = 'bdy_msk'
-        varmskID.units = 'unitless'
-        varmskID.long_name = 'Structured boundary mask'
-
-        vartmpID.units = 'C'
-        vartmpID.short_name = 'votemper'
-        vartmpID.long_name = 'Temperature'
-        vartmpID.grid = 'bdyT'
-
-        varsalID.units = 'PSU'
-        varsalID.short_name = 'vosaline'
-        varsalID.long_name = 'Salinity'
-        varsalID.grid = 'bdyT'
-
-        if grd == 'I':
-            varildID.units = '%'
-            varildID.short_name = 'ildsconc'
-            varildID.long_name = 'Ice lead fraction'
-            varildID.grid = 'bdyT'
-
-            variicID.units = 'm'
-            variicID.short_name = 'iicethic'
-            variicID.long_name = 'Ice thickness'
-            variicID.grid = 'bdyT'
-
-            varisnID.units = 'm'
-            varisnID.short_name = 'isnowthi'
-            varisnID.long_name = 'Snow thickness'
-            varisnID.grid = 'bdyT'
-    elif grd == 'U':
-        varztID.axis = 'Depth'
-        varztID.short_name = 'depthu'
-        varztID.units = 'm'
-        varztID.long_name = 'Depth'
-
-        varbtuID.units = 'm/s'
-        varbtuID.short_name = 'vobtcrtx'
-        varbtuID.long_name = 'Thickness-weighted depth-averaged zonal Current'
-        varbtuID.grid = 'bdyU'
-
-        vartouID.units = 'm/s'
-        vartouID.short_name = 'vozocrtx'
-        vartouID.long_name = 'Zonal Current'
-        vartouID.grid = 'bdyU'
-
-    elif grd == 'V':
-        varztID.axis = 'Depth'
-        varztID.short_name = 'depthv'
-        varztID.units = 'm'
-        varztID.long_name = 'Depth'
-
-        varbtvID.units = 'm/s'
-        varbtvID.short_name = 'vobtcrty'
-        varbtvID.long_name = 'Thickness-weighted depth-averaged meridional Current'
-        varbtvID.grid = 'bdyV'
-
-        vartovID.units = 'm/s'
-        vartovID.short_name = 'vomecrty'
-        vartovID.long_name = 'Meridional Current'
-        vartovID.grid = 'bdyV'
-
-    elif grd == 'Z':
-        varsshID.units = 'm'
-        varsshID.short_name = 'sossheig'
-        varsshID.long_name = 'Sea Surface Height'
-        varsshID.grid = 'bdyT'
-
-        varmskID.short_name = 'bdy_msk'
-        varmskID.units = 'unitless'
-        varmskID.long_name = 'Structured boundary mask'
-
-    else:
-        logging.error('Unknown Grid')
-
-    ncid.close()
+    if var_nam == 'tide_data' or var_nam[0] == 'votemper' or var_nam[0] == 'vosaline':
+        logging.info('benchmark variables identified, using original variable names.......')
+        gridNames = ['T', 'I', 'U', 'V', 'E', 'Z'] # All possible grids
+
+        # Dimension Lengths
+        xb_len = N
+        yb_len = 1
+        x_len = I
+        y_len = J
+        depth_len = K
+
+        # Enter define mode
+        ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
+
+        #define dimensions
+        if grd in gridNames and grd != 'Z': # i.e grid NOT barotropic (Z)
+            dimztID = ncid.createDimension('z', depth_len)
+        else:
+            logging.error('Grid tpye not known')
+        dimxbID = ncid.createDimension('xb', xb_len)
+        dimybID = ncid.createDimension('yb', yb_len)
+        dimxID = ncid.createDimension('x', x_len)
+        dimyID = ncid.createDimension('y', y_len)
+        dimtcID = ncid.createDimension('time_counter', None)
+
+        #define variable
+        vartcID = ncid.createVariable('time_counter', 'f4', ('time_counter', ))
+        varlonID = ncid.createVariable('nav_lon', 'f4', ('y', 'x', ))
+        varlatID = ncid.createVariable('nav_lat', 'f4', ('y', 'x', ))
+
+        if grd in ['E']:
+            varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+            varN1pID = ncid.createVariable('N1p', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varN3nID = ncid.createVariable('N3n', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varN5sID = ncid.createVariable('N5s', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+        elif grd in ['T', 'I']:
+            varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+            # TODO: generic variable name assignment
+            #vartmpID = ncid.createVariable('thetao', 'f4', ('time_counter', 'z', 'yb', 'xb', ),fill_value=fv)
+            vartmpID = ncid.createVariable('votemper', 'f4', ('time_counter', 'z', 'yb', 'xb', ), fill_value=fv)
+            varsalID = ncid.createVariable('vosaline', 'f4', ('time_counter', 'z', 'yb', 'xb', ), fill_value=fv)
+            #varsalID = ncid.createVariable('so', 'f4', ('time_counter', 'z', 'yb', 'xb', ), fill_value=fv)
+            if grd == 'I':
+                varildID = ncid.createVariable('ileadfra', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+                variicID = ncid.createVariable('iicethic', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+                varisnID = ncid.createVariable('isnowthi', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+        elif grd == 'U':
+            varztID = ncid.createVariable('depthu', 'f4', ('z', 'yb', 'xb', ), fill_value=fv)
+            varbtuID = ncid.createVariable('vobtcrtx', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            vartouID = ncid.createVariable('vozocrtx', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+        elif grd == 'V':
+            varztID = ncid.createVariable('depthv', 'f4', ('z', 'yb', 'xb', ))
+            varbtvID = ncid.createVariable('vobtcrty', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            vartovID = ncid.createVariable('vomecrty', 'f4', ('time_counter', 'z', 'yb', 'xb',),
+                                           fill_value=fv)
+        elif grd == 'Z':
+            varsshID = ncid.createVariable('sossheig', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+        else:
+            logging.error("Unknow Grid input")
+
+
+        varnbiID = ncid.createVariable('nbidta', 'i4', ('yb', 'xb', ))
+        varnbjID = ncid.createVariable('nbjdta', 'i4', ('yb', 'xb', ))
+        varnbrID = ncid.createVariable('nbrdta', 'i4', ('yb', 'xb', ))
+        #Global Attributes
+        ncid.file_name = filename
+        ncid.creation_date = str(datetime.datetime.now())
+        ncid.rim_width = rw
+        ncid.history = h
+        ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+
+        #Time axis attributes
+        vartcID.axis = 'T'
+        vartcID.standard_name = 'time'
+        vartcID.units = 'seconds since '+orig
+        vartcID.title = 'Time'
+        vartcID.long_name = 'Time axis'
+        vartcID.time_origin = orig
+        vartcID.calendar = calendar
+
+        #Longitude axis attributes
+        varlonID.axis = 'Longitude'
+        varlonID.short_name = 'nav_lon'
+        varlonID.units = 'degrees_east'
+        varlonID.long_name = 'Longitude'
+
+        #Latitude axis attributes
+        varlatID.axis = 'Latitude'
+        varlatID.short_name = 'nav_lat'
+        varlatID.units = 'degrees_east'
+        varlatID.long_name = 'Latitude'
+
+        #nbidta attributes
+        varnbiID.short_name = 'nbidta'
+        varnbiID.units = 'unitless'
+        varnbiID.long_name = 'Bdy i indices'
+
+        #nbjdta attributes
+        varnbjID.short_name = 'nbjdta'
+        varnbjID.units = 'unitless'
+        varnbjID.long_name = 'Bdy j indices'
+
+        #nbrdta attributes
+        varnbrID.short_name = 'nbrdta'
+        varnbrID.units = 'unitless'
+        varnbrID.long_name = 'Bdy discrete distance'
+        if grd == 'E':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'deptht'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+            varN1pID.units = 'mmol/m^3'
+            varN1pID.short_name = 'N1p'
+            varN1pID.long_name = 'Phosphate'
+            varN1pID.grid = 'bdyT'
+
+            varN3nID.units = 'mmol/m^3'
+            varN3nID.short_name = 'N3n'
+            varN3nID.long_name = 'Nitrate'
+            varN3nID.grid = 'bdyT'
+
+            varN5sID.units = 'mmol/m^3'
+            varN5sID.short_name = 'N5s'
+            varN5sID.long_name = 'Silicate'
+            varN5sID.grid = 'bdyT'
+
+        if grd in ['T', 'I']:
+            varztID.axis = 'Depth'
+            varztID.short_name = 'deptht'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+            vartmpID.units = 'C'
+            vartmpID.short_name = 'votemper'
+            vartmpID.long_name = 'Temperature'
+            vartmpID.grid = 'bdyT'
+
+            varsalID.units = 'PSU'
+            varsalID.short_name = 'vosaline'
+            varsalID.long_name = 'Salinity'
+            varsalID.grid = 'bdyT'
+
+            if grd == 'I':
+                varildID.units = '%'
+                varildID.short_name = 'ildsconc'
+                varildID.long_name = 'Ice lead fraction'
+                varildID.grid = 'bdyT'
+
+                variicID.units = 'm'
+                variicID.short_name = 'iicethic'
+                variicID.long_name = 'Ice thickness'
+                variicID.grid = 'bdyT'
+
+                varisnID.units = 'm'
+                varisnID.short_name = 'isnowthi'
+                varisnID.long_name = 'Snow thickness'
+                varisnID.grid = 'bdyT'
+        elif grd == 'U':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'depthu'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varbtuID.units = 'm/s'
+            varbtuID.short_name = 'vobtcrtx'
+            varbtuID.long_name = 'Thickness-weighted depth-averaged zonal Current'
+            varbtuID.grid = 'bdyU'
+
+            vartouID.units = 'm/s'
+            vartouID.short_name = 'vozocrtx'
+            vartouID.long_name = 'Zonal Current'
+            vartouID.grid = 'bdyU'
+
+        elif grd == 'V':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'depthv'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varbtvID.units = 'm/s'
+            varbtvID.short_name = 'vobtcrty'
+            varbtvID.long_name = 'Thickness-weighted depth-averaged meridional Current'
+            varbtvID.grid = 'bdyV'
+
+            vartovID.units = 'm/s'
+            vartovID.short_name = 'vomecrty'
+            vartovID.long_name = 'Meridional Current'
+            vartovID.grid = 'bdyV'
+
+        elif grd == 'Z':
+            varsshID.units = 'm'
+            varsshID.short_name = 'sossheig'
+            varsshID.long_name = 'Sea Surface Height'
+            varsshID.grid = 'bdyT'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+        else:
+            logging.error('Unknown Grid')
+
+        ncid.close()
+
+    if var_nam[0] == 'thetao' or var_nam[0] == 'so':
+        logging.info('CMEMS variables identified, using default CMEMS variables.....')
+        gridNames = ['T', 'I', 'U', 'V', 'E', 'Z'] # All possible grids
+
+        # Dimension Lengths
+        xb_len = N
+        yb_len = 1
+        x_len = I
+        y_len = J
+        depth_len = K
+
+        # Enter define mode
+        ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
+
+        #define dimensions
+        if grd in gridNames and grd != 'Z': # i.e grid NOT barotropic (Z)
+            dimztID = ncid.createDimension('z', depth_len)
+        else:
+            logging.error('Grid tpye not known')
+        dimxbID = ncid.createDimension('xb', xb_len)
+        dimybID = ncid.createDimension('yb', yb_len)
+        dimxID = ncid.createDimension('x', x_len)
+        dimyID = ncid.createDimension('y', y_len)
+        dimtcID = ncid.createDimension('time_counter', None)
+
+        #define variable
+        vartcID = ncid.createVariable('time_counter', 'f4', ('time_counter', ))
+        varlonID = ncid.createVariable('nav_lon', 'f4', ('y', 'x', ))
+        varlatID = ncid.createVariable('nav_lat', 'f4', ('y', 'x', ))
+
+        if grd in ['E']:
+            varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+            varN1pID = ncid.createVariable('N1p', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varN3nID = ncid.createVariable('N3n', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varN5sID = ncid.createVariable('N5s', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+        elif grd in ['T', 'I']:
+            varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+            # TODO: generic variable name assignment
+            vartmpID = ncid.createVariable('thetao', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                          fill_value=fv)
+            varsalID = ncid.createVariable('so', 'f4', ('time_counter', 'z', 'yb', 'xb', ), fill_value=fv)
+            if grd == 'I':
+                varildID = ncid.createVariable('ileadfra', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+                variicID = ncid.createVariable('iicethic', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+                varisnID = ncid.createVariable('isnowthi', 'f4', ('time_counter', 'yb', 'xb',),
+                                               fill_value=fv)
+        elif grd == 'U':
+            varztID = ncid.createVariable('depthu', 'f4', ('z', 'yb', 'xb', ), fill_value=fv)
+            varbtuID = ncid.createVariable('vobtcrtx', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            vartouID = ncid.createVariable('vozocrtx', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                           fill_value=fv)
+        elif grd == 'V':
+            varztID = ncid.createVariable('depthv', 'f4', ('z', 'yb', 'xb', ))
+            varbtvID = ncid.createVariable('vobtcrty', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            vartovID = ncid.createVariable('vomecrty', 'f4', ('time_counter', 'z', 'yb', 'xb',),
+                                           fill_value=fv)
+        elif grd == 'Z':
+            varsshID = ncid.createVariable('sossheig', 'f4', ('time_counter', 'yb', 'xb', ),
+                                           fill_value=fv)
+            varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+        else:
+            logging.error("Unknow Grid input")
+
+
+        varnbiID = ncid.createVariable('nbidta', 'i4', ('yb', 'xb', ))
+        varnbjID = ncid.createVariable('nbjdta', 'i4', ('yb', 'xb', ))
+        varnbrID = ncid.createVariable('nbrdta', 'i4', ('yb', 'xb', ))
+        #Global Attributes
+        ncid.file_name = filename
+        ncid.creation_date = str(datetime.datetime.now())
+        ncid.rim_width = rw
+        ncid.history = h
+        ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+
+        #Time axis attributes
+        vartcID.axis = 'T'
+        vartcID.standard_name = 'time'
+        vartcID.units = 'seconds since '+orig
+        vartcID.title = 'Time'
+        vartcID.long_name = 'Time axis'
+        vartcID.time_origin = orig
+        vartcID.calendar = calendar
+
+        #Longitude axis attributes
+        varlonID.axis = 'Longitude'
+        varlonID.short_name = 'nav_lon'
+        varlonID.units = 'degrees_east'
+        varlonID.long_name = 'Longitude'
+
+        #Latitude axis attributes
+        varlatID.axis = 'Latitude'
+        varlatID.short_name = 'nav_lat'
+        varlatID.units = 'degrees_east'
+        varlatID.long_name = 'Latitude'
+
+        #nbidta attributes
+        varnbiID.short_name = 'nbidta'
+        varnbiID.units = 'unitless'
+        varnbiID.long_name = 'Bdy i indices'
+
+        #nbjdta attributes
+        varnbjID.short_name = 'nbjdta'
+        varnbjID.units = 'unitless'
+        varnbjID.long_name = 'Bdy j indices'
+
+        #nbrdta attributes
+        varnbrID.short_name = 'nbrdta'
+        varnbrID.units = 'unitless'
+        varnbrID.long_name = 'Bdy discrete distance'
+        if grd == 'E':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'deptht'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+            varN1pID.units = 'mmol/m^3'
+            varN1pID.short_name = 'N1p'
+            varN1pID.long_name = 'Phosphate'
+            varN1pID.grid = 'bdyT'
+
+            varN3nID.units = 'mmol/m^3'
+            varN3nID.short_name = 'N3n'
+            varN3nID.long_name = 'Nitrate'
+            varN3nID.grid = 'bdyT'
+
+            varN5sID.units = 'mmol/m^3'
+            varN5sID.short_name = 'N5s'
+            varN5sID.long_name = 'Silicate'
+            varN5sID.grid = 'bdyT'
+
+        if grd in ['T', 'I']:
+            varztID.axis = 'Depth'
+            varztID.short_name = 'deptht'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+            vartmpID.units = 'C'
+            vartmpID.short_name = 'votemper'
+            vartmpID.long_name = 'Temperature'
+            vartmpID.grid = 'bdyT'
+
+            varsalID.units = 'PSU'
+            varsalID.short_name = 'vosaline'
+            varsalID.long_name = 'Salinity'
+            varsalID.grid = 'bdyT'
+
+            if grd == 'I':
+                varildID.units = '%'
+                varildID.short_name = 'ildsconc'
+                varildID.long_name = 'Ice lead fraction'
+                varildID.grid = 'bdyT'
+
+                variicID.units = 'm'
+                variicID.short_name = 'iicethic'
+                variicID.long_name = 'Ice thickness'
+                variicID.grid = 'bdyT'
+
+                varisnID.units = 'm'
+                varisnID.short_name = 'isnowthi'
+                varisnID.long_name = 'Snow thickness'
+                varisnID.grid = 'bdyT'
+        elif grd == 'U':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'depthu'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varbtuID.units = 'm/s'
+            varbtuID.short_name = 'vobtcrtx'
+            varbtuID.long_name = 'Thickness-weighted depth-averaged zonal Current'
+            varbtuID.grid = 'bdyU'
+
+            vartouID.units = 'm/s'
+            vartouID.short_name = 'vozocrtx'
+            vartouID.long_name = 'Zonal Current'
+            vartouID.grid = 'bdyU'
+
+        elif grd == 'V':
+            varztID.axis = 'Depth'
+            varztID.short_name = 'depthv'
+            varztID.units = 'm'
+            varztID.long_name = 'Depth'
+
+            varbtvID.units = 'm/s'
+            varbtvID.short_name = 'vobtcrty'
+            varbtvID.long_name = 'Thickness-weighted depth-averaged meridional Current'
+            varbtvID.grid = 'bdyV'
+
+            vartovID.units = 'm/s'
+            vartovID.short_name = 'vomecrty'
+            vartovID.long_name = 'Meridional Current'
+            vartovID.grid = 'bdyV'
+
+        elif grd == 'Z':
+            varsshID.units = 'm'
+            varsshID.short_name = 'sossheig'
+            varsshID.long_name = 'Sea Surface Height'
+            varsshID.grid = 'bdyT'
+
+            varmskID.short_name = 'bdy_msk'
+            varmskID.units = 'unitless'
+            varmskID.long_name = 'Structured boundary mask'
+
+        else:
+            logging.error('Unknown Grid')
+
+        ncid.close()
 
diff --git a/pynemo/nemo_bdy_ncpop.py b/pynemo/nemo_bdy_ncpop.py
index 39d344a986239552f3581f674c0914e50f3f8717..45223ab93304e57e0a99f6dda0b55335b903714b 100644
--- a/pynemo/nemo_bdy_ncpop.py
+++ b/pynemo/nemo_bdy_ncpop.py
@@ -18,7 +18,7 @@ def write_data_to_file(filename, variable_name, data):
     ncid = Dataset(filename, 'a', clobber=False, format='NETCDF4')
     count = data.shape
 
-    three_dim_variables = ['votemper', 'vosaline', 'N1p', 'N3n', 'N5s']
+    three_dim_variables = ['votemper', 'vosaline', 'N1p', 'N3n', 'N5s','thetao','so']
     two_dim_variables = ['sossheig', 'vobtcrtx', 'vobtcrty', 'iicethic', 'ileadfra', 'isnowthi']
 
     if variable_name in three_dim_variables:
diff --git a/pynemo/profile.py b/pynemo/profile.py
index 8152cdc45da56dfc2e5d4e6ff8a1a62f0214c3ff..1dc8b28dec46f942fc8e9ccbf9355b1a0d498468 100644
--- a/pynemo/profile.py
+++ b/pynemo/profile.py
@@ -35,6 +35,8 @@ import time
 import logging
 import numpy as np
 from PyQt5.QtWidgets import QMessageBox
+from calendar import monthrange
+import sys
 
 #Local imports
 from pynemo import pynemo_settings_editor
@@ -54,6 +56,7 @@ from pynemo.tide import nemo_bdy_tide3 as tide
 from pynemo.tide import nemo_bdy_tide_ncgen
 from pynemo.utils import Constants
 from pynemo.gui.nemo_bdy_mask import Mask as Mask_File
+from pynemo import nemo_bdy_dl_cmems as dl_cmems
 
 class Grid(object):
     """ 
@@ -71,6 +74,213 @@ class Grid(object):
 logger = logging.getLogger(__name__)
 logging.basicConfig(filename='nrct.log', level=logging.INFO)
 
+# define a Handler which writes INFO messages or higher to the sys.stderr
+console = logging.StreamHandler()
+console.setLevel(logging.INFO)
+# set a format which is simpler for console use
+formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
+# tell the handler to use this format
+console.setFormatter(formatter)
+# add the handler to the root logger
+logging.getLogger('').addHandler(console)
+
+def download_cmems(setup_filepath=0):
+    '''
+    CMEMS download function.
+
+    This is the main script to download CMEMS data, it has been removed from core PyNEMO function
+    to handle issues with downloading better e.g. connection issues etc.
+
+    Input options are handled in the same NEMO style namelist that the main script uses.
+
+
+    :param setup_filepath:
+    :param mask_gui:
+    :return:
+    '''
+    logger.info('Start CMEMS download Logging: ' + time.asctime())
+    logger.info('============================================')
+
+    Setup = setup.Setup(setup_filepath)  # default settings file
+    settings = Setup.settings
+    if settings['download_static'] == True:
+        for re in range(settings['num_retry']):
+            logger.info('CMEMS Static data requested: downloading static data now.... (this may take awhile)')
+            static = dl_cmems.get_static(settings)
+            if static == 0:
+                logger.info('CMEMS static data downloaded')
+                break
+            if type(static) == str:
+                err_chk = dl_cmems.err_parse(static,'FTP')
+                if err_chk == 0:
+                    logger.info('retrying FTP download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+                    if re == (settings['num_retry']-1):
+                        logger.critical('reached retry limit defined in BDY file, exiting now')
+                        logger.critical(static)
+                        dl_cmems.clean_up(settings)
+                        sys.exit(static)
+                if err_chk == 1:
+                    dl_cmems.clean_up(settings)
+                    sys.exit(static)
+                if err_chk == 2:
+                    dl_cmems.clean_up(settings)
+                    sys.exit(static)
+        dl_cmems.clean_up(settings)
+    # subset downloaded static grid files to match downloaded CMEMS data
+    if settings['subset_static'] == True:
+        logger.info('CMEMS subset static data requested: subsetting now......')
+        subset_static = dl_cmems.subset_static(settings)
+        if subset_static == 0:
+            logger.info('CMEMS static data subset successfully')
+        if type(subset_static) == str:
+            logger.error(subset_static)
+            dl_cmems.clean_up(settings)
+            sys.exit(subset_static)
+        dl_cmems.clean_up(settings)
+
+    if settings['download_cmems'] == True:
+
+        logger.info('CMEMS Boundary data requested: starting download process....')
+
+        if settings['year_end'] - settings['year_000'] > 0:
+            date_min = str(settings['year_000']) + '-01-01'
+            date_max = str(settings['year_end']) + '-12-31'
+
+        elif settings['year_end'] - settings['year_000'] == 0:
+
+            days_mth = monthrange(settings['year_end'], settings['month_end'])
+            date_min = str(settings['year_000']) + '-' + str(settings['month_000']).zfill(2) + '-01'
+            date_max = str(settings['year_end']) + '-' + str(settings['month_end']).zfill(2) + '-' + str(
+                days_mth[1])
+
+        elif settings['year_end'] - settings['year_000'] < 0:
+            error_msg = 'end date before start date please ammend bdy file'
+            logger.error(error_msg)
+            dl_cmems.clean_up(settings)
+            sys.exit(error_msg)
+        else:
+            logger.warning('unable to parse dates..... using demo date November 2017')
+            date_min = '2017-11-01'
+            date_max = '2017-11-30'
+        # check to see if MOTU client is installed
+        chk = dl_cmems.chk_motu()
+        if chk == 1:
+            error_msg = 'motuclient not installed, please install by running $ pip install motuclient'
+            logger.error(error_msg)
+            dl_cmems.clean_up(settings)
+            sys.exit(error_msg)
+        if type(chk) == str:
+            logger.info('version ' + chk + ' of motuclient is installed')
+        else:
+            error_msg = 'unable to parse MOTU check'
+            logger.error(error_msg)
+            dl_cmems.clean_up(settings)
+            sys.exit(error_msg)
+        # download request for CMEMS data, try whole time interval first.
+        for re in range(settings['num_retry']):
+            logger.info('starting CMES download now (this can take a while)...')
+            dl = dl_cmems.request_cmems(settings, date_min, date_max)
+            if dl == 0:
+                logger.info('CMES data downloaded successfully')
+                break
+            # a string return means MOTU has return an error
+            if type(dl) == str:
+            # check error message against logged errors
+                err_chk = dl_cmems.err_parse(dl,'MOTU')
+            # error is known and retry is likely to work
+                if err_chk == 0:
+                    logger.info('retrying CMEMS download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+                    if re == (settings['num_retry']-1):
+                        logger.critical('reached retry limit defined in BDY file, exiting now')
+                        logger.critical(dl)
+                        dl_cmems.clean_up(settings)
+                        sys.exit(dl)
+            # error is known and retry is likely to not work
+                if err_chk == 1:
+                    dl_cmems.clean_up(settings)
+                    sys.exit(dl)
+            # error is not logged, add to error file.
+                if err_chk == 2:
+                    dl_cmems.clean_up(settings)
+                    sys.exit(dl)
+        if dl == 1:
+            # if the request is too large try monthly intervals
+            for re in range(settings['num_retry']):
+                logger.warning('CMEMS request too large, try monthly downloads...(this may take awhile)')
+                mnth_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'M')
+                if mnth_dl == 0:
+                    logger.info('CMEMS monthly request successful')
+                    break
+                if type(mnth_dl) == str:
+                    err_chk = dl_cmems.err_parse(mnth_dl,'MOTU')
+                    if err_chk == 0:
+                        logger.info('retrying CMEMS download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+                        if re == (settings['num_retry']-1):
+                            logger.critical('reached retry limit defined in BDY file, exiting now')
+                            logger.critical(mnth_dl)
+                            dl_cmems.clean_up(settings)
+                            sys.exit(mnth_dl)
+                    if err_chk == 1:
+                        dl_cmems.clean_up(settings)
+                        sys.exit(mnth_dl)
+                    if err_chk == 2:
+                        dl_cmems.clean_up(settings)
+                        sys.exit(mnth_dl)
+                if mnth_dl == 1:
+                    # if the request is too large try weekly intervals
+                    for re in range(settings['num_retry']):
+                        logger.warning('CMEMS request still too large, trying weekly downloads...(this will take longer...)')
+                        wk_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'W')
+                        if wk_dl == 0:
+                            logger.info('CMEMS weekly request successful')
+                            break
+                        if type(wk_dl) == str:
+                            err_chk = dl_cmems.err_parse(wk_dl,'MOTU')
+                            if err_chk == 0:
+                                logger.info('retrying CMEMS download....retry number ' + str(re + 1) + ' of ' + str(settings['num_retry']))
+                                if re == (settings['num_retry'] - 1):
+                                    logger.critical('reached retry limit defined in BDY file, exiting now')
+                                    logger.critical(wk_dl)
+                                    dl_cmems.clean_up(settings)
+                                    sys.exit(wk_dl)
+                            if err_chk == 1:
+                                dl_cmems.clean_up(settings)
+                                sys.exit(wk_dl)
+                            if err_chk == 2:
+                                dl_cmems.clean_up(settings)
+                                sys.exit(wk_dl)
+                    if wk_dl == 1:
+                    # if the request is too large try daily intervals.
+                        for re in range(settings['num_retry']):
+                            logger.warning('CMESM request STILL too large, trying daily downloads....(even longer.....)')
+                            dy_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'D')
+                            if dy_dl == 0:
+                                logger.info('CMEMS daily request successful')
+                                break
+                            # if the request is still too large then smaller domain is required.
+                            if dy_dl == str:
+                                # perform error check for retry
+                                err_chk = dl_cmems.err_parse(dy_dl,'MOTU')
+                                if err_chk == 0:
+                                    logger.info('retrying CMEMS download....retry number ' + str(re + 1) + ' of ' + str(settings['num_retry']))
+                                    if re == (settings['num_retry'] - 1):
+                                        logger.critical('reached retry limit defined in BDY file, exiting now')
+                                        logger.critical(dy_dl)
+                                        dl_cmems.clean_up(settings)
+                                        sys.exit(dy_dl)
+                                if err_chk == 1:
+                                    dl_cmems.clean_up(settings)
+                                    sys.exit(dy_dl)
+                                if err_chk == 2:
+                                    dl_cmems.clean_up(settings)
+                                    sys.exit(dy_dl)
+# end of messy if statements to split requests into months, weeks and days as needed.
+        dl_cmems.clean_up(settings)
+
+    logger.info('End CMEMS download: ' + time.asctime())
+    logger.info('==========================================')
+
+
 def process_bdy(setup_filepath=0, mask_gui=False):
     """ 
     Main entry for processing BDY lateral boundary conditions.
@@ -131,7 +341,10 @@ def process_bdy(setup_filepath=0, mask_gui=False):
     
     logger.info('Gathering grid information')
     nc = GetFile(settings['src_zgr'])
-    SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
+    try:
+        SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
+    except:
+        SourceCoord.zt = np.squeeze(nc['depth'][:])
     nc.close()
 
     # Define z at t/u/v points
@@ -153,10 +366,19 @@ def process_bdy(setup_filepath=0, mask_gui=False):
     logger.info('Depths defined')
     
     # Gather vorizontal grid information
-
+    # TODO: Sort generic grid variables (define in bdy file?)
     nc = GetFile(settings['src_hgr'])
-    SourceCoord.lon = nc['glamt'][:,:]
-    SourceCoord.lat = nc['gphit'][:,:]
+
+    try:
+        SourceCoord.lon = nc['glamt'][:,:]
+        SourceCoord.lat = nc['gphit'][:,:]
+    except:
+        SourceCoord.lon = nc['longitude'][:]
+        SourceCoord.lat = nc['latitude'][:]
+        # expand lat and lon 1D arrays into 2D array matching nav_lat nav_lon
+        SourceCoord.lon = np.tile(SourceCoord.lon, (np.shape(SourceCoord.lat)[0], 1))
+        SourceCoord.lat = np.tile(SourceCoord.lat, (np.shape(SourceCoord.lon)[1], 1))
+        SourceCoord.lat = np.rot90(SourceCoord.lat)
     
     try: # if they are masked array convert them to normal arrays
         SourceCoord.lon = SourceCoord.lon.filled()
@@ -276,8 +498,8 @@ def process_bdy(setup_filepath=0, mask_gui=False):
     grd  = [  't',  'u',  'v']
     pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
     
-    # TODO: The following is a temporary stop gap to assign variables. In 
-    # future we need a slicker way of determining the variables to extract. 
+    # TODO: The following is a temporary stop gap to assign variables for both CMEMS downloads
+    #  and existing variable names. In future we need a slicker way of determining the variables to extract.
     # Perhaps by scraping the .ncml file - this way biogeochemical tracers
     # can be included in the ln_tra = .true. option without having to
     # explicitly declaring them.
@@ -285,19 +507,51 @@ def process_bdy(setup_filepath=0, mask_gui=False):
     var_in = {}
     for g in range(len(grd)):
         var_in[grd[g]] = []
-        
-    if ln_tra:
-        var_in['t'].extend(['votemper', 'vosaline'])
-        
-    if ln_dyn2d or ln_dyn3d:
-        var_in['u'].extend(['vozocrtx', 'vomecrty'])
-        var_in['v'].extend(['vozocrtx', 'vomecrty'])
-    
-    if ln_dyn2d:
-        var_in['t'].extend(['sossheig'])
-        
-    if ln_ice:
-        var_in['t'].extend(['ice1', 'ice2', 'ice3'])
+    if 'use_cmems' in settings:
+        if settings['use_cmems'] == True:
+            logger.info('using CMEMS variable names......')
+            if ln_tra:
+                var_in['t'].extend(['thetao'])  # ,'so'])
+
+            if ln_dyn2d or ln_dyn3d:
+                var_in['u'].extend(['uo'])
+                var_in['v'].extend(['vo'])
+
+            if ln_dyn2d:
+                var_in['t'].extend(['zos'])
+
+            if ln_ice:
+                var_in['t'].extend(['siconc', 'sithick'])
+
+        if settings['use_cmems'] == False:
+            logger.info('using existing PyNEMO variable names.....')
+            if ln_tra:
+                var_in['t'].extend(['votemper', 'vosaline'])
+
+            if ln_dyn2d or ln_dyn3d:
+                var_in['u'].extend(['vozocrtx', 'vomecrty'])
+                var_in['v'].extend(['vozocrtx', 'vomecrty'])
+
+            if ln_dyn2d:
+                var_in['t'].extend(['sossheig'])
+
+            if ln_ice:
+                var_in['t'].extend(['ice1', 'ice2', 'ice3'])
+
+    if 'use_cmems' not in settings:
+        logger.info('using existing PyNEMO variable names.....')
+        if ln_tra:
+            var_in['t'].extend(['votemper', 'vosaline'])
+
+        if ln_dyn2d or ln_dyn3d:
+            var_in['u'].extend(['vozocrtx', 'vomecrty'])
+            var_in['v'].extend(['vozocrtx', 'vomecrty'])
+
+        if ln_dyn2d:
+            var_in['t'].extend(['sossheig'])
+
+        if ln_ice:
+            var_in['t'].extend(['ice1', 'ice2', 'ice3'])
     
     # As variables are associated with grd there must be a filename attached
     # to each variable
diff --git a/pynemo/pynemo_exe.py b/pynemo/pynemo_exe.py
index 558207594752e7ad52ad627146f30df54e4e2675..3f4e2a888671fe4e0a9db417ef6b55e4d016fbdc 100644
--- a/pynemo/pynemo_exe.py
+++ b/pynemo/pynemo_exe.py
@@ -18,22 +18,29 @@ def main():
     setup_file = ''
     mask_gui = False
     try:
-        opts, dummy_args = getopt.getopt(sys.argv[1:], "hs:g", ["help","setup=","mask_gui"])
+        opts, dummy_args = getopt.getopt(sys.argv[1:], "h:s:d:g", ["help", "setup=", "download_cmems=", "mask_gui"])
     except getopt.GetoptError:
-        print("usage: pynemo -g -s <namelist.bdy> ")
+        print("usage: pynemo -g -s -d <namelist.bdy> ")
         sys.exit(2)
 
     for opt, arg in opts:
         if opt == "-h":
-            print("usage: pynemo [-g] -s <namelist.bdy> ")
+            print("usage: pynemo [-g] -s -d <namelist.bdy> ")
             print("       -g (optional) will open settings editor before extracting the data")
             print("       -s <bdy filename> file to use")
+            print("       -d (optional) will download CMEMS data using provided bdy file")
             sys.exit()
         elif opt in ("-s", "--setup"):
             setup_file = arg
-        elif opt in("-g", "--mask_gui"):
+        elif opt in ("-g", "--mask_gui"):
             mask_gui = True
-
+        elif opt in ("-d", "--download_cmems"):
+            setup_file = arg
+            t0 = time.time()
+            profile.download_cmems(setup_file)
+            t1 = time.time()
+            print("CMEMS download time: %s" % (t1 - t0))
+            sys.exit(0)
     if setup_file == "":
         print("usage: pynemo [-g] -s <namelist.bdy> ")
         sys.exit(2)
diff --git a/pynemo/reader/ncml.py b/pynemo/reader/ncml.py
index 9d23e85666187cef594d15ebe0abaf38548a4cab..6a7b323ead1ee2264ba178ea42e90f8150361627 100644
--- a/pynemo/reader/ncml.py
+++ b/pynemo/reader/ncml.py
@@ -39,7 +39,19 @@ try:
 except ImportError:
     print('Warning: Please make sure pyjnius is installed and jvm.dll/libjvm.so/libjvm.dylib is in the path')
 
-time_counter_const = "time_counter"
+import sys
+from pynemo import nemo_bdy_setup as setup
+Setup = setup.Setup(sys.argv[2]) # default settings file
+settings = Setup.settings
+if 'use_cmems' in settings:
+    if settings['use_cmems'] == True:
+        time_counter_const = "time"
+    if settings['use_cmems'] == False:
+        time_counter_const = "time_counter"
+if 'use_cmems' not in settings:
+    time_counter_const = "time_counter"
+del settings, Setup
+
 class Reader(object):
     """ This class is the high level of object for the NCML reader, from here using grid type
     will return the grid data
diff --git a/pynemo/utils/cmems_errors.py b/pynemo/utils/cmems_errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..89a2cca43e810ad51099a93efb52a4cb5b21c430
--- /dev/null
+++ b/pynemo/utils/cmems_errors.py
@@ -0,0 +1,17 @@
+'''
+This file contains all the logged errors from MOTU client, these are added over time. The MOTU client is prone to errors
+that only may need a restart to complete. To add an error the key needs to be the error code returned by MOTU,
+this is ususally a number or set of numbers. The dictionary entry is an explantion of the error. Only the key is checked
+so make sure it is written correctly.
+'''
+
+# errors that are worth retrying download, e,g, error in netcdfwriter finish
+MOTU_retry = {'004-27': 'Error in NetcdfWriter finish',
+              'Errno 60': '[Errno 60] Operation timed out',
+              'Excp 11': 'Execution failed: [Excp 11] "Dataset retrival incomplete.'
+              }
+# errors that are not worth retrying e.g. cmems network is down
+MOTU_critical = {'Errno 50': 'Network Down'}
+# FTP specific errors
+FTP_retry = {'999': 'add ftp retry errors here' }
+FTP_critical = {'999': 'add ftp critical errors here' }
\ No newline at end of file
diff --git a/pynemo_37.yml b/pynemo_37.yml
index 8d904c7310bb43a1ee4c931944efca0cee71f371..8c728403364e79419d2652ffcc22777b375dc8cb 100644
--- a/pynemo_37.yml
+++ b/pynemo_37.yml
@@ -8,6 +8,7 @@ dependencies:
   - scipy=1.2.1
   - python=3.7.6
   - pip=20.0.2
+  - pandas=1.0.1
   - pip:
     - idna==2.9
     - lxml==4.5.0
diff --git a/test_scripts/bdy_coords_plot.py b/test_scripts/bdy_coords_plot.py
index 9647cd5c41716e30eddb69b7774809abd7717e68..e7c2d09a6896a2f4ce2e57545eb938db53d6b2c5 100755
--- a/test_scripts/bdy_coords_plot.py
+++ b/test_scripts/bdy_coords_plot.py
@@ -15,9 +15,9 @@ from mpl_toolkits.basemap import Basemap
 
 from pynemo.tests import bdy_coords as bdc
 
-bdc.process_bdy('/Users/thopri/Projects/PyNEMO/inputs/namelist_remote.bdy',False)
+bdc.process_bdy('/Users/thopri/Projects/PyNEMO/inputs/namelist_cmems.bdy',False)
 
-rootgrp = Dataset('/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y1979m11.nc', "r", format="NETCDF4")
+rootgrp = Dataset('/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y2017m11.nc', "r", format="NETCDF4")
 bdy_msk = np.squeeze(rootgrp.variables['bdy_msk'][:]) - 1
 bdy_lon = np.squeeze(rootgrp.variables['nav_lon'][:])
 bdy_lat = np.squeeze(rootgrp.variables['nav_lat'][:])
diff --git a/test_scripts/bdy_var_plot.py b/test_scripts/bdy_var_plot.py
index cfc47308ab09ac84b0e1adb4238fc78ebaa2ac01..763688c73db1f5fe52acdac2b8a4f1f2f4b0714e 100644
--- a/test_scripts/bdy_var_plot.py
+++ b/test_scripts/bdy_var_plot.py
@@ -12,11 +12,14 @@ from matplotlib.patches import Polygon
 def nemo_bdy_order(fname):
     """
     Determine the ordering and breaks in BDY files to aid plotting.
+
     This function takes the i/j coordinates from BDY files and orders them sequentially
     making it easier to visualise sections along the open boundary. Breaks in the open
     boundary are also determined (i.e. where the distance between two points > 2**0.5)
+
     Args:
         fname     (str) : filename of BDY file
+
     Returns:
         bdy_ind   (dict): re-ordered indices
         bdy_dst   (dict): distance (in model coords) between points
@@ -123,11 +126,14 @@ def nemo_bdy_order(fname):
 def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
     """
     Determine the ordering and breaks in BDY files to aid plotting.
+
     This function takes the i/j coordinates from BDY files and orders them sequentially
     making it easier to visualise sections along the open boundary. Breaks in the open
     boundary are also determined (i.e. where the distance between two points > 2**0.5)
+
     Args:
         fname     (str) : filename of BDY file
+
     Returns:
         bdy_ind   (dict): re-ordered indices
         bdy_dst   (dict): distance (in model coords) between points
@@ -252,8 +258,9 @@ def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
 
     return f
 
-fname = '/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y1979m11.nc'
+fname = '/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y2017m11.nc'
+print(fname)
 ind, dst, brk = nemo_bdy_order(fname)
-f = plot_bdy(fname, ind, dst, brk, 'votemper', 0, 0)
+f = plot_bdy(fname, ind, dst, brk, 'thetao', 0, 0)
 
 plt.show()
\ No newline at end of file
diff --git a/test_scripts/test_cmems.py b/test_scripts/test_cmems.py
new file mode 100644
index 0000000000000000000000000000000000000000..75362698206ddca2e0a665f6f6b16ea425c3ea53
--- /dev/null
+++ b/test_scripts/test_cmems.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Oct 31 15:49:52 2019
+
+Test script to test download CMEMS data function in PyNEMO. To be incorprated
+into profile.py
+
+@author: thopri
+"""
+from pynemo import nemo_bdy_dl_cmems as dl_cmems
+from pynemo import nemo_bdy_setup as setup
+from calendar import monthrange
+import sys
+import time
+import threading
+
+Setup = setup.Setup('/Users/thopri/Projects/PyNEMO/inputs/namelist_cmems.bdy') # default settings file
+settings = Setup.settings
+
+if settings['use_cmems'] == True:
+
+    if settings['year_end'] - settings['year_000'] > 0:
+        date_min = settings['year_000']+'-01-01'
+        date_max = settings['year_end']+'-12-31'
+
+    days_mth = monthrange(settings['year_end'],settings['month_end'])
+
+    date_min = str(settings['year_000'])+'-'+str(settings['month_000'])+'-01'
+
+    date_max = str(settings['year_end'])+'-'+str(settings['month_end'])+'-'+str(days_mth[1])
+
+    cmes_config= {
+           'ini_config_template'    : settings['cmes_config_template'],
+           'user'                   : settings['cmes_usr'],
+           'pwd'                    : settings['cmes_pwd'],
+           'motu_server'            : settings['motu_server'],
+           'service_id'             : settings['cmes_model'],
+           'product_id'             : settings['cmes_product'],
+           'date_min'               : date_min,
+           'date_max'               : date_max,
+           'latitude_min'           : settings['latitude_min'],
+           'latitude_max'           : settings['latitude_max'],
+           'longitude_min'          : settings['longitude_min'],
+           'longitude_max'          : settings['longitude_max'],
+           'depth_min'              : settings['depth_min'],
+           'depth_max'              : settings['depth_max'],
+           'variable'               : settings['cmes_variable'],
+           'src_dir'                : settings['src_dir'],
+           'out_name'               : settings['cmes_output'],
+           'config_out'             : settings['cmes_config']
+           }
+
+    chk = dl_cmems.chk_motu()
+
+    dl = dl_cmems.request_CMEMS(cmes_config)
+
+class progress_bar_loading(threading.Thread):
+
+    def run(self):
+            global stop
+            global kill
+            print 'Loading....  ',
+            sys.stdout.flush()
+            i = 0
+            while stop != True:
+                    if (i%4) == 0:
+                        sys.stdout.write('\b/')
+                    elif (i%4) == 1:
+                        sys.stdout.write('\b-')
+                    elif (i%4) == 2:
+                        sys.stdout.write('\b\\')
+                    elif (i%4) == 3:
+                        sys.stdout.write('\b|')
+
+                    sys.stdout.flush()
+                    time.sleep(0.2)
+                    i+=1
+
+            if kill == True:
+                print '\b\b\b\b ABORT!',
+            else:
+                print '\b\b done!',
+
+
+kill = False
+stop = False
+p = progress_bar_loading()
+p.start()
+
+try:
+    #anything you want to run.
+    time.sleep(1)
+    stop = True
+except KeyboardInterrupt or EOFError:
+         kill = True
+         stop = True
\ No newline at end of file