diff --git a/pynemo/__init__.py b/pynemo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/pynemo/gui/__init__.py b/pynemo/gui/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/pynemo/gui/all_mask.png b/pynemo/gui/all_mask.png
new file mode 100644
index 0000000000000000000000000000000000000000..f275614383ea5ae9e32375a91fab8c5be6eb2d0e
Binary files /dev/null and b/pynemo/gui/all_mask.png differ
diff --git a/pynemo/gui/base.ncml b/pynemo/gui/base.ncml
new file mode 100644
index 0000000000000000000000000000000000000000..bb0b521ff901beae074a57124e28c04cbff2ed17
--- /dev/null
+++ b/pynemo/gui/base.ncml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2" title="NEMO aggregation">  
+  <aggregation type="union" >     
+  </aggregation>
+</netcdf>
\ No newline at end of file
diff --git a/pynemo/gui/border.png b/pynemo/gui/border.png
new file mode 100644
index 0000000000000000000000000000000000000000..e8dc7be47122768f10827215032556d53dd33c2f
Binary files /dev/null and b/pynemo/gui/border.png differ
diff --git a/pynemo/gui/freehand.png b/pynemo/gui/freehand.png
new file mode 100644
index 0000000000000000000000000000000000000000..5c1a538fece22b0210df261562476fd7561339bd
Binary files /dev/null and b/pynemo/gui/freehand.png differ
diff --git a/pynemo/gui/max_depth.png b/pynemo/gui/max_depth.png
new file mode 100644
index 0000000000000000000000000000000000000000..689eb9245449d606207621cd45ad9c8d31c7a004
Binary files /dev/null and b/pynemo/gui/max_depth.png differ
diff --git a/pynemo/gui/minus.png b/pynemo/gui/minus.png
new file mode 100644
index 0000000000000000000000000000000000000000..78012835f039da983f464acc9ac22bdd55e6b2c1
Binary files /dev/null and b/pynemo/gui/minus.png differ
diff --git a/pynemo/gui/mynormalize.py b/pynemo/gui/mynormalize.py
new file mode 100644
index 0000000000000000000000000000000000000000..00b75a2793f86050f2190740ff542f07dc4f890d
--- /dev/null
+++ b/pynemo/gui/mynormalize.py
@@ -0,0 +1,192 @@
+# The Normalize class is largely based on code provided by Sarah Graves.
+
+import numpy as np
+import numpy.ma as ma
+
+import matplotlib.cbook as cbook
+from matplotlib.colors import Normalize
+
+
+class MyNormalize(Normalize):
+    '''
+    A Normalize class for imshow that allows different stretching functions
+    for astronomical images.
+    '''
+
+    def __init__(self, stretch='linear', exponent=5, vmid=None, vmin=None,
+                 vmax=None, clip=False):
+        '''
+        Initalize an APLpyNormalize instance.
+
+        Optional Keyword Arguments:
+
+            *vmin*: [ None | float ]
+                Minimum pixel value to use for the scaling.
+
+            *vmax*: [ None | float ]
+                Maximum pixel value to use for the scaling.
+
+            *stretch*: [ 'linear' | 'log' | 'sqrt' | 'arcsinh' | 'power' ]
+                The stretch function to use (default is 'linear').
+
+            *vmid*: [ None | float ]
+                Mid-pixel value used for the log and arcsinh stretches. If
+                set to None, a default value is picked.
+
+            *exponent*: [ float ]
+                if self.stretch is set to 'power', this is the exponent to use.
+
+            *clip*: [ True | False ]
+                If clip is True and the given value falls outside the range,
+                the returned value will be 0 or 1, whichever is closer.
+        '''
+
+        if vmax < vmin:
+            raise Exception("vmax should be larger than vmin")
+
+        # Call original initalization routine
+        Normalize.__init__(self, vmin=vmin, vmax=vmax, clip=clip)
+
+        # Save parameters
+        self.stretch = stretch
+        self.exponent = exponent
+
+        if stretch == 'power' and np.equal(self.exponent, None):
+            raise Exception("For stretch=='power', an exponent should be specified")
+
+        if np.equal(vmid, None):
+            if stretch == 'log':
+                if vmin > 0:
+                    self.midpoint = vmax / vmin
+                else:
+                    raise Exception("When using a log stretch, if vmin < 0, then vmid has to be specified")
+            elif stretch == 'arcsinh':
+                self.midpoint = -1. / 30.
+            else:
+                self.midpoint = None
+        else:
+            if stretch == 'log':
+                if vmin < vmid:
+                    raise Exception("When using a log stretch, vmin should be larger than vmid")
+                self.midpoint = (vmax - vmid) / (vmin - vmid)
+            elif stretch == 'arcsinh':
+                self.midpoint = (vmid - vmin) / (vmax - vmin)
+            else:
+                self.midpoint = None
+
+    def __call__(self, value, clip=None):
+
+        #read in parameters
+        method = self.stretch
+        exponent = self.exponent
+        midpoint = self.midpoint
+
+        # ORIGINAL MATPLOTLIB CODE
+
+        if clip is None:
+            clip = self.clip
+
+        if cbook.iterable(value):
+            vtype = 'array'
+            val = ma.asarray(value).astype(np.float)
+        else:
+            vtype = 'scalar'
+            val = ma.array([value]).astype(np.float)
+
+        self.autoscale_None(val)
+        vmin, vmax = self.vmin, self.vmax
+        if vmin > vmax:
+            raise ValueError("minvalue must be less than or equal to maxvalue")
+        elif vmin == vmax:
+            return 0.0 * val
+        else:
+            if clip:
+                mask = ma.getmask(val)
+                val = ma.array(np.clip(val.filled(vmax), vmin, vmax),
+                                mask=mask)
+            result = (val - vmin) * (1.0 / (vmax - vmin))
+
+            # CUSTOM APLPY CODE
+
+            # Keep track of negative values
+            negative = result < 0.
+
+            if self.stretch == 'linear':
+
+                pass
+
+            elif self.stretch == 'log':
+
+                result = ma.log10(result * (self.midpoint - 1.) + 1.) \
+                       / ma.log10(self.midpoint)
+
+            elif self.stretch == 'sqrt':
+
+                result = ma.sqrt(result)
+
+            elif self.stretch == 'arcsinh':
+
+                result = ma.arcsinh(result / self.midpoint) \
+                       / ma.arcsinh(1. / self.midpoint)
+
+            elif self.stretch == 'power':
+
+                result = ma.power(result, exponent)
+
+            else:
+
+                raise Exception("Unknown stretch in APLpyNormalize: %s" %
+                                self.stretch)
+
+            # Now set previously negative values to 0, as these are
+            # different from true NaN values in the FITS image
+            result[negative] = -np.inf
+
+        if vtype == 'scalar':
+            result = result[0]
+
+        return result
+
+    def inverse(self, value):
+
+        # ORIGINAL MATPLOTLIB CODE
+
+        if not self.scaled():
+            raise ValueError("Not invertible until scaled")
+
+        vmin, vmax = self.vmin, self.vmax
+
+        # CUSTOM APLPY CODE
+
+        if cbook.iterable(value):
+            val = ma.asarray(value)
+        else:
+            val = value
+
+        if self.stretch == 'linear':
+
+            pass
+
+        elif self.stretch == 'log':
+
+            val = (ma.power(10., val * ma.log10(self.midpoint)) - 1.) / (self.midpoint - 1.)
+
+        elif self.stretch == 'sqrt':
+
+            val = val * val
+
+        elif self.stretch == 'arcsinh':
+
+            val = self.midpoint * \
+                  ma.sinh(val * ma.arcsinh(1. / self.midpoint))
+
+        elif self.stretch == 'power':
+
+            val = ma.power(val, (1. / self.exponent))
+
+        else:
+
+            raise Exception("Unknown stretch in APLpyNormalize: %s" %
+                            self.stretch)
+
+        return vmin + val * (vmax - vmin)
diff --git a/pynemo/gui/nemo-icon.png b/pynemo/gui/nemo-icon.png
new file mode 100644
index 0000000000000000000000000000000000000000..ee840df48e56cda8ecd56029a942bca7456fe738
Binary files /dev/null and b/pynemo/gui/nemo-icon.png differ
diff --git a/pynemo/gui/nemo_bdy_input_window.py b/pynemo/gui/nemo_bdy_input_window.py
new file mode 100644
index 0000000000000000000000000000000000000000..886206835450cb5809963e3643659fe44bcda822
--- /dev/null
+++ b/pynemo/gui/nemo_bdy_input_window.py
@@ -0,0 +1,57 @@
+'''
+Created on 21 Jan 2015
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+# pylint: disable=E1002
+from PyQt4 import QtGui
+from .nemo_bdy_namelist_edit import NameListEditor
+from .nemo_bdy_mask_gui import MatplotlibWidget
+from PyQt4.QtGui import QSizePolicy
+from PyQt4.Qt import Qt
+
+class InputWindow(QtGui.QDialog):
+    '''
+    Input Window for editing pyNEMO settings
+    '''
+
+    def __init__(self, setup):
+        '''
+        Initialises the UI components
+        '''
+        super(InputWindow, self).__init__()
+        #initialise NameListEditor
+        self.nl_editor = NameListEditor(setup)
+
+        #initialise MatplotlibWidget
+        self.mpl_widget = MatplotlibWidget()
+
+        #connect namelistedit to matplotlibwidget
+        self.nl_editor.bathymetry_update.connect(self.mpl_widget.set_bathymetry_file)
+        self.nl_editor.mask_update.connect(self.mpl_widget.save_mask_file)
+        self.nl_editor.mask_settings_update.connect(self.mpl_widget.set_mask_settings)
+
+        if setup.bool_settings['mask_file']: 
+            try: #Try to load with bathy and mask file
+                self.mpl_widget.set_bathymetry_file(setup.settings['bathy'], setup.settings['mask_file'])
+            except: # if mask file is not readable then open with bathy
+                self.mpl_widget.set_bathymetry_file(setup.settings['bathy'],None)
+        else:
+            self.mpl_widget.set_bathymetry_file(setup.settings['bathy'],None)
+
+        self.mpl_widget.set_mask_settings(float(setup.settings['mask_max_depth']), float(setup.settings['mask_shelfbreak_dist']))
+
+        splitter = QtGui.QSplitter(Qt.Horizontal)
+        splitter.addWidget(self.nl_editor)
+        splitter.addWidget(self.mpl_widget)
+
+        hbox = QtGui.QHBoxLayout()
+        hbox.addWidget(splitter)
+        self.setLayout(hbox)
+        #set the Dialog title
+        self.setWindowTitle("PyNEMO Settings Editor")
+        QtGui.QApplication.setStyle(QtGui.QStyleFactory.create('Cleanlooks'))        
+        #show the window
+        self.show()
diff --git a/pynemo/gui/nemo_bdy_mask.py b/pynemo/gui/nemo_bdy_mask.py
new file mode 100644
index 0000000000000000000000000000000000000000..02b58c5346785324fb6129fbb355bec24728b02d
--- /dev/null
+++ b/pynemo/gui/nemo_bdy_mask.py
@@ -0,0 +1,244 @@
+'''
+Mask Class to hold the mask information and operation on mask
+
+@author: Mr. Srikanth Nagella
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import numpy as np
+from netCDF4 import Dataset
+import logging
+from scipy import ndimage
+import matplotlib.pyplot as plt
+from pynemo.utils import gcoms_break_depth
+from PyQt4.QtGui import QMessageBox
+from PyQt4 import QtCore
+
+class Mask(object):
+    """This is a Mask holder. which reads from a netCDF bathymetry file and
+    stores it in 'data' member variable"""
+
+    min_depth = 200.0
+    shelfbreak_dist = 200.0
+    mask_type = 0
+
+    
+    def __init__(self, bathymetry_file=None, mask_file=None, min_depth = 200.0, shelfbreak_dist = 200.0):
+        """Initialises the Mask data"""
+        self.data = None
+        self.bathy_data = None
+        self.mask_file = None
+        self.logger = logging.getLogger(__name__)
+        self.set_mask_file(mask_file)
+        self.set_bathymetry_file(bathymetry_file)
+        self.min_depth = min_depth
+        self.shelfbreak_dist = shelfbreak_dist
+
+
+    def set_mask_file(self, mask_file):
+        """
+	Reads the mask data from the mask file
+	Assumes the mask file is 2D
+	"""
+        self.mask_file = mask_file
+        #if mask file is not set then reset the data
+        if self.mask_file == None:
+            self.data = None
+            return
+
+        try:
+            mask_nc = Dataset(str(self.mask_file), mode="r")
+            data = mask_nc.variables['mask']
+            self.data = data[:,:]
+        except KeyError:
+            self.logger.error('Mask file missing have mask variable')
+            raise
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open mask file '+self.mask_file)
+            self.data = None
+            raise
+
+    def set_bathymetry_file(self, bathy_file):
+        """ This reads the bathymetry file and sets the land to 0 and ocean to 1 """
+        if bathy_file == None:
+            return
+
+        try:
+            self.bathymetry_file = str(bathy_file)
+            #open the bathymetry file
+            self.bathy_nc = Dataset(self.bathymetry_file)
+            self.lon = np.asarray(self.bathy_nc.variables['nav_lon'])
+            self.lat = np.asarray(self.bathy_nc.variables['nav_lat'])
+            self.bathy_data = self.bathy_nc.variables['Bathymetry'][:,:]
+            try: #check if units exists otherwise unknown. TODO
+                self.data_units = self.bathy_nc.variables['Bathymetry'].units
+            except AttributeError:
+                self.data_units = "unknown"
+            if self.data is None:
+                self.data = self.bathy_nc.variables['Bathymetry']
+                self.data = np.asarray(self.data[:, :])
+                self.data = np.around((self.data + .5).clip(0, 1))
+                #apply default 1px border
+                self.apply_border_mask(1)            
+        except KeyError:
+            self.logger.error('Bathymetry file does not have Bathymetry variable')
+            raise
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open bathymetry file '+self.bathymetry_file)
+            raise
+
+
+    def save_mask(self, mask_file):
+        """Reads the mask data from the mask file"""
+        if mask_file == None:
+            mask_file = self.mask_file
+
+        try:
+            self.logger.info('Writing mask data to %s' % mask_file)            
+            msgbox = QMessageBox()
+            msgbox.setWindowTitle("Saving....")
+            msgbox.setText("Writing mask data to file, please wait...")
+            msgbox.setWindowModality(QtCore.Qt.NonModal)
+            msgbox.show()           
+            mask_nc = Dataset(str(mask_file), mode="w")
+            mask_nc.createDimension('y', size=len(self.bathy_nc.dimensions['y']))
+            mask_nc.createDimension('x', size=len(self.bathy_nc.dimensions['x']))
+            nav_lat = mask_nc.createVariable('nav_lat', 'f4', ('y','x',))
+            nav_lon = mask_nc.createVariable('nav_lon', 'f4', ('y','x',))
+            mask_var = mask_nc.createVariable('mask', 'f4', ('y','x',))            
+            mask_var[...] = self.data
+            nav_lat[...] = self.lat
+            nav_lon[...] = self.lon
+            msgbox.close()
+        except (IOError, RuntimeError):
+            QMessageBox.information(None,"pyNEMO","Failed to write the mask file, please check the permissions")            
+            self.logger.info('Cannot open mask file for writing '+mask_file)
+            raise
+
+    def apply_border_mask(self, pixels):
+        """ pixels is number of pixels in the border that need applying mask"""
+        if self.data is not None and pixels < self.data.shape[0] and pixels < self.data.shape[1]:
+            if pixels != 0:
+                tmp = np.ones(self.data.shape, dtype=bool)
+                tmp[pixels:-pixels, pixels:-pixels] = False
+            else:
+                tmp = np.zeros(self.data.shape, dtype=bool)
+            self.reset_mask()
+            self.data = self.data * -1
+            self.data[tmp] = -1
+            self.select_the_largest_region()
+
+    def add_mask(self, index, roi):
+        """ Adds the masks for the given index values depending on the type of mask selected"""
+        out_index = None
+        if self.mask_type == None or self.mask_type == 0:
+            out_index = index
+        elif self.mask_type == 1: #maximum depth
+            out_index = self._get_bathy_depth_index(index,self.min_depth)            
+            out_index = self.remove_small_regions(out_index)
+            out_index = self.fill_small_regions(out_index)
+        elif self.mask_type == 2: # shelf break
+            #dummy, shelf_break = gcoms_break_depth.gcoms_break_depth(self.bathy_data[index])
+            #out_index = self._get_bathy_depth_index(index, shelf_break)
+            out_index = gcoms_break_depth.polcoms_select_domain(self.bathy_data, self.lat,
+                                                                self.lon, roi, self.shelfbreak_dist)
+            out_index = np.logical_and(index, out_index)
+            out_index = self.remove_small_regions(out_index)   
+            #out_index = self.fill_small_regions(out_index)                    
+        #if index is not empty        
+        if out_index is not None:                       
+            tmp = self.data[out_index]
+            tmp[tmp == -1] = 1
+            self.data[out_index] = tmp  
+        self.select_the_largest_region()         
+                
+        
+    def _get_bathy_depth_index(self, index, depth):
+        """ returns the indices from the input field `index` which have bathymetry depth greater
+	    than the input field `depth`
+	"""
+        output_index = self.bathy_data < depth
+        output_index = np.logical_and(index,output_index)
+        return output_index
+    
+    def remove_mask(self,index,roi):
+        """ Removes the mask for the given index values depending on the type of mask selected """
+        out_index = None
+        if self.mask_type == None or self.mask_type == 0:
+            out_index = index
+        elif self.mask_type == 1: #minimum depth
+            out_index = self._get_bathy_depth_index(index,self.min_depth)
+            out_index = self.remove_small_regions(out_index)
+            out_index = self.fill_small_regions(out_index)            
+        elif self.mask_type == 2: # shelf break
+#            dummy, shelf_break = gcoms_break_depth.gcoms_break_depth(self.bathy_data[index])
+#            out_index = self._get_bathy_depth_index(index, shelf_break)
+            out_index = gcoms_break_depth.polcoms_select_domain(self.bathy_data, self.lat,
+                                                                self.lon, roi, self.shelfbreak_dist)
+            out_index = np.logical_and(index, out_index)
+            out_index = self.remove_small_regions(out_index)  
+            #out_index = self.fill_small_regions(out_index)  
+        tmp = self.data[out_index]
+        tmp[tmp == 1] = -1
+        self.data[out_index] = tmp
+        self.select_the_largest_region()
+    
+    def set_minimum_depth_mask(self, depth):
+        self.min_depth = depth
+
+    def set_mask_type(self, mask_type):
+        """ Sets the mask type """
+        self.mask_type = mask_type
+        
+    def remove_small_regions(self, index):
+        """ Removes the small regions in the selection area and takes only the largest area
+            for mask"""
+        #prepare the regions
+        mask_data = np.zeros(self.data.shape)
+        mask_data[index] = self.data[index]        
+        #connected components
+        label_mask, num_labels = ndimage.label(mask_data)
+        mean_values = ndimage.sum(np.ones(self.data.shape),label_mask,range(1, num_labels+1))
+        max_area_mask = None
+        if mean_values.size != 0:
+            max_area_index = np.argmax(mean_values)+1
+            max_area_mask = label_mask == max_area_index
+        return max_area_mask
+    
+    def fill_small_regions(self, index):
+        """ This method fills the small regions of the selection area and fills them up """
+        #prepare the region with mask and land as 0, ocean as 1
+        mask_data = np.zeros(self.data.shape)
+        mask_data[index] = 1
+        #remove the small unmask holes
+        mask_withoutholes = ndimage.binary_fill_holes(mask_data)
+        return np.where(mask_withoutholes==1)
+        
+    def reset_mask(self):
+        """ This method resets the data back to no mask with land fill """
+        self.data = np.around((self.bathy_data + .5).clip(0, 1))*-1
+        
+    def select_the_largest_region(self):
+        """ This method tidies up the mask by selecting the largest masked region. this is to avoid two disconnected masked regions """
+        mask_data = np.zeros(self.data.shape)
+        mask_data[:,:] = self.data[:,:]
+        mask_data[mask_data == -1] = 0
+        #connected components
+        label_mask, num_labels = ndimage.label(mask_data)
+        if num_labels == 0: #if mask is empty/clear
+            return
+        mean_values = ndimage.sum(np.ones(self.data.shape),label_mask,range(1, num_labels+1))
+        max_area_mask = None
+        if mean_values.size != 0:
+            max_area_index = np.argmax(mean_values)+1
+            max_area_mask = label_mask == max_area_index
+        self.data = np.around((self.bathy_data + .5).clip(0, 1))
+        self.data[self.data == 1] = -1
+        self.data[max_area_mask] = self.data[max_area_mask] * -1
+    
+    def apply_mediterrian_mask(self):
+        """ This is mediterrian mask specific for the test bathymetry file """
+        tmp = self.data[0:59, 280:350]
+        tmp[tmp == 1] = -1
+        self.data[0:59, 280:350] = tmp
diff --git a/pynemo/gui/nemo_bdy_mask_gui.py b/pynemo/gui/nemo_bdy_mask_gui.py
new file mode 100644
index 0000000000000000000000000000000000000000..203fc939491048412da053caca019c17e3be46b0
--- /dev/null
+++ b/pynemo/gui/nemo_bdy_mask_gui.py
@@ -0,0 +1,332 @@
+'''
+Created on 12 Jan 2015
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+from mpl_toolkits.basemap import Basemap, cm
+import numpy as np
+from .selection_editor import PolygonEditor, BoxEditor
+import os.path
+from PyQt4.QtCore import pyqtSignal, pyqtSlot, Qt
+from nemo_bdy_mask import Mask
+import logging
+from PyQt4.QtGui import QSizePolicy
+from matplotlib.colors import Normalize
+
+mask_alpha = 0.3
+
+from PyQt4 import QtGui
+import matplotlib.pyplot as plt
+from matplotlib.figure import Figure
+from matplotlib.path import Path
+from matplotlib.transforms import Bbox
+from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
+from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
+# pylint: disable=E1002
+class MatplotlibWidget(QtGui.QWidget):
+    """This class is a QWidget for pyNEMO mask plot"""
+    min_depth = 200.0
+    shelfbreak_dist = 200.0
+    mask_type = 0
+    def __init__(self, parent=None, mask=None, min_depth = 200.0, shelfbreak_dist = 200.0,*args, **kwargs):
+        """ Initialises the mask, matplot and the navigation toolbar """
+        super(MatplotlibWidget, self).__init__(parent)
+        #QtGui.QWidget.__init__(self, parent)
+        self.figure = Figure(*args, **kwargs)
+        self.canvas = FigureCanvas(self.figure)
+        self.mask = mask
+        self.min_depth = min_depth
+        self.shelfbreak_dist = shelfbreak_dist
+        if self.mask is not None:
+            self.mask.min_depth = min_depth
+            self.mask.shelfbreak_dist = shelfbreak_dist
+        self.toolbar = NemoNavigationToolbar(self.canvas, self)
+        self.toolbar.locLabel.setMinimumWidth(100)
+        self.toolbar.locLabel.setMaximumWidth(170)
+        self.toolbar.locLabel.setSizePolicy(QSizePolicy.Fixed,QSizePolicy.Fixed)
+        self.toolbar.locLabel.setAlignment(Qt.AlignLeft|Qt.AlignTop)
+        self.toolbar.drawing_tool.connect(self.drawing_tool_callback)
+        self.axes = self.figure.add_subplot(111)
+        self.cbar = None
+        layout = QtGui.QVBoxLayout()
+        layout.addWidget(self.toolbar)
+        layout.addWidget(self.canvas)
+        self.setLayout(layout)
+        self._drawing_tool = None
+        self._drawing_tool_name = None
+        self.create_basemap()
+
+    @pyqtSlot(str)
+    def drawing_tool_callback(self, toolname):
+        """ callback for the drawing tool when the signal of change of drawing tool is
+        received"""
+        if self._drawing_tool_name != None and toolname == "": #if tool is disabled
+            self._drawing_tool.disable()
+            self._drawing_tool_name = None
+            self._drawing_tool = None
+            self.canvas.draw()
+        else:
+            self._drawing_tool_name = toolname
+            if self._drawing_tool_name == "freehand": #if freehand tool is enabled
+                self._drawing_tool = PolygonEditor(self.axes, self.canvas)
+                self.canvas.draw()
+            elif self._drawing_tool_name == "rectangle": #if rectange tool is enabled
+                self._drawing_tool = BoxEditor(self.axes, self.canvas)
+                self._drawing_tool.enable()
+                self.canvas.draw()
+
+    def create_basemap(self):
+        """ Draws the basemap and contour with mask information"""
+        if self.mask == None:
+            return
+
+        x = np.arange(0, self.mask.lon.shape[0])
+        y = np.arange(0, self.mask.lon.shape[1])
+        x_vals, y_vals = np.meshgrid(y, x)
+        Z = self.mask.bathy_data[...].astype(np.float64)
+        #Z[Z==0] = np.nan
+        Z = np.ma.masked_where(Z==0, Z)
+        cmap = plt.get_cmap('GnBu')
+        cmap.set_bad('0.0')
+        cmap.set_under('black',1.0)
+        cmap.set_over('black',1.0)
+        transcmap = plt.get_cmap('autumn')
+        transcmap.set_bad(alpha=0.5)
+        masklayer = np.ma.masked_where(self.mask.data==-1,self.mask.data)
+        extent = (0, self.mask.lon.shape[1],0, self.mask.lon.shape[0])
+        #cax = self.axes.pcolormesh(x_vals, y_vals, Z, cmap=cmap)#, extend='min')#cmap=plt.get_cmap('GnBu'))#cmap=cm.s3pcpn)
+        cax = self.axes.imshow(Z, cmap = cmap, origin="lower",extent=extent,aspect='auto')
+        #self.axes.contourf(x_vals, y_vals, masklayer, [-2, -1, 0, 1, 2], cmap=transcmap,\
+        #                   alpha=mask_alpha)
+        self.axes.imshow(masklayer, cmap=transcmap,alpha=0.3,origin="lower",extent=extent,aspect='auto')
+
+        zmin = np.amin(Z)
+        zmax = np.amax(Z)
+        if self.cbar is not None:
+            self.cbar.remove()
+        self.cbar = self.figure.colorbar(cax,ticks=np.linspace(zmin,zmax,10),orientation='horizontal')
+        self.cbar.set_label("Bathymetry (units=%s)"%self.mask.data_units)
+        self.canvas.draw()
+
+    
+    def reset_mask(self):
+        if self.mask == None:
+            return             
+        self.mask.reset_mask()
+        self.axes.clear()
+        self.create_basemap()
+        
+    def add_mask(self):
+        """ adds the selected region in the drawing tool to the mask """
+        if self._drawing_tool_name != "" and self.mask != None:
+            if self._drawing_tool.polygon != None:
+                x = np.arange(0, self.mask.lon.shape[0])
+                y = np.arange(0, self.mask.lon.shape[1])
+                x_vals, y_vals = np.meshgrid(y, x)
+                grid = zip(x_vals.ravel(), y_vals.ravel())
+                
+                self._drawing_tool.polygon.set_linewidth(1.0)
+                p_path = Path(self._drawing_tool.polygon.xy)
+                index = p_path.contains_points(grid)
+                index = index.reshape(self.mask.lon.shape)
+                xmin, ymin = np.min(self._drawing_tool.polygon.xy, axis=0)
+                xmax, ymax = np.max(self._drawing_tool.polygon.xy, axis=0)
+                self.mask.add_mask(index,[xmin,xmax,ymin,ymax])
+                self._drawing_tool.reset()
+                self.axes.clear()
+                self.create_basemap()
+
+    def remove_mask(self):
+        """ removes the selected region in the drawing tool from the mask """
+        if self._drawing_tool_name != "" and self.mask != None:
+            if self._drawing_tool.polygon != None:
+                x = np.arange(0, self.mask.lon.shape[0])
+                y = np.arange(0, self.mask.lon.shape[1])
+                x_vals, y_vals = np.meshgrid(y, x)
+                grid = zip(x_vals.ravel(), y_vals.ravel()) #check for the index
+
+                self._drawing_tool.polygon.set_linewidth(1.0)
+                p_path = Path(self._drawing_tool.polygon.xy)
+                index = p_path.contains_points(grid)
+                index = index.reshape(self.mask.lon.shape)
+                xmin, ymin = np.min(self._drawing_tool.polygon.xy, axis=0)
+                xmax, ymax = np.max(self._drawing_tool.polygon.xy, axis=0)                
+                self.mask.remove_mask(index,[xmin,xmax,ymin,ymax])
+                self._drawing_tool.reset()
+                self.axes.clear()
+                self.create_basemap()
+
+    def apply_border_mask(self):
+        """ This applies an mask of given number of pixels at the border of the mask"""
+        pixels, ok_btn_pressed = QtGui.QInputDialog.getText(self, 'Mask: Border Input',
+                                                            'Enter number of pixel of border \
+                                                             to be added to mask:')
+        if ok_btn_pressed:
+            self.mask.apply_border_mask(int(pixels))
+            self.axes.clear()
+            self.create_basemap()
+
+    def set_mask_type(self,type):
+        """ Sets the mask type """
+        self.mask_type = type
+        self.mask.mask_type = type
+        
+    @pyqtSlot(str, str)
+    def set_bathymetry_file(self, bathymetry_filename, mask_file):
+        """ Set the bathymetry file """
+        try:
+            self.mask = Mask(bathymetry_filename, mask_file, self.min_depth, self.shelfbreak_dist)
+            self.mask.mask_type = self.mask_type
+            self.create_basemap()
+        except RuntimeError:
+            pass # couldn't set the new file name
+        
+    @pyqtSlot(str)
+    def save_mask_file(self, mask_file):
+        """ Save the mask data to mask_file """
+        if self.mask is not None:
+            self.mask.save_mask(mask_file)
+            
+    @pyqtSlot(float, float)
+    def set_mask_settings(self, min_depth, shelfbreak_dist):
+        """ Mask settings update """
+        self.min_depth = min_depth
+        self.shelfbreak_dist = shelfbreak_dist
+        self.mask.min_depth = min_depth
+        self.mask.shelfbreak_dist = shelfbreak_dist
+        
+class NemoNavigationToolbar(NavigationToolbar):
+    """ This is custom toolbar for the nemo which includes additional buttons
+    for drawing tool and (add,remove) for mask in addtion to default NavigationToolbar
+    provided by matplotlib """
+
+    drawing_tool = pyqtSignal(str) #signal for the drawing tool changed
+    def __init__(self, canvas, parent):
+        """ Initialises the toolbar """
+        self.toolitems = (('Home', 'Reset original view', 'home', 'home'),\
+                          ('Back', 'Back to  previous view', 'back', 'back'),\
+                          ('Forward', 'Forward to next view', 'forward', 'forward'),\
+                          (None, None, None, None),\
+                          ('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'),\
+                          ('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'),\
+                          ('Reset', 'Reset the mask', 'reset','reset'),\
+                          (None, None, None, None),\
+                          ('Freehand', 'Freehand drawing', 'freehand', 'freehand'),\
+                          ('Rectangle', 'Rectangle drawing', 'rectangle', 'rectangle'),\
+                          ('Border', 'Border selection', 'border', 'border'),\
+                          ('plus', 'Add mask', 'add_mask', 'add_mask'),\
+                          ('minus', 'Remove mask', 'remove_mask', 'remove_mask'),\
+                          (None, None, None, None),\
+                          ('Normal','Normal Mask','normal_mask','normal_mask'),\
+                          ('MaxDepth', 'Max Depth Mask', 'max_depth_mask', 'max_depth_mask'),\
+                          ('ShelfBreak','Shelf Break Mask','shelf_break_mask','shelf_break_mask'),\
+                          (None, None, None, None)\
+                          )
+        NavigationToolbar.__init__(self, canvas, parent)
+        self._actions['reset'].setIcon(set_icon('reset.png'))
+        self._actions['freehand'].setCheckable(True)
+        self._actions['freehand'].setIcon(set_icon('freehand.png'))
+        self._actions['rectangle'].setCheckable(True)
+        self._actions['rectangle'].setIcon(set_icon('rectangle.png'))
+        self._actions['border'].setIcon(set_icon('border.png'))
+        self._actions['add_mask'].setIcon(set_icon('plus.png'))
+        self._actions['remove_mask'].setIcon(set_icon('minus.png'))
+        self._actions['normal_mask'].setIcon((set_icon('all_mask.png')))
+        self._actions['normal_mask'].setCheckable(True)
+        self._actions['max_depth_mask'].setIcon((set_icon('max_depth.png')))
+        self._actions['max_depth_mask'].setCheckable(True)
+        self._actions['shelf_break_mask'].setIcon((set_icon('shelf_break.png')))
+        self._actions['shelf_break_mask'].setCheckable(True)
+        self.update_height_mask(0)
+        
+    def reset(self, *dummy):
+        """ Callback for reset button clicked"""
+        self.parent.reset_mask()
+
+    def freehand(self, *dummy):
+        """ callback for freehand button clicked """
+        if self._actions['freehand'].isChecked() == True:
+            if self._active == "PAN":
+                self.pan()
+            elif self._active == "ZOOM":
+                self.zoom()
+            elif self._actions['rectangle'].isChecked() == True:
+                self._actions['rectangle'].setChecked(False)
+                self.drawing_tool.emit("") # clear the rectangle selector
+            self._active = None
+            self.drawing_tool.emit('freehand')
+            self._update_buttons_checked()
+        else:
+            self.drawing_tool.emit("")
+
+    def rectangle(self, *dummy):
+        """ callback for rectangel button clicked """
+        if self._actions['rectangle'].isChecked() == True:
+            if self._active == "PAN":
+                self.pan()
+            elif self._active == "ZOOM":
+                self.zoom()
+            elif self._actions['freehand'].isChecked() == True:
+                self._actions['freehand'].setChecked(False)
+                self.drawing_tool.emit("") # clear the freehand selector
+            self._active = None
+            self.drawing_tool.emit('rectangle')
+            self._update_buttons_checked()
+        else:
+            self.drawing_tool.emit("")
+
+    def border(self, *dummy):
+        """ callback for border button clicked """
+        self.parent.apply_border_mask()
+
+    def add_mask(self, *dummy):
+        """ callback for add mask button clicked """
+        self.parent.add_mask()
+
+    def remove_mask(self, *dummy):
+        """ callback for remove mask button clicked """
+        self.parent.remove_mask()
+
+    def get_active_button(self):
+        """ returns the current active button between freehand and rectangle"""
+        if self._actions['rectangle'].isChecked() == True:
+            return 'rectangle'
+        elif self._actions['freehand'].isChecked() == True:
+            return 'freehand'
+        return None
+    
+    def normal_mask(self, *dummy):
+        """ enable the normal mask button """
+        self.update_height_mask(0)
+    
+    def max_depth_mask(self, *dummy):
+        """ enables the minimum height mask """
+        self.update_height_mask(1)
+    
+    def shelf_break_mask(self, *dummy):
+        """ enables the shelf break mask button """
+        self.update_height_mask(2)
+    
+    def update_height_mask(self, btn_id):
+        """ update the height mask buttons in the interface """
+        self._actions['normal_mask'].setChecked(False)
+        self._actions['max_depth_mask'].setChecked(False)
+        self._actions['shelf_break_mask'].setChecked(False)
+        try:
+            self.parent.set_mask_type(btn_id)
+        except AttributeError:
+            pass
+        if btn_id == 0:
+            self._actions['normal_mask'].setChecked(True)
+        elif btn_id == 1:
+            self._actions['max_depth_mask'].setChecked(True)
+        elif btn_id == 2:
+            self._actions['shelf_break_mask'].setChecked(True)
+
+def set_icon(name):
+    """ Creates an icon based on the file found in the module directory with input name"""
+    return QtGui.QIcon(os.path.join(os.path.dirname(__file__), name))
+
diff --git a/pynemo/gui/nemo_bdy_mask_gui.py.interactive_cbar b/pynemo/gui/nemo_bdy_mask_gui.py.interactive_cbar
new file mode 100644
index 0000000000000000000000000000000000000000..834d6018b2b7fdcf1bd5cb8aaf2cdaf2f2dd8b08
--- /dev/null
+++ b/pynemo/gui/nemo_bdy_mask_gui.py.interactive_cbar
@@ -0,0 +1,407 @@
+'''
+Created on 12 Jan 2015
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+from mpl_toolkits.basemap import Basemap, cm
+import numpy as np
+from .selection_editor import PolygonEditor, BoxEditor
+import os.path
+from PyQt4.QtCore import pyqtSignal, pyqtSlot, Qt
+from nemo_bdy_mask import Mask
+import logging
+from PyQt4.QtGui import QSizePolicy
+
+mask_alpha = 0.3
+
+from PyQt4 import QtGui
+import matplotlib.pyplot as plt
+from matplotlib.figure import Figure
+from matplotlib.path import Path
+from matplotlib.transforms import Bbox
+from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
+from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
+import mynormalize
+# pylint: disable=E1002
+class DraggableColorbar(object):
+    def __init__(self, cbar, mappable):
+        self.cbar = cbar
+        self.mappable = mappable
+        self.press = None
+        self.cycle = sorted([i for i in dir(plt.cm) if hasattr(getattr(plt.cm,i),'N')])
+        self.index = self.cycle.index(cbar.get_cmap().name)
+
+    def connect(self):
+        """connect to all the events we need"""
+        self.cidpress = self.cbar.patch.figure.canvas.mpl_connect(
+            'button_press_event', self.on_press)
+        self.cidrelease = self.cbar.patch.figure.canvas.mpl_connect(
+            'button_release_event', self.on_release)
+        self.cidmotion = self.cbar.patch.figure.canvas.mpl_connect(
+            'motion_notify_event', self.on_motion)
+        self.keypress = self.cbar.patch.figure.canvas.mpl_connect(
+            'key_press_event', self.key_press)
+
+    def on_press(self, event):
+        """on button press we will see if the mouse is over us and store some data"""
+        if event.inaxes != self.cbar.ax: return
+        self.press = event.x, event.y
+
+    def key_press(self, event):
+        if event.key=='c':
+            self.index += 1
+        elif event.key=='C':
+            self.index -= 1
+        if self.index<0:
+            self.index = len(self.cycle)
+        elif self.index>=len(self.cycle):
+            self.index = 0
+        cmap = self.cycle[self.index]
+        self.cbar.set_cmap(cmap)
+        self.cbar.draw_all()
+        self.mappable.set_cmap(cmap)
+        self.mappable.get_axes().set_title(cmap)
+        self.cbar.patch.figure.canvas.draw()
+
+    def on_motion(self, event):
+        'on motion we will move the rect if the mouse is over us'
+        if self.press is None: return
+        if event.inaxes != self.cbar.ax: return
+        xprev, yprev = self.press
+        dx = event.x - xprev
+        dy = event.y - yprev
+        self.press = event.x,event.y
+        #print 'x0=%f, xpress=%f, event.xdata=%f, dx=%f, x0+dx=%f'%(x0, xpress, event.xdata, dx, x0+dx)
+        scale = self.cbar.norm.vmax - self.cbar.norm.vmin
+        perc = 0.03
+        if event.button==1:
+            self.cbar.norm.vmin -= (perc*scale)*np.sign(dy)
+            self.cbar.norm.vmax -= (perc*scale)*np.sign(dy)
+        elif event.button==3:
+            self.cbar.norm.vmin -= (perc*scale)*np.sign(dy)
+            self.cbar.norm.vmax += (perc*scale)*np.sign(dy)
+        self.cbar.draw_all()
+        self.mappable.set_norm(self.cbar.norm)
+        self.cbar.patch.figure.canvas.draw()
+
+
+    def on_release(self, event):
+        """on release we reset the press data"""
+        self.press = None
+        self.mappable.set_norm(self.cbar.norm)
+        self.cbar.patch.figure.canvas.draw()
+
+    def disconnect(self):
+        """disconnect all the stored connection ids"""
+        self.cbar.patch.figure.canvas.mpl_disconnect(self.cidpress)
+        self.cbar.patch.figure.canvas.mpl_disconnect(self.cidrelease)
+        self.cbar.patch.figure.canvas.mpl_disconnect(self.cidmotion)
+
+class MatplotlibWidget(QtGui.QWidget):
+    """This class is a QWidget for pyNEMO mask plot"""
+    min_depth = 200.0
+    shelfbreak_dist = 200.0
+    mask_type = 0
+    def __init__(self, parent=None, mask=None, min_depth = 200.0, shelfbreak_dist = 200.0,*args, **kwargs):
+        """ Initialises the mask, matplot and the navigation toolbar """
+        super(MatplotlibWidget, self).__init__(parent)
+        #QtGui.QWidget.__init__(self, parent)
+        self.figure = Figure(*args, **kwargs)
+        self.canvas = FigureCanvas(self.figure)
+        self.mask = mask
+        self.min_depth = min_depth
+        self.shelfbreak_dist = shelfbreak_dist
+        if self.mask is not None:
+            self.mask.min_depth = min_depth
+            self.mask.shelfbreak_dist = shelfbreak_dist
+        self.toolbar = NemoNavigationToolbar(self.canvas, self)
+        self.toolbar.locLabel.setMinimumWidth(100)
+        self.toolbar.locLabel.setMaximumWidth(170)
+        self.toolbar.locLabel.setSizePolicy(QSizePolicy.Fixed,QSizePolicy.Fixed)
+        self.toolbar.locLabel.setAlignment(Qt.AlignLeft|Qt.AlignTop)
+        self.toolbar.drawing_tool.connect(self.drawing_tool_callback)
+        self.axes = self.figure.add_subplot(111)
+        self.cbar = None
+        layout = QtGui.QVBoxLayout()
+        layout.addWidget(self.toolbar)
+        layout.addWidget(self.canvas)
+        self.setLayout(layout)
+        self._drawing_tool = None
+        self._drawing_tool_name = None
+        self.create_basemap()
+
+    @pyqtSlot(str)
+    def drawing_tool_callback(self, toolname):
+        """ callback for the drawing tool when the signal of change of drawing tool is
+        received"""
+        if self._drawing_tool_name != None and toolname == "": #if tool is disabled
+            self._drawing_tool.disable()
+            self._drawing_tool_name = None
+            self._drawing_tool = None
+            self.canvas.draw()
+        else:
+            self._drawing_tool_name = toolname
+            if self._drawing_tool_name == "freehand": #if freehand tool is enabled
+                self._drawing_tool = PolygonEditor(self.axes, self.canvas)
+                self.canvas.draw()
+            elif self._drawing_tool_name == "rectangle": #if rectange tool is enabled
+                self._drawing_tool = BoxEditor(self.axes, self.canvas)
+                self._drawing_tool.enable()
+                self.canvas.draw()
+
+    def create_basemap(self):
+        """ Draws the basemap and contour with mask information"""
+        if self.mask == None:
+            return
+
+        x = np.arange(0, self.mask.lon.shape[0])
+        y = np.arange(0, self.mask.lon.shape[1])
+        x_vals, y_vals = np.meshgrid(y, x)
+        Z = self.mask.bathy_data[...].astype(np.float64)
+        #Z[Z==0] = np.nan
+        Z = np.ma.masked_where(Z==0, Z)
+        cmap = plt.get_cmap('GnBu')
+        cmap.set_bad('0.0')
+        cmap.set_under('black',1.0)
+        cmap.set_over('black',1.0)
+        transcmap = plt.get_cmap('autumn')
+        transcmap.set_bad(alpha=0.5)
+        masklayer = np.ma.masked_where(self.mask.data==-1,self.mask.data)
+        cax = self.axes.pcolormesh(x_vals, y_vals, Z, cmap=cmap)#, extend='min')#cmap=plt.get_cmap('GnBu'))#cmap=cm.s3pcpn)
+        self.axes.contourf(x_vals, y_vals, masklayer, [-2, -1, 0, 1, 2], cmap=transcmap,\
+                           alpha=mask_alpha)
+
+        zmin = np.amin(Z)
+        zmax = np.amax(Z)
+        if self.cbar is None:
+#           self.cbar.remove()
+#        self.cbar = self.figure.colorbar(cax,ticks=np.linspace(zmin,zmax,10),orientation='horizontal')
+            self.cbar = self.figure.colorbar(cax,ticks=np.linspace(zmin,zmax,10),orientation='horizontal')
+            self.cbar.set_norm(mynormalize.MyNormalize(vmin=zmin,vmax=zmax,stretch='linear'))
+            self.cbar.set_label("Bathymetry (units=%s)"%self.mask.data_units)
+            self.cbar = DraggableColorbar(self.cbar,cax)
+            self.cbar.connect()
+        self.canvas.draw()
+
+    
+    def reset_mask(self):
+        if self.mask == None:
+            return             
+        self.mask.reset_mask()
+        self.axes.clear()
+        self.create_basemap()
+        
+    def add_mask(self):
+        """ adds the selected region in the drawing tool to the mask """
+        if self._drawing_tool_name != "" and self.mask != None:
+            if self._drawing_tool.polygon != None:
+                x = np.arange(0, self.mask.lon.shape[0])
+                y = np.arange(0, self.mask.lon.shape[1])
+                x_vals, y_vals = np.meshgrid(y, x)
+                grid = zip(x_vals.ravel(), y_vals.ravel())
+
+                self._drawing_tool.polygon.set_linewidth(1.0)
+                p_path = Path(self._drawing_tool.polygon.xy)
+                index = p_path.contains_points(grid)
+                index = index.reshape(self.mask.lon.shape)
+                xmin, ymin = np.min(self._drawing_tool.polygon.xy, axis=0)
+                xmax, ymax = np.max(self._drawing_tool.polygon.xy, axis=0)
+                self.mask.add_mask(index,[xmin,xmax,ymin,ymax])
+                self._drawing_tool.reset()
+                self.axes.clear()
+                self.create_basemap()
+
+    def remove_mask(self):
+        """ removes the selected region in the drawing tool from the mask """
+        if self._drawing_tool_name != "" and self.mask != None:
+            if self._drawing_tool.polygon != None:
+                x = np.arange(0, self.mask.lon.shape[0])
+                y = np.arange(0, self.mask.lon.shape[1])
+                x_vals, y_vals = np.meshgrid(y, x)
+                grid = zip(x_vals.ravel(), y_vals.ravel()) #check for the index
+
+                self._drawing_tool.polygon.set_linewidth(1.0)
+                p_path = Path(self._drawing_tool.polygon.xy)
+                index = p_path.contains_points(grid)
+                index = index.reshape(self.mask.lon.shape)
+                xmin, ymin = np.min(self._drawing_tool.polygon.xy, axis=0)
+                xmax, ymax = np.max(self._drawing_tool.polygon.xy, axis=0)                
+                self.mask.remove_mask(index,[xmin,xmax,ymin,ymax])
+                self._drawing_tool.reset()
+                self.axes.clear()
+                self.create_basemap()
+
+    def apply_border_mask(self):
+        """ This applies an mask of given number of pixels at the border of the mask"""
+        pixels, ok_btn_pressed = QtGui.QInputDialog.getText(self, 'Mask: Border Input',
+                                                            'Enter number of pixel of border \
+                                                             to be added to mask:')
+        if ok_btn_pressed:
+            self.mask.apply_border_mask(int(pixels))
+            self.axes.clear()
+            self.create_basemap()
+
+    def set_mask_type(self,type):
+        """ Sets the mask type """
+        self.mask_type = type
+        self.mask.mask_type = type
+        
+    @pyqtSlot(str, str)
+    def set_bathymetry_file(self, bathymetry_filename, mask_file):
+        """ Set the bathymetry file """
+        try:
+            self.mask = Mask(bathymetry_filename, mask_file, self.min_depth, self.shelfbreak_dist)
+            self.mask.mask_type = self.mask_type
+            self.create_basemap()
+        except RuntimeError:
+            pass # couldn't set the new file name
+        
+    @pyqtSlot(str)
+    def save_mask_file(self, mask_file):
+        """ Save the mask data to mask_file """
+        if self.mask is not None:
+            self.mask.save_mask(mask_file)
+            
+    @pyqtSlot(float, float)
+    def set_mask_settings(self, min_depth, shelfbreak_dist):
+        """ Mask settings update """
+        self.min_depth = min_depth
+        self.shelfbreak_dist = shelfbreak_dist
+        self.mask.min_depth = min_depth
+        self.mask.shelfbreak_dist = shelfbreak_dist
+        
+class NemoNavigationToolbar(NavigationToolbar):
+    """ This is custom toolbar for the nemo which includes additional buttons
+    for drawing tool and (add,remove) for mask in addtion to default NavigationToolbar
+    provided by matplotlib """
+
+    drawing_tool = pyqtSignal(str) #signal for the drawing tool changed
+    def __init__(self, canvas, parent):
+        """ Initialises the toolbar """
+        self.toolitems = (('Home', 'Reset original view', 'home', 'home'),\
+                          ('Back', 'Back to  previous view', 'back', 'back'),\
+                          ('Forward', 'Forward to next view', 'forward', 'forward'),\
+                          (None, None, None, None),\
+                          ('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'),\
+                          ('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'),\
+                          ('Reset', 'Reset the mask', 'reset','reset'),\
+                          (None, None, None, None),\
+                          ('Freehand', 'Freehand drawing', 'freehand', 'freehand'),\
+                          ('Rectangle', 'Rectangle drawing', 'rectangle', 'rectangle'),\
+                          ('Border', 'Border selection', 'border', 'border'),\
+                          ('plus', 'Add mask', 'add_mask', 'add_mask'),\
+                          ('minus', 'Remove mask', 'remove_mask', 'remove_mask'),\
+                          (None, None, None, None),\
+                          ('Normal','Normal Mask','normal_mask','normal_mask'),\
+                          ('MaxDepth', 'Max Depth Mask', 'max_depth_mask', 'max_depth_mask'),\
+                          ('ShelfBreak','Shelf Break Mask','shelf_break_mask','shelf_break_mask'),\
+                          (None, None, None, None)\
+                          )
+        NavigationToolbar.__init__(self, canvas, parent)
+        self._actions['reset'].setIcon(set_icon('reset.png'))
+        self._actions['freehand'].setCheckable(True)
+        self._actions['freehand'].setIcon(set_icon('freehand.png'))
+        self._actions['rectangle'].setCheckable(True)
+        self._actions['rectangle'].setIcon(set_icon('rectangle.png'))
+        self._actions['border'].setIcon(set_icon('border.png'))
+        self._actions['add_mask'].setIcon(set_icon('plus.png'))
+        self._actions['remove_mask'].setIcon(set_icon('minus.png'))
+        self._actions['normal_mask'].setIcon((set_icon('all_mask.png')))
+        self._actions['normal_mask'].setCheckable(True)
+        self._actions['max_depth_mask'].setIcon((set_icon('max_depth.png')))
+        self._actions['max_depth_mask'].setCheckable(True)
+        self._actions['shelf_break_mask'].setIcon((set_icon('shelf_break.png')))
+        self._actions['shelf_break_mask'].setCheckable(True)
+        self.update_height_mask(0)
+        
+    def reset(self, *dummy):
+        """ Callback for reset button clicked"""
+        self.parent.reset_mask()
+
+    def freehand(self, *dummy):
+        """ callback for freehand button clicked """
+        if self._actions['freehand'].isChecked() == True:
+            if self._active == "PAN":
+                self.pan()
+            elif self._active == "ZOOM":
+                self.zoom()
+            elif self._actions['rectangle'].isChecked() == True:
+                self._actions['rectangle'].setChecked(False)
+                self.drawing_tool.emit("") # clear the rectangle selector
+            self._active = None
+            self.drawing_tool.emit('freehand')
+            self._update_buttons_checked()
+        else:
+            self.drawing_tool.emit("")
+
+    def rectangle(self, *dummy):
+        """ callback for rectangel button clicked """
+        if self._actions['rectangle'].isChecked() == True:
+            if self._active == "PAN":
+                self.pan()
+            elif self._active == "ZOOM":
+                self.zoom()
+            elif self._actions['freehand'].isChecked() == True:
+                self._actions['freehand'].setChecked(False)
+                self.drawing_tool.emit("") # clear the freehand selector
+            self._active = None
+            self.drawing_tool.emit('rectangle')
+            self._update_buttons_checked()
+        else:
+            self.drawing_tool.emit("")
+
+    def border(self, *dummy):
+        """ callback for border button clicked """
+        self.parent.apply_border_mask()
+
+    def add_mask(self, *dummy):
+        """ callback for add mask button clicked """
+        self.parent.add_mask()
+
+    def remove_mask(self, *dummy):
+        """ callback for remove mask button clicked """
+        self.parent.remove_mask()
+
+    def get_active_button(self):
+        """ returns the current active button between freehand and rectangle"""
+        if self._actions['rectangle'].isChecked() == True:
+            return 'rectangle'
+        elif self._actions['freehand'].isChecked() == True:
+            return 'freehand'
+        return None
+    
+    def normal_mask(self, *dummy):
+        """ enable the normal mask button """
+        self.update_height_mask(0)
+    
+    def max_depth_mask(self, *dummy):
+        """ enables the minimum height mask """
+        self.update_height_mask(1)
+    
+    def shelf_break_mask(self, *dummy):
+        """ enables the shelf break mask button """
+        self.update_height_mask(2)
+    
+    def update_height_mask(self, btn_id):
+        """ update the height mask buttons in the interface """
+        self._actions['normal_mask'].setChecked(False)
+        self._actions['max_depth_mask'].setChecked(False)
+        self._actions['shelf_break_mask'].setChecked(False)
+        try:
+            self.parent.set_mask_type(btn_id)
+        except AttributeError:
+            pass
+        if btn_id == 0:
+            self._actions['normal_mask'].setChecked(True)
+        elif btn_id == 1:
+            self._actions['max_depth_mask'].setChecked(True)
+        elif btn_id == 2:
+            self._actions['shelf_break_mask'].setChecked(True)
+
+def set_icon(name):
+    """ Creates an icon based on the file found in the module directory with input name"""
+    return QtGui.QIcon(os.path.join(os.path.dirname(__file__), name))
+
diff --git a/pynemo/gui/nemo_bdy_namelist_edit.py b/pynemo/gui/nemo_bdy_namelist_edit.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f027e31e2e7415db338748cb3535c94bd7588bd
--- /dev/null
+++ b/pynemo/gui/nemo_bdy_namelist_edit.py
@@ -0,0 +1,164 @@
+'''
+Editor for namelist.bdy file
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+# pylint: disable=E1002
+from PyQt4 import QtGui, QtCore
+from PyQt4.QtCore import pyqtSignal, Qt, QRect, QPoint
+
+import ast
+from PyQt4.QtGui import QMessageBox, QRegion, QIcon, QToolTip, QCursor
+
+class NameListEditor(QtGui.QWidget):
+    '''
+    This class creates a gui for the Namelist file options
+    '''
+    new_settings = {} #temporary variable to store the settings as they are changed in the GUI
+    bathymetry_update = pyqtSignal(str,str) #fires when there are changes to the settings
+    mask_update = pyqtSignal(str) #fires when there mask data to be saved is fired
+    mask_settings_update = pyqtSignal(float, float) #fires when there is mask settings update
+    def __init__(self, setup):
+        '''
+        Constructor for setting up the gui using the settings
+        '''
+        super(NameListEditor, self).__init__()
+        self.settings = setup.settings
+        self.bool_settings = setup.bool_settings
+        self.setup = setup
+        self.init_ui()
+
+    def init_ui(self):
+        '''
+        Initialises the UI components of the GUI
+        '''
+        client = QtGui.QWidget(self)
+        # Create the Layout to Grid
+        grid = QtGui.QGridLayout()
+
+        # Loop through the settings and create widgets for each setting
+        index = 0
+        for setting in self.settings:
+            # initialises setting Widget
+            label = QtGui.QLabel(setting)
+            qlabel = QtGui.QPushButton("")
+            qlabel.setIcon(self.style().standardIcon(QtGui.QStyle.SP_MessageBoxQuestion))
+            if type(self.settings[setting]).__name__ in ['str', 'float', 'double',
+                                                         'int', 'time', 'dict']:
+                text = QtGui.QLineEdit(self)
+                text.setText(str(self.settings[setting]))
+                text.textChanged.connect(lambda value=setting,\
+                                         var_name=setting: self.label_changed(value, var_name))
+                if self.bool_settings.has_key(setting):
+                    chkbox = QtGui.QCheckBox(self)
+                    chkbox.setChecked(self.bool_settings[setting])
+                    chkbox.stateChanged.connect(lambda value=setting,\
+                                                var_name=setting:\
+                                                self.state_changed(value, var_name))
+                    grid.addWidget(chkbox, index, 0)
+
+            elif type(self.settings[setting]).__name__ == 'bool':
+                text = QtGui.QComboBox(self)
+                text.insertItem(0, 'True')
+                text.insertItem(1, 'False')
+                if self.settings[setting]:
+                    text.setCurrentIndex(0)
+                else:
+                    text.setCurrentIndex(1)
+                text.currentIndexChanged.connect(lambda value=setting,\
+                                                 var_name=setting:\
+                                                 self.combo_index_changed(value, var_name))
+
+            grid.addWidget(label, index, 1)
+            grid.addWidget(text, index, 2)
+            qlabel.clicked.connect(lambda widget=qlabel,\
+                                   str_val=self.setup.variable_info[setting]:\
+                                   QToolTip.showText(QCursor.pos(),str_val))            
+            grid.addWidget(qlabel,index, 3)
+            if setting in self.setup.variable_info:
+                qlabel.setToolTip(self.setup.variable_info[setting])
+            index = index+1
+
+        client.setLayout(grid)
+        #scrollbars
+        scroll_area = QtGui.QScrollArea(self)
+        #scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
+        scroll_area.setWidget(client)
+
+        #save cancel buttons
+        btn_widget = QtGui.QWidget(self)
+        hbox_layout = QtGui.QHBoxLayout(self)      
+        btn_save = QtGui.QPushButton('Save')
+        btn_save.clicked.connect(self._btn_save_callback)
+        self.btn_cancel = QtGui.QPushButton('Close')
+        self.btn_cancel.clicked.connect(self._btn_cancel_callback)
+        hbox_layout.addWidget(btn_save)
+        hbox_layout.addWidget(self.btn_cancel)
+        btn_widget.setLayout(hbox_layout)
+
+        box_layout = QtGui.QVBoxLayout(self)
+        box_layout.addWidget(scroll_area)
+        box_layout.addWidget(btn_widget)
+        btn_widget.setMaximumWidth(400)
+        scroll_area.setMaximumWidth(400)
+        self.setLayout(box_layout)
+        #show the window
+        self.show()
+
+    def label_changed(self, value, name):
+        """ callback when the text is changed in the text box"""
+        self.new_settings[name] = unicode(value).encode('utf_8')
+
+    def combo_index_changed(self, value, name):
+        """ callback when the True/False drop down for the settings which has boolean value
+        is changed"""
+        if value == 0:
+            self.new_settings[name] = True
+        else:
+            self.new_settings[name] = False
+
+    def state_changed(self, state, name):
+        """ callback when the check box  state is changed. This updates the bool_setting """
+        if state == QtCore.Qt.Checked:
+            self.bool_settings[name] = True
+        else:
+            self.bool_settings[name] = False
+
+    def _btn_save_callback(self):
+        """ callback when save button is clicked. this method writes takes the settings values in
+        GUI and write them back to file."""
+        #copy the the modified values to settings and call the setup save
+        for setting in self.new_settings:
+            if (type(self.settings[setting]).__name__ == 'dict') & \
+                (type(self.new_settings[setting]).__name__ != 'dict'):
+                self.new_settings[setting] = ast.literal_eval(self.new_settings[setting])
+            self.settings[setting] = self.new_settings[setting]
+
+        self.setup.settings = self.settings
+        try:
+            self.setup.write() #write settings back to file
+            QMessageBox.information(self,"pyNEMO","Setting saved to file")
+        except:
+            QMessageBox.information(self,"pyNEMO", "Error while saving the settings file, please check the permissions")
+       
+        try:
+        #only emit the saving of mask file if the mask file name is set and boolean value is set 
+            if self.settings['mask_file'] is not None and self.bool_settings['mask_file']:
+                self.mask_update.emit(self.settings['mask_file'])
+        except KeyError:
+            QMessageBox.information(self,"pyNEMO","Set mask_file key in the setting .bdy file")            
+            
+        try:
+            self.mask_settings_update.emit(float(self.settings['mask_max_depth']), float(self.settings['mask_shelfbreak_dist']))
+        except KeyError:
+            print 'Set the mask setting mask_max_depth and mask_shelfbreak_dist'
+            
+        if self.bool_settings['mask_file']:
+            self.bathymetry_update.emit(self.settings['bathy'],self.settings['mask_file'])
+
+    def _btn_cancel_callback(self):
+        """ callback when cancel button is clicked """
+        self.close()
+
diff --git a/pynemo/gui/nemo_ncml_generator.py b/pynemo/gui/nemo_ncml_generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..4037367a85a87220f3a4a1409f960b9f85440e52
--- /dev/null
+++ b/pynemo/gui/nemo_ncml_generator.py
@@ -0,0 +1,368 @@
+'''
+Created on 6 Aug 2015
+
+@author: Shirley Crompton, UK Science and Technology Facilities Council
+'''
+import logging
+import os
+import xml.etree.ElementTree as ET
+from PyQt4 import QtGui
+from PyQt4 import QtCore
+from PyQt4.QtCore import pyqtSlot
+import nemo_ncml_tab_widget
+from thredds_crawler.crawl import Crawl
+
+class Ncml_generator(QtGui.QDialog):
+    '''
+    Gui editor to capture user input for the purpose of generating NCML representation of pynemo source datasets.
+    '''
+
+    def __init__(self, basefile):
+        '''
+        Initialises the UI components
+        '''
+        super(Ncml_generator, self).__init__()     # no params yet, may be allow user to predefine an input ncml for edit???? 
+        #Logging for class
+        self.logger = logging.getLogger(__name__)   #logger config'ed in pynemo_exe.py
+        
+        if not basefile:
+            testpath, file_name = os.path.split(__file__)
+            self.baseFile = os.path.join(testpath,'base.ncml')
+        else:
+            self.baseFile = basefile
+            print 'ncml baseFile : ', str(self.baseFile)
+        
+        self.filename = None # store the output file pointer      
+        self.initUI()
+        
+        
+    def initUI(self):
+        QtGui.QToolTip.setFont(QtGui.QFont('SansSerif', 11))
+        '''
+        vbox is the top container
+        '''
+        #the 
+        vbox = QtGui.QVBoxLayout(self)
+        vbox.setSpacing(10)
+        vbox.setContentsMargins(10, 10, 5, 5)
+        
+        '''
+        top panel for output file
+        '''
+        top_outfile_label = QtGui.QLabel(unicode('Output filename').encode('utf-8'))
+        self.top_outfile_name = QtGui.QLineEdit()    #location is pre-defined
+        self.top_outfile_name.setToolTip(unicode('Define output file').encode('utf-8'))
+        self.top_outfile_name.returnPressed.connect(self.get_fname_input)
+        
+        top_outfile_button = QtGui.QPushButton(unicode('Select file').encode('utf-8'))
+        top_outfile_button.clicked.connect(self.get_fname)
+        
+        top_grpBox = QtGui.QGroupBox(unicode('Define output file').encode('utf-8'), None)
+        top_grid = QtGui.QGridLayout(top_grpBox)
+        top_grid.setVerticalSpacing(5)
+        top_grid.setHorizontalSpacing(10)
+        top_grid.addWidget(top_outfile_label, 1, 0)
+        top_grid.addWidget(self.top_outfile_name, 1, 1)
+        top_grid.addWidget(top_outfile_button, 1,2, QtCore.Qt.AlignRight)
+        
+        '''
+        middle panel for tab folder
+        '''
+        self.tabWidget = QtGui.QTabWidget()
+        self.tracer_tab = nemo_ncml_tab_widget.Ncml_tab(unicode("Tracer").encode('utf-8'))
+        self.tracer_tab.setEnabled(False)
+        self.dynamic_tab = nemo_ncml_tab_widget.Ncml_tab(unicode("Dynamics").encode('utf-8'))
+        self.dynamic_tab.setEnabled(False)
+        self.ice_tab = nemo_ncml_tab_widget.Ncml_tab(unicode("Ice").encode('utf-8'))
+        self.ice_tab.setEnabled(False)
+        self.ecosys_tab = nemo_ncml_tab_widget.Ncml_tab(unicode("Ecosystem").encode('utf-8'))
+        self.ecosys_tab.setEnabled(False)
+        self.grid_tab = nemo_ncml_tab_widget.Ncml_tab(unicode("Grid").encode('utf-8'))
+        self.grid_tab.setEnabled(False)
+                
+        self.tabWidget.addTab(self.tracer_tab, unicode("Tracer").encode('utf-8'))
+        self.tabWidget.addTab(self.dynamic_tab, unicode("Dynamics").encode('utf-8'))
+        self.tabWidget.addTab(self.ice_tab, unicode("Ice").encode('utf-8'))
+        self.tabWidget.addTab(self.ecosys_tab, unicode("Ecosystem").encode('utf-8')) # should be disabled
+        self.tabWidget.addTab(self.grid_tab, unicode("Grid").encode('utf-8')) # should be disabled
+        self.tabWidget.setMovable(False)
+#        if self.tabWidget.widget(self.tabWidget.currentIndex()).isEnabled() is True:
+        
+#       self.connect(self.tabWidget, SIGNAL('currentChanged(int)'),self.enable_btn_update)
+        self.tabWidget.currentChanged.connect(lambda: self.enable_btn_update(enable_btn))
+        '''
+        button bar
+        '''
+        go_btn = QtGui.QPushButton(unicode('Generate').encode('utf-8'))
+        go_btn.setToolTip(unicode('Add all variable definitions before generating NcML file.').encode('utf-8'))
+        cancel_btn = QtGui.QPushButton(unicode('Cancel').encode('utf-8'))
+        enable_btn = QtGui.QPushButton(unicode('Enable Tab').encode('utf-8'))
+        #layout button bar        
+        btn_hBox = QtGui.QHBoxLayout(None)
+        btn_hBox.setMargin(5)
+        btn_hBox.setSpacing(10)
+        btn_hBox.setAlignment(QtCore.Qt.AlignRight)
+        btn_hBox.addWidget(enable_btn)
+        btn_hBox.addWidget(cancel_btn)
+        btn_hBox.addWidget(go_btn)
+        
+        go_btn.clicked.connect(self.generate)
+        cancel_btn.clicked.connect(self.close)
+        enable_btn.clicked.connect(lambda: self.enable_tab(enable_btn))
+#       enable_btn.clicked.connect(self.enable_tab)
+        
+        '''
+        Assemble the top layout container
+        '''
+        vbox.addWidget(top_grpBox)
+        vbox.addWidget(self.tabWidget)
+        vbox.addLayout(btn_hBox)
+        
+        #self.setLayout(grp_box)        
+        self.setWindowIcon(QtGui.QIcon('/Users/jdha/anaconda/lib/python2.7/site-packages/pynemo-0.2-py2.7.egg/pynemo/gui/nemo_icon.png'))    #doesn't work       
+        self.setWindowTitle(unicode("PyNEMO NcML Generator").encode('utf-8'))
+        self.resize(650,300)
+        
+        #has to change the default focus to stop the output file QTextedit to trigger the widget in focus when enter is pressed.  Not sure why this happens???
+        self.tabWidget.setFocus()
+        #show the window
+        self.show()
+        
+    
+    '''
+    file picker call back for output file input field
+    '''
+    @pyqtSlot()
+    def get_fname(self):
+        # When you call getOpenFileName, a file picker dialog is created
+        # and if the user selects a file, it's path is returned, and if not
+        # (ie, the user cancels the operation) None is returned
+        fname = QtGui.QFileDialog.getSaveFileName(self, 'Select output file', '', selectedFilter='*.ncml')
+        if fname:
+            self.filename = fname #returns a QString
+            self.top_outfile_name.setText(str(fname))
+            #print 'the output file is set to : ' + self.filename
+    '''
+    output file text box call back handler
+    '''
+    @pyqtSlot()
+    def get_fname_input(self):
+        self.filename = self.top_outfile_name.text()
+        #print 'the output file is manually set to : ' + self.filename
+    '''
+    call back to handle the generate button pressed
+    '''
+    @pyqtSlot()
+    def enable_btn_update(self, enable_btn):
+        if self.tabWidget.widget(self.tabWidget.currentIndex()).isEnabled() is True:
+            enable_btn.setText(unicode('Disable Tab').encode('utf-8'))
+        else:
+            enable_btn.setText(unicode('Enable Tab').encode('utf-8'))
+    '''
+    call back to handle the generate button pressed
+    '''
+    @pyqtSlot()
+    def enable_tab(self,enable_btn):
+#   def enable_tab(self):
+        #validate output file
+        if self.tabWidget.widget(self.tabWidget.currentIndex()).isEnabled() is True:
+            self.tabWidget.widget(self.tabWidget.currentIndex()).setEnabled(False)
+            enable_btn.setText(unicode('Enable Tab').encode('utf-8'))
+        else:
+            self.tabWidget.widget(self.tabWidget.currentIndex()).setEnabled(True)
+            enable_btn.setText(unicode('Disable Tab').encode('utf-8'))
+
+    '''
+    call back to handle the generate button pressed
+    '''
+    @pyqtSlot()
+    def generate(self):
+        #validate output file
+        if self.filename is None or self.filename == "":
+            if self.top_outfile_name.text() is None or self.top_outfile_name.text() == "":
+                QtGui.QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('No output file specified!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                return
+            else:
+                self.filename = self.top_outfile_name.text()
+            
+        if(os.path.exists(os.path.dirname(str(self.filename)))) == False:
+            #if os.path.dirname(os.path.dirname(os.path.exists(os.path.normpath(str(self.filename))))) == False:
+            QtGui.QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('Invalid output directory!  Cannot generate file!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+            #print 'invalid target directory!  Cannot generate.'
+            return
+            
+                
+        #validate if all the variables are defined, use the mandatory src field as a proxy
+        # also need to check that the tab is active
+
+        tabsList = []
+        if self.tracer_tab.isEnabled() is True:
+            if self.tracer_tab.votemper.src != ""  and \
+               self.tracer_tab.vosaline.src != "" :
+                tabsList.extend([self.tracer_tab.votemper, self.tracer_tab.vosaline])
+            else:
+                QtGui.QMessageBox.information(self, unicode('Something is wrong').encode('utf-8'), unicode('Not all the variables under the tracer tab have been defined!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+
+
+        if self.ice_tab.isEnabled() is True:
+            if self.ice_tab.ileadfra.src != ""  and \
+               self.ice_tab.iicethic.src != ""  and \
+               self.ice_tab.isnowthi.src != "" :
+                tabsList.extend([self.ice_tab.iicethic, self.ice_tab.ileadfra, self.ice_tab.isnowthi])
+            else:
+                QtGui.QMessageBox.information(self, unicode('Something is wrong').encode('utf-8'), unicode('Not all the variables under the ice tab have been defined!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+
+        if self.dynamic_tab.isEnabled() is True:
+            if self.dynamic_tab.vozocrtx.src != ""  and \
+               self.dynamic_tab.vozocrtx.src != ""  and \
+               self.dynamic_tab.sossheig.src != "" :
+                tabsList.extend([self.dynamic_tab.vozocrtx, self.dynamic_tab.vomecrty, self.dynamic_tab.sossheig])
+            else:
+                QtGui.QMessageBox.information(self, unicode('Something is wrong').encode('utf-8'), unicode('Not all the variables under the dynamics tab have been defined!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+
+        if self.grid_tab.isEnabled() is True:
+            if self.grid_tab.gdept.src != ""    and \
+               self.grid_tab.gdepw.src != ""    and \
+               self.grid_tab.mbathy.src != ""   and \
+               self.grid_tab.e3t.src != ""      and \
+               self.grid_tab.e3u.src != ""      and \
+               self.grid_tab.e3v.src != "" :
+                tabsList.extend([self.grid_tab.gdept, self.grid_tab.gdepw, self.grid_tab.mbathy, self.grid_tab.e3t, self.grid_tab.e3u, self.grid_tab.e3v])
+            else:
+                QtGui.QMessageBox.information(self, unicode('Something is wrong').encode('utf-8'), unicode('Not all the variables under the grid tab have been defined!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+
+        try:
+            self.generateNcML(tabsList) #go ahead and do it
+        except:
+            raise
+
+        QtGui.QMessageBox.information(self, unicode('Success.').encode('utf-8'), unicode('NcML file generated.').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+
+    '''
+    Function to generates the NcML text and write it to the user defined output file
+    '''
+    def generateNcML(self, tabsList):
+        #first open the default base file
+        ns = '{http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2}'
+        self.tree = self._parseNcml()
+        self.root = self.tree.getroot()
+        #create a netcdf element for each tab variable
+        for tab in tabsList:
+            netcdfE = ET.Element(ns+unicode('netcdf').encode('utf-8')) #src directory is converted to the correct format when added/
+            if str(tab.src).startswith("http:") or str(tab.src).startswith("https:"):
+                #Its url so use thredds crawler to get the urls
+                urls = self.url_trawler(tab.src,str(tab.regex))
+                aggE = ET.Element(ns+unicode('aggregation').encode('utf-8'), name=unicode(str(tab.name)).encode('utf-8'), type=unicode('joinExisting').encode('utf-8'), dimName=unicode('time_counter').encode('utf-8')) #tab.name already encoded                
+                for nc_url in urls:
+                    tcNetcdf = ET.Element(ns+unicode('netcdf').encode('utf-8'), location=unicode(str(nc_url)).encode('utf-8'))
+                    aggE.append(tcNetcdf)
+                netcdfE.append(aggE)
+            else:
+                scanE = ET.Element(ns+unicode('scan').encode('utf-8'), location=unicode(str(tab.src)).encode('utf-8'), regExp=unicode(str(tab.regex)).encode('utf-8'))
+                if tab.subdirs == True:
+                    scanE.set(unicode('subdirs').encode('utf-8'), unicode('true').encode('utf-8'))
+                aggE = ET.Element(ns+unicode('aggregation').encode('utf-8'), name=unicode(str(tab.name)).encode('utf-8'), type=unicode('joinExisting').encode('utf-8'), dimName=unicode('time_counter').encode('utf-8')) #tab.name already encoded
+                aggE.append(scanE)
+                netcdfE.append(aggE)
+            self.root[0].append(netcdfE)    #add the new netcdf element to the top aggregation 
+            
+            #deal with variable name change TODO put this into a loop?
+            if tab.old_name is not None and tab.old_name != "":
+                vname = unicode('variable').encode('utf-8')
+                #v is None          
+                if tab.name == unicode('temperature').encode('utf-8') and tab.old_name != unicode('votemper').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='votemper', orgName = str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('salinity').encode('utf-8') and tab.old_name != unicode('vosaline').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='vosaline', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('ice_thickness').encode('utf-8') and tab.old_name != unicode('iicethic').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='iicethic', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('leads_fraction').encode('utf-8') and tab.old_name != unicode('ileadfra').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='ileadfra', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('snow_thickness').encode('utf-8') and tab.old_name != unicode('isnowthi').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='isnowthi', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('zonal_velocity').encode('utf-8') and tab.old_name != unicode('vozocrtx').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='vozocrtx', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('meridian_velocity').encode('utf-8') and tab.old_name != unicode('vomecrty').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='vomecrty', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('sea_surface_height').encode('utf-8') and tab.old_name != unicode('sossheig').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='sossheig', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('depth_at_t_points').encode('utf-8') and tab.old_name != unicode('gdept').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='gdept', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('depth_at_w_points').encode('utf-8') and tab.old_name != unicode('gdepw').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='gdepw', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('number_of_wet_levels').encode('utf-8') and tab.old_name != unicode('mbathy').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='mbathy', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('vertical_scale_factors_at_t_points').encode('utf-8') and tab.old_name != unicode('e3t').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='e3t', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('vertical_scale_factors_at_u_points').encode('utf-8') and tab.old_name != unicode('e3u').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='e3u', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                elif tab.name == unicode('vertical_scale_factors_at_v_points').encode('utf-8') and tab.old_name != unicode('e3v').encode('utf-8'):
+                    v = ET.Element(ns+vname, name='e3v', orgName =  str(tab.old_name))
+                    self.root.append(v)
+                   
+        #write ncml to file
+        try:
+            self.indent(self.root, 0)   #24Aug15 format the xml for pretty printing
+            self.tree.write(self.filename, encoding='utf-8')
+        except IOError as (errno, strerror):
+            self.logger.error("I/O error({0}): {1}".format(errno, strerror))
+        except:
+            self.logger.error('Error generating ncml file')
+            raise
+             
+    '''
+    Function to retrieve the NcML file template
+    '''
+    def _parseNcml(self):
+        try:
+            parser = ET.XMLParser(encoding="utf-8")
+            tree = ET.parse(self.baseFile, parser=parser)
+            return tree
+        except ET.ParseError, v:
+                row, column = v.position
+                print "error on row", row, "column", column, ":", v
+                
+    '''
+    Function to format xml.  Based on code provided by http://effbot.org/zone/element-lib
+    '''
+    def indent(self, elem, level=0):
+        i = "\n" + level*"  "
+        if len(elem):
+            if not elem.text or not elem.text.strip():
+                elem.text = i + "  "
+            if not elem.tail or not elem.tail.strip():
+                elem.tail = i
+            for elem in elem:
+                self.indent(elem, level+1)
+            if not elem.tail or not elem.tail.strip():
+                elem.tail = i
+        else:
+            if level and (not elem.tail or not elem.tail.strip()):
+                elem.tail = i     
+    
+    """
+        This method trawls throught the url with a given expression and returns the
+        list of urls that match the expression 
+    """
+    def url_trawler(self, url, expr):
+        if url.endswith(".xml"):
+            c = Crawl(url, select=[expr])
+        elif url.endswith("/"): # we'll try and add catalog.xml as the user may have just provided a directory
+            c = Crawl(url+"catalog.xml", select=[expr])
+        else:                   # we'll try and add catalog.xml as the user may have just provided a directory
+            c = Crawl(url+"/catalog.xml", select=[expr])
+        urls = [s.get("url") for d in c.datasets for s in d.services if s.get("service").lower()=="opendap"]
+        return urls
diff --git a/pynemo/gui/nemo_ncml_tab_widget.py b/pynemo/gui/nemo_ncml_tab_widget.py
new file mode 100644
index 0000000000000000000000000000000000000000..5860aff9dd0e133781acdfb3af483c8ea6a7fc5f
--- /dev/null
+++ b/pynemo/gui/nemo_ncml_tab_widget.py
@@ -0,0 +1,542 @@
+'''
+Created on 2 Jul 2015
+
+@author: Shirley Crompton, UK Science and Technology Facilities Council
+'''
+import logging
+import os
+from PyQt4 import QtGui
+from PyQt4 import QtCore
+from PyQt4.QtCore import pyqtSlot
+
+class Ncml_tab(QtGui.QWidget):
+    '''
+    tab contents to define child aggregation
+    '''
+    def __init__(self, tabName):
+        '''
+        Initialises the UI components
+        '''
+        super(Ncml_tab, self).__init__() 
+        self.logger = logging.getLogger(__name__)
+        self.var= tabName  # tabName is used to determine the set of netcdf variables to process
+        #print self.var
+        #self.value_dict = {} # to capture the user inputs
+    
+        # no params yet, may be allow user to predefine an input ncml for edit????    
+        self.initUI()
+        
+        
+    def initUI(self):  
+        QtGui.QToolTip.setFont(QtGui.QFont('SansSerif', 11))
+        self.varStackedWidget = QtGui.QStackedWidget()                   
+        #variable chooser combobox
+        combo_vars = []
+        if(self.var == unicode("Tracer").encode('utf-8')):
+            combo_vars = [unicode('temperature').encode('utf-8'),unicode('salinity').encode('utf-8')] #votemper, vosaline
+            self.votemper = ncml_variable(unicode('temperature').encode('utf-8'),'votemper')
+            self.vosaline = ncml_variable(unicode('salinity').encode('utf-8'),'vosaline')
+            self.varStackedWidget.addWidget(self._addStackWidget("votemper"))
+            self.varStackedWidget.addWidget(self._addStackWidget("vosaline"))
+            #debug
+#            print 'Tracer has ' + str(self.varStackedWidget.count())
+        elif(self.var == unicode("Ice").encode('utf-8')):
+            combo_vars = [unicode('ice thickness').encode('utf-8'),unicode('leads fraction').encode('utf-8'),unicode('snow thickness').encode('utf-8')] #'iicethic,ileadfra,isnowthi
+            self.iicethic = ncml_variable(unicode('ice_thickness').encode('utf-8'),'iicethic')
+            self.ileadfra = ncml_variable(unicode('leads_fraction').encode('utf-8'),'ileadfra')
+            self.isnowthi = ncml_variable(unicode('snow_thickness').encode('utf-8'),'isnowthi')
+            self.varStackedWidget.addWidget(self._addStackWidget("iicethic"))
+            self.varStackedWidget.addWidget(self._addStackWidget("ileadfra"))
+            self.varStackedWidget.addWidget(self._addStackWidget("isnowthi"))
+#            print 'Ice has ' + str(self.varStackedWidget.count())
+        elif(self.var == unicode("Dynamics").encode('utf-8')):
+            combo_vars = [unicode('zonal velocity').encode('utf-8'), unicode('meridian velocity').encode('utf-8'), unicode('sea surface height').encode('utf-8')] #vozocrtx, vomecrty, sossheig
+            self.vozocrtx = ncml_variable(unicode('zonal_velocity').encode('utf-8'),'vozocrtx')
+            self.vomecrty = ncml_variable(unicode('meridian_velocity').encode('utf-8'),'vomecrty')
+            self.sossheig = ncml_variable(unicode('sea_surface_height').encode('utf-8'),'sossheig')
+            self.varStackedWidget.addWidget(self._addStackWidget("vozocrtx"))
+            self.varStackedWidget.addWidget(self._addStackWidget("vomecrty"))
+            self.varStackedWidget.addWidget(self._addStackWidget("sossheig"))
+#            print 'Dynamics has ' + str(self.varStackedWidget.count())
+        elif(self.var == unicode("Grid").encode('utf-8')):
+            combo_vars = [unicode('depth at T points').encode('utf-8'), 
+                          unicode('depth at W points').encode('utf-8'), 
+                          unicode('number of wet levels').encode('utf-8'), 
+                          unicode('vertical scale factor at T points').encode('utf-8'), 
+                          unicode('vertical scale factor at U points').encode('utf-8'), 
+                          unicode('vertical scale factor at V points').encode('utf-8')] #gdept,gdepw,mbathy
+            self.gdept = ncml_variable(unicode('depth_at_t_points').encode('utf-8'),'gdept')
+            self.gdepw = ncml_variable(unicode('depth_at_w_points').encode('utf-8'),'gdepw')
+            self.mbathy = ncml_variable(unicode('number_of_wet_levels').encode('utf-8'),'mbathy')
+            self.e3t = ncml_variable(unicode('vertical_scale_factors_at_t_points').encode('utf-8'),'e3t')
+            self.e3u = ncml_variable(unicode('vertical_scale_factors_at_u_points').encode('utf-8'),'e3u')
+            self.e3v = ncml_variable(unicode('vertical_scale_factors_at_v_points').encode('utf-8'),'e3v')
+            self.varStackedWidget.addWidget(self._addStackWidget("gdept"))
+            self.varStackedWidget.addWidget(self._addStackWidget("gdepw"))
+            self.varStackedWidget.addWidget(self._addStackWidget("mbathy"))
+            self.varStackedWidget.addWidget(self._addStackWidget("e3t"))
+            self.varStackedWidget.addWidget(self._addStackWidget("e3u"))
+            self.varStackedWidget.addWidget(self._addStackWidget("e3v"))
+#            print 'Grid has ' + str(self.varStackedWidget.count())
+        elif(self.var == "Ecosysem"):
+            vars = ['nitrate','silicate'] #nitrate, silicate
+        self.varStackedWidget.setCurrentIndex(0)  #we rely on the stacked tab index to be the same as the combo box 
+        #combo box     
+        self.var_combo = QtGui.QComboBox()
+        self.var_combo.addItems(combo_vars)
+        self.var_combo.setEditable(False)
+        self.var_combo.setCurrentIndex(0)
+        #the value if not saved is cached during the session, we can wait until the add button is pressed
+        self.var_combo.currentIndexChanged.connect(lambda var_name = self.var : self.src_combo_changed(var_name))
+        self.var_combo.currentIndexChanged.connect(self.setWidgetStack)
+        #label
+        var_label = QtGui.QLabel(unicode('Variable').encode('utf-8'))
+        #set layout
+        stacked_hBox = QtGui.QHBoxLayout()
+        stacked_hBox.setMargin(5)
+        stacked_hBox.setSpacing(50) # spacing between items
+        stacked_hBox.setAlignment(QtCore.Qt.AlignLeft)
+        stacked_hBox.addWidget(var_label)
+        stacked_hBox.addWidget(self.var_combo)
+        #
+        vBoxLayout = QtGui.QVBoxLayout()        
+        vBoxLayout.addLayout(stacked_hBox)
+        vBoxLayout.addWidget(self.varStackedWidget)
+        #
+        grp_box = QtGui.QGroupBox(None)
+        grp_box.setLayout(vBoxLayout)
+                        
+        '''
+        :TODO Need to add the override time gui widgets
+        '''
+        
+        
+        '''
+        button bar
+        '''
+        # reset button
+        reset_btn = QtGui.QPushButton(unicode('Reset').encode('utf-8'))
+        reset_btn.setToolTip(unicode('Reset fields to previously saved values').encode('utf-8'))
+        add_btn = QtGui.QPushButton(unicode('Add').encode('utf-8')) 
+        add_btn.setDefault(True)
+        add_btn.setToolTip(unicode('Add the current definition to the NcML').encode('utf-8'))
+        #connect up with events
+        reset_btn.clicked.connect(self.reset_tab)
+        add_btn.clicked.connect(self.add_tab)
+       
+        btn_hBox = QtGui.QHBoxLayout(None)
+        btn_hBox.setMargin(5)
+        btn_hBox.setSpacing(10)
+        btn_hBox.setAlignment(QtCore.Qt.AlignCenter)
+        btn_hBox.addWidget(reset_btn)
+        btn_hBox.addWidget(add_btn)
+        
+        #build the contents         
+        vbox = QtGui.QVBoxLayout(self)
+        vbox.setSpacing(10)
+        vbox.setContentsMargins(10, 10, 5, 5)
+        vbox.addWidget(grp_box)
+        vbox.addLayout(btn_hBox)
+    '''
+    create the stacked widget for each nemo variable
+    '''    
+    def _addStackWidget(self, old_name=""):
+        self.varWidget = QtGui.QWidget()
+        #self.varWidget.setObjectName(objName)
+        varLayout = QtGui.QGridLayout()
+        varLayout.setSpacing(20)
+               
+        #labels
+        src_label = QtGui.QLabel(unicode('Source directory*').encode('utf-8'))  
+        cbox_label = QtGui.QLabel(unicode('Includes subdirs').encode('utf-8'))
+        regex_label = QtGui.QLabel(unicode('Regular expression').encode('utf-8'))
+        old_name_label = QtGui.QLabel(unicode('Existing variable name*').encode('utf-8'))        
+        #input textboxs
+        self.varWidget.src_tedit = QtGui.QLineEdit()       # input widgets need to be attached to the stacked widget itself 
+        self.varWidget.src_tedit.setToolTip(unicode('either remote OPeNDAP server or local file absolute path').encode('utf-8'))
+        self.varWidget.src_tedit.returnPressed.connect(self.src_tedit_edited)
+        
+        
+        self.varWidget.cbox = QtGui.QCheckBox()
+        self.varWidget.cbox.setCheckable(True)
+        self.varWidget.cbox.setChecked(False)
+        self.varWidget.cbox.setToolTip(unicode('includes subdirectories').encode('utf-8'))
+        self.varWidget.regex_tedit = QtGui.QLineEdit()
+        self.varWidget.regex_tedit.setToolTip(unicode('see http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/ncml/AnnotatedSchema4.html#regexp').encode('utf-8'))                
+        self.varWidget.old_name_tedit = QtGui.QLineEdit()
+        self.varWidget.old_name_tedit.setToolTip(unicode('variable name in data file').encode('utf-8'))
+        self.varWidget.old_name_tedit.setText(old_name)
+        
+        varLayout.addWidget(src_label, 1, 0, 1, 1)
+        varLayout.addWidget(self.varWidget.src_tedit, 1, 1, 1, 3)
+        varLayout.addWidget(cbox_label, 2, 0, 1, 1)
+        varLayout.addWidget(self.varWidget.cbox, 2, 1, 1, 1)        
+        varLayout.addWidget(regex_label, 2, 2, 1, 1)
+        varLayout.addWidget(self.varWidget.regex_tedit, 2, 3, 1, 1)
+        varLayout.addWidget(old_name_label, 3, 0, 1, 1)
+        varLayout.addWidget(self.varWidget.old_name_tedit, 3, 1, 1, 3)
+        
+        self.varWidget.setLayout(varLayout)
+        return self.varWidget
+    '''
+    synchronise stack widget display with combo box value changed callback
+    '''
+    @pyqtSlot()
+    def setWidgetStack(self):
+        self.varStackedWidget.setCurrentIndex(self.var_combo.currentIndex())
+    '''
+    variable combo box value changed callback
+    '''
+    @pyqtSlot()
+    def src_combo_changed(self, var_name):  
+        #not sure why the current text is prefixed by the index : eg 0temperature      
+#        print 'src_combo_value_changed to : ' + str(var_name) +  unicode(str(self.var_combo.currentText())).encode('utf_8')
+        pass
+        
+        
+    @pyqtSlot()
+    def src_tedit_edited(self):
+        src_tedit_input = self.varStackedWidget.currentWidget().src_tedit.text()
+#        print 'src_edit text edited : ', src_tedit_input
+        #validate the input now
+        if not str(src_tedit_input).startswith('http'): 
+            if not os.path.isabs(src_tedit_input): #assumes local file
+                QtGui.QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('source directory must be an absolute path!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                self.varStackedWidget.currentWidget().src_tedit.clear()
+            if not os.path.exists(src_tedit_input) :
+                QtGui.QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('source directory does not exist!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                self.varStackedWidget.currentWidget().src_tedit.clear()   
+    '''
+    reset button pushed callback.  The widgets are reset to default values
+    '''
+    @pyqtSlot()  
+    def reset_tab(self):        
+        #current screen value is not saved until the add button is pressed
+        #reset only reset the screen values, not the cached values        
+        if self.var_combo.currentText() == unicode("temperature").encode('utf-8'):
+#            print 'reset button is pushed, temperature ....'
+            self.resetValues(self.votemper)
+        elif self.var_combo.currentText() == unicode("salinity").encode('utf-8'):
+            self.resetValues(self.vosaline)
+        elif self.var_combo.currentText() == unicode("ice thickness").encode('utf-8'):
+            self.resetValues(self.iicethic)
+        elif self.var_combo.currentText() == unicode("leads fraction").encode('utf-8'):
+            self.resetValues(self.ileadfra)
+        elif self.var_combo.currentText() == unicode("snow thickness").encode('utf-8'):
+            self.resetValues(self.isnowthi)
+        elif self.var_combo.currentText() == unicode("zonal velocity").encode('utf-8'):
+            self.resetValues(self.vozocrtx) 
+        elif self.var_combo.currentText() == unicode("meridian velocity").encode('utf-8'):
+            self.resetValues(self.vomecrty)
+        elif self.var_combo.currentText() == unicode("sea surface height").encode('utf-8'):
+            self.resetValues(self.sossheig)   
+        elif self.var_combo.currentText() == unicode("depth at T points").encode('utf-8'):
+            self.resetValues(self.gdept)   
+        elif self.var_combo.currentText() == unicode("depth at W points").encode('utf-8'):
+            self.resetValues(self.gdepw)   
+        elif self.var_combo.currentText() == unicode("number of wet levels").encode('utf-8'):
+            self.resetValues(self.mbathy)   
+        elif self.var_combo.currentText() == unicode("vertical scale factor at T points").encode('utf-8'):
+            self.resetValues(self.e3t)   
+        elif self.var_combo.currentText() == unicode("vertical scale factor at U points").encode('utf-8'):
+            self.resetValues(self.e3u)   
+        elif self.var_combo.currentText() == unicode("vertical scale factor at V points").encode('utf-8'):
+            self.resetValues(self.e3v)   
+            
+    '''
+    reset the stacked widget values
+    '''    
+    def resetValues(self, currentValues = None):
+        # 'in resetValues ....'
+        if currentValues is None:
+            #self.var_combo.setCurrentIndex(0)    #we don't reset this, as this is the key
+            self.varStackedWidget.currentWidget().src_tedit.clear()
+            self.varStackedWidget.currentWidget().regex_tedit.clear()
+            self.varStackedWidget.currentWidget().old_name_tedit.clear()
+            self.varStackedWidget.currentWidget().cbox.setChecked(False)
+        else:
+            #print 'name : ' + currentValues.name + ', src: ' + currentValues.src + ', regex: ' +  currentValues.regex + ', old_name: ' + currentValues.old_name
+            #self.var_combo.setCurrentIndex(0)
+            self.varStackedWidget.currentWidget().src_tedit.setText(currentValues.src)
+            self.varStackedWidget.currentWidget().regex_tedit.setText(currentValues.regex)
+            self.varStackedWidget.currentWidget().old_name_tedit.setText(currentValues.old_name)
+            self.varStackedWidget.currentWidget().cbox.setChecked(currentValues.subdirs)
+        
+    '''
+    add button pushed call back
+    '''
+    @pyqtSlot()  
+    def add_tab(self): 
+        #first validate the src tab is not null
+        if(self.varStackedWidget.currentWidget().src_tedit.text() is None or self.varStackedWidget.currentWidget().src_tedit.text() == '' or               
+           self.varStackedWidget.currentWidget().old_name_tedit.text() is None or self.varStackedWidget.currentWidget().old_name_tedit.text() == ''):
+                QtGui.QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('source directory and existing variable name cannot be blank!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+        else:
+            '''        
+            if not str(target).startsWith(unicode('http').encode('utf-8')):
+                if os.path.exists(os.path.normpath(target)) == False:
+                    QMessageBox.critical(self, unicode('Something is wrong').encode('utf-8'), unicode('source directory does not exist!').encode('utf-8'), QMessageBox.Ok, QMessageBox.Ok)
+                    return #breakout now 
+            '''
+            #print type(self.var) = str   
+            # effort to speed string entry up: on the first entry of the src directory - go populate other variables as they're most likely to 
+            # be in the same directory.
+
+
+            if(self.var == unicode("Tracer").encode('utf-8')):                
+                if (self.var_combo.currentText() == unicode("temperature").encode('utf-8')):            
+                    if(self._sameValues(self.votemper)):
+                        QtGui.QMessageBox.information(self, 'For information', 'No changes have been made!', QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:   
+                        self.votemper.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.votemper.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        self.votemper.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.votemper.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.votemper.regex = ''    #blank it over    
+                else: # can only be salinity
+                    if(self._sameValues(self.vosaline)):
+                        QtGui.QMessageBox.information(self, 'For information', 'No changes have been made!', QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.vosaline.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.vosaline.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        self.vosaline.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.vosaline.regex = self.varStackedWidget.currentWidget().regex_tedit.text()                            
+                        else:
+                            self.vosaline.regex = ''
+                if self.votemper.src == '':
+                    self.votemper.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(0).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.vosaline.src == '':
+                    self.vosaline.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(1).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+            elif(self.var == unicode('Ice').encode('utf-8')): #iicethic,ileadfra,isnowthi
+                if (self.var_combo.currentText() == unicode("ice thickness").encode('utf-8')):            
+                    if(self._sameValues(self.iicethic)):
+                        QtGui.QMessageBox.information(self, 'For information', 'No changes have been made!', QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.iicethic.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.iicethic.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.iicethic.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.iicethic.regex = self.varStackedWidget.currentWidget().regex_tedit.text() 
+                        else:
+                            self.iicethic.regex = ''                       
+                elif(self.var_combo.currentText() == unicode("leads fraction").encode('utf-8')): 
+                    if(self._sameValues(self.ileadfra)):
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.ileadfra.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.ileadfra.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.ileadfra.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.ileadfra.regex = self.varStackedWidget.currentWidget().regex_tedit.text()                            
+                        else:
+                            self.ileadfra.regex = ''
+                else:
+                    if(self._sameValues(self.isnowthi)): #snow thickness
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.isnowthi.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.isnowthi.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.isnowthi.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.isnowthi.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.isnowthi.regex = ''
+                if self.iicethic.src == '':
+                    self.iicethic.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(0).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.ileadfra.src == '':
+                    self.ileadfra.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(1).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.isnowthi.src == '':
+                    self.isnowthi.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(2).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+            elif(self.var == unicode("Dynamics").encode('utf-8')):
+                if (self.var_combo.currentText() == unicode("zonal velocity").encode('utf-8')):            
+                    if(self._sameValues(self.vozocrtx)):
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.vozocrtx.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.vozocrtx.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.vozocrtx.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.vozocrtx.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.vozocrtx.regex = ''
+                elif(self.var_combo.currentText() == unicode('meridian velocity').encode('utf-8')): 
+                    if(self._sameValues(self.vomecrty)):
+                        QtGui.QMessageBox.information(self, 'For information', 'No changes have been made!', QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else: 
+                        self.vomecrty.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.vomecrty.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.vomecrty.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.vomecrty.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.vomecrty.regex = ''
+                elif(self.var_combo.currentText() == unicode('sea surface height').encode('utf-8')):      
+                    if(self._sameValues(self.sossheig)): #sea surface height
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.sossheig.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.sossheig.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.sossheig.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.sossheig.regex = self.varStackedWidget.currentWidget().regex_tedit.text()  
+                        else:
+                            self.sossheig.regex = ''
+                if self.vozocrtx.src == '':
+                    self.vozocrtx.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(0).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.vomecrty.src == '':
+                    self.vomecrty.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(1).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.sossheig.src == '':
+                    self.sossheig.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(2).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+            elif(self.var == unicode("Grid").encode('utf-8')):
+                if (self.var_combo.currentText() == unicode("depth at T points").encode('utf-8')):            
+                    if(self._sameValues(self.gdept)):
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.gdept.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.gdept.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.gdept.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.gdept.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.gdept.regex = ''
+                elif(self.var_combo.currentText() == unicode('depth at W points').encode('utf-8')): 
+                    if(self._sameValues(self.gdepw)):
+                        QtGui.QMessageBox.information(self, 'For information', 'No changes have been made!', QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else: 
+                        self.gdepw.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.gdepw.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.gdepw.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.gdepw.regex = self.varStackedWidget.currentWidget().regex_tedit.text()
+                        else:
+                            self.gdepw.regex = ''
+                elif(self.var_combo.currentText() == unicode('number of wet levels').encode('utf-8')):      
+                    if(self._sameValues(self.mbathy)): #sea surface height
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.mbathy.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.mbathy.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.mbathy.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.mbathy.regex = self.varStackedWidget.currentWidget().regex_tedit.text()  
+                        else:
+                            self.mbathy.regex = ''
+                elif(self.var_combo.currentText() == unicode('vertical scale factor at T points').encode('utf-8')):      
+                    if(self._sameValues(self.e3t)): #sea surface height
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.e3t.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.e3t.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.e3t.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.e3t.regex = self.varStackedWidget.currentWidget().regex_tedit.text()  
+                        else:
+                            self.e3t.regex = ''
+                elif(self.var_combo.currentText() == unicode('vertical scale factor at U points').encode('utf-8')):      
+                    if(self._sameValues(self.e3u)): #sea surface height
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.e3u.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.e3u.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.e3u.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.e3u.regex = self.varStackedWidget.currentWidget().regex_tedit.text()  
+                        else:
+                            self.e3u.regex = ''
+                elif(self.var_combo.currentText() == unicode('vertical scale factor at V points').encode('utf-8')):      
+                    if(self._sameValues(self.e3v)): #sea surface height
+                        QtGui.QMessageBox.information(self, unicode('For information').encode('utf-8'), unicode('No changes have been made!').encode('utf-8'), QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
+                    else:
+                        self.e3v.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                        self.e3v.subdirs = self.varStackedWidget.currentWidget().cbox.isChecked()
+                        self.e3v.old_name = self.varStackedWidget.currentWidget().old_name_tedit.text()
+                        if(self.varStackedWidget.currentWidget().regex_tedit.text() is not None or self.varStackedWidget.currentWidget().regex_tedit.text() != ''):
+                            self.e3v.regex = self.varStackedWidget.currentWidget().regex_tedit.text()  
+                        else:
+                            self.e3v.regex = ''
+                if self.gdept.src == '':
+                    self.gdept.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(0).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.gdepw.src == '':
+                    self.gdepw.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(1).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.mbathy.src == '':
+                    self.mbathy.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(2).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.e3t.src == '':
+                    self.e3t.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(3).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.e3u.src == '':
+                    self.e3u.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(4).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+                if self.e3v.src == '':
+                    self.e3v.src = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+                    self.varStackedWidget.widget(5).src_tedit.setText(self.varStackedWidget.currentWidget().src_tedit.text())
+        
+    '''
+    convert the target folder into NcMl required format
+    '''
+    def _convertSrc(self, thepath):
+        #print 'thepath before trimming ', thepath
+        fpath = str(thepath.trimmed()) #24Aug15 trimmed whitespaces at both end of QString and then cast to string
+        #print 'thepath after trimming and casting ', fpath
+        #make sure thatit is an absolute path and prefixed with file:/ and uses / file separator
+        if fpath.startswith('http:/'):
+            target = fpath # do nothing 
+        elif fpath.startswith('file:/'):
+            temp = os.path.normpath(fpath[6:])
+#            print 'normal path : ', temp
+            target =  unicode('file:/' + str(os.path.abspath(temp)).replace("\\", "/")).encode('utf-8') 
+        else: #should be local file but not prefixed by file:/  we still check for absolute path
+#            print 'normal path : ', os.path.normpath(fpath)
+            target = unicode('file:/' + str(os.path.abspath(fpath)).replace("\\", "/")).encode('utf-8')
+        
+ #       if not str(target).endswith('/'):
+ #           target = target + '/'
+            
+        return target    
+    
+    '''   
+    compare the gui cached values with the stored values
+    '''   
+    def _sameValues(self, ncml_var):
+#        print 'before state - variable: ' + ncml_var.name + ', src: ' + ncml_var.src + ', regex: ' +  ncml_var.regex + ', old_name: ' + ncml_var.old_name
+        target = self._convertSrc(self.varStackedWidget.currentWidget().src_tedit.text())
+        
+        if(target == ncml_var.src and \
+           self.varStackedWidget.currentWidget().old_name_tedit.text() is not None and self.varStackedWidget.currentWidget().old_name_tedit.text() == ncml_var.old_name and \
+           self.varStackedWidget.currentWidget().regex_tedit.text() is not None and self.varStackedWidget.currentWidget().regex_tedit.text() == ncml_var.regex and \
+           self.varStackedWidget.currentWidget().cbox.isChecked() == ncml_var.subdirs):
+            return True
+        else:
+            return False
+'''
+convenient class to hold the user input for each variable
+'''   
+class ncml_variable(object):
+    '''
+    convenient class to hold the values for a ncml variable
+    '''
+    def __init__(self, varName, old_name=''):
+        #print 'created ncml_variable object : ' + varName
+        self.name = varName
+        self.src = ''
+        self.regex = ''
+        self.old_name = old_name
+        self.subdirs = False
+        
+        
+        
+        
+        
diff --git a/pynemo/gui/plus.png b/pynemo/gui/plus.png
new file mode 100644
index 0000000000000000000000000000000000000000..bfcf4cf1b211f54108e115763d434eaef0c77295
Binary files /dev/null and b/pynemo/gui/plus.png differ
diff --git a/pynemo/gui/rectangle.png b/pynemo/gui/rectangle.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5ee7a4b38a54e70fdce4cf465ccf5f058fbeb9e
Binary files /dev/null and b/pynemo/gui/rectangle.png differ
diff --git a/pynemo/gui/reset.png b/pynemo/gui/reset.png
new file mode 100644
index 0000000000000000000000000000000000000000..5e25a07c0f2ec05e482d194f014a6d5fa1c40227
Binary files /dev/null and b/pynemo/gui/reset.png differ
diff --git a/pynemo/gui/selection_editor.py b/pynemo/gui/selection_editor.py
new file mode 100644
index 0000000000000000000000000000000000000000..527193eb34b1a3f7bb23ce8fff511d1150eabc4e
--- /dev/null
+++ b/pynemo/gui/selection_editor.py
@@ -0,0 +1,293 @@
+'''
+This code has been taken from matlibplot polygon interaction
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import numpy as np
+from matplotlib.lines import Line2D
+from matplotlib.patches import Polygon
+from matplotlib.artist import Artist
+from matplotlib.mlab import dist_point_to_segment
+from matplotlib.widgets import RectangleSelector
+
+
+polygon_alpha = 0.2
+class PolygonEditor(object):
+    '''
+    This edits the polygons drawn on the map
+    '''
+
+    show_verts = True
+    epsilon = 3 #threshold
+    def __init__(self, axis, canvas):
+        '''
+        initialises the editable polygon object
+        '''
+        self.axis = axis
+        self.polygon = None
+        self.line = None
+        self.xy_values = np.array([])
+        self._ind = None
+        self.background = None #background copying
+
+        self._callback_ids = list()
+        self._callback_ids.append(canvas.mpl_connect('draw_event',
+                                                     self.draw_callback))
+        self._callback_ids.append(canvas.mpl_connect('button_press_event',
+                                                     self.button_press_callback))
+        self._callback_ids.append(canvas.mpl_connect('button_release_event',
+                                                     self.button_release_callback))
+        self._callback_ids.append(canvas.mpl_connect('motion_notify_event',
+                                                     self.motion_notify_callback))
+        self.canvas = canvas
+
+    def add_point(self, xval, yval):
+        """ adds an new point to the selection list and redraws the selection tool"""
+        if self.xy_values.shape[0] == 0:
+            self.xy_values = np.array([(xval, yval), ])
+        else:
+            self.xy_values = np.concatenate((self.xy_values, [[xval, yval], ]), axis=0)
+        self.refresh()
+
+    def refresh(self):
+        """ This method looks at the list of points available and depending on the number of
+        points in the list creates a point or line or a polygon and draws them"""
+        if self.xy_values.shape[0] == 0: # No points available
+            self.reset_line()
+            self.reset_polygon()
+        elif self.xy_values.shape[0] <= 2: # point or line for 1 or 2 points
+            xval, yval = zip(*self.xy_values)
+            if self.line == None:
+                self.line = Line2D(xval, yval, marker='o', markerfacecolor='r', animated=True)
+                self.axis.add_line(self.line)
+            else:
+                self.line.set_data(zip(*self.xy_values))
+            self.reset_polygon()
+        else: # more than 2 points if polygon is not created then creates one and draws
+            if self.polygon == None:
+                self.polygon = Polygon(self.xy_values, animated=True, alpha=polygon_alpha)
+                self.polygon.add_callback(self.polygon_changed)
+                self.axis.add_patch(self.polygon)
+            else:
+                self.polygon.xy = self.xy_values
+            self.line.set_data(zip(*self.xy_values))
+        self.draw_callback(None)
+        self.canvas.draw()
+
+    def reset_line(self):
+        """ resets the line i.e removes the line from the axes and resets to None """
+        if self.line != None:
+            self.line.remove()
+            self.line = None
+
+    def reset_polygon(self):
+        """ resets the polygon ie. removes the polygon from the axis and reset to None """
+        if self.polygon != None:
+            self.polygon.remove()
+            self.polygon = None
+
+    def draw_line(self):
+        """ draws the line if available """
+        if self.line != None:
+            self.axis.draw_artist(self.line)
+
+    def draw_polygon(self):
+        """ draws polygon if available"""
+        if self.polygon != None:
+            self.axis.draw_artist(self.polygon)
+
+    def disable(self):
+        """ disables the events and the selection """
+        self.set_visibility(False)
+        for callback_id in self._callback_ids:
+            self.canvas.mpl_disconnect(callback_id)
+        self.canvas = None
+
+    def enable(self):
+        """ enables the selection """
+        self.set_visibility(True)
+
+    def set_visibility(self, status):
+        """ sets the visibility of the selection object """
+        if self.polygon != None:
+            self.polygon.set_visible(status)
+        if self.line != None:
+            self.line.set_visible(status)
+        self.canvas.blit(self.axis.bbox)
+
+    def draw_callback(self, dummy_event):
+        """ this method draws the selection object """
+        self.background = self.canvas.copy_from_bbox(self.axis.bbox)
+        self.draw_polygon()
+        self.draw_line()
+        self.canvas.blit(self.axis.bbox)
+
+    def polygon_changed(self, poly):
+        """ redraws the polygon """
+        vis = self.line.get_visible()
+        Artist.update_from(self.line, poly)
+        self.line.set_visible(vis)
+
+    def get_index_under_point(self, event):
+        """ gets the index of the point under the event (mouse click) """
+        if self.xy_values.shape[0] == 0:
+            return None
+        xy_values = self.xy_values
+        xt_values, yt_values = xy_values[:, 0], xy_values[:, 1]
+        dist = np.sqrt((xt_values-event.xdata)**2 + (yt_values-event.ydata)**2)
+        indseq = np.nonzero(np.equal(dist, np.amin(dist)))[0]
+        ind = indseq[0]
+        if dist[ind] >= self.epsilon:
+            ind = None
+        return ind
+
+    def button_press_callback(self, event):
+        """ callback to mouse press event """
+        if not self.show_verts:
+            return
+        if event.inaxes == None:
+            return
+        if event.button != 1:
+            return
+        self._ind = self.get_index_under_point(event)
+        if self._ind == None:
+            self.insert_datapoint(event)
+
+    def button_release_callback(self, event):
+        """ callback to mouse release event """
+        if not self.show_verts:
+            return
+        if event.button == 2:
+            self.insert_datapoint(event)
+            return
+        if event.button == 3:
+            self.delete_datapoint(event)
+            return
+        if event.button != 1:
+            return
+        self._ind = None
+
+    def insert_datapoint(self, event):
+        """ inserts a new data point between the segment that is closest in polygon """
+        if self.xy_values.shape[0] <= 2:
+            self.add_point(event.xdata, event.ydata)
+        else:
+            event_point = event.xdata, event.ydata
+            prev_d = dist_point_to_segment(event_point, self.xy_values[0], self.xy_values[-1])
+            prev_i = len(self.xy_values)
+            for i in range(len(self.xy_values)-1):
+                seg_start = self.xy_values[i]
+                seg_end = self.xy_values[i+1]
+                dist_p_s = dist_point_to_segment(event_point, seg_start, seg_end)
+                if dist_p_s < prev_d:
+                    prev_i = i
+                    prev_d = dist_p_s
+            self.xy_values = np.array(list(self.xy_values[:prev_i+1]) +
+                                      [(event.xdata, event.ydata)] +
+                                      list(self.xy_values[prev_i+1:]))
+            self.refresh()
+
+    def delete_datapoint(self, event):
+        """ deletes the data point under the point in event """
+        ind = self.get_index_under_point(event)
+        if ind is not None:
+            self.xy_values = np.array([tup for i, tup in enumerate(self.xy_values) if i != ind])
+            self.refresh()
+        self.canvas.draw()
+
+    def motion_notify_callback(self, event):
+        """ callback for the mouse motion with button press.
+        this is to move the edge points of the polygon"""
+        if not self.show_verts:
+            return
+        if self._ind is None:
+            return
+        if event.inaxes is None:
+            return
+        if event.button != 1:
+            return
+        xval, yval = event.xdata, event.ydata
+
+        self.xy_values[self._ind] = xval, yval
+        self.refresh()
+
+        self.canvas.restore_region(self.background)
+        self.axis.draw_artist(self.polygon)
+        self.axis.draw_artist(self.line)
+        self.canvas.blit(self.axis.bbox)
+
+    def reset(self):
+        """ resets the points in the selection deleting the line and polygon"""
+        self.xy_values = np.array([])
+        self.reset_line()
+        self.reset_polygon()
+
+class BoxEditor(object):
+    """ Box editor is to select area using rubber band sort of drawing rectangle.
+    it uses matplotlib RectangleSelector under the hood """
+    polygon = None
+    def __init__(self, axes, canvas):
+        """ initialises class and creates a rectangle selector """
+        self.axes = axes
+        self.canvas = canvas
+        self.rectangle_selector = RectangleSelector(axes, self.line_select_callback, drawtype='box',
+                                                    useblit=True, button=[1,],
+                                                    minspanx=5, minspany=5,
+                                                    spancoords='pixels')
+
+    def line_select_callback(self, eclick, erelease):
+        """ callback to the rectangleselector """
+        x1_val, y1_val = eclick.xdata, eclick.ydata
+        x2_val, y2_val = erelease.xdata, erelease.ydata
+        xy_values = np.array([[x1_val, y1_val, ],
+                              [x1_val, y2_val, ],
+                              [x2_val, y2_val, ],
+                              [x2_val, y1_val, ], ])
+        self.reset_polygon()
+        self.polygon = Polygon(xy_values, animated=False, alpha=polygon_alpha)
+        self.axes.add_patch(self.polygon)
+        self.canvas.draw()
+
+    def enable(self):
+        """ enable the box selector """
+        self.rectangle_selector.set_active(True)
+
+    def disable(self):
+        """ disables or removes the box selector """
+        self.reset_polygon()
+        self.rectangle_selector.set_active(False)
+        self.canvas.draw()
+
+    def reset_polygon(self):
+        """ resets rectangle polygon """
+        if self.polygon != None:
+            self.polygon.remove()
+            self.polygon = None
+
+    def reset(self):
+        """ reset the Box selector """
+        self.reset_polygon()
+
+# if __name__ == '__main__':
+#     import matplotlib.pyplot as plt
+#     from matplotlib.patches import Polygon
+#
+#     theta = np.arange(0, 2*np.pi, 0.3)
+#     r = 1.5
+#
+#     xs = r*np.cos(theta)
+#     ys = r*np.sin(theta)
+#
+# #    poly = Polygon(list(zip(xs,ys)), animated=True)
+#     poly = Polygon(list([(0,0)]), animated=True)
+#
+#     fig, ax = plt.subplots()
+#     ax.add_patch(poly)
+#     p = PolygonEditor(ax,poly)
+#
+#     ax.set_title('Click and drag a point to move it')
+#     ax.set_xlim((-2,2))
+#     ax.set_ylim((-2,2))
+#     plt.show()
diff --git a/pynemo/gui/shelf_break.png b/pynemo/gui/shelf_break.png
new file mode 100644
index 0000000000000000000000000000000000000000..40263862b740ecd6b3521ee9eb31d36b6e7b7c0a
Binary files /dev/null and b/pynemo/gui/shelf_break.png differ
diff --git a/pynemo/nemo_bdy_dst_coord.py b/pynemo/nemo_bdy_dst_coord.py
new file mode 100644
index 0000000000000000000000000000000000000000..03591124e4864b1d32830323968a6eda277a44a2
--- /dev/null
+++ b/pynemo/nemo_bdy_dst_coord.py
@@ -0,0 +1,8 @@
+
+class DstCoord:
+    """
+    This object is currently empty and has data bound to it externally
+    Equivalent to Matlab dst_coord. 
+    """
+    def __self__(self):
+        self.bdy_i = None
diff --git a/pynemo/nemo_bdy_extr_tm3.py b/pynemo/nemo_bdy_extr_tm3.py
new file mode 100644
index 0000000000000000000000000000000000000000..3711cfc4677caca31f2a0dd31ae8f61036ce3e3c
--- /dev/null
+++ b/pynemo/nemo_bdy_extr_tm3.py
@@ -0,0 +1,921 @@
+# ===================================================================
+# The contents of this file are dedicated to the public domain.  To
+# the extent that dedication to the public domain is not available,
+# everyone is granted a worldwide, perpetual, royalty-free,
+# non-exclusive license to exercise all rights associated with the
+# contents of this file for any purpose whatsoever.
+# No rights are reserved.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+# ===================================================================
+
+'''
+Created on Wed Sep 12 08:02:46 2012
+
+This Module defines the extraction of the data from the source grid and does 
+the interpolation onto the destination grid. 
+
+@author James Harle
+@author John Kazimierz Farey
+@author: Mr. Srikanth Nagella
+$Last commit on:$
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+
+# External Imports
+import copy
+import logging
+import numpy as np
+import scipy.spatial as sp
+from calendar import monthrange, isleap
+from scipy.interpolate import interp1d
+from netcdftime import datetime, utime
+from pynemo import nemo_bdy_ncgen as ncgen
+from pynemo import nemo_bdy_ncpop as ncpop
+
+# Local Imports
+import nemo_bdy_grid_angle as ga
+from pynemo.reader.factory import GetFile
+from pynemo.utils.nemo_bdy_lib import rot_rep, sub2ind
+
+#TODO: Convert the 'F' ordering to 'C' to improve efficiency 
+class Extract:
+
+    def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
+        """ 
+        Initialises the Extract object.
+
+        Parent grid to child grid weights are defined along with rotation
+        weightings for vector quantities.
+
+        Args:
+            setup           (list) : settings for bdy
+            SourceCoord     (obj)  : source grid information
+            DstCoord        (obj)  : destination grid information
+            Grid            (dict) : containing grid type 't', 'u', 'v'
+                                     and source time
+            var_name        (list) : netcdf file variable names (str)
+            years           (list) : years to extract (default [1979])
+            months          (list) : months to extract (default [11])                      
+        
+        Returns:
+        """
+
+        self.logger = logging.getLogger(__name__)
+        self.g_type = grd
+        self.settings = setup
+        self.key_vec = False
+        
+        # TODO: Why are we deepcopying the coordinates???
+        
+        SC = copy.deepcopy(SourceCoord)
+        DC = copy.deepcopy(DstCoord)
+        bdy_r = copy.deepcopy(Grid[grd].bdy_r)
+        
+        # Extract time and variable information
+        
+        sc_time = Grid[grd].source_time
+        self.var_nam = var_nam
+        print 'Here', var_nam
+        sc_z = SC.zt[:]
+        print 'SC_Z', SC.zt, SC.zt
+        sc_z_len = len(sc_z)
+        
+        
+        self.jpj, self.jpi = DC.lonlat[grd]['lon'].shape
+        self.jpk = DC.depths[grd]['bdy_z'].shape[0]
+        # Set some constants
+        
+        # Make function of dst grid resolution (used in 1-2-1 weighting)
+        # if the weighting can only find one addtional point this implies an 
+        # point so fill the third point with itself as not to bias too much
+        
+        fr = 0.1
+
+        # Set up any rotation that is required
+        
+        if pair == 'uv':
+            if grd == 'u':
+                self.rot_dir = 'i'
+                self.key_vec = True
+                self.fnames_2 = Grid['v'].source_time
+            elif grd == 'v':
+                self.rot_dir = 'j'
+                self.key_vec = True
+                self.fnames_2 = Grid['v'].source_time
+            else:
+                raise ValueError('Invalid rotation grid grid_type: %s' %grd)
+
+        # 
+        
+        
+        dst_lon = DC.bdy_lonlat[self.g_type]['lon']
+        dst_lat = DC.bdy_lonlat[self.g_type]['lat']
+        try:
+            dst_dep = DC.depths[self.g_type]['bdy_z']
+        except KeyError:
+            dst_dep = np.zeros([1])
+        self.isslab = len(dst_dep) == 1
+        if dst_dep.size == len(dst_dep):
+            dst_dep = np.ones([1, len(dst_lon)])
+
+        # ??? Should this be read from settings?
+        wei_121 = np.array([0.5, 0.25, 0.25])
+
+        SC.lon = SC.lon.squeeze()
+        SC.lat = SC.lat.squeeze()
+
+        
+
+        # Check that we're only dealing with one pair of vectors
+
+        num_bdy = len(dst_lon)
+        self.nvar = len(self.var_nam)
+        
+        
+        if self.key_vec:
+            self.nvar = self.nvar / 2
+            if self.nvar != 1:
+                self.logger.error('Code not written yet to handle more than\
+                                   one pair of rotated vectors')
+
+        self.logger.info('Extract __init__: variables to process')
+        self.logger.info('nvar: %s', self.nvar)
+        self.logger.info('key vec: %s', self.key_vec)
+
+        # Find subset of source data set required to produce bdy points
+
+        ind_e  = SC.lon < np.amax(dst_lon); ind_w  = SC.lon > np.amin(dst_lon)
+        ind_ew = np.logical_and(ind_e, ind_w)
+        ind_s  = SC.lat > np.amin(dst_lat); ind_n  = SC.lat < np.amax(dst_lat)
+        ind_sn = np.logical_and(ind_s, ind_n)
+
+        ind    = np.where(np.logical_and(ind_ew, ind_sn) != 0)
+        ind_s  = np.argsort(ind[1])
+
+        sub_j  = ind[0][ind_s]
+        sub_i  = ind[1][ind_s]
+        
+        # Find I/J range
+        
+        imin = np.maximum(np.amin(sub_i) - 2, 0)
+        imax = np.minimum(np.amax(sub_i) + 2, len(SC.lon[0, :]) - 1) + 1
+        jmin = np.maximum(np.amin(sub_j) - 2, 0)
+        jmax = np.minimum(np.amax(sub_j) + 2, len(SC.lon[:, 0]) - 1) + 1
+
+        # Summarise subset region
+        
+        self.logger.info('Extract __init__: subset region limits')
+        self.logger.info(' \n imin: %d\n imax: %d\n jmin: %d\n jmax: %d\n', 
+                          imin, imax, jmin, jmax)
+        
+        # Reduce the source coordinates to the sub region identified
+        
+        SC.lon = SC.lon[jmin:jmax, imin:imax]
+        SC.lat = SC.lat[jmin:jmax, imin:imax]
+
+        # Initialise gsin* and gcos* for rotation of vectors
+        
+        if self.key_vec:
+             
+            bdy_ind = Grid[grd].bdy_i
+            
+            maxI = DC.lonlat['t']['lon'].shape[1]
+            maxJ = DC.lonlat['t']['lon'].shape[0]
+            dst_gcos = np.ones([maxJ, maxI])
+            dst_gsin = np.zeros([maxJ, maxI])
+            
+            # TODO: allow B-Grid Extraction
+            
+            # Extract the source rotation angles on the T-Points as the C-Grid 
+            # U/V points naturally average onto these
+            
+            src_ga = ga.GridAngle(self.settings['src_hgr'], imin,
+                                                         imax, jmin, jmax, 't')
+            
+            # Extract the rotation angles for the bdy velocities points
+            
+            dst_ga = ga.GridAngle(self.settings['dst_hgr'], 1,
+                                                   maxI, 1, maxJ, grd)
+
+            self.gcos = src_ga.cosval
+            self.gsin = src_ga.sinval
+            dst_gcos[1:, 1:] = dst_ga.cosval
+            dst_gsin[1:, 1:] = dst_ga.sinval
+            
+            # Retain only boundary points rotation information
+            
+            tmp_gcos = np.zeros((1, bdy_ind.shape[0]))
+            tmp_gsin = np.zeros((1, bdy_ind.shape[0]))
+            
+            # TODO: can this be converted to an ind op rather than a loop?
+            
+            for p in range(bdy_ind.shape[0]):
+                tmp_gcos[:, p] = dst_gcos[bdy_ind[p, 1], bdy_ind[p, 0]]
+                tmp_gsin[:, p] = dst_gsin[bdy_ind[p, 1], bdy_ind[p, 0]]
+
+            self.dst_gcos = np.tile(tmp_gcos, (sc_z_len,1))
+            self.dst_gsin = np.tile(tmp_gsin, (sc_z_len,1))
+            
+            print self.dst_gcos.shape
+
+
+        # Determine size of source data subset
+        dst_len_z = len(dst_dep[:, 0])
+
+        source_dims = SC.lon.shape
+
+        # Find nearest neighbour on the source grid to each dst bdy point
+        # Ann Query substitute
+        source_tree = None
+        try:
+            source_tree = sp.cKDTree(zip(SC.lon.ravel(order='F'),
+                                     SC.lat.ravel(order='F')), balanced_tree=False,compact_nodes=False)
+        except TypeError: #added this fix to make it compatible with scipy 0.16.0
+            source_tree = sp.cKDTree(zip(SC.lon.ravel(order='F'),
+                                     SC.lat.ravel(order='F')))            
+        dst_pts = zip(dst_lon[:].ravel(order='F'), dst_lat[:].ravel(order='F'))
+        nn_dist, nn_id = source_tree.query(dst_pts, k=1)
+
+        # Find surrounding points
+        j_sp, i_sp = np.unravel_index(nn_id, source_dims, order='F')
+        j_sp = np.vstack((j_sp, j_sp + 1, j_sp - 1))
+        j_sp = np.vstack((j_sp, j_sp, j_sp))
+        i_sp = np.vstack((i_sp, i_sp, i_sp))
+        i_sp = np.vstack((i_sp, i_sp + 1, i_sp - 1))
+
+        # Index out of bounds error check not implemented
+
+        # Determine 9 nearest neighbours based on distance
+        ind = sub2ind(source_dims, i_sp, j_sp)
+        ind_rv = np.ravel(ind, order='F')
+        sc_lon_rv = np.ravel(SC.lon, order='F')
+        sc_lat_rv = np.ravel(SC.lat, order='F')
+        sc_lon_ind = sc_lon_rv[ind_rv]
+
+        diff_lon = sc_lon_ind - np.repeat(dst_lon, 9).T
+        diff_lon = diff_lon.reshape(ind.shape, order='F')
+        out = np.abs(diff_lon) > 180
+        diff_lon[out] = -np.sign(diff_lon[out]) * (360 - np.abs(diff_lon[out]))
+
+        dst_lat_rep = np.repeat(dst_lat.T, 9)
+        diff_lon_rv = np.ravel(diff_lon, order='F')
+        dist_merid = diff_lon_rv * np.cos(dst_lat_rep * np.pi / 180)
+        dist_zonal = sc_lat_rv[ind_rv] - dst_lat_rep
+
+        dist_tot = np.power((np.power(dist_merid, 2) +
+                             np.power(dist_zonal, 2)), 0.5)
+        dist_tot = dist_tot.reshape(ind.shape, order='F').T
+        # Get sort inds, and sort
+        dist_ind = np.argsort(dist_tot, axis=1, kind='mergesort')
+        dist_tot = dist_tot[np.arange(dist_tot.shape[0])[:, None], dist_ind]
+
+        # Shuffle ind to reflect ascending dist of source and dst points
+        ind = ind.T
+        for p in range(ind.shape[0]):
+            ind[p, :] = ind[p, dist_ind[p, :]]
+
+        if self.key_vec:
+            self.gcos = self.gcos.flatten(1)[ind].reshape(ind.shape, order='F')
+            self.gsin = self.gsin.flatten(1)[ind].reshape(ind.shape, order='F')
+
+        sc_ind = {}
+        sc_ind['ind'] = ind
+        sc_ind['imin'], sc_ind['imax'] = imin, imax
+        sc_ind['jmin'], sc_ind['jmax'] = jmin, jmax
+
+        # Fig not implemented
+        #Sri TODO::: key_vec compare to assign gcos and gsin
+        # Determine 1-2-1 filter indices
+        id_121 = np.zeros((num_bdy, 3), dtype=np.int64)
+        for r in range(int(np.amax(bdy_r))+1):         
+            r_id = bdy_r != r
+            rr_id = bdy_r == r
+            tmp_lon = dst_lon.copy()
+            tmp_lon[r_id] = -9999
+            tmp_lat = dst_lat.copy()
+            tmp_lat[r_id] = -9999
+            source_tree = None
+            try:
+                source_tree = sp.cKDTree(zip(tmp_lon.ravel(order='F'),
+                                         tmp_lat.ravel(order='F')), balanced_tree=False,compact_nodes=False)
+            except TypeError: #fix for scipy 0.16.0
+                source_tree = sp.cKDTree(zip(tmp_lon.ravel(order='F'),
+                                         tmp_lat.ravel(order='F')))
+                
+            dst_pts = zip(dst_lon[rr_id].ravel(order='F'),
+                          dst_lat[rr_id].ravel(order='F'))
+            junk, an_id = source_tree.query(dst_pts, k=3,
+                                            distance_upper_bound=fr)
+            id_121[rr_id, :] = an_id
+#            id_121[id_121 == len(dst_lon)] = 0
+
+        reptile = np.tile(id_121[:, 0], 3).reshape(id_121.shape, order='F')
+        tmp_reptile = reptile * (id_121 == len(dst_lon))
+        id_121[id_121 == len(dst_lon)] = 0
+        tmp_reptile[tmp_reptile == len(dst_lon)] = 0
+        id_121 = id_121+tmp_reptile
+#        id_121 = id_121 + reptile * (id_121 == len(dst_lon))
+
+        rep_dims = (id_121.shape[0], id_121.shape[1], sc_z_len)
+        # These tran/tiles work like matlab. Tested with same Data.
+        id_121 = id_121.repeat(sc_z_len).reshape(rep_dims).transpose(2, 0, 1)
+        reptile = np.arange(sc_z_len).repeat(num_bdy).reshape(sc_z_len, 
+                                                              num_bdy)
+        reptile = reptile.repeat(3).reshape(num_bdy, 3, sc_z_len, 
+                                            order='F').transpose(2, 0, 1)
+
+        id_121 = sub2ind((sc_z_len, num_bdy), id_121, reptile)
+
+        tmp_filt = wei_121.repeat(num_bdy).reshape(num_bdy, len(wei_121),
+                                                   order='F')
+        tmp_filt = tmp_filt.repeat(sc_z_len).reshape(num_bdy, len(wei_121),
+                                                     sc_z_len).transpose(2, 0, 1)
+
+        # Fig not implemented
+
+        if self.isslab != 1: # TODO or no vertical interpolation required
+            
+            # Determine vertical weights for the linear interpolation 
+            # onto Dst grid
+            # Allocate vertical index array
+            dst_dep_rv = dst_dep.ravel(order='F')
+            z_ind = np.zeros((num_bdy * dst_len_z, 2), dtype=np.int64)
+            source_tree = None
+            try:
+                source_tree = sp.cKDTree(zip(sc_z.ravel(order='F')), balanced_tree=False,compact_nodes=False)
+            except TypeError: #fix for scipy 0.16.0
+                source_tree = sp.cKDTree(zip(sc_z.ravel(order='F')))
+
+            junk, nn_id = source_tree.query(zip(dst_dep_rv), k=1)
+
+            # WORKAROUND: the tree query returns out of range val when
+            # dst_dep point is NaN, causing ref problems later.
+            nn_id[nn_id == sc_z_len] = sc_z_len-1
+            sc_z[nn_id]
+            
+            # Find next adjacent point in the vertical
+            z_ind[:, 0] = nn_id
+            z_ind[sc_z[nn_id] > dst_dep_rv[:], 1] = nn_id[sc_z[nn_id] >
+                                                          dst_dep_rv[:]] - 1
+            z_ind[sc_z[nn_id] <= dst_dep_rv[:], 1] = nn_id[sc_z[nn_id] <=
+                                                           dst_dep_rv[:]] + 1
+            # Adjust out of range values
+            z_ind[z_ind == -1] = 0
+            z_ind[z_ind == sc_z_len] = sc_z_len - 1
+
+            # Create weightings array
+            sc_z[z_ind]
+            z_dist = np.abs(sc_z[z_ind] - dst_dep.T.repeat(2).reshape(len(dst_dep_rv), 2))
+            rat = np.sum(z_dist, axis=1)
+            z_dist = 1 - (z_dist / rat.repeat(2).reshape(len(rat), 2))
+
+            # Update z_ind for the dst array dims and vector indexing
+            # Replicating this part of matlab is difficult without causing
+            # a Memory Error. This workaround may be +/- brilliant
+            # In theory it maximises memory efficiency
+            z_ind[:, :] += (np.arange(0, (num_bdy) * sc_z_len, sc_z_len)
+                           [np.arange(num_bdy).repeat(2*dst_len_z)].reshape(z_ind.shape))
+        else:
+            z_ind = np.zeros([1,1])
+            z_dist = np.zeros([1,1])
+        # End self.isslab
+        
+        # Set instance attributes
+        self.first = True
+        self.nav_lon = DC.lonlat[grd]['lon']
+        self.nav_lat = DC.lonlat[grd]['lat']
+        self.z_ind = z_ind
+        self.z_dist = z_dist
+        self.sc_ind = sc_ind
+        self.dst_dep = dst_dep
+        self.num_bdy = num_bdy
+        self.id_121 = id_121
+        if not self.isslab:
+            self.bdy_z = DC.depths[self.g_type]['bdy_H']
+        else:
+            self.bdy_z = np.zeros([1])
+            
+        self.dst_z = dst_dep
+        self.sc_z_len = sc_z_len
+        self.sc_time = sc_time
+        self.tmp_filt = tmp_filt
+        self.dist_tot = dist_tot
+
+        self.d_bdy = {}
+        for v in range(self.nvar):
+            self.d_bdy[self.var_nam[v]] = {}
+       
+    def extract_month(self, year, month):
+        """Extracts monthly data and interpolates onto the destination grid
+        
+        Keyword arguments:
+        year -- year of data to be extracted
+        month -- month of the year to be extracted
+        """
+        self.logger.info('extract_month function called')
+        # Check year entry exists in d_bdy, if not create it.
+        for v in range(self.nvar):
+            try:
+                self.d_bdy[self.var_nam[v]][year]
+            except KeyError:        
+                self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
+        
+        i_run = np.arange(self.sc_ind['imin'], self.sc_ind['imax']) 
+        j_run = np.arange(self.sc_ind['jmin'], self.sc_ind['jmax'])
+        extended_i = np.arange(self.sc_ind['imin'] - 1, self.sc_ind['imax'])
+        extended_j = np.arange(self.sc_ind['jmin'] - 1, self.sc_ind['jmax'])
+        ind = self.sc_ind['ind']
+        sc_time = self.sc_time
+        sc_z_len = self.sc_z_len
+        print dir(sc_time)
+        # define src/dst cals
+        sf, ed = self.cal_trans(sc_time.calendar, #sc_time[0].calendar 
+                                self.settings['dst_calendar'], year, month)
+        DstCal = utime('seconds since %d-1-1' %year, 
+                       self.settings['dst_calendar'])
+        dst_start = DstCal.date2num(datetime(year, month, 1))
+        dst_end = DstCal.date2num(datetime(year, month, ed, 23, 59, 59))
+
+        self.S_cal = utime(sc_time.units, sc_time.calendar)#sc_time[0].units,sc_time[0].calendar)
+
+        self.D_cal = utime('seconds since %d-1-1' %self.settings['base_year'], 
+                           self.settings['dst_calendar'])
+
+        src_date_seconds = np.zeros(len(sc_time.time_counter))
+        for index in range(len(sc_time.time_counter)):
+            tmp_date = self.S_cal.num2date(sc_time.time_counter[index])
+            src_date_seconds[index] = DstCal.date2num(tmp_date) * sf
+
+        # Get first and last date within range, init to cover entire range
+        first_date = 0
+        last_date = len(sc_time.time_counter) - 1 
+        rev_seq = range(len(sc_time.time_counter))
+        rev_seq.reverse()
+        for date in rev_seq:
+            if src_date_seconds[date] < dst_start:
+                first_date = date
+                break
+        for date in range(len(sc_time.time_counter)):
+            if src_date_seconds[date] > dst_end:
+                last_date = date
+                break
+
+        self.logger.info('first/last dates: %s %s', first_date, last_date)
+
+        if self.first:
+            nc_3 = GetFile(self.settings['src_msk'])
+            varid_3 = nc_3['tmask']
+            t_mask = varid_3[:1, :sc_z_len, j_run, i_run]
+            if self.key_vec:
+                varid_3 = nc_3['umask']
+                u_mask = varid_3[:1, :sc_z_len, j_run, extended_i]
+                varid_3 = nc_3['vmask']
+                v_mask = varid_3[:1, :sc_z_len, extended_j, i_run]
+            nc_3.close()
+
+        # Identify missing values and scale factors if defined
+        meta_data = []
+        meta_range = self.nvar
+        if self.key_vec:
+            meta_range += 1 
+        for v in range(meta_range):
+            meta_data.append({})
+            for x in 'mv', 'sf', 'os', 'fv':
+                meta_data[v][x] = np.ones((self.nvar, 1)) * np.NaN
+
+        print 'VARNAME', self.var_nam
+        for v in range(self.nvar):
+#            meta_data[v] = self._get_meta_data(sc_time[first_date].file_name, 
+#                                               self.var_nam[v], meta_data[v])
+            meta_data[v] = sc_time.get_meta_data(self.var_nam[v], meta_data[v])
+
+
+        if self.key_vec:
+            n = self.nvar
+            print n, self.var_nam[n], meta_data[n]
+#            meta_data[n] = self.fnames_2[first_date].get_meta_data(self.var_nam[n], meta_data[n])
+            meta_data[n] = self.fnames_2.get_meta_data(self.var_nam[n], meta_data[n])
+
+        # Loop over identified files
+        for f in range(first_date, last_date + 1):
+            sc_array = [None, None]
+            sc_alt_arr = [None, None]
+            #self.logger.info('opening nc file: %s', sc_time[f].file_name)            
+            # Counters not implemented
+
+            sc_bdy = np.zeros((len(self.var_nam), sc_z_len, ind.shape[0], 
+                              ind.shape[1]))
+
+            # Loop over time entries from file f
+            for vn in range(self.nvar):
+                # Extract sub-region of data
+                self.logger.info('var_nam = %s',self.var_nam[vn])
+                varid = sc_time[self.var_nam[vn]]
+                # If extracting vector quantities open second var
+                if self.key_vec:
+                    varid_2 = self.fnames_2[self.var_nam[vn+1]]#nc_2.variables[self.var_nam[vn + 1]]
+
+                # Extract 3D scalar variables
+                if not self.isslab and not self.key_vec:
+                    self.logger.info(' 3D source array ')
+                    sc_array[0] = varid[f:f+1 , :sc_z_len, j_run, i_run]
+                # Extract 3D vector variables
+                elif self.key_vec:
+                    # For u vels take i-1
+                    sc_alt_arr[0] = varid[f:f+1, :sc_z_len, j_run, extended_i]
+                    # For v vels take j-1
+                    sc_alt_arr[1] = varid_2[f:f+1, :sc_z_len, extended_j, i_run]
+                # Extract 2D scalar vars
+                else:
+                    self.logger.info(' 2D source array ')
+                    sc_array[0] = varid[f:f+1, j_run, i_run].reshape([1,1,j_run.size,i_run.size])
+
+                # Average vector vars onto T-grid
+                if self.key_vec:
+                    # First make sure land points have a zero val
+                    sc_alt_arr[0] *= u_mask
+                    sc_alt_arr[1] *= v_mask
+                    # Average from to T-grid assuming C-grid stagger
+                    sc_array[0] = 0.5 * (sc_alt_arr[0][:,:,:,:-1] + 
+                                         sc_alt_arr[0][:,:,:,1:])
+                    sc_array[1] = 0.5 * (sc_alt_arr[1][:,:,:-1,:] +
+                                         sc_alt_arr[1][:,:,1:,:])
+
+                # Set land points to NaN and adjust with any scaling
+                # Factor offset
+                # Note using isnan/sum is relatively fast, but less than 
+                # bottleneck external lib
+                self.logger.info('SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
+                sc_array[0][t_mask == 0] = np.NaN
+                self.logger.info( 'SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
+                if not np.isnan(np.sum(meta_data[vn]['sf'])):
+                    sc_array[0] *= meta_data[vn]['sf']
+                if not np.isnan(np.sum(meta_data[vn]['os'])):
+                    sc_array[0] += meta_data[vn]['os']
+
+                if self.key_vec:
+                    sc_array[1][t_mask == 0] = np.NaN
+                    if not np.isnan(np.sum(meta_data[vn + 1]['sf'])):
+                        sc_array[1] *= meta_data[vn + 1]['sf']
+                    if not np.isnan(np.sum(meta_data[vn + 1]['os'])):
+                        sc_array[1] += meta_data[vn + 1]['os']
+
+                # Now collapse the extracted data to an array 
+                # containing only nearest neighbours to dest bdy points
+                # Loop over the depth axis
+                for dep in range(sc_z_len):
+                    tmp_arr = [None, None]
+                    # Consider squeezing
+                    tmp_arr[0] = sc_array[0][0,dep,:,:].flatten(1) #[:,:,dep]
+                    if not self.key_vec:
+                        sc_bdy[vn, dep, :, :] = self._flat_ref(tmp_arr[0], ind)
+                    else:
+                        tmp_arr[1] = sc_array[1][0,dep,:,:].flatten(1) #[:,:,dep]
+                        # Include in the collapse the rotation from the
+                        # grid to real zonal direction, ie ij -> e
+                        sc_bdy[vn, dep, :] = (tmp_arr[0][ind[:]] * self.gcos -
+                                              tmp_arr[1][ind[:]] * self.gsin)
+                        # Include... meridinal direction, ie ij -> n
+                        sc_bdy[vn+1, dep, :] = (tmp_arr[1][ind[:]] * self.gcos +
+                                                tmp_arr[0][ind[:]] * self.gsin)
+
+                # End depths loop
+                self.logger.info(' END DEPTHS LOOP ')
+            # End Looping over vars
+            self.logger.info(' END VAR LOOP ')
+            # ! Skip sc_bdy permutation
+
+            x = sc_array[0]
+            y = np.isnan(x)
+            z = np.invert(np.isnan(x))
+            x[y] = 0
+            self.logger.info('nans: %s', np.sum(y[:]))
+            #x = x[np.invert(y)]
+            self.logger.info('%s %s %s %s', x.shape, np.sum(x[z], dtype=np.float64), np.amin(x), np.amax(x))
+
+            # Calculate weightings to be used in interpolation from
+            # source data to dest bdy pts. Only need do once.
+            if self.first:
+                # identify valid pts
+                data_ind = np.invert(np.isnan(sc_bdy[0,:,:,:]))
+                # dist_tot is currently 2D so extend along depth
+                # axis to allow single array calc later, also remove
+                # any invalid pts using our eldritch data_ind
+                self.logger.info('DIST TOT ZEROS BEFORE %s', np.sum(self.dist_tot == 0))
+                self.dist_tot = (np.repeat(self.dist_tot, sc_z_len).reshape(
+                            self.dist_tot.shape[0],
+                            self.dist_tot.shape[1], sc_z_len)).transpose(2,0,1)
+                self.dist_tot *= data_ind
+                self.logger.info('DIST TOT ZEROS %s', np.sum(self.dist_tot == 0))
+
+                self.logger.info('DIST IND ZEROS %s', np.sum(data_ind == 0))
+
+                # Identify problem pts due to grid discontinuities 
+                # using dists >  lat
+                over_dist = np.sum(self.dist_tot[:] > 4)
+                if over_dist > 0:
+                    raise RuntimeError('''Distance between source location
+                                          and new boundary points is greater
+                                          than 4 degrees of lon/lat''')
+
+                # Calculate guassian weighting with correlation dist
+                r0 = self.settings['r0']
+                dist_wei = (1/(r0 * np.power(2 * np.pi, 0.5)))*(np.exp( -0.5 *np.power(self.dist_tot / r0, 2)))
+                # Calculate sum of weightings
+                dist_fac = np.sum(dist_wei * data_ind, 2)
+                # identify loc where all sc pts are land
+                nan_ind = np.sum(data_ind, 2) == 0
+                self.logger.info('NAN IND : %s ', np.sum(nan_ind))
+                
+                # Calc max zlevel to which data available on sc grid
+                data_ind = np.sum(nan_ind == 0, 0) - 1
+                # set land val to level 1 otherwise indexing problems
+                # may occur- should not affect later results because
+                # land is masked in weightings array
+                data_ind[data_ind == -1] = 0
+                # transform depth levels at each bdy pt to vector
+                # index that can be used to speed up calcs
+                data_ind += np.arange(0, sc_z_len * self.num_bdy, sc_z_len)
+
+                # ? Attribute only used on first run so clear. 
+                del self.dist_tot
+
+            # weighted averaged onto new horizontal grid
+            for vn in range(self.nvar):
+                self.logger.info(' sc_bdy %s %s', np.nanmin(sc_bdy), np.nanmax(sc_bdy))
+                dst_bdy = (np.nansum(sc_bdy[vn,:,:,:] * dist_wei, 2) /
+                           dist_fac)
+                self.logger.info(' dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                # Quick check to see we have not got bad values
+                if np.sum(dst_bdy == np.inf) > 0:
+                    raise RuntimeError('''Bad values found after 
+                                          weighted averaging''')
+                # weight vector array and rotate onto dest grid
+                if self.key_vec:
+                    # [:,:,:,vn+1]
+                    dst_bdy_2 = (np.nansum(sc_bdy[vn+1,:,:,:] * dist_wei, 2) /
+                                 dist_fac)
+                    self.logger.info('time to to rot and rep ')
+                    self.logger.info('%s %s',  np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                    self.logger.info( '%s en to %s %s' , self.rot_str,self.rot_dir, dst_bdy.shape)
+                    dst_bdy = rot_rep(dst_bdy, dst_bdy_2, self.rot_str,
+                                      'en to %s' %self.rot_dir, self.dst_gcos, self.dst_gsin)
+                    self.logger.info('%s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                # Apply 1-2-1 filter along bdy pts using NN ind self.id_121
+                if self.first:
+                    tmp_valid = np.invert(np.isnan(
+                                            dst_bdy.flatten(1)[self.id_121]))
+                    # Finished first run operations
+                    self.first = False
+
+                dst_bdy = (np.nansum(dst_bdy.flatten(1)[self.id_121] * 
+                           self.tmp_filt, 2) / np.sum(self.tmp_filt *
+                           tmp_valid, 2))
+                # Set land pts to zero
+
+                self.logger.info(' pre dst_bdy[nan_ind] %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                dst_bdy[nan_ind] = 0
+                self.logger.info(' post dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+                # Remove any data on dst grid that is in land
+                dst_bdy[:,np.isnan(self.bdy_z)] = 0
+                self.logger.info(' 3 dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+
+                # If we have depth dimension
+                if not self.isslab:
+                    # If all else fails fill down using deepest pt
+                    dst_bdy = dst_bdy.flatten(1)
+                    dst_bdy += ((dst_bdy == 0) *
+                                dst_bdy[data_ind].repeat(sc_z_len))
+                    # Weighted averaged on new vertical grid
+                    dst_bdy = (dst_bdy[self.z_ind[:,0]] * self.z_dist[:,0] +
+                               dst_bdy[self.z_ind[:,1]] * self.z_dist[:,1])
+                    data_out = dst_bdy.reshape(self.dst_dep.shape, order='F')
+
+                    # If z-level replace data below bed !!! make stat
+                    # of this as could be problematic
+                    ind_z = self.bdy_z.repeat(len(self.dst_dep))
+                    ind_z = ind_z.reshape(len(self.dst_dep),
+                                          len(self.bdy_z), order='F')
+                    ind_z -= self.dst_dep
+                    ind_z = ind_z < 0
+
+                    data_out[ind_z] = np.NaN
+                else:
+                    data_out = dst_bdy
+                    data_out[np.isnan(self.bdy_z)] = np.NaN
+                entry = self.d_bdy[self.var_nam[vn]][year]
+                if entry['data'] is None:
+                    # Create entry with singleton 3rd dimension
+                    entry['data'] = np.array([data_out])
+                else:
+                    entry['data'] = np.concatenate((entry['data'],
+                                                   np.array([data_out])))
+                entry['date'] = sc_time.time_counter[f] #count skipped
+        
+        # Need stats on fill pts in z and horiz + missing pts...
+    # end month
+#end year
+# End great loop of crawling chaos
+
+
+    # Allows reference of two equal sized but misshapen arrays
+    # equivalent to Matlab alpha(beta(:)) 
+    def _flat_ref(self, alpha, beta):
+        """Extract input index elements from array and order them in Fotran array
+        and returns the new array
+        
+        Keywork arguments:
+        alpha -- input array
+        beta -- index array 
+        """
+        return alpha.flatten(1)[beta.flatten(1)].reshape(
+                                                   beta.shape, order='F')
+
+    # Convert numeric date from source to dest
+ #   def convert_date(self, date):
+ #       val = self.S_cal.num2date(date)
+ #       return self.D_cal.date2num(val)
+
+
+    def cal_trans(self, source, dest, year, month):
+        """Translate between calendars and return scale factor and number of days in month
+        
+        Keyword arguments:
+        source -- source calendar
+        dest -- destination calendar
+        year -- input year
+        month -- input month  
+        """
+        vals = {'gregorian': 365. + isleap(year), 'noleap': 
+                365., '360_day': 360.}
+        if source not in vals.keys():
+            raise ValueError('Unknown source calendar type: %s' %source)
+        # Get month length
+        if dest == '360_day':
+            ed = 30
+        else:
+            ed = monthrange(year, month)[1]
+        # Calculate scale factor
+        sf = vals[source] / vals[dest]
+        
+        return sf, ed
+
+    # BEWARE FORTRAN V C ordering
+    # Replicates and tiles an array
+    #def _trig_reptile(self, trig, size):
+    #    trig = np.transpose(trig, (2, 1, 0)) # Matlab 2 0 1
+    #    return np.tile(trig, (1, size, 1)) # Matlab size 1 1
+
+
+
+
+    def time_interp(self, year, month):
+        """
+        Perform a time interpolation of the BDY data.
+    
+        This method performs a time interpolation (if required). This is necessary 
+        if the time frequency is not a factor of monthly output or the input and
+        output calendars differ. CF compliant calendar options accepted: gregorian
+        | standard, proleptic_gregorian, noleap | 365_day, 360_day or julian.*
+    
+        Args:
+    
+        Returns:
+            
+        """
+        # Extract time information 
+        
+        nt           = len(self.sc_time.time_counter)
+        time_counter = np.zeros([nt])
+        tmp_cal      = utime('seconds since %d-1-1' %year,
+                             self.settings['dst_calendar'].lower())
+        
+        for t in range(nt):
+            time_counter[t] = tmp_cal.date2num(self.sc_time.date_counter[t])
+        
+        date_000 = datetime(year, month, 1, 12, 0, 0)
+        if month < 12:
+            date_end = datetime(year, month+1, 1, 12, 0, 0)
+        else:
+            date_end = datetime(year+1, 1, 1, 12, 0, 0)
+        time_000 = tmp_cal.date2num(date_000)
+        time_end = tmp_cal.date2num(date_end)
+            
+        # Take the difference of the first two time enteries to get delta t
+        
+        del_t = time_counter[1] - time_counter[0]
+        dstep = 86400 / np.int(del_t)
+      
+        # TODO: put in a test to check all deltaT are the same otherwise throw 
+        # an exception
+    
+        # If time freq. is greater than 86400s 
+        # TODO put in an error handler for the unlikely event of frequency not a
+        # multiple of 86400 | data are annual means
+        if del_t >= 86400.:
+            for v in self.var_nam:    
+                print len(time_counter), self.d_bdy[v][1979]['data'][:,:,:]
+                intfn = interp1d(time_counter, self.d_bdy[v][1979]['data'][:,:,:], axis=0,
+                                                                 bounds_error=True)
+                self.d_bdy[v][1979]['data'] = intfn(np.arange(time_000, time_end, 86400))
+        else:
+            for v in self.var_nam: 
+                for t in range(dstep):
+                    intfn = interp1d(time_counter[t::dstep], 
+                       self.d_bdy[v].data[t::dstep,:,:], axis=0, bounds_error=True)
+                    self.d_bdy[v].data[t::dstep,:,:] = intfn(np.arange(time_000, 
+                                                                  time_end, 86400)) 
+        self.time_counter = time_counter
+    
+    def write_out(self, year, month, ind, unit_origin):
+        """ 
+        Writes monthy BDY data to netCDF file.
+    
+        This method writes out all available variables for a given grid along with
+        any asscoaied metadata. Currently data are only written out as monthly 
+        files.
+    
+        Args:
+            year         (int) : year to write out
+            month        (int) : month to write out
+            ind          (dict): dictionary holding grid information
+            unit_origin  (str) : time reference '%d-01-01 00:00:00' %year_000
+            
+        Returns:
+        """
+    
+        # Define output filename    
+        
+        self.logger.info('Defining output file for grid %s, month: %d, year: %d', 
+                    self.g_type.upper(), month, year)
+       
+        f_out = self.settings['dst_dir']+self.settings['fn']+ \
+                '_bdy'+self.g_type.upper()+ '_y'+str(year)+'m'+'%02d' % month+'.nc'
+                            
+        ncgen.CreateBDYNetcdfFile(f_out, self.num_bdy,
+                                  self.jpi, self.jpj, self.jpk,
+                                  self.settings['rimwidth'],
+                                  self.settings['dst_metainfo'],
+                                  unit_origin,
+                                  self.settings['fv'],
+                                  self.settings['dst_calendar'],
+                                  self.g_type.upper())
+        
+        self.logger.info('Writing out BDY data to: %s', f_out)
+        
+        # Loop over variables in extracted object
+            
+#        for v in self.variables:
+        for v in self.var_nam:
+            print self.settings
+            if self.settings['dyn2d']: # Calculate depth averaged velocity
+                tile_dz = np.tile(self.bdy_dz, [len(self.time_counter), 1, 1, 1])
+                tmp_var = np.reshape(self.d_bdy[v][1979]['data'][:,:,:], tile_dz.shape)
+                tmp_var = np.nansum(tmp_var * tile_dz, 2) /np.nansum(tile_dz, 2)
+            else: # Replace NaNs with specified fill value
+                tmp_var = np.where(np.isnan(self.d_bdy[v][1979]['data'][:,:,:]),
+                                            self.settings['fv'], 
+                                            self.d_bdy[v][1979]['data'][:,:,:])
+               
+            # Write variable to file
+            
+            ncpop.write_data_to_file(f_out, v, tmp_var)
+    
+        # Write remaining data to file (indices are in Python notation
+        # therefore we must add 1 to i,j and r)
+        print self.bdy_z.shape
+        print tmp_var.shape
+        ncpop.write_data_to_file(f_out, 'nav_lon', self.nav_lon)
+        ncpop.write_data_to_file(f_out, 'nav_lat', self.nav_lat)
+        ncpop.write_data_to_file(f_out, 'depth'+self.g_type, self.dst_dep)
+        ncpop.write_data_to_file(f_out, 'nbidta', ind.bdy_i[:, 0] + 1)
+        ncpop.write_data_to_file(f_out, 'nbjdta', ind.bdy_i[:, 1] + 1)
+        ncpop.write_data_to_file(f_out, 'nbrdta', ind.bdy_r[:   ] + 1)
+        ncpop.write_data_to_file(f_out, 'time_counter', self.time_counter)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 
diff --git a/pynemo/nemo_bdy_gen_c.py b/pynemo/nemo_bdy_gen_c.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7a84d0163977ca9f8f07edf85c820daa9039ed3
--- /dev/null
+++ b/pynemo/nemo_bdy_gen_c.py
@@ -0,0 +1,230 @@
+'''
+This generates the NEMO Boundary. Creates indices for t, u and v points, plus rim gradient.
+The variable names have been renamed to keep consistent with python standards and generalizing
+the variable names eg. bdy_i is used instead of bdy_t
+Ported from Matlab code by James Harle 
+@author: John Kazimierz Farey
+@author: Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+#External Imports
+import numpy as np
+import logging
+
+#Local Imports
+from utils.nemo_bdy_lib import sub2ind
+
+class Boundary:
+    # Bearings for overlays
+    _NORTH = [1,-1,1,-1,2,None,1,-1]
+    _SOUTH = [1,-1,1,-1,None,-2,1,-1]
+    _EAST = [1,-1,1,-1,1,-1,2,None]
+    _WEST = [1,-1,1,-1,1,-1,None,-2]
+    
+    def __init__(self, boundary_mask, settings, grid):
+        """Generates the indices for NEMO Boundary and returns a Grid object with indices 
+        
+        Keyword arguments:
+        boundary_mask -- boundary mask
+        settings -- dictionary of setting values
+        grid -- type of the grid 't', 'u', 'v'
+        Attributes:
+        bdy_i -- index
+        bdy_r -- r index 
+        """
+        self.logger = logging.getLogger(__name__)
+        bdy_msk = boundary_mask
+        self.settings = settings
+        rw = self.settings['rimwidth']
+        rm = rw - 1
+        self.grid_type = grid.lower()
+        # Throw an error for wrong grid input type
+        if grid not in ['t', 'u', 'v', 'f']:
+            self.logger.error('Grid Type not correctly specified:'+grid)
+            raise ValueError("""%s is invalid grid grid_type;  
+                                must be 't', 'u', 'v' or 'f'""" %grid)
+
+        # Configure grid grid_type
+        if grid is not 't':
+            # We need to copy this before changing, because the original will be 
+            # needed in calculating later grid boundary types
+            bdy_msk = boundary_mask.copy()
+            grid_ind = np.zeros(bdy_msk.shape, dtype=np.bool, order='C')
+            #NEMO works with a staggered 'C' grid. need to create a grid with staggered points
+            for fval in [-1, 0]: #-1 mask value, 0 Land, 1 Water/Ocean
+                if grid is 'u':
+                    grid_ind[:, :-1] = np.logical_and(bdy_msk[:, :-1] == 1,
+                                                      bdy_msk[:, 1:] == fval)
+                    bdy_msk[grid_ind] = fval
+                elif grid is 'v':
+                    grid_ind[:-1, :] = np.logical_and(bdy_msk[:-1, :] == 1,
+                                                      bdy_msk[1:, :] == fval)
+                    bdy_msk[grid_ind] = fval
+                elif grid is 'f':
+                    grid_ind[:-1, :-1] = np.logical_and(bdy_msk[:-1,:-1] == 1,
+                                                        bdy_msk[1:, 1:] == fval)
+                    grid_ind[:-1, :] = np.logical_or(np.logical_and(bdy_msk[:-1, :] == 1,
+                                                                    bdy_msk[1:, :] == fval), 
+                                                     grid_ind[:-1, :] == 1)
+
+                    grid_ind[:, :-1] = np.logical_or(np.logical_and(bdy_msk[:, :-1] == 1,
+                                                                    bdy_msk[:, 1:] == fval), 
+                                                     grid_ind[:, :-1] == 1)
+                    bdy_msk[grid_ind] = fval
+
+        # Create padded array for overlays
+        msk = np.pad(bdy_msk,((1,1),(1,1)), 'constant', constant_values=(-1))
+        # create index arrays of I and J coords
+        igrid, jgrid = np.meshgrid(np.arange(bdy_msk.shape[1]), np.arange(bdy_msk.shape[0]))
+
+        SBi, SBj = self._find_bdy(igrid, jgrid, msk, self._SOUTH)
+        NBi, NBj = self._find_bdy(igrid, jgrid, msk, self._NORTH)
+        EBi, EBj = self._find_bdy(igrid, jgrid, msk, self._EAST)
+        WBi, WBj = self._find_bdy(igrid, jgrid, msk, self._WEST)
+
+        #create a 2D array index for the points that are on border
+        tij = np.column_stack((np.concatenate((SBi, NBi, WBi, EBi)),np.concatenate((SBj, NBj, WBj, EBj))))
+        bdy_i = np.tile(tij, (rw, 1, 1))
+        
+        bdy_i = np.transpose(bdy_i, (1, 2, 0))
+        bdy_r = bdy_r = np.tile(np.arange(0,rw),(bdy_i.shape[0],1))
+
+        # Add points for relaxation zone over rim width
+        # In the relaxation zone with rim width. looking into the domain up to the rim width
+        # and select the points. S head North (0,+1) N head South(0,-1) W head East (+1,0)
+        # E head West (-1,0)
+        temp = np.column_stack((np.concatenate((SBi*0, NBi*0, WBi*0+1, EBi*0-1)),
+                                 np.concatenate((SBj*0+1, NBj*0-1, WBj*0, EBj*0))))
+        for i in range(rm):
+            bdy_i[:, :, i+1] = bdy_i[:, :, i] + temp
+
+        bdy_i = np.transpose(bdy_i, (1, 2, 0))
+        bdy_i = np.reshape(bdy_i, 
+                 (bdy_i.shape[0],bdy_i.shape[1]*bdy_i.shape[2]))
+        bdy_r = bdy_r.flatten(1)
+
+        ##   Remove duplicate and open sea points  ##
+        
+        bdy_i, bdy_r = self._remove_duplicate_points(bdy_i, bdy_r)
+        bdy_i, bdy_r, nonmask_index = self._remove_landpoints_open_ocean(bdy_msk, bdy_i, bdy_r)
+                
+        ###   Fill in any gradients between relaxation zone and internal domain
+        ###   bdy_msk matches matlabs incarnation, r_msk is pythonic 
+        r_msk = bdy_msk.copy()
+        r_msk[r_msk == 1] = rw
+        r_msk = np.float16(r_msk)
+        r_msk[r_msk < 1] = np.NaN
+        r_msk[bdy_i[:,1],bdy_i[:,0]] = np.float16(bdy_r)
+        
+        r_msk_orig = r_msk.copy()
+        r_msk_ref = r_msk[1:-1, 1:-1]
+
+        self.logger.debug('Start r_msk bearings loop')
+        #Removes the shape gradients by smoothing it out
+        for i in range(rw-1):
+            # Check each bearing
+            for b in [self._SOUTH, self._NORTH, self._WEST, self._EAST]:
+                r_msk,r_msk_ref = self._fill(r_msk, r_msk_ref, b)
+        self.logger.debug('done loop')
+    
+        # update bdy_i and bdy_r
+        new_ind = np.abs(r_msk - r_msk_orig) >  0
+        # The transposing gets around the Fortran v C ordering thing.
+        bdy_i_tmp = np.array([igrid.T[new_ind.T], jgrid.T[new_ind.T]])
+        bdy_r_tmp = r_msk.T[new_ind.T]
+        bdy_i = np.vstack((bdy_i_tmp.T, bdy_i))
+        
+        uniqind = self._unique_rows(bdy_i)
+        bdy_i = bdy_i[uniqind, :]
+        bdy_r = np.hstack((bdy_r_tmp, bdy_r))
+        bdy_r = bdy_r[uniqind]
+        
+        # sort by rimwidth
+        igrid = np.argsort(bdy_r, kind='mergesort')
+        bdy_r = bdy_r[igrid]
+        bdy_i = bdy_i[igrid, :]
+
+        self.bdy_i = bdy_i
+        self.bdy_r = bdy_r
+
+        self.logger.debug( 'Final bdy_i: %s', self.bdy_i.shape)
+
+
+
+    def _remove_duplicate_points(self, bdy_i, bdy_r):
+        """ Removes the duplicate points in the bdy_i and return the bdy_i and bdy_r
+        bdy_i -- bdy indexes
+        bdy_r -- bdy rim values 
+        """
+        bdy_i2 = np.transpose(bdy_i, (1, 0))
+        uniqind = self._unique_rows(bdy_i2)
+
+        bdy_i = bdy_i2[uniqind]
+        bdy_r = bdy_r[uniqind]
+        return bdy_i, bdy_r
+    
+    def _remove_landpoints_open_ocean(self, mask, bdy_i, bdy_r):
+        """ Removes the land points and open ocean points """        
+        unmask_index = mask[bdy_i[:,1],bdy_i[:,0]] != 0 
+        bdy_i = bdy_i[unmask_index, :]
+        bdy_r = bdy_r[unmask_index]
+        return bdy_i, bdy_r, unmask_index        
+        
+    def _find_bdy(self, I, J, mask, brg):
+        """Finds the border indexes by checking the change from ocean to land.
+        Returns the i and j index array where the shift happens.
+        
+        Keyword arguments:
+        I -- I x direction indexes
+        J -- J y direction indexes
+        mask -- mask data
+        brg -- mask index range
+        """
+        # subtract matrices to find boundaries, set to True
+        m1 = mask[brg[0]:brg[1], brg[2]:brg[3]]
+        m2 = mask[brg[4]:brg[5], brg[6]:brg[7]]
+        overlay = np.subtract(m1,m2)
+        # Create boolean array of bdy points in overlay
+        bool_arr = overlay==2
+        # index I or J to find bdies
+        bdy_I = I[bool_arr]
+        bdy_J = J[bool_arr]
+        
+        return bdy_I, bdy_J
+
+    def _fill(self, mask, ref, brg):
+        """  """
+        tmp = mask[brg[4]:brg[5], brg[6]:brg[7]]
+        ind = (ref - tmp) > 1
+        ref[ind] = tmp[ind] + 1
+        mask[brg[0]:brg[1], brg[2]:brg[3]] = ref
+
+        return mask, ref
+
+
+    
+    def _unique_rows(self, t):
+        """ This returns unique rows in the 2D array. 
+        Returns indexes of unique rows in the input 2D array 
+        t -- input 2D array
+        """ 
+        sh = np.shape(t)
+	if (len(sh)> 2) or (sh[0] ==0) or (sh[1] == 0):
+	    print 'Warning: Shape of expected 2D array:', sh
+        tlist = t.tolist()
+        sortt = []
+        indx = zip(*sorted([(val, i) for i,val in enumerate(tlist)]))[1]
+        indx = np.array(indx)
+        for i in indx:
+            sortt.append(tlist[i])
+        del tlist
+        for i,x in enumerate(sortt):
+            if x == sortt[i-1]:
+                indx[i] = -1
+        # all the rows are identical, set the first as the unique row
+        if sortt[0] == sortt[-1]:
+	    indx[0] = 0
+			        
+        return indx[indx != -1]
+
diff --git a/pynemo/nemo_bdy_grid_angle.py b/pynemo/nemo_bdy_grid_angle.py
new file mode 100644
index 0000000000000000000000000000000000000000..92eec78d4c41c51fa72cfd4d3ce4c836efc1bbb9
--- /dev/null
+++ b/pynemo/nemo_bdy_grid_angle.py
@@ -0,0 +1,149 @@
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+# Calculates Grid Angles                                              #
+#                                                                     #
+# Written by John Kazimierz Farey, Sep 2012                           #
+# Port of Matlab code of James Harle                                  #
+#                                                                     #
+# I have substituted the nemo_phycst for numpy inbuilts               #
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+
+# coord_fname: nemo coordinate file
+# i: model zonal indices
+# j: model meridional indices
+# cd_type: define the nature of pt2d grid points
+
+import numpy as np
+from reader.factory import GetFile
+import logging
+#     pylint: disable=E1101
+
+class GridAngle:
+    
+    # I and J offsets for different grid types
+    CASES = {'t': [0, 0, 0, -1], 'u': [0, 0, 0,-1],
+                  'v': [0, 0, -1, 0], 'f': [0, 1, 0, 0]}
+    MAP = {'t': 'v', 'u': 'f', 'v': 'f', 'f': 'u'}
+
+    def __init__(self, coord_fname, imin, imax, jmin, jmax, cd_type):
+        # set case and check validity
+        self.CD_T = cd_type.lower()
+        self.logger = logging.getLogger(__name__)
+        if self.CD_T not in ['t', 'u', 'v', 'f']:
+            raise ValueError('Unknown grid grid_type %s' %cd_type)
+        self.M_T = self.MAP[self.CD_T]
+        
+        self.logger.debug( 'Grid Angle: ', self.CD_T)
+
+        # open coord file 
+        self.nc = GetFile(coord_fname)
+        
+        # set constants
+        self.IMIN, self.IMAX = imin, imax
+        self.JMIN, self.JMAX = jmin, jmax
+        ndim = len(self.nc['glamt'].dimensions)
+        if ndim == 4:
+            self.DIM_STR = 0, 0
+        elif ndim == 3:
+            self.DIM_STR = 0
+        else:
+            self.DIM_STR = None
+
+        # Get North pole direction and modulus for cd_type
+        np_x, np_y, np_n = self._get_north_dir()
+
+        # Get i or j MAP segment Direction around cd_type
+        sd_x, sd_y, sd_n = self._get_seg_dir(np_n)
+
+        # Get cosinus and sinus
+        self.sinval, self.cosval = self._get_sin_cos(np_x, np_y, np_n, sd_x,
+                                                     sd_y, sd_n)
+
+        self.nc.close()
+
+# # # # # # # # # # # # # 
+# # Functions # # # # # #
+# # # # # # # # # # # # #
+
+    def _get_sin_cos(self, nx, ny, nn, sx, sy, sn):
+        # Geographic mesh
+        i, j, ii, jj = self.CASES[self.CD_T]
+        var_one = self._get_lam_phi(map=True, i=i, j=j, single=True)
+        var_two = self._get_lam_phi(map=True, i=ii, j=jj, single=True)
+
+        ind = (np.abs(var_one - var_two) % 360) < 1.e-8
+        
+        # Cosinus and sinus using using scaler/vectorial products
+        if self.CD_T == 'v':
+            sin_val = (nx * sx + ny * sy) / sn
+            cos_val = -(nx * sy - ny * sx) / sn
+        else:
+            sin_val = (nx * sy - ny * sx) / sn
+            cos_val = (nx * sx + ny * sy) / sn
+
+        sin_val[ind] = 0  
+        cos_val[ind] = 1
+
+        return sin_val, cos_val 
+        
+    # Finds North pole direction and modulus of some point
+    def _get_north_dir(self):
+        zlam, zphi = self._get_lam_phi()
+        z_x_np = self._trig_eq(-2, 'cos', zlam, zphi)
+        z_y_np = self._trig_eq(-2, 'sin', zlam, zphi)
+        z_n_np = np.power(z_x_np,2) + np.power(z_y_np,2)
+
+        return z_x_np, z_y_np, z_n_np
+
+    # Find segmentation direction of some point
+    def _get_seg_dir(self, north_n):
+        i, j, ii, jj = self.CASES[self.CD_T]
+        zlam, zphi = self._get_lam_phi(map=True, i=i, j=j)
+        z_lan, z_phh = self._get_lam_phi(map=True, i=ii, j=jj)
+
+        z_x_sd = (self._trig_eq(2, 'cos', zlam, zphi) -
+                   self._trig_eq(2, 'cos', z_lan, z_phh))
+        z_y_sd = (self._trig_eq(2, 'sin', zlam, zphi) - 
+                   self._trig_eq(2, 'sin', z_lan, z_phh)) # N
+
+        z_n_sd = np.sqrt(north_n * (np.power(z_x_sd, 2) + np.power(z_y_sd, 2)))
+        z_n_sd[z_n_sd < 1.e-14] = 1.e-14
+
+        return z_x_sd, z_y_sd, z_n_sd
+
+    # Returns lam/phi in (offset) i/j range for init grid type
+    # Data must be converted to float64 to prevent dementation of later results
+    def _get_lam_phi(self, map=False, i=0, j=0, single=False):
+        d = self.DIM_STR
+        i, ii = self.IMIN + i, self.IMAX + i
+        j, jj = self.JMIN + j, self.JMAX + j
+        if j < 0:
+            jj -= j
+            j = 0
+        if i < 0:
+            ii -= i
+            i = 0
+             
+        if map:
+            case = self.M_T
+        else:
+            case = self.CD_T
+        zlam = np.float64(self.nc['glam' + case][d, j:jj, i:ii]) #.variables['glam' + case][d, j:jj, i:ii])
+        if single:
+            return zlam
+        zphi = np.float64(self.nc['gphi' + case][d, j:jj, i:ii])#.variables['gphi' + case][d, j:jj, i:ii])
+       
+        return zlam, zphi 
+
+
+    # Returns long winded equation of two vars; some lam and phi
+    def _trig_eq(self, x, eq, z_one, z_two):
+        if eq == 'cos':
+            z_one = np.cos(np.radians(z_one))
+        elif eq == 'sin':
+            z_one = np.sin(np.radians(z_one))
+        else:
+            raise ValueError('eq must be "cos" or "sin"')
+
+        z_two = np.tan(np.pi / 4 - np.radians(z_two) / 2)
+        
+        return x * z_one * z_two
diff --git a/pynemo/nemo_bdy_ice.py b/pynemo/nemo_bdy_ice.py
new file mode 100644
index 0000000000000000000000000000000000000000..53580abc49124caac0aec1b2ad18e343c7d57f22
--- /dev/null
+++ b/pynemo/nemo_bdy_ice.py
@@ -0,0 +1,8 @@
+#
+# Currently empty but gets attributes assigned
+#
+
+class BoundaryIce:
+
+    def __init__(self):
+        pass
diff --git a/pynemo/nemo_bdy_ncgen.py b/pynemo/nemo_bdy_ncgen.py
new file mode 100644
index 0000000000000000000000000000000000000000..af21d1d32aac497737261c1cb9b079119270c0cc
--- /dev/null
+++ b/pynemo/nemo_bdy_ncgen.py
@@ -0,0 +1,240 @@
+'''
+Creates Nemo Bdy netCDF file ready for population
+
+Written by John Kazimierz Farey, started August 30, 2012
+Port of Matlab code of James Harle
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+import datetime
+import logging
+
+def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd):
+    """ This method creates a template of bdy netcdf files. A common for
+    T, I, U, V, E grid types.
+    """
+
+    gridNames = ['T', 'I', 'U', 'V', 'E', 'Z'] # All possible grids
+
+    # Dimension Lengths
+    xb_len = N
+    yb_len = 1
+    x_len = I
+    y_len = J
+    depth_len = K
+
+    # Enter define mode
+    ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
+
+    #define dimensions
+    if grd in gridNames and grd != 'Z': # i.e grid NOT barotropic (Z) 
+        dimztID = ncid.createDimension('z', depth_len)
+    else:
+        logging.error('Grid tpye not known')
+    dimxbID = ncid.createDimension('xb', xb_len)
+    dimybID = ncid.createDimension('yb', yb_len)
+    dimxID = ncid.createDimension('x', x_len)
+    dimyID = ncid.createDimension('y', y_len)
+    dimtcID = ncid.createDimension('time_counter', None)
+
+    #define variable
+    vartcID = ncid.createVariable('time_counter', 'f4', ('time_counter', ))
+    varlonID = ncid.createVariable('nav_lon', 'f4', ('y', 'x', ))
+    varlatID = ncid.createVariable('nav_lat', 'f4', ('y', 'x', ))
+
+    if grd in ['E']:
+        varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+        varN1pID = ncid.createVariable('N1p', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+        varN3nID = ncid.createVariable('N3n', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+        varN5sID = ncid.createVariable('N5s', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+    elif grd in ['T', 'I']:
+        varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
+        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+        vartmpID = ncid.createVariable('votemper', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+        varsalID = ncid.createVariable('vosaline', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+        if grd == 'I':
+            varildID = ncid.createVariable('ileadfra', 'f4', ('time_counter', 'yb', 'xb',),
+                                           fill_value=fv)
+            variicID = ncid.createVariable('iicethic', 'f4', ('time_counter', 'yb', 'xb',),
+                                           fill_value=fv)
+            varisnID = ncid.createVariable('isnowthi', 'f4', ('time_counter', 'yb', 'xb',),
+                                           fill_value=fv)
+    elif grd == 'U':
+        varztID = ncid.createVariable('depthu', 'f4', ('z', 'yb', 'xb', ), fill_value=fv)
+        varbtuID = ncid.createVariable('vobtcrtx', 'f4', ('time_counter', 'yb', 'xb', ),
+                                       fill_value=fv)
+        vartouID = ncid.createVariable('vozocrtx', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+                                       fill_value=fv)
+    elif grd == 'V':
+        varztID = ncid.createVariable('depthv', 'f4', ('z', 'yb', 'xb', ))
+        varbtvID = ncid.createVariable('vobtcrty', 'f4', ('time_counter', 'yb', 'xb', ),
+                                       fill_value=fv)
+        vartovID = ncid.createVariable('vomecrty', 'f4', ('time_counter', 'z', 'yb', 'xb',),
+                                       fill_value=fv)
+    elif grd == 'Z':
+        varsshID = ncid.createVariable('sossheig', 'f4', ('time_counter', 'yb', 'xb', ),
+                                       fill_value=fv)
+        varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+    else:
+        logging.error("Unknow Grid input")
+
+
+    varnbiID = ncid.createVariable('nbidta', 'i4', ('yb', 'xb', ))
+    varnbjID = ncid.createVariable('nbjdta', 'i4', ('yb', 'xb', ))
+    varnbrID = ncid.createVariable('nbrdta', 'i4', ('yb', 'xb', ))
+    #Global Attributes
+    ncid.file_name = filename
+    ncid.creation_date = str(datetime.datetime.now())
+    ncid.rim_width = rw
+    ncid.history = h
+    ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+
+    #Time axis attributes
+    vartcID.axis = 'T'
+    vartcID.standard_name = 'time'
+    vartcID.units = 'seconds since '+orig
+    vartcID.title = 'Time'
+    vartcID.long_name = 'Time axis'
+    vartcID.time_origin = orig
+    vartcID.calendar = calendar
+
+    #Longitude axis attributes
+    varlonID.axis = 'Longitude'
+    varlonID.short_name = 'nav_lon'
+    varlonID.units = 'degrees_east'
+    varlonID.long_name = 'Longitude'
+
+    #Latitude axis attributes
+    varlatID.axis = 'Latitude'
+    varlatID.short_name = 'nav_lat'
+    varlatID.units = 'degrees_east'
+    varlatID.long_name = 'Latitude'
+
+    #nbidta attributes
+    varnbiID.short_name = 'nbidta'
+    varnbiID.units = 'unitless'
+    varnbiID.long_name = 'Bdy i indices'
+
+    #nbjdta attributes
+    varnbjID.short_name = 'nbjdta'
+    varnbjID.units = 'unitless'
+    varnbjID.long_name = 'Bdy j indices'
+
+    #nbrdta attributes
+    varnbrID.short_name = 'nbrdta'
+    varnbrID.units = 'unitless'
+    varnbrID.long_name = 'Bdy discrete distance'
+    if grd == 'E':
+        varztID.axis = 'Depth'
+        varztID.short_name = 'deptht'
+        varztID.units = 'm'
+        varztID.long_name = 'Depth'
+
+        varmskID.short_name = 'bdy_msk'
+        varmskID.units = 'unitless'
+        varmskID.long_name = 'Structured boundary mask'
+
+        varN1pID.units = 'mmol/m^3'
+        varN1pID.short_name = 'N1p'
+        varN1pID.long_name = 'Phosphate'
+        varN1pID.grid = 'bdyT'
+
+        varN3nID.units = 'mmol/m^3'
+        varN3nID.short_name = 'N3n'
+        varN3nID.long_name = 'Nitrate'
+        varN3nID.grid = 'bdyT'
+
+        varN5sID.units = 'mmol/m^3'
+        varN5sID.short_name = 'N5s'
+        varN5sID.long_name = 'Silicate'
+        varN5sID.grid = 'bdyT'
+
+    if grd in ['T', 'I']:
+        varztID.axis = 'Depth'
+        varztID.short_name = 'deptht'
+        varztID.units = 'm'
+        varztID.long_name = 'Depth'
+
+        varmskID.short_name = 'bdy_msk'
+        varmskID.units = 'unitless'
+        varmskID.long_name = 'Structured boundary mask'
+
+        vartmpID.units = 'C'
+        vartmpID.short_name = 'votemper'
+        vartmpID.long_name = 'Temperature'
+        vartmpID.grid = 'bdyT'
+
+        varsalID.units = 'PSU'
+        varsalID.short_name = 'vosaline'
+        varsalID.long_name = 'Salinity'
+        varsalID.grid = 'bdyT'
+
+        if grd == 'I':
+            varildID.units = '%'
+            varildID.short_name = 'ildsconc'
+            varildID.long_name = 'Ice lead fraction'
+            varildID.grid = 'bdyT'
+
+            variicID.units = 'm'
+            variicID.short_name = 'iicethic'
+            variicID.long_name = 'Ice thickness'
+            variicID.grid = 'bdyT'
+
+            varisnID.units = 'm'
+            varisnID.short_name = 'isnowthi'
+            varisnID.long_name = 'Snow thickness'
+            varisnID.grid = 'bdyT'
+    elif grd == 'U':
+        varztID.axis = 'Depth'
+        varztID.short_name = 'depthu'
+        varztID.units = 'm'
+        varztID.long_name = 'Depth'
+
+        varbtuID.units = 'm/s'
+        varbtuID.short_name = 'vobtcrtx'
+        varbtuID.long_name = 'Thickness-weighted depth-averaged zonal Current'
+        varbtuID.grid = 'bdyU'
+
+        vartouID.units = 'm/s'
+        vartouID.short_name = 'vozocrtx'
+        vartouID.long_name = 'Zonal Current'
+        vartouID.grid = 'bdyU'
+
+    elif grd == 'V':
+        varztID.axis = 'Depth'
+        varztID.short_name = 'depthv'
+        varztID.units = 'm'
+        varztID.long_name = 'Depth'
+
+        varbtvID.units = 'm/s'
+        varbtvID.short_name = 'vobtcrty'
+        varbtvID.long_name = 'Thickness-weighted depth-averaged meridional Current'
+        varbtvID.grid = 'bdyV'
+
+        vartovID.units = 'm/s'
+        vartovID.short_name = 'vomecrty'
+        vartovID.long_name = 'Meridional Current'
+        vartovID.grid = 'bdyV'
+
+    elif grd == 'Z':
+        varsshID.units = 'm'
+        varsshID.short_name = 'sossheig'
+        varsshID.long_name = 'Sea Surface Height'
+        varsshID.grid = 'bdyT'
+
+        varmskID.short_name = 'bdy_msk'
+        varmskID.units = 'unitless'
+        varmskID.long_name = 'Structured boundary mask'
+
+    else:
+        logging.error('Unknown Grid')
+
+    ncid.close()
+
diff --git a/pynemo/nemo_bdy_ncpop.py b/pynemo/nemo_bdy_ncpop.py
new file mode 100644
index 0000000000000000000000000000000000000000..898bb66c49d204a7c6ed01bfa42565365c9a226d
--- /dev/null
+++ b/pynemo/nemo_bdy_ncpop.py
@@ -0,0 +1,44 @@
+'''
+Created on 3 Oct 2014
+
+@author: Mr. Srikanth Nagella
+Netcdf writer for the bdy output
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+import numpy as np
+def write_data_to_file(filename, variable_name, data):
+    """ Writes the data to the netcdf templete file.
+    Keyword arguments:
+    filename -- output filename
+    variable_name -- variable name into which the data is written to.
+    data -- data that will be written to variable in netcdf.
+    """
+    ncid = Dataset(filename, 'a', clobber=False, format='NETCDF4')
+    count = data.shape
+
+    three_dim_variables = ['votemper', 'vosaline', 'N1p', 'N3n', 'N5s']
+    two_dim_variables = ['sossheig', 'vobtcrtx', 'vobtcrty', 'iicethic', 'ileadfra', 'isnowthi']
+
+    if variable_name in three_dim_variables:
+        if len(count) == 3:
+            count += (1L, )
+        ncid.variables[variable_name][:, :, :, :] = np.reshape(data, count)[:, :, :, :]
+    elif variable_name in two_dim_variables:
+        if len(count) == 2:
+            count += (1L, )
+        elif len(count) == 1:
+            count += (1L, 1L, )
+        ncid.variables[variable_name][:, :, :] = np.reshape(data, count)[:, :, :]
+    elif variable_name == 'time_counter':
+        ncid.variables[variable_name][:] = data[:]
+    else:
+        if len(count) == 1:
+            ncid.variables[variable_name][:] = data[:]
+        elif len(count) == 2:
+            ncid.variables[variable_name][:, :] = data[:, :]
+        elif len(count) == 3:
+            ncid.variables[variable_name][:, :, :] = data[:, :, :]
+
+    ncid.close()
diff --git a/pynemo/nemo_bdy_scr_coord.py b/pynemo/nemo_bdy_scr_coord.py
new file mode 100644
index 0000000000000000000000000000000000000000..79ad341a5725b33487f0e08bd3910d0390596451
--- /dev/null
+++ b/pynemo/nemo_bdy_scr_coord.py
@@ -0,0 +1,10 @@
+
+# This object is initially empty but has data bound to it
+# Equivalent to matlab scr_coord
+
+
+class ScrCoord:
+
+    def __init__(self):
+        pass
+
diff --git a/pynemo/nemo_bdy_setup.py b/pynemo/nemo_bdy_setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..60c7e45b96299873eac4d3282339306d3827c2eb
--- /dev/null
+++ b/pynemo/nemo_bdy_setup.py
@@ -0,0 +1,280 @@
+# ===================================================================
+# The contents of this file are dedicated to the public domain.  To
+# the extent that dedication to the public domain is not available,
+# everyone is granted a worldwide, perpetual, royalty-free,
+# non-exclusive license to exercise all rights associated with the
+# contents of this file for any purpose whatsoever.
+# No rights are reserved.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+# ===================================================================
+
+'''
+Created on Wed Sep 12 08:02:46 2012
+
+Parses a file to find out which nemo boundary settings to use 
+
+@author John Kazimierz Farey
+@author James Harle
+$Last commit on:$
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+
+#External imports
+from collections import OrderedDict
+import os
+import logging
+
+class Setup(object):
+    '''
+    Invoke with a text file location, class init reads and deciphers variables.
+
+    attribute <settings> is a dict holding all the vars.
+    '''
+    """ This class holds the settings information """
+    def __init__(self, setfile):
+        """ Constructor, reads the settings file and sets the dictionary with setting name/key and
+        it's value """
+        #Logging for class
+        self.logger = logging.getLogger(__name__)
+        self.filename = setfile
+        if not setfile: # debug
+            self.filename = '../data/namelist.bdy'
+        self._load_settings()
+        self.variable_info = self.variable_info_reader(self.filename+'.info')
+
+    def refresh(self):
+        """ Re loads the settings from file """
+        self._load_settings()
+        
+    def _load_settings(self):
+        """ Loads the settings from file """
+        try:
+            namelist = open(self.filename, 'r')
+        except:
+            self.logger.error("Cannot open the file:"+self.filename)
+            raise
+        data = namelist.readlines()
+        # Dictionary of all the vars in the file and a seperate settings for boolean values
+        self.settings, self.bool_settings = _assign(_trim(data))
+        namelist.close()
+                
+    def _get_var_name_value(self, line):
+        """ splits the line into key value pair. """
+        key_value = line.split("=", 2)
+        name_prefix = key_value[0][0:2].lower()
+        name = key_value[0][3:].lower().strip() # 3 -> 0 to keep type info
+        value_str = key_value[1].strip()
+        index = '-1'
+        value = ''
+        if name_prefix == 'ln':
+            if value_str.find('true') is not -1:
+                value = True
+            elif value_str.find('false') is not -1:
+                value = False
+            else:
+                raise ValueError('Cannot assign %s to %s, must be boolean' %(value_str, name))
+
+        elif name_prefix == 'rn' or name_prefix == 'nn':
+            if value_str.find('.') > -1 or value_str.find('e') > -1:
+                try:
+                    value = float(value_str)
+                except ValueError:
+                    self.logger.error('Cannot assign %s to %s, must be float')
+                    raise
+            else:
+                try:
+                    value = int(value_str)
+                except ValueError:
+                    self.logger.error('Cannot assign %s to %s, must be integer')
+                    raise
+        elif name_prefix == 'cn' or name_prefix == 'sn':
+            value = value_str.strip("'")
+        elif name_prefix == 'cl':
+            name = key_value[0].split('(')
+            index = name[1].split(')')
+            name = name[0].strip()
+            index = index[0].strip()
+            value = value_str
+        else:
+            raise ValueError('%s data type is ambiguous' %key_value)
+        return name, index, value
+
+    def write(self):
+        '''
+        This method write backs the variable data back into the file
+        '''
+        try:
+            namelist = open(self.filename, 'r')
+        except:
+            self.logger.error("Cannot open the file:"+self.filename+" to write ")
+            raise
+        data = namelist.readlines()
+
+        for name in self.settings:
+            values = self.settings[name]
+            if type(values).__name__ != 'dict':
+                values = {'-1': values}
+            for index, value in values.iteritems():
+                count = -1
+                for line in data:
+#                    print line
+                    count = count + 1
+                    #find the variable
+                    line_without_comments = strip_comments(line)
+                    if line_without_comments == '':
+                        continue
+                    #print line_without_comments
+                    data_name, data_index, data_value = self._get_var_name_value(line_without_comments)
+
+                    if data_name == name:
+                        #found the variable line
+                        if data_index == index:
+                            if type(data_value).__name__ == 'bool' \
+                                and \
+                               type(value).__name__ != 'bool':
+                                data[count] = _replace_var_value(line, data_value,\
+                                                                 self.bool_settings[name])
+                                continue
+
+                            if data_value == value:
+                                break
+                            else:
+                                data[count] = _replace_var_value(line, data_value, value)
+                                break
+        namelist.close()
+        namelist = open(self.filename, 'w')
+        namelist.truncate()
+        namelist.writelines(data)
+        namelist.close()
+
+    def variable_info_reader(self, filename):
+        """ This method reads the variable description data from 'variable.info' file in the pynemo installation path
+        if it can't find the file with the same name as input bdy file with extension .info
+        Keyword arguments:
+        filename -- filename of the variables information
+        returns a dictionary with variable name and its description
+        """
+        variable_info = {}               
+        if filename is None or not os.path.exists(filename):
+            #Read the default file
+            file_path, dummy = os.path.split(__file__)
+            filename = os.path.join(file_path,'variable.info')
+        try:
+            namelist = open(filename, 'r')
+            data = namelist.readlines()
+            for line in data:
+                name = _get_var_name(line)
+                value = line.split("=", 1)[1]
+                variable_info[name[0]] = value.strip()                           
+        except IOError:
+            self.logger.error("Cannot open the  variable file:"+filename)
+        return variable_info
+        
+            
+def _trim(data):
+    """ Trims the sets of lines removing empty lines/whitespaces and removing comments
+    which start with ! """
+    newdata = []
+    while data:
+        line = data.pop(0)
+        line = line.rsplit('!')[0].strip()
+        if line is not '':
+            newdata.append(line)
+    return newdata
+
+def _get_val(vars_dictionary, bool_vars_dictionary, line):
+    """ traverses input string and appends the setting name and its value to dictionary
+    of settings and also if the setting name holds a boolean value then to the dictionary
+    of boolean variables. checks the type and raises error for ambiguous values"""
+
+    logger = logging.getLogger(__name__)
+    name_prefix = line[0][0:2].lower()
+    name = line[0][3:].lower().strip() # 3 -> 0 to keep type info
+    value = line[1].strip()
+
+    if name_prefix == 'ln':
+        if value.find('true') is not -1:
+            if vars_dictionary.has_key(name) != True:
+                vars_dictionary[name] = True
+            bool_vars_dictionary[name] = True
+        elif value.find('false') is not -1:
+            if vars_dictionary.has_key(name) != True:
+                vars_dictionary[name] = False
+            bool_vars_dictionary[name] = False
+        else:
+            raise ValueError('Cannot assign %s to %s, must be boolean' %(value, name))
+
+    elif name_prefix == 'rn' or name_prefix == 'nn':
+        if value.find('.') > -1 or value.find('e') > -1:
+            try:
+                vars_dictionary[name] = float(value)
+            except ValueError:
+                logger.error('Cannot assign %s to %s, must be float')
+                raise
+        else:
+            try:
+                vars_dictionary[name] = int(value)
+            except ValueError:
+                logger.error('Cannot assign %s to %s, must be integer')
+                raise
+    elif name_prefix == 'cn' or name_prefix == 'sn':
+        vars_dictionary[name] = value.strip("'")
+    elif name_prefix == 'cl':
+        name = line[0].split('(')
+        index = name[1].split(')')
+        if name[0].strip() not in vars_dictionary.keys():
+            vars_dictionary[name[0].strip()] = {}
+        vars_dictionary[name[0].strip()][index[0].strip()] = value.strip()
+    else:
+        raise ValueError('%s data type is ambiguous' %line)
+
+def _replace_var_value(original_line, value, new_value):
+    """ replaces the variable name value with new_value in the original_line"""
+    if type(value).__name__ == 'bool':
+        value = str(value).lower()
+        new_value = str(new_value).lower()
+    elif type(value).__name__ == 'str': #an empty string need to replace with ''
+        print value, new_value
+        if value == '':
+            value = '\'\''
+            new_value = '\''+new_value+'\''
+    return original_line.replace(str(value), str(new_value), 1)
+
+def _get_var_name(line):
+    """ parses the line to find the name and if it is part of the array '()'
+    then returns name of variable and index of the array. if variable is not
+    array then only variable name and -1 in index"""
+    name_value = line.split("=", 1)
+    name_prefix = name_value[0][0:2].lower()
+    name = name_value[0][3:].lower().strip() # 3 -> 0 to keep type info
+    if name_prefix in ['ln', 'rn', 'nn', 'cn', 'sn']:
+        return name, -1
+    elif name_prefix == 'cl':
+        name = name_value[0].split('(')
+        index = name[1].split(')')
+        return name[0], index[0]
+
+    # Returns tidy dictionary of var names and values
+def _assign(data):
+    """ return a dictionary of variables and also special dictionary for boolean variable """
+    vars_dictionary = OrderedDict({})
+    bool_vars_dictionary = OrderedDict({})
+    for line in data:
+        keyvalue = line.split('=', 1)
+        _get_val(vars_dictionary, bool_vars_dictionary, keyvalue)
+    return vars_dictionary, bool_vars_dictionary
+
+def strip_comments(line):
+    """ strips the comments in the line. removes text after ! """
+    line = line.rsplit('!')[0].strip()
+    return line
diff --git a/pynemo/nemo_bdy_source_coord.py b/pynemo/nemo_bdy_source_coord.py
new file mode 100644
index 0000000000000000000000000000000000000000..23aa4c41cd7ceacb9ad9ef1065571676cd6d4627
--- /dev/null
+++ b/pynemo/nemo_bdy_source_coord.py
@@ -0,0 +1,11 @@
+
+# This object is currently but has data bound to it
+# Equivalent to matlab src_coord
+
+
+class SourceCoord:
+
+    def __init__(self):
+        """ This for source coordinates object attributes initialisation """
+        self.bdy_i = None
+
diff --git a/pynemo/nemo_bdy_src_time.py b/pynemo/nemo_bdy_src_time.py
new file mode 100644
index 0000000000000000000000000000000000000000..b272181b6265b4d9453cda09462820d73159c535
--- /dev/null
+++ b/pynemo/nemo_bdy_src_time.py
@@ -0,0 +1,60 @@
+##################################################
+# Written by John Kazimierz Farey, Sep 2012      #
+# Port of Matlab code of James Harle             #
+# #                                            # #
+# Init with source directory for netcdf files    #
+# Method to generates time/file list information #
+# for a particular grid                          #
+##################################################
+
+from os import listdir
+
+from netCDF4 import Dataset, netcdftime
+import logging
+
+class SourceTime:
+
+    def __init__(self, src_dir):
+        self.src_dir = src_dir
+        self.logger = logging.getLogger(__name__)
+    # returns a list of all the relevant netcdf files
+    def _get_dir_list(self, grid):
+        fend = 'd05%s.nc' %grid.upper()
+        dir_list = listdir(self.src_dir)
+        for i in range(len(dir_list)):
+            if dir_list[i][-7:] != fend:
+                dir_list[i] = ''
+            else:
+                dir_list[i] = self.src_dir + dir_list[i]
+
+        dir_list.sort()
+            
+        return filter(None, dir_list)
+    
+    # Returns list of dicts of date/time info
+    # I assume there is only one date per file
+    # Each date is datetime instance. to get day etc use x.day
+    # They should be in order
+    # Matlab var dir_list is incorporated into src_time    
+    def get_source_time(self, grid, t_adjust):
+        dir_list = self._get_dir_list(grid)
+        src_time = []
+        for f in range(len(dir_list)):
+            self.logger.debug('get_source_time: %s', dir_list[f])
+            nc = Dataset(dir_list[f], 'r')
+            varid = nc.variables['time_counter']
+            f_time = {}
+            f_time['fname'] = dir_list[f]
+
+            # First 2 values are in unicode. Pray.
+            f_time['units'] = varid.units
+            f_time['calendar'] = varid.calendar
+            raw_date = varid[0] + t_adjust
+            f_time['date'] = netcdftime.num2date(raw_date, f_time['units'], 
+                                                 f_time['calendar'])
+
+            src_time.append(f_time)
+        
+        return src_time
+    
+
diff --git a/pynemo/nemo_bdy_zgrv2.py b/pynemo/nemo_bdy_zgrv2.py
new file mode 100644
index 0000000000000000000000000000000000000000..e91fb8ca9d7120b85dafc6c006d0ce81cbc7100f
--- /dev/null
+++ b/pynemo/nemo_bdy_zgrv2.py
@@ -0,0 +1,122 @@
+##############################################
+# Generates Depth information                #
+# #                                        # #
+# Written by John Kazimierz Farey, Sep 2012  #
+# Port of Matlab code of James Harle         #
+##############################################
+"""
+# NOTES:
+# I have skipped error check code
+
+# Generates depth points for t, u and v in one loop iteration
+
+Initialise with bdy t, u and v grid attributes (Grid.bdy_i)
+and settings dictionary 
+"""
+
+from reader.factory import GetFile
+import numpy as np
+import logging
+
+from utils.nemo_bdy_lib import sub2ind
+from utils.e3_to_depth import e3_to_depth
+#     pylint: disable=E1101
+# Query name
+class Depth:
+
+    def __init__(self, bdy_t, bdy_u, bdy_v, settings):
+        self.logger = logging.getLogger(__name__) 
+        self.logger.debug( 'init Depth' )
+        hc = settings['hc'] 
+        nc = GetFile(settings['dst_zgr'])#Dataset(settings['dst_zgr'], 'r')
+        mbathy = nc['mbathy'][:,:,:].squeeze() #nc.variables['mbathy'][:,:,:].squeeze()
+        # numpy requires float dtype to use NaNs
+        mbathy = np.float16(mbathy)
+        mbathy[mbathy == 0] = np.NaN 
+        nz = len(nc['nav_lev'][:])#.variables['nav_lev'][:])
+
+        # Set up arrays
+        t_nbdy = len(bdy_t[:,0])
+        u_nbdy = len(bdy_u[:,0])
+        v_nbdy = len(bdy_v[:,0])
+        zp = ['t', 'wt', 'u', 'wu', 'v', 'wv']
+        self.zpoints = {}
+        for z in zp:
+            if 't' in z:
+                nbdy = t_nbdy
+            elif 'u' in z:
+                nbdy = u_nbdy
+            elif 'v' in z:
+                nbdy = v_nbdy
+            self.zpoints[z] = np.zeros((nz, nbdy))
+
+        # Check inputs
+        # FIX ME? Errors for wrong obj arg len. probably better to work around
+        if settings['sco']:
+            # hc = ... FIX ME??
+            # Depth of water column at t-point
+            hbatt = nc['hbatt'][:,:,:]#nc.variables['hbatt'][:,:,:]
+            # Replace land with NaN   
+            hbatt[mbathy == 0] = np.NaN
+
+        # find bdy indices from subscripts
+        t_ind = sub2ind(mbathy.shape, bdy_t[:,0], bdy_t[:,1])
+        
+        u_ind = sub2ind(mbathy.shape, bdy_u[:,0], bdy_u[:,1])
+        u_ind2 = sub2ind(mbathy.shape, bdy_u[:,0] + 1, bdy_u[:,1])
+
+        v_ind = sub2ind(mbathy.shape, bdy_v[:,0], bdy_v[:,1])
+        v_ind2 = sub2ind(mbathy.shape, bdy_v[:,0], bdy_v[:,1] + 1) 
+   
+        # This is very slow
+        self.logger.debug( 'starting nc reads loop' )
+        for k in range(nz):
+            if settings['sco']:
+                # sigma coeffs at t-point (1->0 indexed)
+                gsigt = nc['gsigt'][0,k,:,:]#nc.variables['gsigt'][0,k,:,:]
+                # sigma coeffs at w-point
+                gsigw = nc['gsigw'][0,k,:,:]#nc.variables['gsigw'][0,k,:,:]
+
+                # NOTE:  check size of gsigt SKIPPED
+
+                wrk1 = (hbatt - hc) * gsigt[:,:] + (hc * (k + 0.5) / (nz - 1))
+                wrk2 = (hbatt - hc) * gsigw[:,:] + (hc * (k + 0.5) / (nz - 1))
+            else:
+		# jelt: replace 'load gdep[wt] with load e3[tw] and compute gdep[tw]
+                #wrk1 = nc['gdept'][0,k,:,:]#nc.variables['gdept'][0,k,:,:]
+                #wrk2 = nc['gdepw'][0,k,:,:]#nc.variables['gdepw'][0,k,:,:]
+		[wrk1, wrk2] = e3_to_depth(nc['e3t'][0,k,:,:], nc['e3w'][0,k,:,:], nz)
+
+            # Replace deep levels that are not used with NaN
+            wrk2[mbathy + 1 < k + 1] = np.NaN
+            wrk1[mbathy < k + 1] = np.NaN
+
+            # Set u and v grid point depths
+            zshapes = {}
+            for p in self.zpoints.keys():
+                zshapes[p] = self.zpoints[p].shape
+            wshapes = []
+            wshapes.append(wrk1.shape)
+            wshapes.append(wrk2.shape)
+            wrk1, wrk2 = wrk1.flatten(1), wrk2.flatten(1)
+
+            self.zpoints['t'][k,:]  = wrk1[t_ind]
+            self.zpoints['wt'][k,:] = wrk2[t_ind]
+            
+            self.zpoints['u'][k,:]  = 0.5 * (wrk1[u_ind] + wrk1[u_ind2])
+            self.zpoints['wu'][k,:] = 0.5 * (wrk2[u_ind] + wrk2[u_ind2])
+            
+            self.zpoints['v'][k,:]  = 0.5 * (wrk1[v_ind] + wrk1[v_ind2])
+            self.zpoints['wv'][k,:] = 0.5 * (wrk2[v_ind] + wrk2[v_ind2])
+
+            for p in self.zpoints.keys():
+                self.zpoints[p] = self.zpoints[p].reshape(zshapes[p])
+            
+        self.logger.debug( 'Done loop, zpoints: %s ', self.zpoints['t'].shape)
+                                
+
+        nc.close()
+
+            
+
+
diff --git a/pynemo/nemo_bogadons_gauntlet.py b/pynemo/nemo_bogadons_gauntlet.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0dda057d34f7641ee4bfe45c956317909aa74a6
--- /dev/null
+++ b/pynemo/nemo_bogadons_gauntlet.py
@@ -0,0 +1,148 @@
+#
+# The loop from nemo_bdy_extr_tm3
+#
+# 
+#
+
+from calendar import monthrange
+from datetime import datetime
+
+from netCDF4 import Dataset, netcdftime
+
+
+class Enter:
+
+    def __init__(self, settings, sc_time, dir_list, dst_cal_type, year, month):
+
+        var_nam = ['votemper', 'vosaline']
+        sc_fields = source_fields
+
+        #self.setup = settings # dict
+        # define src/dst cals
+        sf, ed = cal_trans(sc_time, dst_cal_type, year, month)
+        # W
+        DstCal = utime('seconds since %d-1-1' %year, dst_cal_type)
+        dst_start = DstCal.date2num(datetime(year, month, 1))
+        dst_end = DstCal.date2num(datetime(year, month, ed, 23, 59, 59))
+
+        self.S_cal = utime(sc_time[0]['units'], sc_time[0]['calendar'])
+        self.D_cal = utime('seconds since %d-1-1' %settings['year_000'], 
+                           settings['dst_calendar'])
+
+        for date in sc_time:
+            date['date_num'] = DstCal.date2num(date['date']) * sf
+
+        # Get first and last date within range
+        first_date, last_date = None, None
+        rev_seq = range(len_sc_time)
+        rev_seq.reverse()
+        # Multiple values.. might be broken.. 
+        for date in rev_seq:
+            if sc_time[date]['date_num'] < dst_start:
+                first_date = date #number..
+                break
+        for date in range(len_sc_time):
+            if sc_time[date]['date_num'] > dst_end:
+                last_date = date
+                break
+
+        for date in range(first_date, last_date + 1):
+            nc = Dataset(sc_time[date], 'r')
+            if key_vec:
+                pass
+                #nc_2 = Dataset
+                # FIX ME
+
+            # We shouldnt have to worry about counters
+            sc_bdy = np.zeros(nvar, sc_z_len, source_ind['ind'].shape[0],
+                                  source_ind['ind'].shape[1])
+            ind_vec = {}
+            # distinctive variable name since it gets lost in the huge loop
+            for shoggoth in range(nvar):
+                varid = nc.variables[var_nam[shoggoth]]
+                i, ii = source_ind['imin'], source_ind['imax']
+                j, jj = source_ind['jmin'], source_ind['jmax']
+                sc_arrays = []
+                col_sc_arrays = []
+                if key_vec:
+                    varid_2 = nc_2.variables[var_nam[shoggoth + 1]]
+                if not isslab and not key_vec:
+                    # NOTE: 0 v 1 indexing may problemate
+                    sc_arrays.append(varid[i-1:ii, j-1:jj, :sc_z_len, :1])
+                elif key_vec:
+                    sc_arrays.append(varid[i-2:ii, j-1:jj, :sc_z_len, :1])
+                    sc_arrays.append(varid_2[i-1:ii, j-2:jj, :sc_z_len, :1])
+                    for x in 0,1:
+                        # tidy up src array - replace missing val
+                        for y in 'mv', 'fv':
+                            if not np.isnan(sc_fields[y][x]):
+                                ind_vec[y] = sc_arrays[x] == sc_fields[y][x]
+                                sc_arrays[x][ind_vec[y]] = 0
+                            else:
+                                sc_arrays[x][np.isnan(scarr)] = 0
+                        # Adjust for scaling or offsets
+                        if not np.isnan(sc_fields['sf'][x]):
+                            sc_arrays[x] *= sc_fields['sf'][x]
+                        if not np.isnan(sc_fields['os'][x]):
+                            sc_arrays[x] += sc_fields['os'][x]
+                
+                        # Colocate velocity points on T grid prior to rotation
+                        axis = [1, None]
+                        col = 0.5 * (sc_arrays[x][:-axis[0],:-axis[1],:] + 
+                                     sc_arrays[x][axis[0]:,axis[1]:,:])
+                        col[col == 0] = np.NaN
+                        col_sc_arrays.append(col)
+                        axis.reverse()
+                
+                # This is a slab
+                else:
+                    sc_arrays.append(varid[i-1:ii, j-1:jj, :1])
+                    #query var names
+                    if msk and first and shoggoth==0:
+                        pass
+                        # Open another magic file and do stuff
+                        nc3 = Dataset(source_mask, 'r')
+                        varid_3 = nc3.variables['tmaskutil']
+                        msk_array = varid_3[i-1:ii, j-1:jj, :1]
+                    if msk: #must be true for msk array ??...
+                        sc_arrays[0][msk_array == 0] = np.NaN
+
+        
+            # Finished reading Source data
+
+            #for depth_val in range(sc_z_len):
+            #    tmp_arrays = []
+            #    if not key_vec:
+            #        tmp_arrays.append(sc_arrays[0][:,:depth_val]
+
+
+    def _fv_mv_to_zero(self, scarr, indvec, sc_fields, pos):
+        
+        for x in 'mv', 'fv':
+            if not np.isnan(sc_fields[x][pos]):
+                ind_vec[x] = scarr == sc_fields[x][pos]
+                scarr[ind_vec[x]] = 0
+            else:
+                scarr[np.isnan(scarr)] = 0
+        return scarr, indvec
+
+    # Convert numeric date from source to dest
+    def convert_date(self, date):
+        
+        val = self.S_cal.num2date(date)
+        return self.D_cal.date2num(val)
+
+    def cal_trans(self, source, dest, year, month):
+        vals = {'gregorian': [monthrange(year, month)[1], 31], 'noleap': 
+                [365., 31],'360_day': [360., 30]}
+        if source not in vals.keys():
+            raise ValueError('Unknown calendar type: %s' %source)
+        
+        sf = val[source][0] / val[dest][0]
+        
+        return sf, vals[dest][1]
+
+
+
+
+
diff --git a/pynemo/nemo_coord_gen_pop.py b/pynemo/nemo_coord_gen_pop.py
new file mode 100644
index 0000000000000000000000000000000000000000..65d7aa00b4a5b26e95d8991e312bb0f746b506d1
--- /dev/null
+++ b/pynemo/nemo_coord_gen_pop.py
@@ -0,0 +1,150 @@
+########################################################
+# Creates Nemo bdy indices for t, u, v points          #
+##                                                    ##
+# Written by John Kazimierz Farey, started 30 Aug 2012 #
+# Port of Matlab code by James Harle                   #
+########################################################
+
+'''
+This module combines matlab coord gen and pop. 
+Initialise with netcdf file name and dictionary containing 
+all bdy grids (objects)
+
+'''
+
+from datetime import datetime
+
+from netCDF4 import Dataset
+import numpy as np
+import logging
+
+class Coord:
+
+    _grid = ['t','u','v']
+    
+    # Init with nc fname and dictionary of bdy inds
+    def __init__(self, fname, bdy_ind):
+        self.bdy_ind = bdy_ind
+        self.logger = logging.getLogger(__name__)
+        self.logger.debug( fname )
+        if not fname:
+            print 'need some error handling in here or is this redundant?' # TODO
+         
+        # Enter define mode
+        self.ncid = Dataset(fname, 'w', clobber=True, format='NETCDF4')
+        
+        # Define Dimensions
+        self.dim_id = self._create_dims()
+        
+        # Create tidy dictionaries to hold all our pretty variables
+        self.var_nb_ij_id = self._build_dict(['i', 'j'], 
+                                 ['nb', 'i4', 'unitless', 0])
+        self.var_nb_r_id = self._build_dict(['r'], 
+                                 ['nb', 'i4', 'unitless', 0]) 
+        self.var_g_lamphi_id = self._build_dict(['lam', 'phi'],
+                                 ['g', 'f4', 'degrees_east', 'longitude'])
+        self.var_e_12_id = self._build_dict(['1', '2'], 
+                                 ['e', 'f4', 'metres', 'scale factor'])
+
+        # Assign Global Attributes
+        self.ncid.file_name = fname
+        self.ncid.creation_date = str(datetime.now())
+        self.ncid.institution = 'National Oceanography Centre, Liverpool, U.K.'
+     
+        # Leave Define Mode
+
+# # # # # # # # #
+# # Functions # #
+# # # # # # # # # 
+        
+    def closeme(self):
+        self.ncid.close()
+        
+    # Creates dims and returns a dictionary of them
+    def _create_dims(self):
+        ret = {'xb':{}}
+        ret['xb']['t'] = self.ncid.createDimension('xbT', 
+                                                   len(self.bdy_ind['t'].bdy_i))
+        ret['xb']['u'] = self.ncid.createDimension('xbU', 
+                                                   len(self.bdy_ind['u'].bdy_i))
+        ret['xb']['v'] = self.ncid.createDimension('xbV', 
+                                                   len(self.bdy_ind['v'].bdy_i))
+        ret['yb'] = self.ncid.createDimension('yb', 1)
+   
+        return ret
+    
+    # Sets up a grid dictionary
+    def _build_dict(self, dim, units):
+        ret = {}
+        for g in self._grid:
+            ret[g] = {}
+            for d in dim:
+                ret[g][d] = self._add_vars(d, g, units)
+
+        return ret
+
+    # creates a var w/ attributes
+    def _add_vars(self, dim, grd, unt):
+        dat = unt[2]
+        lname = unt[3]
+        if dim is 'phi':
+            dat = 'degrees_north'
+            lname = 'latitude'
+        elif lname == 0:
+            lname = 'Bdy %s indices'%dim
+        lname = lname + ' (%s)'%grd.upper()
+        #print '%s%s%s'%(unt[0],dim,grd)
+        #print unt[1]
+        var = self.ncid.createVariable('%s%s%s'%(unt[0],dim,grd), 
+                                       unt[1], ('yb', 'xb'+ grd.upper()))
+        var.short_name = '%s%s%s'%(unt[0],dim,grd)
+        var.units = dat
+        var.long_name = lname 
+        
+        return var
+
+    def populate(self, ncfname):
+        self.set_lenvar(self.var_nb_ij_id)
+        self.set_lenvar(self.var_nb_r_id)
+
+        ncid2 = Dataset(ncfname, 'r')
+        self.set_lenvar(self.var_g_lamphi_id, ncid2, 'g')
+        self.set_lenvar(self.var_e_12_id, ncid2, 'e')
+        ncid2.close()
+        
+        self.closeme()
+
+    # sets the len var of each array in the var dictionary fed
+    # specifying nc and unt pulls data from a secondary file
+    # Otherwise pull it from the class dict
+    def set_lenvar(self, vardic, nc=None, unt=None):
+        for ind in vardic:
+            x = 0
+            data = None
+            for dim in vardic[ind]:
+                if nc is not None:
+                    data = nc.variables['%s%s%s'%(unt, dim, ind)][:]
+                    self.logger.debug('%s %s %s %s %s %s', ind, self.bdy_ind[ind].bdy_i[:,1], data.shape, dim, unt, ind)
+                    data = data.squeeze()
+                    self.logger.debug('%s %s %s', ind, self.bdy_ind[ind].bdy_i[:,1], data.shape)
+                    data = data[(self.bdy_ind[ind].bdy_i[:,1]),
+                                (self.bdy_ind[ind].bdy_i[:,0])]
+                elif len(vardic[ind]) == 1:
+                    data = self.bdy_ind[ind].bdy_r[:]
+                else:
+                    data = self.bdy_ind[ind].bdy_i[:,x]
+                    x = 1
+
+                attData = getattr(vardic[ind][dim],  'long_name') 
+                #print 'Just about to add 1 to ', attData
+                # add 1 to all indices as they're going to be used in
+                # a Fortran environment
+                # This is commented because data generated doesn't match
+                # with output generated from matlab
+                data = data + 1
+                
+
+                vardic[ind][dim][:] = data
+ 
+
+
diff --git a/pynemo/profile.py b/pynemo/profile.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc8a3468d9908c21de4de1ac749efa75f07bb23c
--- /dev/null
+++ b/pynemo/profile.py
@@ -0,0 +1,475 @@
+# ===================================================================
+# The contents of this file are dedicated to the public domain.  To
+# the extent that dedication to the public domain is not available,
+# everyone is granted a worldwide, perpetual, royalty-free,
+# non-exclusive license to exercise all rights associated with the
+# contents of this file for any purpose whatsoever.
+# No rights are reserved.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+# ===================================================================
+
+'''
+Created on Wed Sep 12 08:02:46 2012
+
+The main application script for the NRCT. 
+
+@author James Harle
+@author John Kazimierz Farey
+@author Srikanth Nagella
+$Last commit on:$
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+
+#External imports
+import time
+import logging
+import numpy as np
+from PyQt4.QtGui import QMessageBox
+
+#Local imports
+from pynemo import pynemo_settings_editor
+from pynemo import nemo_bdy_ncgen as ncgen
+from pynemo import nemo_bdy_ncpop as ncpop
+from pynemo import nemo_bdy_source_coord as source_coord
+from pynemo import nemo_bdy_dst_coord as dst_coord
+from pynemo import nemo_bdy_setup as setup
+from pynemo import nemo_bdy_gen_c as gen_grid
+from pynemo import nemo_coord_gen_pop as coord
+from pynemo import nemo_bdy_zgrv2 as zgrv
+from pynemo import nemo_bdy_extr_tm3 as extract
+
+from pynemo.reader.factory import GetFile
+from pynemo.reader import factory
+from pynemo.tide import nemo_bdy_tide3 as tide
+from pynemo.tide import nemo_bdy_tide_ncgen
+from pynemo.utils import Constants
+from pynemo.gui.nemo_bdy_mask import Mask as Mask_File
+
+class Grid(object):
+    """ 
+    A Grid object that stores bdy grid information
+    """    
+    def __init__(self):
+        self.bdy_i       = None # bdy indices
+        self.bdy_r       = None # associated rimwidth values
+        self.grid_type   = None # this can be T/U/V
+        self.fname_2     = None # 2nd file for vector rotation
+        self.max_i       = None # length of i-axis in fname_2
+        self.max_j       = None # length of j-axis in fname_2
+        self.source_time = None # netcdftime information from parent files
+
+logger = logging.getLogger(__name__)
+logging.basicConfig(filename='nrct.log', level=logging.INFO)
+
+def process_bdy(setup_filepath=0, mask_gui=False):
+    """ 
+    Main entry for processing BDY lateral boundary conditions.
+
+    This is the main script that handles all the calls to generate open 
+    boundary conditions for a given regional domain. Input options are handled 
+    in a NEMO style namelist (namelist.bdy). There is an optional GUI allowing
+    the user to create a mask that defines the extent of the regional model.
+
+    Args:
+        setup_filepath (str) : file path to find namelist.bdy
+        mask_gui       (bool): whether use of the GUI is required
+
+    """
+    # Start Logger
+    
+    logger.info('Start NRCT Logging: '+time.asctime())
+    logger.info('============================================')
+    
+    SourceCoord = source_coord.SourceCoord()
+    DstCoord    = dst_coord.DstCoord()
+
+    Setup = setup.Setup(setup_filepath) # default settings file
+    settings = Setup.settings
+
+    logger.info('Reading grid completed')
+
+    bdy_msk = _get_mask(Setup, mask_gui)
+    DstCoord.bdy_msk = bdy_msk == 1
+    
+    logger.info('Reading mask completed')
+    
+    bdy_ind = {} # define a dictionary to hold the grid information
+    
+    for grd in ['t', 'u', 'v']:
+        bdy_ind[grd] = gen_grid.Boundary(bdy_msk, settings, grd)
+        logger.info('Generated BDY %s information', grd)
+        logger.info('Grid %s has shape %s', grd, bdy_ind[grd].bdy_i.shape)
+
+    # TODO: Write in option to seperate out disconnected LBCs
+    
+    # Write out grid information to coordinates.bdy.nc
+
+    co_set = coord.Coord(settings['dst_dir']+'/coordinates.bdy.nc', bdy_ind)
+    co_set.populate(settings['dst_hgr'])
+    logger.info('File: coordinates.bdy.nc generated and populated')
+
+    # Idenitify number of boundary points
+    
+    nbdy = {}
+    
+    for grd in ['t', 'u', 'v']:
+        nbdy[grd] = len(bdy_ind[grd].bdy_i[:, 0])
+
+    # Gather grid information
+    
+    # TODO: insert some logic here to account for 2D or 3D src_zgr
+    
+    logger.info('Gathering grid information')
+    nc = GetFile(settings['src_zgr'])
+    SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
+    print SourceCoord.zt.shape
+    nc.close()
+
+    # Define z at t/u/v points
+
+    z = zgrv.Depth(bdy_ind['t'].bdy_i,
+                   bdy_ind['u'].bdy_i,
+                   bdy_ind['v'].bdy_i, settings)
+
+    # TODO: put conditional here as we may want to keep data on parent
+    #       vertical grid
+   
+    DstCoord.depths = {'t': {}, 'u': {}, 'v': {}}
+
+    for grd in ['t', 'u', 'v']:
+        DstCoord.depths[grd]['bdy_H']  = np.nanmax(z.zpoints['w'+grd], axis=0)
+        DstCoord.depths[grd]['bdy_dz'] = np.diff(z.zpoints['w'+grd], axis=0)
+        DstCoord.depths[grd]['bdy_z']  = z.zpoints[grd]
+
+    logger.info('Depths defined')
+    
+    # Gather vorizontal grid information
+
+    nc = GetFile(settings['src_hgr'])
+    SourceCoord.lon = nc['glamt'][:,:]
+    SourceCoord.lat = nc['gphit'][:,:]
+    
+    try: # if they are masked array convert them to normal arrays
+        SourceCoord.lon = SourceCoord.lon.filled()
+    except:
+        pass
+    try:
+        SourceCoord.lat = SourceCoord.lat.filled()
+    except:
+        pass
+        
+    nc.close()
+
+    DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}}
+
+    nc = GetFile(settings['dst_hgr'])
+
+    # Read and assign horizontal grid data
+    
+    for grd in ['t', 'u', 'v']:
+        DstCoord.lonlat[grd]['lon'] = nc['glam' + grd][0, :, :]
+        DstCoord.lonlat[grd]['lat'] = nc['gphi' + grd][0, :, :]
+    
+    nc.close()
+
+    logger.info('Grid coordinates defined')
+    
+    # Identify lons/lats of the BDY points
+    
+    DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {}}
+     
+    for grd in ['t', 'u', 'v']:
+        for l in ['lon', 'lat']:
+            DstCoord.bdy_lonlat[grd][l] = np.zeros(nbdy[grd])
+
+    for grd in ['t', 'u', 'v']:
+        for i in range(nbdy[grd]):
+            x = bdy_ind[grd].bdy_i[i, 1]
+            y = bdy_ind[grd].bdy_i[i, 0]
+            DstCoord.bdy_lonlat[grd]['lon'][i] =                              \
+                                              DstCoord.lonlat[grd]['lon'][x, y]
+            DstCoord.bdy_lonlat[grd]['lat'][i] =                              \
+                                              DstCoord.lonlat[grd]['lat'][x, y]
+
+        DstCoord.lonlat[grd]['lon'][DstCoord.lonlat[grd]['lon'] > 180] -= 360
+
+    logger.info('BDY lons/lats identified from %s', settings['dst_hgr'])
+
+    # Set up time information
+    
+    t_adj = settings['src_time_adj'] # any time adjutments?
+    reader = factory.GetReader(settings['src_dir'],t_adj)
+    for grd in ['t', 'u', 'v']:
+        bdy_ind[grd].source_time = reader[grd]
+ 
+    unit_origin = '%d-01-01 00:00:00' %settings['base_year']
+
+    # Extract source data on dst grid
+
+    if settings['tide']:
+        if settings['tide_model']=='tpxo':
+            cons = tide.nemo_bdy_tpx7p2_rot(
+                Setup, DstCoord, bdy_ind['t'], bdy_ind['u'], bdy_ind['v'],
+                                                            settings['clname'])
+        elif settings['tide_model']=='fes':
+            logger.error('Tidal model: %s, not yet implimented', 
+                         settings['tide_model'])
+            return
+        else:
+            logger.error('Tidal model: %s, not recognised', 
+                         settings['tide_model'])
+            return
+            
+        write_tidal_data(Setup, DstCoord, bdy_ind, settings['clname'], cons)
+
+    logger.info('Tidal constituents written to file')
+    
+    # Set the year and month range
+    
+    yr_000 = settings['year_000']
+    yr_end = settings['year_end']
+    mn_000 = settings['month_000']
+    mn_end = settings['month_end']
+    
+    if yr_000 > yr_end:
+        logging.error('Please check the nn_year_000 and nn_year_end '+ 
+                      'values in input bdy file')
+        return
+    
+    yrs = range(yr_000, yr_end+1)
+    
+    if yr_end - yr_000 >= 1:
+        if range(mn_000, mn_end+1) < 12:
+            logger.info('Warning: All months will be extracted as the number '+
+                        'of years is greater than 1')
+        mns = range(1,13)
+    else:
+        mn_000 = settings['month_000']
+        mn_end = settings['month_end']
+        if mn_end > 12 or mn_000 < 1:
+            logging.error('Please check the nn_month_000 and nn_month_end '+
+                          'values in input bdy file')
+            return
+        mns = range(mn_000, mn_end+1)
+    
+    # Enter the loop for each year and month extraction
+    
+    logger.info('Entering extraction loop')
+    
+    ln_dyn2d   = settings['dyn2d']            
+    ln_dyn3d   = settings['dyn3d'] # are total or bc velocities required
+    ln_tra     = settings['tra']          
+    ln_ice     = settings['ice']   
+
+    # Define mapping of variables to grids with a dictionary
+    
+    emap = {}
+    grd  = [  't',  'u',  'v']
+    pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
+    
+    # TODO: The following is a temporary stop gap to assign variables. In 
+    # future we need a slicker way of determining the variables to extract. 
+    # Perhaps by scraping the .ncml file - this way biogeochemical tracers
+    # can be included in the ln_tra = .true. option without having to
+    # explicitly declaring them.
+
+    var_in = {}
+    for g in range(len(grd)):
+        var_in[grd[g]] = []
+        
+    if ln_tra:
+        var_in['t'].extend(['votemper', 'vosaline'])
+        
+    if ln_dyn2d or ln_dyn3d:
+        var_in['u'].extend(['vozocrtx', 'vomecrty'])
+        var_in['v'].extend(['vozocrtx', 'vomecrty'])
+    
+    if ln_dyn2d:
+        var_in['t'].extend(['sossheig'])
+        
+    if ln_ice:
+        var_in['t'].extend(['ice1', 'ice2', 'ice3'])
+    
+    # As variables are associated with grd there must be a filename attached
+    # to each variable
+    
+    for g in range(len(grd)):
+        
+        if len(var_in[grd[g]])>0:
+            emap[grd[g]]= {'variables': var_in[grd[g]],
+                           'pair'     : pair[g]} 
+
+    extract_obj = {}
+    
+    # Initialise the mapping indices for each grid 
+    
+    for key, val in emap.items():
+        
+        extract_obj[key] = extract.Extract(Setup.settings, 
+                                           SourceCoord, DstCoord,
+                                           bdy_ind, val['variables'], 
+                                           key, val['pair'])
+    
+    # TODO: Write the nearest neighbour parent grid point to each bdy point
+    #       possibly to the coordinates.bdy.nc file to help with comparison
+    #       plots later.
+        
+    for year in yrs:
+        for month in mns:
+            for key, val in emap.items():
+                
+                # Extract the data for a given month and year
+                
+                extract_obj[key].extract_month(year, month)
+                
+                # Interpolate/stretch in time if time frequecy is not a factor 
+                # of a month and/or parent:child calendars differ
+                
+                extract_obj[key].time_interp(year, month)
+                
+                # Finally write to file
+                
+                extract_obj[key].write_out(year, month, bdy_ind[key], 
+                                           unit_origin)
+                
+    logger.info('End NRCT Logging: '+time.asctime())
+    logger.info('==========================================')
+                
+
+def write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons):
+    """ 
+    This method writes the tidal data to netcdf file.
+
+    Args:
+        setup_var     (list): Description of arg1
+        dst_coord_var (obj) : Description of arg1
+        grid          (dict): Description of arg1
+        tide_cons     (list): Description of arg1
+        cons          (data): Description of arg1
+    """
+    indx = 0
+    
+    # Mapping of variable names to grid types
+    
+    tmap = {}
+    grd = ['t', 'u', 'v']
+    var = ['z', 'u', 'v']
+    des = ['tidal elevation components for:',
+           'tidal east velocity components for:',
+           'tidal north velocity components for:']
+    
+    for g in range(len(grd)):
+        bdy_r = grid[grd[g]].bdy_r
+        tmap[grd[g]]= {'nam': var[g], 'des': des[g], 
+                       'ind': np.where(bdy_r == 0),
+                       'nx' : len(grid[grd[g]].bdy_i[bdy_r == 0, 0])}
+        
+    # Write constituents to file
+    
+    for tide_con in tide_cons:
+        
+        const_name = setup_var.settings['clname'][tide_con]
+        const_name = const_name.replace("'", "").upper()
+
+        for key,val in tmap.items():
+            
+            fout_tide = setup_var.settings['dst_dir']+             \
+                        setup_var.settings['fn']+                  \
+                        '_bdytide_'+const_name+'_grd_'+            \
+                        val['nam'].upper()+'.nc'
+            
+            nemo_bdy_tide_ncgen.CreateBDYTideNetcdfFile(fout_tide, 
+                            val['nx'], 
+                            dst_coord_var.lonlat['t']['lon'].shape[1],
+                            dst_coord_var.lonlat['t']['lon'].shape[0], 
+                            val['des']+tide_con, 
+                            setup_var.settings['fv'], key.upper())
+            
+            ncpop.write_data_to_file(fout_tide, val['nam']+'1', 
+                                     cons['cos'][val['nam']][indx])
+            ncpop.write_data_to_file(fout_tide, val['nam']+'2', 
+                                     cons['sin'][val['nam']][indx])
+            ncpop.write_data_to_file(fout_tide, 'bdy_msk',
+                                     dst_coord_var.bdy_msk)
+            ncpop.write_data_to_file(fout_tide, 'nav_lon',
+                                     dst_coord_var.lonlat['t']['lon'])
+            ncpop.write_data_to_file(fout_tide, 'nav_lat',
+                                     dst_coord_var.lonlat['t']['lat'])
+            ncpop.write_data_to_file(fout_tide, 'nbidta',
+                                     grid[key].bdy_i[val['ind'], 0]+1)
+            ncpop.write_data_to_file(fout_tide, 'nbjdta',
+                                     grid[key].bdy_i[val['ind'], 1]+1)
+            ncpop.write_data_to_file(fout_tide, 'nbrdta',
+                                     grid[key].bdy_r[val['ind']]+1)
+        
+        # Iterate over constituents
+        
+        indx += 1
+
+def _get_mask(Setup, mask_gui):
+    """ 
+    Read mask information from file or open GUI.
+
+    This method reads the mask information from the netcdf file or opens a gui
+    to create a mask depending on the mask_gui input. The default mask data 
+    uses the bathymetry and applies a 1pt halo.
+
+    Args:
+        Setup    (list): settings for bdy
+        mask_gui (bool): whether use of the GUI is required
+
+    Returns:
+        numpy.array     : a mask array of the regional domain
+    """
+    # Initialise bdy_msk array
+    
+    bdy_msk = None
+    
+    if mask_gui: # Do we activate the GUI
+        
+        # TODO: I do not like the use of _ for a dummy variable - better way?
+        
+        _, mask = pynemo_settings_editor.open_settings_dialog(Setup)
+        bdy_msk = mask.data
+        Setup.refresh()
+        logger.info('Using GUI defined mask')
+    else: # Try an read mask from file
+        try:
+            if (Setup.bool_settings['mask_file'] and 
+                Setup.settings['mask_file'] is not None):
+                mask = Mask_File(Setup.settings['bathy'], 
+                                 Setup.settings['mask_file'])
+                bdy_msk = mask.data
+                logger.info('Using input mask file')
+            elif Setup.bool_settings['mask_file']:
+                logger.error('Mask file is not given')
+                return
+            else: # No mask file specified then use default 1px halo mask
+                logger.warning('Using default mask with bathymetry!!!!')
+                mask = Mask_File(Setup.settings['bathy'])
+                mask.apply_border_mask(Constants.DEFAULT_MASK_PIXELS)
+                bdy_msk = mask.data
+        except:
+            return
+    
+    if np.amin(bdy_msk) == 0: # Mask is not set, so set border to 1px
+        logger.warning('Setting the mask to with a 1 grid point border')
+        QMessageBox.warning(None,'NRCT', 'Mask is not set, setting a 1 grid '+
+                                         'point border mask')
+        if (bdy_msk is not None and 1 < bdy_msk.shape[0] and 
+            1 < bdy_msk.shape[1]):
+            tmp = np.ones(bdy_msk.shape, dtype=bool)
+            tmp[1:-1, 1:-1] = False
+            bdy_msk[tmp] = -1
+            
+    return bdy_msk
diff --git a/pynemo/pynemo_exe.py b/pynemo/pynemo_exe.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcceac09a27f1caa53b5fe7d3fe0163aaf79370a
--- /dev/null
+++ b/pynemo/pynemo_exe.py
@@ -0,0 +1,49 @@
+'''
+Entry for the project
+
+@author: Mr. Srikanth Nagella
+'''
+
+import sys, getopt
+import profile
+import logging
+
+# Logging set to info
+logging.basicConfig(level=logging.INFO)
+import time
+def main():
+    """ Main function which checks the command line parameters and
+        passes them to the profile module for processing """
+
+    setup_file = ''
+    mask_gui = False
+    try:
+        opts, dummy_args = getopt.getopt(sys.argv[1:], "hs:g", ["help","setup=","mask_gui"])
+    except getopt.GetoptError:
+        print "usage: pynemo -g -s <namelist.bdy> "
+        sys.exit(2)
+
+    for opt, arg in opts:
+        if opt == "-h":
+            print "usage: pynemo [-g] -s <namelist.bdy> "
+            print "       -g (optional) will open settings editor before extracting the data"
+            print "       -s <bdy filename> file to use"
+            sys.exit()
+        elif opt in ("-s", "--setup"):
+            setup_file = arg
+        elif opt in("-g", "--mask_gui"):
+            mask_gui = True
+
+    if setup_file == "":
+        print "usage: pynemo [-g] -s <namelist.bdy> "
+        sys.exit(2)
+
+    #Logger
+    #logger = logging.getLogger(__name__)
+    t0 = time.time()
+    profile.process_bdy(setup_file, mask_gui)
+    t1 = time.time()
+    print "Execution Time: %s" % (t1-t0)
+    
+if __name__ == "__main__":
+    main()
diff --git a/pynemo/pynemo_ncml_generator.py b/pynemo/pynemo_ncml_generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1295b164ea1b467eda540c500a8da9038bb87ba
--- /dev/null
+++ b/pynemo/pynemo_ncml_generator.py
@@ -0,0 +1,24 @@
+'''
+Created on 2 Jul 2015
+
+The main application object for hosting the pynemo ncml editor.
+Used for development purposes to display the ncml editor dialog.
+
+@author: Shirley Crompton, UK Science and Technology Facilities Council
+'''
+import sys
+from PyQt4.QtGui import *
+from gui import nemo_ncml_generator as ncml_generator
+import logging
+# Logging set to info
+logging.basicConfig(level=logging.INFO)
+
+def main():
+    """ Command line execution method which check the input arguments and passes on to
+    method to open the ncml generator window"""
+    app = QApplication(sys.argv)
+    ex = ncml_generator.Ncml_generator(None)
+    sys.exit(app.exec_())
+
+if __name__ == '__main__':
+    main() 
\ No newline at end of file
diff --git a/pynemo/pynemo_settings_editor.py b/pynemo/pynemo_settings_editor.py
new file mode 100644
index 0000000000000000000000000000000000000000..0498bb3a7db269336afd5110c6614614308287b0
--- /dev/null
+++ b/pynemo/pynemo_settings_editor.py
@@ -0,0 +1,56 @@
+'''
+Created on 7 Jan 2015
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from PyQt4 import QtGui
+
+from gui.nemo_bdy_input_window import InputWindow
+import nemo_bdy_setup
+
+import sys, getopt
+
+def open_settings_window(fname):
+    """ Main method which starts a Qt application and gives user
+    an option to pick a namelist.bdy file to edit. Once user selects it
+    it will open a dialog box where users can edit the parameters"""
+    app = QtGui.QApplication(sys.argv)
+    if fname is None:
+        fname = QtGui.QFileDialog.getOpenFileName(None, 'Open file')
+
+    setup = nemo_bdy_setup.Setup(fname)#'../../data/namelisttest.bdy')
+    ex = InputWindow(setup)
+    ex.nl_editor.btn_cancel.clicked.connect(lambda: sys.exit(0))
+    return app.exec_(), ex.mpl_widget.mask
+
+def open_settings_dialog(setup):
+    """ This method is to start the settings window using the setup settings provided
+    in the input. On clicking the cancel button it doesn't shutdown the applicaiton
+    but carries on with the execution"""
+    app = QtGui.QApplication(sys.argv)
+    ex = InputWindow(setup)
+    ex.nl_editor.btn_cancel.clicked.connect(app.quit)
+    return app.exec_(), ex.mpl_widget.mask
+
+def main():
+    """ Command line execution method which check the input arguments and passes on to
+    method to open the settings window"""
+    setup_file = None
+    try:
+        opts, dummy_args = getopt.getopt(sys.argv[1:], "hs:", ["help", "setup="])
+    except getopt.GetoptError:
+        print "usage: pynemo_settings_editor -s <namelist.bdy> "
+        sys.exit(2)
+
+    for opt, arg in opts:
+        if opt == "-h":
+            print "usage: pynemo_settings_editor -s <namelist.bdy> "
+            sys.exit()
+        elif opt in ("-s", "--setup"):
+            setup_file = arg
+    sys.exit(open_settings_window(setup_file))
+
+if __name__ == '__main__':
+    main()
diff --git a/pynemo/reader/__init__.py b/pynemo/reader/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/pynemo/reader/directory.py b/pynemo/reader/directory.py
new file mode 100644
index 0000000000000000000000000000000000000000..59d32f4f09d50947970d61e1b16dd4cdfe4e98e3
--- /dev/null
+++ b/pynemo/reader/directory.py
@@ -0,0 +1,255 @@
+'''
+This is an abstraction for the data repository
+@author: Mr. Srikanth Nagella
+'''
+from os import listdir
+import numpy as np
+from netCDF4 import Dataset
+from netCDF4 import netcdftime
+import copy
+import logging
+class Reader(object):
+    '''
+    This provides a reader for all the files in the directory as one
+    single object.
+    Usage:
+        >>> reader = Reader("Folder path")
+        >>> reader['t']['votemper'][:,:,:,:]
+    '''
+    grid_type_list = ['t','u','v','i']
+    def __init__(self, directory, time_adjust):
+        '''
+        This takes in directory path as input and returns the required information to the
+        bdy.
+        Keyword arguments:
+        directory -- The directory in which to look for the files
+        time_adjust -- amount of time to be adjusted to the time read from file.
+        '''
+        self.directory = directory
+        self.day_interval = 1
+        self.hr_interval = 0
+        self.grid_source_data = {}
+        for grid_type in self.grid_type_list:
+            self.grid_source_data[grid_type] = self._get_source_timedata(grid_type, time_adjust)
+        if self.grid_type_list is not None and len(self.grid_source_data) != 0:
+            self._calculate_time_interval()
+
+    def _get_dir_list(self, grid):
+        """
+        This method scans the directory for a input grid related netcdf files. i.e ending with
+        the grid name.
+        Keyword arguments:
+        grid -- grid name eg. 't','v','u','i'
+        """
+        fend = '%s.nc' %grid.upper()
+        dir_list = listdir(self.directory)
+        for i in range(len(dir_list)):
+            if dir_list[i][-4:] != fend:
+                dir_list[i] = ''
+            else:
+                dir_list[i] = self.directory + dir_list[i]
+
+        dir_list.sort()
+        return filter(None, dir_list)
+
+    def _delta_time_interval(self, time1, time2):
+        """ Get the difference between the two times in days and hours"""
+        timedif = time2 - time1
+        days = timedif / (60 * 60 * 24)
+        hrs = timedif % (60 * 60 * 24)
+        hrs = hrs / (60 * 60)
+        return days, hrs
+    
+    def _get_source_timedata(self, grid, t_adjust):
+        """ Get the source time data information. builds up sourcedata objects of a given grid """
+        dir_list = self._get_dir_list(grid)
+        group = GridGroup()
+        group.data_list = []
+        group.time_counter = []        
+        group.date_counter = []
+        for filename in dir_list:   
+            nc = Dataset(filename, 'r')
+            varid = nc.variables['time_counter'] 
+            for index in range(0,len(varid)):
+                x = [filename, index]
+                group.data_list.append(x)
+                group.time_counter.append(varid[index]+t_adjust)
+                group.date_counter.append(netcdftime.utime(varid.units,
+                                                           varid.calendar).num2date(varid[index]+t_adjust))
+            group.units = varid.units
+            group.calendar = varid.calendar
+            nc.close()
+        tmp_data_list = copy.deepcopy(group.data_list)
+        tmp_time_counter = copy.deepcopy(group.time_counter)
+        for index in range(len(group.time_counter)):
+            tmp_data_list[index] = group.data_list[index]
+            tmp_time_counter[index] = group.time_counter[index]
+        group.data_list = tmp_data_list
+        group.time_counter = tmp_time_counter
+        return group
+
+    def _calculate_time_interval(self):
+        """ This method will calculate the time interval of the each grid. If all the grids
+        get the same interval then it sets it to the days and hours otherwise it throws an
+        error"""
+        days = set()
+        hrs = set()
+        for grid_type in self.grid_source_data.keys():
+            day, hr = self._delta_time_interval(self.grid_source_data[grid_type].time_counter[0],
+                                                self.grid_source_data[grid_type].time_counter[1])
+            days.add(day)
+            hrs.add(hr)
+        if len(days) != 1 or len(hrs) != 1:
+            raise Exception('All the Grid time interval is not same')
+        self.day_interval = list(days)[0]
+        self.hr_interval = list(hrs)[0] 
+        
+    def __getitem__(self,val):
+        if val in self.grid_type_list:
+            return self.grid_source_data[val]
+        else:
+            return None
+    
+class GridGroup:
+    def __init__(self):
+        pass
+    def __getitem__(self,val):
+        return Variable(self.data_list, val)
+    
+    def get_meta_data(self, variable, source_dic):
+        """ Returns a dictionary with meta data information correspoinding to the variable """
+        #source_dic = {}
+        try:
+            var = self.__getitem__(variable)
+            attrs = var.get_attribute_values(['missing_value','scale_factor', 'add_offset', '_FillValue'])
+            source_dic['sf'] = 1
+            source_dic['os'] = 0
+            if attrs['missing_value'] is not None:            
+                source_dic['mv'] = attrs['missing_value']
+            if attrs['scale_factor'] is not None:
+                source_dic['sf'] = attrs['scale_factor']
+            if attrs['add_offset'] is not None:                            
+                source_dic['os'] = attrs['add_offset']
+            if attrs['_FillValue'] is not None:                
+                source_dic['fv'] = attrs['_FillValue']
+            return source_dic            
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_name)
+        return None  
+    
+class Variable(object):
+    time_counter_const = "time_counter"
+    def __init__(self, filenames, variable):
+        self.variable = variable
+        self.file_names = filenames
+        self.dimensions = self._get_dimensions()
+        self._set_time_dimension_index()
+        self.logger = logging.getLogger(__name__)
+        
+    def __str__(self):
+        return "PyNEMO Data Object from files: %s and variable %s" % self.file_names, self.variable
+    
+    def __len__(self):
+        """ Returns the length of the variable """
+        try:
+            dataset = Dataset(self.file_names[0], 'r')
+            dvar = dataset.variables[self.variable]
+            val  = len(dvar)
+            dataset.close()
+            return val
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_names[0])
+        return None
+            
+    def __getitem__(self, val):
+        """ Returns the data requested """
+        try:
+            if self.time_dim_index == -1:
+                dataset = Dataset(self.file_names[0][0], 'r')
+                dvar = dataset.variables[self.variable]
+                retval  = dvar[val]
+                dataset.close()
+                return retval
+            else:
+                # select all the files that are required for the selected range
+                # read the data and merge them
+                val = list(val)
+                for index in range(len(val)):
+                    if type(val[index]) is not slice: 
+                        if type(val[index]) is not np.ndarray:
+                                val[index] = slice(val[index], val[index] + 1)
+                val = tuple(val)
+                start = val[self.time_dim_index].start
+                stop = val[self.time_dim_index].stop
+                step = val[self.time_dim_index].step
+                if step is None:
+                    step = 1
+                if start is None:
+                    start = 0
+                if stop is None:
+                    stop = len(self.file_names)
+                finalval = None
+                for index in range(start, stop, step):
+                    dataset = Dataset(self.file_names[index][0], 'r')
+                    val = list(val)
+                    val[self.time_dim_index] = slice(self.file_names[index][1], self.file_names[index][1] + 1)
+                    val = tuple(val)
+                    dvar = dataset.variables[self.variable]
+                    retval = dvar[val]
+                    if finalval is None:
+                        finalval = retval
+                    else:
+                        finalval = np.concatenate((finalval, retval), axis=self.time_dim_index)
+                    dataset.close()
+                return finalval
+
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_names)
+        return None
+      
+    def get_attribute_values(self, attr_name):
+        """ Returns the attribute value of the variable """
+        try:
+            dataset = Dataset(self.file_names[0][0], 'r')
+            dvar = dataset.variables[self.variable]
+            ret_val = {}
+            for name in attr_name:
+                try:
+                    val = dvar.getncattr(name)
+                    ret_val[name]=val
+                except AttributeError:
+                    ret_val[name] = None
+            dataset.close()
+            return ret_val
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_names[0])
+        return None
+            
+    def _get_dimensions(self):
+        """ Returns the dimensions of the variables """
+        try:        
+            dataset = Dataset(self.file_names[0][0], 'r')
+            dvar = dataset.variables[self.variable]
+            return dvar.dimensions
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_names[0])
+        return None
+    
+    def _set_time_dimension_index(self):
+        """ Sets the time dimension index """
+        self.time_dim_index = -1
+        for index in range(len(self.dimensions)):
+            if self.dimensions[index] == self.time_counter_const:
+                self.time_dim_index = index
+        
+
diff --git a/pynemo/reader/factory.py b/pynemo/reader/factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff8bf427f4b672b7bce5cd03b6d09d7dc73997c6
--- /dev/null
+++ b/pynemo/reader/factory.py
@@ -0,0 +1,70 @@
+'''
+This is generic file loader factory.
+
+@author: Mr. Srikanth Nagella
+'''
+
+#Global Imports
+import os
+#Local Imports
+from pynemo.reader.ncml import Reader as NcMLReader
+from pynemo.reader.ncml import NcMLFile
+from pynemo.reader.directory import Reader as DirectoryReader
+
+from netCDF4 import Dataset
+
+def GetReader(uri, t_adjust, reader_type=None):
+    if reader_type is None:
+        print uri
+        if uri.endswith(".ncml"):
+            reader_type = "NcML"
+        elif os.path.isdir(uri):
+            reader_type = "Directory"
+        else:
+            print "Error input should be a NcML file or URL or a Local directory"
+            return None
+    if reader_type == "NcML":
+        return NcMLReader(uri,t_adjust)
+    else:
+        return DirectoryReader(uri, t_adjust)
+    
+    
+class NetCDFFile(object):
+    def __init__(self, filename):
+        self.nc = Dataset(filename)
+    
+    def __getitem__(self,val):
+        return self.nc.variables[val]
+    
+    def close(self):
+        self.nc.close()
+
+
+def GetFile(uri):
+    if uri.endswith(".ncml"):
+        return NcMLFile(uri)
+    else:
+        return NetCDFFile(uri)
+# from netcdf import GetFile as netcdf_get_file
+# from netcdf import GetRepository as netcdf_repository
+# from ncml import GetFile as ncml_get_file 
+# from ncml import GetRepository as ncml_repository
+# 
+#     
+#     
+# import os
+# def GetRepository(src_dir, grid, t_adjust, reader_type="netcdf"):
+#     """ Generic method to return the repository either netcdf or ncml based on some
+#     logic, now passing as reader_type to choose """
+#     if reader_type == "netcdf":
+#         return netcdf_repository(src_dir, grid, t_adjust)
+#     elif reader_type == "ncml":
+#         return ncml_repository(src_dir, grid, t_adjust)
+#       
+# def GetFile(file_path, reader_type="netcdf"):
+#     """ Generic method to return the file object either netcdf or ncml based on some
+#     logic, now passing as reader_type to choose"""
+#     if reader_type == "netcdf":
+#         return netcdf_get_file(file_path)
+#     elif reader_type == "ncml":
+#         return ncml_get_file(file_path)
\ No newline at end of file
diff --git a/pynemo/reader/jars/netcdfAll-4.6.jar b/pynemo/reader/jars/netcdfAll-4.6.jar
new file mode 100644
index 0000000000000000000000000000000000000000..6b067fe38b7dd588ee94b7af7106b7494a6bdfd3
Binary files /dev/null and b/pynemo/reader/jars/netcdfAll-4.6.jar differ
diff --git a/pynemo/reader/ncml.py b/pynemo/reader/ncml.py
new file mode 100644
index 0000000000000000000000000000000000000000..804f1f65f8a585428d8b758340dc4a5f705d9a3a
--- /dev/null
+++ b/pynemo/reader/ncml.py
@@ -0,0 +1,290 @@
+'''
+NcML reading implementation using pyjnius
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+#Loading of NCML jar file
+import os
+import string
+import logging
+import numpy as np
+import jnius_config
+from netCDF4 import netcdftime
+ncmlpath, file_name = os.path.split(__file__)
+ncmlpath = os.path.join(ncmlpath, "jars", "netcdfAll-4.6.jar") 
+jnius_config.set_classpath('.',ncmlpath)
+try:
+    if os.environ['http_proxy'] is not None:
+        #split the proxy name and port
+        proxylist = string.split(os.environ['http_proxy'],':')
+        proxy_host = proxylist[0]
+        proxy_port = proxylist[1]        
+        jnius_config.add_options('-Dhttp.proxyHost='+proxy_host,'-Dhttp.proxyPort='+proxy_port)
+except:
+    print "Didn't find a proxy environment variable"
+NetcdfDataset = None
+NcMLReader = None
+Section = None
+try:
+    from jnius import autoclass
+    def init_jnius():
+        global NetcdfDataset
+        global NcMLReader
+        global Section
+        NetcdfDataset = autoclass('ucar.nc2.dataset.NetcdfDataset')
+        NcMLReader = autoclass('ucar.nc2.ncml.NcMLReader')
+        Section = autoclass('ucar.ma2.Section')
+    init_jnius()
+except ImportError:
+    print 'Warning: Please make sure pyjnius is installed and jvm.dll/libjvm.so/libjvm.dylib is in the path'
+
+time_counter_const = "time_counter"
+class Reader(object):
+    """ This class is the high level of object for the NCML reader, from here using grid type
+    will return the grid data
+    usage: 
+        >>> reader = Reader("NCML Filename")
+        >>> reader['t']['votemper'][:,:,:,:]
+    """
+    grid_type_list = ['t','u','v','i']
+    time_counter = time_counter_const
+    def __init__(self, uri, time_adjust):
+        self.uri = uri
+        self.time_adjust = time_adjust
+        try:
+            self.dataset = NetcdfDataset.openFile(self.uri, None)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.uri)
+        self.grid = GridGroup(self.uri,self.dataset)                           
+        self._get_source_timedata(self.grid,self.time_adjust)
+    def __del__(self):
+        """ Destructor close the netcdf file """
+        if self.dataset is not None:
+            self.dataset.close()
+                    
+    def __getitem__(self,val):
+        """ Returns the grid. it doesn't matter what type of grid is requested all the
+        variables are in the same object. This is to keep it consistent with the Local directory
+        reader"""
+        if val in self.grid_type_list:
+            return self.grid
+        else:
+            return None    
+        
+    def _get_source_timedata(self, grid, t_adjust):
+        """ Get the source time data information. builds up sourcedata objects of a given grid """
+        timevar = grid[self.time_counter]    
+        grid.time_counter = timevar[:]+t_adjust
+        grid.date_counter = []
+        for index in range(0,len(grid.time_counter)):            
+            grid.date_counter.append(netcdftime.utime(grid.units,
+                                                      grid.calendar).num2date(grid.time_counter[index])) 
+
+    def close(self):
+        """ This is not yet implemented. TODO: keep the netcdf file open until its expicitly 
+        closed """
+        pass 
+    
+class GridGroup(object):
+    """ This class is to provide an indirection to the grid type. since ncml
+    file has aggregation of all the variables this is just a place holder"""
+    logger = logging.getLogger(__name__)
+    def __init__(self, filename, dataset):
+        """ This class is the source data that holds the dataset information """
+        self.file_name = filename
+        self.units = None
+        self.calendar = None
+        self.date_counter = None
+        self.seconds = None
+        self.time_counter = None
+        self.dataset = dataset
+        self.update_atrributes()
+
+    def __del__(self):
+        self.dataset = None
+            
+    def __getitem__(self, val):
+        """ Returns the data requested """
+        try:
+            return Variable(self.dataset, val)
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_name)
+        return None
+       
+    def get_meta_data(self, variable, source_dic):
+        """ Returns a dictionary with meta data information correspoinding to the variable """
+        #source_dic = {}
+        try:
+            dvar = self.dataset.findVariable(variable)
+            if dvar is None:
+                raise KeyError()
+            source_dic['sf'] = 1
+            source_dic['os'] = 0
+            mv_attr = dvar.findAttributeIgnoreCase('missing_value')
+            if mv_attr is not None:
+                source_dic['mv'] = mv_attr.getValues().copyToNDJavaArray()
+            sf_attr = dvar.findAttributeIgnoreCase('scale_factor')
+            if sf_attr is not None:
+                source_dic['sf'] = sf_attr.getValues().copyToNDJavaArray()
+            os_attr = dvar.findAttributeIgnoreCase('add_offset')
+            if os_attr is not None:
+                source_dic['os'] = os_attr.getValues().copyToNDJavaArray()
+            fv_attr = dvar.findAttributeIgnoreCase('_FillValue')
+            if fv_attr is not None:
+                source_dic['mv'] = fv_attr.getValue(0)
+            return source_dic            
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_name)
+        return None          
+
+    def update_atrributes(self):
+        """ Updates the units and calendar information for the grid """        
+        try:
+            var =  Variable(self.dataset, time_counter_const)
+            self.units = var.get_attribute_value("units")
+            self.calendar = var.get_attribute_value("calendar")
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_name)
+        return None
+       
+class Variable(object):
+
+    def __init__(self, dataset, variable):
+        self.logger = logging.getLogger(__name__)
+        self.dataset = dataset
+        self.variable = variable
+        
+    def __str__(self):        
+        return "PyNEMO NcML Object from file: %s and variable %s" % self.file_name, self.variable
+
+    def __len__(self):
+        """ Returns the length of the variable """
+        try:
+            dvar = self.dataset.findVariable(self.variable) 
+            if dvar is None:
+                raise KeyError()
+            val  = dvar.getDimension(0).getLength()
+            return val
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        return None
+            
+    def __getitem__(self, val):
+        """ Returns the data requested """
+        if type(val) != tuple:
+            val = (val,)
+        try:
+            dvar = self.dataset.findVariable(self.variable)
+            if dvar is None:
+                raise KeyError()
+            dims = dvar.getShape()
+            # get the requested slice and extract that information from jnius
+            # Check if the request data is with in the dataset dimensions
+            start = [0]*len(dims)
+            stop = dims
+            stride = [1]*len(dims)
+            new_dims = tuple()
+            np_input = False
+            for idx in range(0,len(dims)):
+                try:
+                    if val[idx].start is not None:
+                        start[idx] = val[idx].start
+                    if val[idx].step is not None:
+                        stride[idx] = val[idx].step
+                    if val[idx].stop is not None:
+                        if val[idx].stop == -1:
+                            val[idx] = stop[idx] - 1
+                        elif val[idx].stop > stop[idx]:
+                            val[idx].stop = stop[idx]
+                        stop[idx] = (val[idx].stop - start[idx])//stride[idx]
+                        if (val[idx].stop - start[idx])%stride[idx] != 0:
+                            stop[idx] = stop[idx] + 1
+                    new_dims = new_dims+(stop[idx],)
+                except IndexError:
+                    pass
+                except AttributeError:
+                    if isinstance(val[idx],int):
+                        start[idx] = val[idx]
+                        stop[idx] = 1
+                    elif isinstance(val[idx],np.ndarray):
+                        new_dims = new_dims+(val[idx].shape)
+                        np_input = True
+            # Create a section object that represents the requested slice 
+            start = [int(i) for i in start]
+            stop = [int(i) for i in stop]
+            stride = [int(i) for i in stride]
+            selected_section = Section(start,stop,stride)
+            data_array = dvar.read(selected_section)
+            retval = data_array.copyToNDJavaArray() 
+            #TODO: copy it into numpy instead of Java array and then convert to numpy
+            # convert to numpy array
+            retval = np.asarray(retval)
+            self.logger.info(retval.shape)
+            if np_input: #if an array is passed as selection
+                ret_dim_list = ()
+                for idx in range(0,len(dims)):
+                    if isinstance(val[idx], np.ndarray): 
+                        ret_dim_list2 = ret_dim_list+(val[idx],)
+                        # can't do all the reductions at once due to Index Error: shape mismatch
+                        retval = retval[ret_dim_list2]  
+                    ret_dim_list = ret_dim_list+(slice(None,None,None),)
+                self.logger.info(ret_dim_list)                        
+                self.logger.info(retval.shape)
+                self.logger.info(ret_dim_list)
+            # reshape to reflect the request
+            retval = np.reshape(retval, new_dims)
+            return retval
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+self.file_name)
+        return None
+         
+    def _get_dimensions(self):
+        """ Returns the dimensions of the variables """
+        try:
+            dvar = self.dataset.findVariable(self.variable)
+            if dvar is None:
+                raise KeyError()
+            retval = tuple(dvar.getDimensionsString().split(' '))
+            return retval
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        return None     
+    
+    def get_attribute_value(self, attr_name):
+        """ Returns the attribute value of the variable """
+        try:
+            dvar = self.dataset.findVariable(self.variable)
+            if dvar is None:
+                raise KeyError()
+            attr = dvar.findAttributeIgnoreCase(attr_name)
+            if attr is not None:
+                retval = attr.getValue(0)            
+            return retval
+        except KeyError:
+            self.logger.error('Cannot find the requested variable '+self.variable)
+        return None  
+    
+    
+class NcMLFile(object):
+    def __init__(self,filename):
+        self.dataset = None
+        try:
+            self.dataset = NetcdfDataset.openFile(filename, None)
+        except (IOError, RuntimeError):
+            self.logger.error('Cannot open the file '+filename)
+    
+    def __getitem__(self,val):
+        return Variable(self.dataset,val)
+    
+    def close(self):
+        if self.dataset is not None:
+            self.dataset.close()
diff --git a/pynemo/tests/__init__.py b/pynemo/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..afef08ad860fa78d58c04d388dddd1dcfbefa7f5
--- /dev/null
+++ b/pynemo/tests/__init__.py
@@ -0,0 +1,5 @@
+'''
+Created on 4 Nov 2014
+
+@author: Mr. Srikanth Nagella
+'''
diff --git a/pynemo/tests/gcoms_break_depth_test.py b/pynemo/tests/gcoms_break_depth_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..12cdbee87762d58afff6006d6d77c6bd828d6fd5
--- /dev/null
+++ b/pynemo/tests/gcoms_break_depth_test.py
@@ -0,0 +1,62 @@
+'''
+This is unit test for gcoms_break_depth
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import unittest
+from pynemo.utils import gcoms_break_depth
+from netCDF4 import Dataset
+import numpy as np
+class Test(unittest.TestCase):
+
+
+    def setUp(self):
+        self.nc = Dataset('data/gebco_1.nc')
+        self.bathy = self.nc.variables['topo'][2::6,2::6]
+        self.lat = self.nc.variables['latitude'][2::6]
+        self.lon = self.nc.variables['longitude'][2::6]
+
+        self.lat = self.lat
+        self.lon = self.lon
+        self.lon,self.lat = np.meshgrid(self.lon, self.lat)
+        #gcoms_break_depth.gcoms_boundary_masks(self.bathy, -1, 0)
+        
+        print self.bathy.shape
+        print self.bathy
+        pass
+
+
+    def tearDown(self):
+        self.nc.close()
+        pass
+
+
+    def testPolcoms_select_domain(self):
+        roi = [1045, 1537, 975, 1328]
+        self.bathy = self.bathy[...]
+        self.bathy[self.bathy>=0] = 0
+        self.bathy = self.bathy*-1        
+        tmp  = gcoms_break_depth.polcoms_select_domain(self.bathy, self.lat, self.lon, roi, 200)
+        self.assertEquals(tmp[32,0],1,"Set the break select correctly")
+        self.assertEquals(tmp[40,0],1,"Set the break select correctly 40")        
+        self.assertEquals(tmp[50,0],1,"Set the break select correctly 50")        
+        self.assertEquals(tmp[60,0],1,"Set the break select correctly 60")        
+                
+    def testGcomsBreakDepth(self):
+        r = 18        
+        self.bathy = self.bathy[991-r:1295+r,1556-r:1801+r]
+        self.bathy[self.bathy>=0]=0        
+        self.bathy = -1*self.bathy
+        self.bathy[np.isnan(self.bathy)] = -1
+        self.lat = self.lat[1556-r:1801+r]
+        self.lon = self.lon[991-r:1295+r]        
+        gcoms_break_depth.gcoms_break_depth(self.bathy)
+
+        pass
+
+
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/nemo_bdy_gen_c_test.py b/pynemo/tests/nemo_bdy_gen_c_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..25a7eb338a6e3d0aa42f6d25e458c42c21408744
--- /dev/null
+++ b/pynemo/tests/nemo_bdy_gen_c_test.py
@@ -0,0 +1,32 @@
+'''
+Unit test for nemo_bdy_gen_c. Boundary class
+
+@author: Mr. Srikanth Nagella
+'''
+import unittest
+from pynemo.nemo_bdy_gen_c import *
+from pynemo.gui.nemo_bdy_mask import *
+class Test(unittest.TestCase):
+
+    settings={}
+    def setUp(self):
+        self.settings['rimwidth'] = 9
+        self.Mask = Mask('data/grid_C/NNA_R12_bathy_meter_bench.nc')
+        
+
+    def tearDown(self):
+        self.settings={}
+
+
+    def testInvalidGridType(self):
+       self.assertRaises(ValueError,Boundary,self.Mask.data,self.settings,'x')
+       
+    def testGridTypeT(self):
+        Grid_T = Boundary(self.Mask.data,self.settings,'t')
+        self.assertEqual(Grid_T.grid_type,'t','Grid type is not T')
+        
+
+
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/nemo_bdy_msk_c_test.py b/pynemo/tests/nemo_bdy_msk_c_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..54d6425c79988259b0c86f295248246cadbeeac2
--- /dev/null
+++ b/pynemo/tests/nemo_bdy_msk_c_test.py
@@ -0,0 +1,20 @@
+'''
+Unit test to nemo_bdy_msk_c
+
+@author: Srikanth Nagella
+'''
+import unittest
+from pynemo.gui.nemo_bdy_mask import * 
+
+class Test(unittest.TestCase):
+
+
+    def testMaskData(self):
+        mask = Mask('data/grid_C/NNA_R12_bathy_meter_bench.nc')
+        print mask.data.shape
+        self.assertEqual(mask.data.shape,(401L,351L),'Mask reading failed')
+
+
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/nemo_bdy_ncgen_test.py b/pynemo/tests/nemo_bdy_ncgen_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c376955905b115724909cbf3580e1f315a105e4d
--- /dev/null
+++ b/pynemo/tests/nemo_bdy_ncgen_test.py
@@ -0,0 +1,24 @@
+'''
+Created on 6 Oct 2014
+
+@author: sn65
+'''
+import unittest
+
+import os
+from pynemo.nemo_bdy_ncgen import CreateBDYNetcdfFile
+class Test(unittest.TestCase):
+
+    def setUp(self):
+        unittest.TestCase.setUp(self)
+
+    def testCreateNCFileMain(self):
+        CreateBDYNetcdfFile('Test.nc',7699,351,401,75,9,'EB bdy files produces by jdha from ORCA0083-N001 global run provided by acc',
+                        '1960-01-01 00:00:00', -1e+20,'gregorian','T')
+    
+    def tearDown(self):
+        os.remove('Test.nc')
+
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testCreateNCFileMain']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/nemo_bdy_setup_test.py b/pynemo/tests/nemo_bdy_setup_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1986604afea1c2081afe1e24b62e3f89ac1ab23
--- /dev/null
+++ b/pynemo/tests/nemo_bdy_setup_test.py
@@ -0,0 +1,79 @@
+'''
+Unit test for setup of nemo bdy namelist
+Will be testing the reading of the file and expected settings
+@author: Mr. Srikanth Nagella
+'''
+import unittest
+from pynemo.nemo_bdy_setup import * 
+
+import os
+class Test(unittest.TestCase):
+
+
+    def testReadingOfNonExistingFile(self):
+        self.assertRaises(IOError,Setup,'emptynamelist.bdy')
+  
+    def testReadingEmptyFile(self):
+        #create an empty file and load it
+        fo = open("test.bdy","wb")
+        fo.close()
+        setup = Setup('test.bdy')   
+        #test settings after reading empty file   
+        self.assertEqual(setup.settings,{} ,"There are some default settings after reading empty file")
+        #delete empty file
+        os.remove('test.bdy')
+
+    def testAmbigiousEntriesInFile(self):
+        #create an ambigious entry in file
+        fo = open('test.bdy','wb')
+        fo.write("! Ambigious Entry\n")
+        fo.write("ambigious = true")
+        fo.close()
+        
+        #test
+        self.assertRaises(ValueError,Setup,'test.bdy')
+        
+        #delete file
+        os.remove('test.bdy')
+        
+    def testEntryWithoutSpaceOnEitherSideOfEquals(self):
+        #create an  entry in file
+        fo = open('test.bdy','wb')
+        fo.write("! Ambigious Entry\n")
+        fo.write("ln_nonambigious=true")
+        fo.close()
+        
+        #test
+        setup = Setup('test.bdy')
+        self.assertEquals(setup.settings,{'nonambigious':True},"Didn't recognize valid setting")
+        
+        #delete file
+        os.remove('test.bdy')                
+
+    def testEntryDifferentTypeEntries(self):
+        #create an  entry in file
+        fo = open('test.bdy','wb')
+        fo.write("! Ambigious Entry\n")
+        fo.write("ln_nonambigious=false !Comment testing false logical value\n")
+        fo.write("rn_floatval=10.9\n")
+        fo.write("nn_floatval2 = 20.9\n")
+        fo.write("!Comments in middle of file\n")
+        fo.write("cn_stringval='coordinates.nc'\n")
+        fo.write("sn_stringval2 = 'gregorian'\n")        
+        fo.close()
+                
+        #test
+        setup = Setup('test.bdy')
+        print setup.settings
+        self.assertEquals(setup.settings['nonambigious'],False,"Didn't recognize logical setting")
+        self.assertEquals(setup.settings['floatval'],10.9,"Didn't recognize rn value in setting")
+        self.assertEquals(setup.settings['floatval2'],20.9,"Didn't recongnize nn value in setting")
+        self.assertEquals(setup.settings['stringval'],'coordinates.nc',"Didn't recognize cn string value in setting")
+        self.assertEquals(setup.settings['stringval2'],'gregorian',"Didn't recognize sn string value in setting")
+        
+        #delete file
+        os.remove('test.bdy')
+        
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/reader_ncml_test.py b/pynemo/tests/reader_ncml_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6fa862d13d716346f138ecf805207a428cc9270
--- /dev/null
+++ b/pynemo/tests/reader_ncml_test.py
@@ -0,0 +1,91 @@
+'''
+Unit tests for Ncml data reading module using pyjnius
+
+@author: Mr. Srikanth Nagella
+'''
+import unittest
+
+#from pynemo.reader.ncml_back import init_jnius
+#from pynemo.reader.ncml_back import Data
+from pynemo.reader.ncml import init_jnius
+from pynemo.reader.ncml import Variable as Data2
+from pynemo.reader.ncml import GridGroup
+from pynemo.reader.ncml import Reader
+from pynemo.reader.ncml import NcMLFile
+
+import os
+class Test(unittest.TestCase):
+
+#     @unittest.skip("Remote testing skipping")
+#     def testDataInit(self):
+#         init_jnius()
+#         testpath, file_name = os.path.split(__file__)
+#         testfile = os.path.join(testpath, "test.ncml")
+#         dataset = Data(testfile,"votemper")
+#         data = dataset[0,0,0,0:10]
+#         self.assertEquals(data.shape[0],10,"Extracted requested dimension of data")
+#         self.assertAlmostEquals(data[0],18.945175,6)
+
+
+                
+    def testTimeCounter(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml")
+        sd = Reader(testfile,0).grid
+        dataset = Data2(sd.dataset,"time_counter")
+        self.assertEquals(len(dataset), 8, "There should 8 datasets")
+    
+    def testVariable(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml")      
+        sd = Reader(testfile,0).grid
+        dataset = Data2(sd.dataset,"votemper")
+        val = dataset[0,0,0,0]
+        val2 = dataset[2,10,0,0]
+        self.assertAlmostEqual(dataset[0,0,0,0], 18.945175, 6,"First value should be 18.9")
+        self.assertAlmostEqual(dataset[2,10,0,0], 20.314891, 5,"2, 10, 0,0  value should be 18.9")
+        
+    def testNcMLFile(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml")
+        sd = NcMLFile(testfile)
+        val = sd['votemper'][0,0,0,0]
+        self.assertAlmostEqual(val, 18.945175, 6,"First value should be 18.9")
+
+        
+    def testSrcDataVariable(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml")
+        sd = Reader(testfile,0).grid    
+        dataset = sd["votemper"]
+        val = dataset[0,0,0,0]
+        val2 = dataset[2,10,0,0]        
+        self.assertAlmostEqual(dataset[0,0,0,0], 18.945175, 6,"First value should be 18.9")
+        self.assertAlmostEqual(dataset[2,10,0,0], 20.314891, 5,"2, 10, 0,0  value should be 18.9")
+        
+    def testRepoMatching(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml") 
+        repo = Reader(testfile,0)
+        self.assertAlmostEqual(repo['t']['votemper'][0,0,0,0], 18.945175, 6,"First value should be 18.9")
+        self.assertAlmostEqual(repo['t']['votemper'][2,10,0,0], 20.314891, 5,"2, 10, 0,0  value should be 18.9")
+        
+    def testGridGroupTimeCounter(self):
+        init_jnius()
+        testpath, file_name = os.path.split(__file__)
+        testfile = os.path.join(testpath, "testremote.ncml")
+        repo = Reader(testfile,0)
+        self.assertEquals(len(repo['t'].time_counter), 8, "Time counter should be 8")
+        self.assertEquals(repo['t'].time_counter[0], 691416000, "The first time value doesn't match")
+        repo = Reader(testfile,100)
+        self.assertEquals(len(repo['t'].time_counter), 8, "Time counter should be 8")
+        self.assertEquals(repo['t'].time_counter[0], 691416100, "The first time value doesn't match")
+
+if __name__ == "__main__":
+    #import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
\ No newline at end of file
diff --git a/pynemo/tests/test.ncml b/pynemo/tests/test.ncml
new file mode 100644
index 0000000000000000000000000000000000000000..49b43201d26ed53f93f8367de4181f99d57716fb
--- /dev/null
+++ b/pynemo/tests/test.ncml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2" location="http://esurgeod.noc.soton.ac.uk:8080/thredds/fileServer/PyNEMO/extra_data/NN_ORCA025-N206_19791206d05T.nc">
+  <variable name="votemper" orgName="temp" />
+  <variable name="vosaline" orgName="salt" />  
+  <variable name="sossheig" orgName="ssh" />
+  <variable name="deptht" orgName="zt" />
+  <variable name="time_counter" orgName="t" />  
+</netcdf>
\ No newline at end of file
diff --git a/pynemo/tests/testremote.ncml b/pynemo/tests/testremote.ncml
new file mode 100644
index 0000000000000000000000000000000000000000..68deba0f829908206fc307afbff3ea360b8717a1
--- /dev/null
+++ b/pynemo/tests/testremote.ncml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<netcdf title="aggregation example" xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">
+  <variable name="u" orgName="vozocrtx" />
+  <variable name="v" orgName="vomecrty" />
+  <aggregation type="union" >
+     <netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">
+        <aggregation type="joinExisting" dimName="time_counter" >
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791206d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791201d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791126d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791121d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791116d05V.nc" />  
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791111d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791106d05V.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791101d05V.nc" />		   		   
+        </aggregation>
+     </netcdf>
+     <netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">
+        <aggregation type="joinExisting" dimName="time_counter" >
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791206d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791201d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791126d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791121d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791116d05U.nc" />  
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791111d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791106d05U.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791101d05U.nc" />
+        </aggregation>
+     </netcdf>
+     <netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">
+        <aggregation type="joinExisting" dimName="time_counter" >
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791206d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791201d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791126d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791121d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791116d05T.nc" />  
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791111d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791106d05T.nc" />
+           <netcdf location="http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/ORCA025-N206_19791101d05T.nc" />
+        </aggregation>
+     </netcdf>	 
+  </aggregation>
+</netcdf>
\ No newline at end of file
diff --git a/pynemo/tide/__init__.py b/pynemo/tide/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/pynemo/tide/nemo_bdy_tide.py b/pynemo/tide/nemo_bdy_tide.py
new file mode 100644
index 0000000000000000000000000000000000000000..983a77c81b140d7ccb4e16be4839f4cb772db7e9
--- /dev/null
+++ b/pynemo/tide/nemo_bdy_tide.py
@@ -0,0 +1,161 @@
+'''
+
+
+
+'''
+import numpy as np
+import scipy.spatial as sp
+from netCDF4 import Dataset
+import copy # DEBUG ONLY- allows multiple runs without corruption
+import nemo_bdy_grid_angle
+#from nemo_bdy_extr_tm3 import rot_rep
+
+class Extract:
+
+    def __init__(self, setup, DstCoord, Grid):
+
+        self.g_type = Grid.grid_type
+        DC = copy.deepcopy(DstCoord)
+        dst_lon = DC.bdy_lonlat[self.g_type]['lon'][Grid.bdy_r == 0]
+        dst_lat = DC.bdy_lonlat[self.g_type]['lat'][Grid.bdy_r == 0]
+        self.dst_dep = DC.depths[self.g_type]['bdy_hbat'][Grid.bdy_r == 0]
+        self.harm_Im = {} # tidal boundary data: Imaginary
+        self.harm_Re = {} # tidal boundary data: Real
+        
+        # Modify lon for 0-360 TODO this needs to be auto-dectected
+        
+        dst_lon = np.array([x if x > 0 else x+360 for x in dst_lon])
+      
+        fileIDb = '/Users/jdha/Projects/pynemo_data/DATA/grid_tpxo7.2.nc' # TPX bathymetry file
+        nb = Dataset(fileIDb) # Open the TPX bathybetry file using the NetCDF4-Python library
+
+        # Open the TPX Datafiles using the NetCDF4-Python library
+#            T_GridAngles = nemo_bdy_grid_angle.GridAngle(
+#                       self.settings['src_hgr'], imin, imax, jmin, jmax, 't')
+#            RotStr_GridAngles = nemo_bdy_grid_angle.GridAngle(
+#                         self.settings['dst_hgr'], 1, maxI, 1, maxJ, self.rot_str)
+            
+#            self.gcos = T_GridAngles.cosval
+#            self.gsin = T_GridAngles.sinval
+            
+        if self.g_type == 't':    
+            self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/h_tpxo7.2.nc' # TPX sea surface height file
+            self.var_Im = 'hIm' 
+            self.var_Re = 'hRe' 
+            nc = Dataset(self.fileID) # pass variable ids to nc
+            lon = np.ravel(nc.variables['lon_z'][:,:]) # need to add in a east-west wrap-around
+            lat = np.ravel(nc.variables['lat_z'][:,:])
+            bat = np.ravel(nb.variables['hz'][:,:])
+            msk = np.ravel(nb.variables['mz'][:,:])
+        elif self.g_type == 'u':
+            self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/u_tpxo7.2.nc' # TPX velocity file
+            self.var_Im = 'UIm' 
+            self.var_Re = 'URe' 
+            self.key_tr = setup['tide_trans']
+            nc = Dataset(self.fileID) # pass variable ids to nc
+            lon = np.ravel(nc.variables['lon_u'][:,:])
+            lat = np.ravel(nc.variables['lat_u'][:,:])
+            bat = np.ravel(nb.variables['hu'][:,:])
+            msk = np.ravel(nb.variables['mu'][:,:])
+        else:
+            self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/u_tpxo7.2.nc' # TPX velocity file
+            self.var_Im = 'VIm' 
+            self.var_Re = 'VRe' 
+            self.key_tr = setup['tide_trans']
+            nc = Dataset(self.fileID) # pass variable ids to nc
+            lon = np.ravel(nc.variables['lon_v'][:,:])
+            lat = np.ravel(nc.variables['lat_v'][:,:]) 
+            bat = np.ravel(nb.variables['hv'][:,:])
+            msk = np.ravel(nb.variables['mv'][:,:])
+              
+        # Pull out the constituents that are avaibable
+        self.cons = []
+        for ncon in range(nc.variables['con'].shape[0]):
+            self.cons.append(nc.variables['con'][ncon,:].tostring().strip())
+                        
+        nc.close() # Close Datafile
+        nb.close() # Close Bathymetry file
+
+        # Find nearest neighbours on the source grid to each dst bdy point
+        source_tree = sp.cKDTree(zip(lon, lat))
+        dst_pts = zip(dst_lon, dst_lat)
+        nn_dist, self.nn_id = source_tree.query(dst_pts, k=4, eps=0, p=2, 
+                                                distance_upper_bound=0.5)
+        
+        # Create a weighting index for interpolation onto dst bdy point 
+        # need to check for missing values
+        
+        ind = nn_dist == np.inf
+
+        self.nn_id[ind] = 0  # better way of carrying None in the indices?      
+        dx = (lon[self.nn_id] - np.repeat(np.reshape(dst_lon,[dst_lon.size, 1]),4,axis=1) ) * np.cos(np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1) * np.pi / 180.)
+        dy =  lat[self.nn_id] - np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1)
+        
+        dist_tot = np.power((np.power(dx, 2) + np.power(dy, 2)), 0.5)
+
+        self.msk = msk[self.nn_id]
+        self.bat = bat[self.nn_id]
+        
+        dist_tot[ind | self.msk] = np.nan
+        
+        dist_wei = 1/( np.divide(dist_tot,(np.repeat(np.reshape(np.nansum(dist_tot,axis=1),[dst_lat.size, 1]),4,axis=1)) ) )
+        
+        self.nn_wei = dist_wei/np.repeat(np.reshape(np.nansum(dist_wei, axis=1),[dst_lat.size, 1]),4,axis=1)       
+        self.nn_wei[ind | self.msk] = 0.
+        
+        # Need to identify missing points and throw a warning and set values to zero
+        
+        mv = np.sum(self.wei,axis=1) == 0
+        print '##WARNING## There are', np.sum(mv), 'missing values, these will be set to ZERO'
+        
+    def extract_con(self, con):        
+        
+        if con in self.cons:
+            con_ind = self.cons.index(con)
+            
+            # Extract the complex amplitude components
+            
+            nc = Dataset(self.fileID) # pass variable ids to nc
+            
+            vIm = np.ravel(nc.variables[self.var_Im][con_ind,:,:])
+            vRe = np.ravel(nc.variables[self.var_Re][con_ind,:,:])
+        
+            nc.close()
+            
+            if self.g_type != 't':
+                
+                self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)
+                self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)
+            
+            else: # Convert transports to velocities
+                
+                if self.key_tr == True: # We convert to velocity using tidal model bathymetry
+                
+                    self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat[self.nn_id]*self.nn_wei,axis=1)
+                    self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat[self.nn_id]*self.nn_wei,axis=1)
+      
+                else: # We convert to velocity using the regional model bathymetry
+                
+                    self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+                    self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+      
+                
+                # Rotate vectors
+            
+                self.harm_Im_rot[con] = self.rot_rep(self.harm_Im[con], self.harm_Im[con], self.rot_str,
+                                      'en to %s' %self.rot_dir, self.dst_gcos, self.dst_gsin)
+                self.harm_Re_rot[con] = self.rot_rep(self.harm_Re[con], self.harm_Re[con], self.rot_str,
+                                      'en to %s' %self.rot_dir, self.dst_gcos, self.dst_gsin)
+                                      
+        else:
+            
+            # throw some warning
+            print '##WARNING## Missing constituent values will be set to ZERO'
+        
+            self.harm_Im[con] = np.zeros(self.nn_id[:,0].size)
+            self.harm_Re[con] = np.zeros(self.nn_id[:,0].size)
+            
+
+
+
+
diff --git a/pynemo/tide/nemo_bdy_tide2.py b/pynemo/tide/nemo_bdy_tide2.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fe69e43d010bc7fd249616954e5afa2bca686bf
--- /dev/null
+++ b/pynemo/tide/nemo_bdy_tide2.py
@@ -0,0 +1,219 @@
+'''
+
+
+
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import numpy as np
+import scipy.spatial as sp
+from netCDF4 import Dataset
+import copy # DEBUG ONLY- allows multiple runs without corruption
+from pynemo import nemo_bdy_grid_angle
+from pynemo.nemo_bdy_lib import rot_rep
+
+class Extract:
+
+    def __init__(self, setup, DstCoord, Grid):
+
+        self.g_type = Grid.grid_type
+        DC = copy.deepcopy(DstCoord)
+        dst_lon = DC.bdy_lonlat[self.g_type]['lon'][Grid.bdy_r == 0]
+        dst_lat = DC.bdy_lonlat[self.g_type]['lat'][Grid.bdy_r == 0]
+        self.dst_dep = DC.depths[self.g_type]['bdy_hbat'][Grid.bdy_r == 0]
+        self.harm_Im = {} # tidal boundary data: Imaginary
+        self.harm_Re = {} # tidal boundary data: Real
+        
+        # Modify lon for 0-360 TODO this needs to be auto-dectected
+        
+        dst_lon = np.array([x if x > 0 else x+360 for x in dst_lon])
+      
+        fileIDb = '../data/tide/grid_tpxo7.2.nc' # TPX bathymetry file
+        nb = Dataset(fileIDb) # Open the TPX bathybetry file using the NetCDF4-Python library
+
+        # Open the TPX Datafiles using the NetCDF4-Python library
+            
+        if self.g_type == 't':    
+            self.fileID = '../data/tide/h_tpxo7.2.nc' # TPX sea surface height file
+            self.var_Im = 'hIm' 
+            self.var_Re = 'hRe' 
+        elif (self.g_type == 'u') or (self.g_type == 'v') :
+            self.fileID = '../data/tide/u_tpxo7.2.nc' # TPX velocity file
+            self.var_Im = 'UIm' 
+            self.var_Re = 'URe' 
+            self.var_Im2 = 'VIm' 
+            self.var_Re2 = 'VRe' 
+            self.key_tr = setup['trans']
+            
+            # Determine the grid angle for rotating vector qunatities
+            maxJ = DC.lonlat['t']['lon'].shape[0]
+            maxI = DC.lonlat['t']['lon'].shape[1]
+            GridAngles = nemo_bdy_grid_angle.GridAngle(setup['dst_hgr'], 1, maxI, 1, maxJ, self.g_type)
+            dst_gcos = np.ones([maxJ, maxI])
+            dst_gsin = np.zeros([maxJ,maxI])            
+            dst_gcos[1:,1:] = GridAngles.cosval
+            dst_gsin[1:,1:] = GridAngles.sinval
+
+            # Retain only boundary points rotation information
+            self.gcos = np.zeros(Grid.bdy_i.shape[0])
+            self.gsin = np.zeros(Grid.bdy_i.shape[0])
+            for p in range(Grid.bdy_i.shape[0]):
+                self.gcos[p] = dst_gcos[Grid.bdy_i[p,1], Grid.bdy_i[p,0]]
+                self.gsin[p] = dst_gsin[Grid.bdy_i[p,1], Grid.bdy_i[p,0]]
+            
+            if self.g_type == 'u':
+                self.rot_dir = 'i'
+            elif self.g_type == 'v':
+                self.rot_dir = 'j'
+        else:
+            print 'You should not see this message!'
+              
+        # We will average velocities onto the T grid as there is a rotation to be done 
+        # Also need to account for east-west wrap-around
+        nc = Dataset('../data/tide/h_tpxo7.2.nc')
+        lon = np.ravel(np.concatenate([nc.variables['lon_z'][-2:,:], 
+                                       nc.variables['lon_z'][:,:], 
+                                       nc.variables['lon_z'][0:2,:]]))
+        lat = np.ravel(np.concatenate([nc.variables['lat_z'][-2:,:], 
+                                       nc.variables['lat_z'][:,:], 
+                                       nc.variables['lat_z'][0:2,:]]))
+        bat = np.ravel(np.concatenate([nb.variables['hz'][-2:,:], 
+                                       nb.variables['hz'][:,:], 
+                                       nb.variables['hz'][0:2,:]]))
+        msk = np.ravel(np.concatenate([nb.variables['mz'][-2:,:], 
+                                       nb.variables['mz'][:,:], 
+                                       nb.variables['mz'][0:2,:]]))
+     
+        # Pull out the constituents that are avaibable
+        self.cons = []
+        for ncon in range(nc.variables['con'].shape[0]):
+            self.cons.append(nc.variables['con'][ncon,:].tostring().strip())
+                        
+        nc.close() # Close Datafile
+        nb.close() # Close Bathymetry file
+
+        # Find nearest neighbours on the source grid to each dst bdy point
+        source_tree = sp.cKDTree(zip(lon, lat))
+        dst_pts = zip(dst_lon, dst_lat)
+        # Upper bound set at 0.5 deg as the TPXO7.2 data are at 0.25 deg resolution and 
+        # we don't want to grab points from further afield
+        nn_dist, self.nn_id = source_tree.query(dst_pts, k=4, eps=0, p=2, 
+                                                distance_upper_bound=0.5)
+        
+        # Create a weighting index for interpolation onto dst bdy point 
+        # need to check for missing values
+        
+        ind = nn_dist == np.inf
+
+        self.nn_id[ind] = 0  # better way of carrying None in the indices?      
+        dx = (lon[self.nn_id] - np.repeat(np.reshape(dst_lon,[dst_lon.size, 1]),4,axis=1) ) * np.cos(np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1) * np.pi / 180.)
+        dy =  lat[self.nn_id] - np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1)
+        
+        dist_tot = np.power((np.power(dx, 2) + np.power(dy, 2)), 0.5)
+
+        self.msk = msk[self.nn_id]
+        self.bat = bat[self.nn_id]
+        
+        dist_tot[ind | self.msk] = np.nan
+        
+        dist_wei = 1/( np.divide(dist_tot,(np.repeat(np.reshape(np.nansum(dist_tot,axis=1),[dst_lat.size, 1]),4,axis=1)) ) )
+        
+        self.nn_wei = dist_wei/np.repeat(np.reshape(np.nansum(dist_wei, axis=1),[dst_lat.size, 1]),4,axis=1)       
+        self.nn_wei[ind | self.msk] = 0.
+        
+        # Need to identify missing points and throw a warning and set values to zero
+        
+        mv = np.sum(self.nn_wei,axis=1) == 0
+        if np.sum(mv) > 1:
+            print '##WARNING## There are', np.sum(mv), 'missing values, these will be set to ZERO'
+        else:
+            print '##WARNING## There is', np.sum(mv), 'missing value, this will be set to ZERO'
+        
+    def extract_con(self, con):        
+        
+        if con in self.cons:
+            con_ind = self.cons.index(con)
+            
+            # Extract the complex amplitude components
+            nc = Dataset(self.fileID) # pass variable ids to nc
+            
+            if self.g_type == 't':
+                
+                
+                vIm = np.ravel(np.concatenate([nc.variables[self.var_Im][con_ind,-2:,:],
+                                               nc.variables[self.var_Im][con_ind,:,:],
+                                               nc.variables[self.var_Im][con_ind,0:2,:]]))
+                vRe = np.ravel(np.concatenate([nc.variables[self.var_Re][con_ind,-2:,:],
+                                               nc.variables[self.var_Re][con_ind,:,:],
+                                               nc.variables[self.var_Re][con_ind,0:2,:]]))
+        
+                self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)
+                self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)
+            
+            else: 
+                
+                uIm = np.concatenate([nc.variables[self.var_Im][con_ind,-2:,:],
+                                      nc.variables[self.var_Im][con_ind,:,:],
+                                      nc.variables[self.var_Im][con_ind,0:3,:]])
+                uRe = np.concatenate([nc.variables[self.var_Re][con_ind,-2:,:],
+                                      nc.variables[self.var_Re][con_ind,:,:],
+                                      nc.variables[self.var_Re][con_ind,0:3,:]])
+                                               
+                vIm = np.concatenate([nc.variables[self.var_Im2][con_ind,-2:,:],
+                                      nc.variables[self.var_Im2][con_ind,:,:],
+                                      nc.variables[self.var_Im2][con_ind,0:2,:]])
+                vRe = np.concatenate([nc.variables[self.var_Re2][con_ind,-2:,:],
+                                      nc.variables[self.var_Re2][con_ind,:,:],
+                                      nc.variables[self.var_Re2][con_ind,0:2,:]])
+                # Deal with north pole. NB in TPXO7.2 data U and Z at 90N have different
+                # values for each Longitude value! Plus there's something odd with the 
+                # hu and hv depths not being the min of surrounding T-grid depths
+                # TODO remove hardwired 722 index point and make generic
+                vIm = np.concatenate([vIm[:,:], np.concatenate([vIm[722:,-1],vIm[:722,-1]])[:,np.newaxis]],axis=1)
+                vRe = np.concatenate([vRe[:,:], np.concatenate([vRe[722:,-1],vRe[:722,-1]])[:,np.newaxis]],axis=1)
+                                      
+                # Average U and V onto the T-grid
+                                      
+                uIm = np.ravel((uIm[:-1,:] + uIm[1:,:])/2)
+                uRe = np.ravel((uRe[:-1,:] + uRe[1:,:])/2)
+                vIm = np.ravel((vIm[:,:-1] + vIm[:,1:])/2)
+                vRe = np.ravel((vRe[:,:-1] + vRe[:,1:])/2)
+                
+                if self.key_tr: # We convert to velocity using tidal model bathymetry
+                
+                    harm_Im = np.sum(uIm[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat*self.nn_wei,axis=1)
+                    harm_Re = np.sum(uRe[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat*self.nn_wei,axis=1)
+                    harm_Im2 = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat*self.nn_wei,axis=1)
+                    harm_Re2 = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat*self.nn_wei,axis=1)
+      
+                else: # We convert to velocity using the regional model bathymetry
+                
+                    harm_Im = np.sum(uIm[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+                    harm_Re = np.sum(uRe[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+                    harm_Im2 = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+                    harm_Re2 = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
+      
+                
+                # Rotate vectors
+            
+                self.harm_Im[con] = rot_rep(harm_Im, harm_Im2, self.g_type,
+                                      'en to %s' %self.rot_dir, self.gcos, self.gsin)
+                self.harm_Re[con] = rot_rep(harm_Re, harm_Re2, self.g_type,
+                                      'en to %s' %self.rot_dir, self.gcos, self.gsin)
+                self.harm_Im[con][self.msk]=0.
+                self.harm_Re[con][self.msk]=0.
+              
+            nc.close()
+                                    
+        else:
+            
+            # throw some warning
+            print '##WARNING## Missing constituent values will be set to ZERO'
+        
+            self.harm_Im[con] = np.zeros(self.nn_id[:,0].size)
+            self.harm_Re[con] = np.zeros(self.nn_id[:,0].size)
+            
+
+
+
+
diff --git a/pynemo/tide/nemo_bdy_tide3.py b/pynemo/tide/nemo_bdy_tide3.py
new file mode 100644
index 0000000000000000000000000000000000000000..3dc5951080e5642d1fb47d491bb49646af35cfb0
--- /dev/null
+++ b/pynemo/tide/nemo_bdy_tide3.py
@@ -0,0 +1,272 @@
+'''
+Module to extract constituents for the input grid mapped onto output grid
+
+@author: Mr. Srikanth Nagella
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import copy
+import tpxo_extract_HC
+import numpy as np
+from netCDF4 import Dataset
+from pynemo import nemo_bdy_grid_angle
+from pynemo.utils.nemo_bdy_lib import rot_rep
+from pynemo.reader.factory import GetFile
+
+import logging
+
+def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
+    """ TPXO Global Tidal model interpolation including rotation grid"""
+    key_transport = 0 # compute the velocities from transport
+    numharm = len(comp)
+    logger = logging.getLogger(__name__)
+    g_type = Grid_T.grid_type
+    DC = copy.deepcopy(DstCoord)
+    dst_lon = DC.bdy_lonlat[g_type]['lon'][Grid_T.bdy_r == 0]
+    dst_lat = DC.bdy_lonlat[g_type]['lat'][Grid_T.bdy_r == 0]
+
+    #nbdyz = len(Grid_T.bdy_i)
+    nbdyu = len(Grid_U.bdy_i)
+    nbdyv = len(Grid_V.bdy_i)
+
+    #convert the dst_lon into TMD Conventions (0E/360E)
+    dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
+    #extract the surface elevation at each z-point
+    tpxo_z = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, g_type)
+    #convert back the z-longitudes into the usual conventions (-180E/+180E)
+    dst_lon[dst_lon > 180.0] = dst_lon[dst_lon > 180.0]-360.0
+    #check if elevation data are missing
+    ind = np.where((np.isnan(tpxo_z.amp)) | (np.isnan(tpxo_z.gph)))
+    if ind[0].size > 0:
+        logger.warning('Missing elveation along the open boundary')
+
+    ampz = tpxo_z.amp
+    phaz = tpxo_z.gph
+    ampz[ind] = 0.0
+    phaz[ind] = 0.0
+
+    #extract U values of constituents
+    dst_lon = DC.bdy_lonlat[Grid_U.grid_type]['lon'][Grid_U.bdy_r == 0]
+    dst_lat = DC.bdy_lonlat[Grid_U.grid_type]['lat'][Grid_U.bdy_r == 0]
+
+    #convert the U-longitudes into the TMD conventions (0/360E)
+    dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
+
+    tpxo_ux = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+    tpxo_vx = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
+
+    ampuX = tpxo_ux.amp
+    phauX = tpxo_ux.gph
+    ampvX = tpxo_vx.amp
+    phavX = tpxo_vx.gph
+
+    #check if ux data are missing
+    ind = np.where((np.isnan(ampuX)) | (np.isnan(phauX)))
+    if ind[0].size > 0:
+        logger.warning('Missing zonal velocity along the x open boundary')
+    ampuX[ind] = 0
+    phauX[ind] = 0
+    #check if ux data are missing
+    ind = np.where((np.isnan(ampvX)) | (np.isnan(phavX)))
+    if ind[0].size > 0:
+        logger.warning('Missing zonal velocity along the x open boundary')
+    ampvX[ind] = 0
+    phavX[ind] = 0
+
+    #convert back the u-longitudes into the usual conventions (-180E/+180E)
+    dst_lon[dst_lon > 180.0] = dst_lon[dst_lon > 180.0]-360.0
+
+    #extract V values of constituents
+    dst_lon = DC.bdy_lonlat[Grid_V.grid_type]['lon'][Grid_V.bdy_r == 0]
+    dst_lat = DC.bdy_lonlat[Grid_V.grid_type]['lat'][Grid_V.bdy_r == 0]
+
+    #convert the U-longitudes into the TMD conventions (0/360E)
+    dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
+    tpxo_uy = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+    tpxo_vy = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
+
+    ampuY = tpxo_uy.amp
+    phauY = tpxo_uy.gph
+    ampvY = tpxo_vy.amp
+    phavY = tpxo_vy.gph
+
+    #check if ux data are missing
+    ind = np.where((np.isnan(ampuY)) | (np.isnan(phauY)))
+    if ind[0].size > 0:
+        logger.warning('Missing zonal velocity along the x open boundary')
+    ampuY[ind] = 0
+    phauY[ind] = 0
+    #check if ux data are missing
+    ind = np.where((np.isnan(ampvY)) | (np.isnan(phavY)))
+    if ind[0].size > 0:
+        logger.warning('Missing zonal velocity along the x open boundary')
+    ampvY[ind] = 0
+    phavY[ind] = 0
+
+    #convert back the u-longitudes into the usual conventions (-180E/+180E)
+    dst_lon[dst_lon > 180.0] = dst_lon[dst_lon > 180.0]-360.0
+
+    #extract the depths along the U-point open boundary
+    zgr = GetFile(setup.settings['dst_zgr'])#Dataset(settings['dst_zgr'], 'r')
+    mbathy = zgr['mbathy'][:,:,:].squeeze() #zgr.variables['mbathy'][:,:,:]
+
+    #summing over scale factors as zps doesn't have hbat variable
+    #e3X = zgr.variables['e3u']
+    #e3X = np.squeeze(e3X)
+    try: # Read in either 3D or 4D data. 
+        e3X = zgr['e3u'][:,:,:].squeeze()
+    except ValueError:
+        e3X = zgr['e3u'][:,:,:,:].squeeze()
+    if len(np.shape(e3X)) != 3:
+        logger.warning('Expected a 3D array for e3u field')
+
+    heightrange = np.arange(1, e3X.shape[0]+1)
+    regular_heightprofile = np.tile(heightrange,
+                                    e3X.shape[1]*e3X.shape[2]\
+                                    ).reshape(heightrange.shape[0],
+                                              e3X.shape[1],
+                                              e3X.shape[2],
+                                              order='F')
+    ind = np.tile(mbathy, [e3X.shape[0], 1, 1]) >= regular_heightprofile
+
+    # in u direction blank cells neighbouring T-point land as defined by mbathy
+    ind[:, :, 1:] = ind[:, :, 0:-1] | ind[:, :, 1:]
+    hbatX = np.sum(e3X*ind, 0)
+
+    depu = np.zeros((1, Grid_U.bdy_i.shape[0]))
+    for n in range(0, Grid_U.bdy_i.shape[0]):
+        depu[0, n] = hbatX[Grid_U.bdy_i[n, 1], Grid_U.bdy_i[n, 0]]
+
+    #extract the depths along the V-point open boundary
+    #summing over scale factors as zps doesn't have hbat variable
+    #e3X = zgr.variables['e3v']
+    #e3X = np.squeeze(e3X)
+    try: # Read in either 3D or 4D data. 
+        e3X = zgr['e3v'][:,:,:].squeeze()
+    except ValueError:
+        e3X = zgr['e3v'][:,:,:,:].squeeze()
+    if len(np.shape(e3X)) != 3:
+        logger.warning('Expected a 3D array for e3v field')
+
+    heightrange = np.arange(1, e3X.shape[0]+1)
+    regular_heightprofile = np.tile(heightrange,
+                                    e3X.shape[1]*e3X.shape[2]\
+                                    ).reshape(heightrange.shape[0],
+                                              e3X.shape[1],
+                                              e3X.shape[2],
+                                              order='F')
+    ind = np.tile(mbathy, [e3X.shape[0], 1, 1]) >= regular_heightprofile
+
+    # in u direction blank cells neighbouring T-point land as defined by mbathy
+    ind[:, 1:, :] = ind[:, 0:-1, :] | ind[:, 1:, :]
+    hbatX = np.sum(e3X*ind, 0)
+
+    depv = np.zeros((1, Grid_V.bdy_i.shape[0]))
+    for n in range(0, Grid_V.bdy_i.shape[0]):
+        depv[0, n] = hbatX[Grid_V.bdy_i[n, 1], Grid_V.bdy_i[n, 0]]
+
+    cosz = np.zeros((numharm, ampz.shape[1]))
+    sinz = np.zeros((numharm, ampz.shape[1]))
+    cosuX = np.zeros((numharm, nbdyu))
+    sinuX = np.zeros((numharm, nbdyu))
+    cosvX = np.zeros((numharm, nbdyu))
+    sinvX = np.zeros((numharm, nbdyu))
+    cosuY = np.zeros((numharm, nbdyv))
+    sinuY = np.zeros((numharm, nbdyv))
+    cosvY = np.zeros((numharm, nbdyv))
+    sinvY = np.zeros((numharm, nbdyv))
+
+    compindx = constituents_index(tpxo_z.cons, comp)
+    for h in range(0, numharm):
+        c = int(compindx[h])
+        if c != -1:
+            cosz[h, :] = ampz[c, :] * np.cos(np.deg2rad(phaz[c, :]))
+            sinz[h, :] = ampz[c, :] * np.sin(np.deg2rad(phaz[c, :]))
+
+            if key_transport == 1:
+                if (np.sum(depu[:] <= 0.0) > 0) | (np.sum(depv[:] <= 0.0) > 0):
+                    logger.error(' Error: Land or Mask contamination')
+
+                cosuX[h, :] = ampuX[c, :] * np.cos(np.deg2rad(phauX[c, :])) / depu
+                sinuX[h, :] = ampuX[c, :] * np.sin(np.deg2rad(phauX[c, :])) / depu
+                cosvX[h, :] = ampvX[c, :] * np.cos(np.deg2rad(phavX[c, :])) / depu
+                sinvX[h, :] = ampvX[c, :] * np.sin(np.deg2rad(phavX[c, :])) / depu
+                cosuY[h, :] = ampuY[c, :] * np.cos(np.deg2rad(phauY[c, :])) / depv
+                sinuY[h, :] = ampuY[c, :] * np.sin(np.deg2rad(phauY[c, :])) / depv
+                cosvY[h, :] = ampvY[c, :] * np.cos(np.deg2rad(phavY[c, :])) / depv
+                sinvY[h, :] = ampvY[c, :] * np.sin(np.deg2rad(phavY[c, :])) / depv
+            else:
+                cosuX[h, :] = 0.01 * ampuX[c, :] * np.cos(np.deg2rad(phauX[c, :]))
+                sinuX[h, :] = 0.01 * ampuX[c, :] * np.sin(np.deg2rad(phauX[c, :]))
+                cosvX[h, :] = 0.01 * ampvX[c, :] * np.cos(np.deg2rad(phavX[c, :]))
+                sinvX[h, :] = 0.01 * ampvX[c, :] * np.sin(np.deg2rad(phavX[c, :]))
+                cosuY[h, :] = 0.01 * ampuY[c, :] * np.cos(np.deg2rad(phauY[c, :]))
+                sinuY[h, :] = 0.01 * ampuY[c, :] * np.sin(np.deg2rad(phauY[c, :]))
+                cosvY[h, :] = 0.01 * ampvY[c, :] * np.cos(np.deg2rad(phavY[c, :]))
+                sinvY[h, :] = 0.01 * ampvY[c, :] * np.sin(np.deg2rad(phavY[c, :]))
+
+# TOD:: Do we need to rotate ??? And is this method  correct ????
+    maxJ = DC.lonlat['t']['lon'].shape[0]
+    maxI = DC.lonlat['t']['lon'].shape[1]
+    dst_gcos = np.ones([maxJ, maxI])
+    dst_gsin = np.zeros([maxJ, maxI])
+    #lets start with the u-points
+    grid_angles = nemo_bdy_grid_angle.GridAngle(setup.settings['dst_hgr'], 0, maxI, 0, maxJ, 'u')
+    dst_gcos = grid_angles.cosval
+    dst_gsin = grid_angles.sinval
+
+    #retain only boundary points rotation information
+    tmp_gcos = np.zeros(Grid_U.bdy_i.shape[0])
+    tmp_gsin = np.zeros(Grid_U.bdy_i.shape[0])
+    for index in range(Grid_U.bdy_i.shape[0]):
+        tmp_gcos[index] = dst_gcos[Grid_U.bdy_i[index, 1], Grid_U.bdy_i[index, 0]]
+        tmp_gsin[index] = dst_gsin[Grid_U.bdy_i[index, 1], Grid_U.bdy_i[index, 0]]
+    dst_gcos = tmp_gcos
+    dst_gsin = tmp_gsin
+
+    cosu = rot_rep(cosuX, cosvX, 'u', 'en to i', dst_gcos, dst_gsin)
+    sinu = rot_rep(sinuX, sinvX, 'u', 'en to i', dst_gcos, dst_gsin)
+
+    #let do the v points
+    dst_gcos = np.ones([maxJ, maxI])
+    dst_gsin = np.zeros([maxJ, maxI])
+    grid_angles = nemo_bdy_grid_angle.GridAngle(setup.settings['dst_hgr'], 0, maxI, 0, maxJ, 'v')
+    dst_gcos = grid_angles.cosval
+    dst_gsin = grid_angles.sinval
+
+    #retain only boundary points rotation information
+    tmp_gcos = np.zeros(Grid_V.bdy_i.shape[0])
+    tmp_gsin = np.zeros(Grid_V.bdy_i.shape[0])
+    for index in range(Grid_V.bdy_i.shape[0]):
+        tmp_gcos[index] = dst_gcos[Grid_V.bdy_i[index, 1], Grid_V.bdy_i[index, 0]]
+        tmp_gsin[index] = dst_gsin[Grid_V.bdy_i[index, 1], Grid_V.bdy_i[index, 0]]
+    dst_gcos = tmp_gcos
+    dst_gsin = tmp_gsin
+
+    cosv = rot_rep(cosuY, cosvY, 'v', 'en to j', dst_gcos, dst_gsin)
+    sinv = rot_rep(sinuY, sinvY, 'v', 'en to j', dst_gcos, dst_gsin)
+
+    #return the values
+    return cosz, sinz, cosu, sinu, cosv, sinv
+
+
+def constituents_index(constituents, inputcons):
+    """
+    Converts the input contituents to index in the tidal constituents.
+    Inputs:     constituents: The list of constituents available from the source data
+                        e.g. TPXO: ['m2', 's2', 'n2', 'k2', 'k1', 'o1', 'p1', 'q1', 'mf', 'mm', 'm4', 'ms4', 'mn4']
+                inputcons: The dictionary of constituents from the namelist with their numbers 
+                        e.g. {'1': "'M2'", '3': "'K2'", '2': "'S2'", '4': "'M4'"}
+    Output:     retindx: The indices (relative to the source data list) of the dictionary items from the namelist
+                        e.g. [  0.   3.   1.  10.]
+    """
+    retindx = np.zeros(len(inputcons))
+    count = 0
+    for value in inputcons.values():
+        const_name = value.replace("'", "").lower() # force inputcons entries to lowercase
+        retindx[count] = [x.lower() for x in constituents].index(const_name) # force constituents to lowercase
+        count = count+1
+    return retindx
+#    tpxo_z.Gph
+#    tpxo_z.amp
diff --git a/pynemo/tide/nemo_bdy_tide_ncgen.py b/pynemo/tide/nemo_bdy_tide_ncgen.py
new file mode 100644
index 0000000000000000000000000000000000000000..43943fa72ab43383cc3d643a45d774e966cdc350
--- /dev/null
+++ b/pynemo/tide/nemo_bdy_tide_ncgen.py
@@ -0,0 +1,131 @@
+'''
+Creates Tide netcdf file ready for population
+
+@author: Mr. Srikanth Nagella
+'''
+
+from netCDF4 import Dataset 
+import datetime
+import logging
+
+def CreateBDYTideNetcdfFile(filename, N,I,J,h,fv,grd):
+    gridNames = ['T', 'U', 'V']
+    
+    # Dimension Lengths
+    xb_len = N
+    yb_len = 1
+    x_len  = I
+    y_len  = J
+    
+    # Enter define mode
+    ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
+    
+    #define dimensions
+    dimxbID = ncid.createDimension('xb',xb_len)
+    dimybID = ncid.createDimension('yb',yb_len)
+    dimxID  = ncid.createDimension('x', x_len)
+    dimyID  = ncid.createDimension('y', y_len)
+
+    #define variable  
+    varlonID = ncid.createVariable('nav_lon','f4',('y','x',))
+    varlatID = ncid.createVariable('nav_lat','f4',('y','x',))
+    
+    
+    if grd =='T':
+        varmskID = ncid.createVariable('bdy_msk','f4',('y','x',),fill_value=fv)
+        varz1ID = ncid.createVariable('z1','f4',('yb','xb',),fill_value=fv)
+        varz2ID = ncid.createVariable('z2','f4',('yb','xb',),fill_value=fv)
+    elif grd == 'U':
+        varu1ID = ncid.createVariable('u1','f4',('yb','xb',),fill_value=fv)
+        varu2ID = ncid.createVariable('u2','f4',('yb','xb',),fill_value=fv)
+    elif grd == 'V':
+        varv1ID = ncid.createVariable('v1','f4',('yb','xb',),fill_value=fv)
+        varv2ID = ncid.createVariable('v2','f4',('yb','xb',),fill_value=fv)
+    else :
+        logging.error("Unknown Grid input")
+        
+    
+    varnbiID = ncid.createVariable('nbidta','i4',('yb','xb',))
+    varnbjID = ncid.createVariable('nbjdta','i4',('yb','xb',))
+    varnbrID = ncid.createVariable('nbrdta','i4',('yb','xb',))
+    #Global Attributes
+    ncid.file_name = filename
+    ncid.creation_date = str(datetime.datetime.now())
+    ncid.history = h
+    ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+    
+    #Longitude axis attributes
+    varlonID.axis = 'Longitude'
+    varlonID.short_name = 'nav_lon'
+    varlonID.units = 'degrees_east'
+    varlonID.long_name = 'Longitude'
+    
+    #Latitude axis attributes
+    varlatID.axis = 'Latitude'
+    varlatID.short_name = 'nav_lat'
+    varlatID.units = 'degrees_east'
+    varlatID.long_name = 'Latitude'
+    
+    #nbidta attributes
+    varnbiID.short_name = 'nbidta'
+    varnbiID.units = 'unitless'
+    varnbiID.long_name = 'Bdy i indices'
+    
+    #nbjdta attributes
+    varnbjID.short_name = 'nbjdta'
+    varnbjID.units = 'unitless'
+    varnbjID.long_name = 'Bdy j indices'
+    
+    #nbrdta attributes
+    varnbrID.short_name = 'nbrdta'
+    varnbrID.units = 'unitless'
+    varnbrID.long_name = 'Bdy discrete distance'
+    if grd == 'T' :
+      
+        varmskID.short_name = 'bdy_msk'
+        varmskID.units = 'unitless'
+        varmskID.long_name = 'Structured boundary mask'
+        
+        varz1ID.units = 'm'
+        varz1ID.short_name = 'z1'
+        varz1ID.long_name = 'tidal elevation: cosine'
+        varz1ID.grid = 'bdyT'
+        
+        varz2ID.units = 'm'
+        varz2ID.short_name = 'z2'
+        varz2ID.long_name = 'tidal elevation: sine'
+        varz2ID.grid = 'bdyT'
+        
+    elif grd == 'U' :
+        
+        varu1ID.units = 'm/s'
+        varu1ID.short_name = 'u1'
+        varu1ID.long_name = 'tidal east velocity: cosine'
+        varu1ID.grid = 'bdyU'
+        
+        varu2ID.units = 'm/s'
+        varu2ID.short_name = 'u2'
+        varu2ID.long_name = 'tidal east velocity: sine'
+        varu2ID.grid = 'bdyU'
+        
+    elif grd == 'V':
+        
+        varv1ID.units = 'm/s'
+        varv1ID.short_name = 'v1'
+        varv1ID.long_name = 'tidal north velocity: cosine'
+        varv1ID.grid = 'bdyV'
+        
+        varv2ID.units = 'm/s'
+        varv2ID.short_name = 'v2'
+        varv2ID.long_name = 'tidal north velocity: sine'
+        varv2ID.grid = 'bdyV'
+        
+    else :
+        logging.error('Unknown Grid')
+        
+    ncid.close()
+
+              
+    
+
+
diff --git a/pynemo/tide/tpxo_extract_HC.py b/pynemo/tide/tpxo_extract_HC.py
new file mode 100644
index 0000000000000000000000000000000000000000..c73b48bcd9d86148e8f6686a8b35dffff85f6f60
--- /dev/null
+++ b/pynemo/tide/tpxo_extract_HC.py
@@ -0,0 +1,270 @@
+'''
+This is to extract the tidal harmonic constants out of a tidal model
+for a given locations
+[amp,Gph] = tpxo_extract_HC(Model,lat,lon,type,Cid)
+
+@author: Mr. Srikanth Nagella
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+from scipy import interpolate
+import numpy as np
+
+class TpxoExtract(object):
+    """ This is TPXO model extract_hc.c implementation in python"""
+    def __init__(self, settings, lat, lon, grid_type):
+        """initialises the Extract of tide information from the netcdf
+           Tidal files"""
+	# Set tide model
+	tide_model = 'TPXO'
+
+	if tide_model == 'TPXO':  # Define stuff to generalise Tide model
+	   hRe_name = 'hRe'
+	   hIm_name = 'hIm'
+	   lon_z_name = 'lon_z'
+	   lat_z_name = 'lat_z'	
+           URe_name = 'URe'
+	   UIm_name = 'UIm'
+	   lon_u_name = 'lon_u'
+	   lat_u_name = 'lat_u'
+           VRe_name = 'VRe'
+	   VIm_name = 'VIm'
+	   lon_v_name = 'lon_v'
+	   lat_v_name = 'lat_v'
+           mz_name = 'mz'
+           mu_name = 'mu'
+           mv_name = 'mv'
+           self.grid = Dataset(settings['tide_grid'])#../data/tide/grid_tpxo7.2.nc')
+           #read the height_dataset file
+           self.height_dataset = Dataset(settings['tide_h'])#../data/tide/h_tpxo7.2.nc')
+           #read the velocity_dataset file
+           self.velocity_dataset = Dataset(settings['tide_u'])#../data/tide/u_tpxo7.2.nc')
+
+           height_z = self.grid.variables['hz']
+           mask_z = self.grid.variables['mz']
+           lon_z = self.height_dataset.variables[lon_z_name][:, 0]
+           lat_z = self.height_dataset.variables[lat_z_name][0, :]
+           lon_resolution = lon_z[1] - lon_z[0]
+           data_in_km = 0 # added to maintain the reference to matlab tmd code
+           # Pull out the constituents that are avaibable
+           self.cons = []
+           for ncon in range(self.height_dataset.variables['con'].shape[0]):
+              self.cons.append(self.height_dataset.variables['con'][ncon, :].tostring().strip())
+
+
+
+
+	elif tide_model == 'FES':
+	   constituents = ['2N2','EPS2','J1','K1','K2','L2','LA2','M2','M3','M4','M6','M8','MF','MKS2','MM','MN4','MS4','MSF','MSQM','MTM','MU2','N2','N4','NU2','O1','P1','Q1','R2','S1','S2','S4','SA','SSA','T2']
+           print 'did not actually code stuff for FES in this routine. Though that would be ideal. Instead put it in fes_extract_HC.py'
+    
+
+
+   	else:
+	   print 'Don''t know that tide model'
+
+        # Wrap coordinates in longitude if the domain is global
+        glob = 0
+        if lon_z[-1]-lon_z[0] == 360-lon_resolution:
+            glob = 1
+        if glob == 1:
+            lon_z = np.concatenate(([lon_z[0]-lon_resolution, ], lon_z,
+                                    [lon_z[-1]+lon_resolution, ]))
+            height_z = np.concatenate(([height_z[-1, :], ], height_z, [height_z[0, :],]), axis=0)
+            mask_z = np.concatenate(([mask_z[-1, :], ], mask_z, [mask_z[0, :], ]), axis=0)
+
+        #adjust lon convention
+        xmin = np.min(lon)
+
+        if data_in_km == 0:
+            if xmin < lon_z[0]:
+                lon[lon < 0] = lon[lon < 0] + 360
+            if xmin > lon_z[-1]:
+                lon[lon > 180] = lon[lon > 180]-360
+
+        #height_z[height_z==0] = np.NaN
+#        f=interpolate.RectBivariateSpline(lon_z,lat_z,height_z,kx=1,ky=1)
+#        depth = np.zeros(lon.size)
+#        for idx in range(lon.size):
+#            depth[idx] = f(lon[idx],lat[idx])
+#        print depth[369:371]
+
+#        H2 = np.ravel(height_z)
+#        H2[H2==0] = np.NaN
+#        points= np.concatenate((np.ravel(self.height_dataset.variables['lon_z']),
+#                                np.ravel(self.height_dataset.variables['lat_z'])))
+#        points= np.reshape(points,(points.shape[0]/2,2),order='F')
+#        print points.shape
+#        print np.ravel(height_z).shape
+#        depth = interpolate.griddata(points,H2,(lon,lat))
+#        print depth
+#        print depth.shape
+
+        height_z[height_z == 0] = np.NaN
+        lonlat = np.concatenate((lon, lat))
+        lonlat = np.reshape(lonlat, (lon.size, 2), order='F')
+
+        depth = interpolate.interpn((lon_z, lat_z), height_z, lonlat)
+#        f=interpolate.RectBivariateSpline(lon_z,lat_z,mask_z,kx=1,ky=1)
+#        depth_mask = np.zeros(lon.size)
+#        for idx in range(lon.size):
+#            depth_mask[idx] = f(lon[idx],lat[idx])
+        depth_mask = interpolate.interpn((lon_z, lat_z), mask_z, lonlat)
+        index = np.where((np.isnan(depth)) & (depth_mask > 0))
+
+        if index[0].size != 0:
+            depth[index] = bilinear_interpolation(lon_z, lat_z, height_z, lon[index], lat[index])
+
+        if grid_type == 'z' or grid_type == 't':
+            self.amp, self.gph = self.interpolate_constituents(self.height_dataset,
+                                                               hRe_name, hIm_name, lon_z_name, lat_z_name,
+							       lon, lat, maskname=mz_name)
+        elif grid_type == 'u':
+            self.amp, self.gph = self.interpolate_constituents(self.velocity_dataset,
+                                                               URe_name, UIm_name, lon_u_name, lat_u_name,
+                                                               lon, lat, depth, maskname=mu_name)
+        elif grid_type == 'v':
+            self.amp, self.gph = self.interpolate_constituents(self.velocity_dataset,
+                                                               VRe_name, VIm_name, lon_v_name, lat_v_name,
+                                                               lon, lat, depth, maskname=mv_name)
+        else:
+            print 'Unknown grid_type'
+            return
+
+    def interpolate_constituents(self, nc_dataset, real_var_name, img_var_name, lon_var_name,
+                                 lat_var_name, lon, lat, height_data=None, maskname=None):
+        """ Interpolates the tidal constituents along the given lat lon coordinates """
+        amp = np.zeros((nc_dataset.variables['con'].shape[0], lon.shape[0],))
+        gph = np.zeros((nc_dataset.variables['con'].shape[0], lon.shape[0],))
+        data = np.array(np.ravel(nc_dataset.variables[real_var_name]), dtype=complex)
+        data.imag = np.array(np.ravel(nc_dataset.variables[img_var_name]))
+
+        data = data.reshape(nc_dataset.variables[real_var_name].shape)
+        #data[data==0] = np.NaN
+
+        #Lat Lon values
+        x_values = nc_dataset.variables[lon_var_name][:, 0]
+        y_values = nc_dataset.variables[lat_var_name][0, :]
+        x_resolution = x_values[1] - x_values[0]
+        glob = 0
+        if x_values[-1]-x_values[0] == 360-x_resolution:
+            glob = 1
+
+        if glob == 1:
+            x_values = np.concatenate(([x_values[0]-x_resolution,], x_values,
+                                       [x_values[-1]+x_resolution, ]))
+
+        #adjust lon convention
+        xmin = np.min(lon)
+        if xmin < x_values[0]:
+            lon[lon < 0] = lon[lon < 0] + 360
+        if xmin > x_values[-1]:
+            lon[lon > 180] = lon[lon > 180]-360
+
+        lonlat = np.concatenate((lon, lat))
+        lonlat = np.reshape(lonlat, (lon.size, 2), order='F')
+
+        mask = self.grid.variables[maskname]
+        mask = np.concatenate(([mask[-1, :], ], mask, [mask[0, :], ]), axis=0)
+        #interpolate the mask values
+        maskedpoints = interpolate.interpn((x_values, y_values), mask, lonlat)
+
+        data_temp = np.zeros((data.shape[0], lon.shape[0], 2, ))
+        for cons_index in range(data.shape[0]):
+            #interpolate real values
+            data_temp[cons_index, :, 0] = interpolate_data(x_values, y_values,
+                                                                data[cons_index, :, :].real,
+                                                                maskedpoints, lonlat)
+            #interpolate imag values
+            data_temp[cons_index, :, 1] = interpolate_data(x_values, y_values,
+                                                                data[cons_index, :, :].imag,
+                                                                maskedpoints, lonlat)
+
+            #for velocity_dataset values
+            if height_data is not None:
+                data_temp[cons_index, :, 0] = data_temp[cons_index, :, 0]/height_data*100
+                data_temp[cons_index, :, 1] = data_temp[cons_index, :, 1]/height_data*100
+
+            zcomplex = np.array(data_temp[cons_index, :, 0], dtype=complex)
+            zcomplex.imag = data_temp[cons_index, :, 1]
+
+            amp[cons_index, :] = np.absolute(zcomplex)
+            gph[cons_index, :] = np.arctan2(-1*zcomplex.imag, zcomplex.real)
+        gph = gph*180.0/np.pi
+        gph[gph < 0] = gph[gph < 0]+360.0
+        return amp, gph
+
+def interpolate_data(lon, lat, data, mask, lonlat):
+    """ Interpolate data data on regular grid for given lonlat coordinates """
+    result = np.zeros((lonlat.shape[0], ))
+    data[data == 0] = np.NaN
+    data = np.concatenate(([data[-1, :], ], data, [data[0, :], ]), axis=0)
+    result[:] = interpolate.interpn((lon, lat), data, lonlat)
+    index = np.where((np.isnan(result)) & (mask > 0))
+    if index[0].size != 0:
+        result[index] = bilinear_interpolation(lon, lat, data, np.ravel(lonlat[index, 0]),
+                                               np.ravel(lonlat[index, 1]))
+    return result
+
+def bilinear_interpolation(lon, lat, data, lon_new, lat_new):
+    """ Does a bilinear interpolation of grid where the data values are NaN's"""
+    glob = 0
+    lon_resolution = lon[1] - lon[0]
+    if lon[-1] - lon[1] == 360 - lon_resolution:
+        glob = 1
+    inan = np.where(np.isnan(data))
+    data[inan] = 0
+    mask = np.zeros(data.shape)
+    mask[data != 0] = 1
+#    n = lon.size
+#    m = lat.size
+    if lon.size != data.shape[0] or lat.size != data.shape[1]:
+        print 'Check Dimensions'
+        return np.NaN
+    if glob == 1:
+        lon = np.concatenate(([lon[0] - 2 * lon_resolution, lon[0] - lon_resolution, ],
+                              lon, [lon[-1] + lon_resolution, lon[-1] + 2 * lon_resolution]))
+        data = np.concatenate((data[-2, :], data[-1, :], data, data[0, :], data[1, :]), axis=0)
+        mask = np.concatenate((mask[-2, :], mask[-1, :], mask, mask[0, :], mask[1, :]), axis=0)
+    lon_new_copy = lon_new
+
+    nonmask_index = np.where((lon_new_copy < lon[0]) & (lon_new_copy > lon[-1]))
+    if lon[-1] > 180:
+        lon_new_copy[nonmask_index] = lon_new_copy[nonmask_index] + 360
+    if lon[-1] < 0:
+        lon_new_copy[nonmask_index] = lon_new_copy[nonmask_index] - 360
+    lon_new_copy[lon_new_copy > 360] = lon_new_copy[lon_new_copy > 360] - 360
+    lon_new_copy[lon_new_copy < -180] = lon_new_copy[lon_new_copy < -180] + 360
+
+    weight_factor_0 = 1 / (4 + 2 * np.sqrt(2))
+    weight_factor_1 = weight_factor_0 / np.sqrt(2)
+    height_temp = weight_factor_1 * data[0:-2, 0:-2] + weight_factor_0 * data[0:-2, 1:-1] + \
+                  weight_factor_1 * data[0:-2, 2:] + weight_factor_1 * data[2:, 0:-2] + \
+                  weight_factor_0 * data[2:, 1:-1] + weight_factor_1 * data[2:, 2:] + \
+                  weight_factor_0 * data[1:-1, 0:-2] + weight_factor_0 * data[1:-1, 2:]
+    mask_temp = weight_factor_1 * mask[0:-2, 0:-2] + weight_factor_0 * mask[0:-2, 1:-1] + \
+                weight_factor_1 * mask[0:-2, 2:] + weight_factor_1 * mask[2:, 0:-2] + \
+                weight_factor_0 * mask[2:, 1:-1] + weight_factor_1 * mask[2:, 2:] + \
+                weight_factor_0 * mask[1:-1, 0:-2] + weight_factor_0 * mask[1:-1, 2:]
+    mask_temp[mask_temp == 0] = 1
+    data_copy = data.copy()
+    data_copy[1:-1, 1:-1] = np.divide(height_temp, mask_temp)
+    nonmask_index = np.where(mask == 1)
+    data_copy[nonmask_index] = data[nonmask_index]
+    data_copy[data_copy == 0] = np.NaN
+    lonlat = np.concatenate((lon_new_copy, lat_new))
+    lonlat = np.reshape(lonlat, (lon_new_copy.size, 2), order='F')
+    result = interpolate.interpn((lon, lat), data_copy, lonlat)
+
+    return result
+
+
+#lat=[42.8920,42.9549,43.0178]
+#lon=[339.4313,339.4324,339.4335]
+#lat_u=[42.8916,42.9545,43.0174]
+#lon_u=[339.4735,339.4746,339.4757]
+#lat = np.array(lat_u)
+#lon = np.array(lon_u)
+#lon = TPXO_Extract(lat,lon,'velocity_dataset')
diff --git a/pynemo/utils/Constants.py b/pynemo/utils/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e38c4face83120d7cf90692e40a5bc7f67aaace
--- /dev/null
+++ b/pynemo/utils/Constants.py
@@ -0,0 +1,7 @@
+'''
+This file defines all the constants that will be used.
+
+@author: Mr. Srikanth Nagella
+'''
+
+DEFAULT_MASK_PIXELS = 1
\ No newline at end of file
diff --git a/pynemo/utils/__init__.py b/pynemo/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/pynemo/utils/e3_to_depth.py b/pynemo/utils/e3_to_depth.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e1b551b1f5a33daf622f0d00443ce52595b8e87
--- /dev/null
+++ b/pynemo/utils/e3_to_depth.py
@@ -0,0 +1,23 @@
+'''
+funtion e3_to_depth
+Purpose :   compute t- & w-depths of model levels from e3t & e3w scale factors
+Method  :   The t- & w-depth are given by the summation of e3w & e3t, resp.
+Action  :   pe3t, pe3w : scale factor of t- and w-point (m)
+Useage: [gdept, gdepw] = e3_to_depth(e3t, e3w, nz)
+'''
+
+import numpy as np
+## COMMENT(jelt 5Feb18). Since the move from NEMOv3.6 to NEMOv4 we now need to compute the depth variables from e3[tw] metrics
+## Older namelist_dst.ncml files did not have an e3w definition. This is now needed to reconstruct depth at w-points
+## However if e3w isn't defined in the *ncml file it is likely that PyNEMO will fail here.
+def e3_to_depth(pe3t, pe3w, jpk):
+  pdepw      = np.zeros_like(pe3w)
+  pdepw[0,:] = 0.
+  pdept      = np.zeros_like(pe3t)
+  pdept[0,:] = 0.5 * pe3w[0,:]
+
+  for jk in np.arange(1,jpk,1):
+    pdepw[jk,:] = pdepw[jk-1,:] + pe3t[jk-1,:]
+    pdept[jk,:] = pdept[jk-1,:] + pe3w[jk  ,:]
+
+  return pdept, pdepw
diff --git a/pynemo/utils/gcoms_break_depth.py b/pynemo/utils/gcoms_break_depth.py
new file mode 100644
index 0000000000000000000000000000000000000000..eab5d74c86cdf577998735ecb5cdcc6bb8a0f363
--- /dev/null
+++ b/pynemo/utils/gcoms_break_depth.py
@@ -0,0 +1,282 @@
+'''
+Rewritting the break depth implementation from matlab version
+
+@author: Mr. Srikanth Nagella
+'''
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+import numpy as np
+import math
+import copy
+import logging
+#import pyproj
+
+#import matplotlib.pyplot as plt
+import scipy.ndimage as ndimage
+import seawater
+def gcoms_break_depth(bathy):
+    """ This creates a mask for the break depth using histograms """
+    ocean_depth = bathy[...]
+    ocean_depth = ocean_depth[ocean_depth > 0]
+
+    depth_bin = 10.0    
+    depth_max = np.max(ocean_depth)
+    num_bin = int(math.floor(depth_max/depth_bin))
+
+    # Compute the histogram of depth values over the whole Domain
+    depth_vec = (np.arange(1,num_bin+1)+0.5)*depth_bin    
+    histbat, dummy = np.histogram(ocean_depth, num_bin)
+    max_hist_value_index = np.argmax(histbat)
+    #z_smo = (max_hist_value_index * depth_bin)/2.0
+    z_smo = 100.0
+    nsmo = math.floor(z_smo/depth_bin)
+#    print nsmo, z_smo, histbat.dtype
+    #print max_hist_value_index
+#    plt.subplot(211)
+#    plt.hist(ocean_depth, num_bin) 
+    #plt.show()
+    hist_smooth = ndimage.uniform_filter(histbat.astype(float), int(nsmo)*2+1, mode='nearest')
+#    print histbat.shape, dummy.shape
+#    plt.subplot(212)
+#    plt.bar(dummy[:-1],hist_smooth)
+    
+#    plt.show()
+#    print histbat
+#    print hist_smooth
+    kshelf = -1
+    kbreak = -1
+    kplain = -1
+    kfloor = -1
+    histfloor = 0.0
+    for depth_bin_index in range(0,num_bin-1): 
+        if kshelf == -1:
+            if hist_smooth[depth_bin_index] > hist_smooth[depth_bin_index+1]:
+                kshelf = depth_bin_index;
+        elif kbreak == -1:
+            if hist_smooth[depth_bin_index] < hist_smooth[depth_bin_index+1]:
+                kbreak = depth_bin_index
+        elif kplain == -1:
+            if hist_smooth[depth_bin_index] > hist_smooth[depth_bin_index+1]:
+                kplain = depth_bin_index
+                histfloor = hist_smooth[depth_bin_index]
+    
+    depth_shelf = depth_vec[kshelf]
+    depth_break = depth_vec[kbreak]
+    depth_plain = depth_vec[kplain]
+#    print kshelf,kbreak,kplain
+#    print 'Approximate depths: shelf=%sm, break=%sm, plain=%sm' % (depth_shelf,depth_break,depth_plain)
+    h_max = math.floor(depth_break/100)*100
+    return depth_shelf, h_max
+
+
+def gcoms_boundary_masks(bathy,ov,lv):    
+    """ 
+    :param bathy: This is the input bathymetry data
+    :param ov: Latittude array
+    :param lv: Longitude array  
+    :type bathy: numpy array
+    :type ov: numpy array
+    :type lv: numpy array
+    :return: returns the ob, lb
+    :rtype: numpy arrays
+    
+    :Example:
+    """    
+    tmp = np.pad(bathy, (1, 1), 'constant', constant_values=(np.NaN, np.NaN))
+    tmp[tmp==ov] = np.NaN
+    
+    tmp1 = tmp[1:-1, :-2] + tmp[1:-1, 2:] + tmp[:-2, 1:-1] + tmp[2:, 1:-1]
+
+    ob = np.logical_and(np.logical_and(np.isnan(tmp1), bathy != ov) , bathy != lv)
+    
+    tmp = np.pad(bathy, (1, 1), 'constant', constant_values=(-1,-1))
+    tmp[tmp==lv] = np.NaN
+    
+    tmp1 = tmp[1:-1, :-2] + tmp[1:-1, 2:] + tmp[:-2, 1:-1] + tmp[2:, 1:-1]
+
+    lb = np.logical_and(np.logical_and(np.isnan(tmp1), bathy!=ov), bathy!=lv)
+    return ob, lb
+
+
+def polcoms_select_domain(bathy, lat, lon, roi, dr):
+    """ This calculates the shelf break
+    :param bathy: This is the input bathymetry data
+    :param lat: Latittude array
+    :param lon: Longitude array
+    :param roi: region of interest array [4]
+    :param dr: shelf break distance
+    :type bathy: numpy array
+    :type lat: numpy array
+    :type lon: numpy array
+    :type roi: python array
+    :type dr: float
+    :return: returns the depth_shelf, h_max
+    :rtype: numpy arrays
+    
+    :Example:
+    """
+    logger = logging.getLogger(__name__)
+#   dy = 0.1
+#   dx = 0.1
+    
+    #create a copy of bathy
+    bathy_copy = bathy.copy()
+#    bathy[bathy>=0] = 0;
+#    bathy = bathy*-1
+    global_ind = bathy_copy*np.NaN
+#   r = np.ceil(dr/(np.pi/180*6400)/dy)
+#   r = np.ceil(dr/(np.cos(np.radians(lat_ob[idx]))*np.pi*6400*2/360*dy))
+#   if r > np.max(bathy_copy.shape):
+#       logger.error("Shelf break is larger than the grid")
+#        d1 = bathy_copy.shape[0]-(roi[3]-roi[2])/2.0
+#        d2 = bathy_copy.shape[1]-(roi[1]-roi[0])/2.0
+#        r = np.ceil(min(d1,d2))
+        #just select the box roi
+#       ret_val = np.ones(bathy.shape)
+#       ret_val[roi[2]:roi[3],roi[0]:roi[1]] = -1
+#       return ret_val == -1
+        
+    tmp = bathy_copy[roi[2]:roi[3],roi[0]:roi[1]]
+    lat = lat[roi[2]:roi[3],roi[0]:roi[1]]
+    lon = lon[roi[2]:roi[3],roi[0]:roi[1]]
+    
+    nanind = np.isnan(tmp) 
+    tmp[nanind] = -1
+    dummy, lb = gcoms_boundary_masks(tmp, -1,0)
+    Zshelf, Hmax = gcoms_break_depth(tmp)
+    tmp[tmp>Hmax] = -1
+    tmp[np.logical_and(np.logical_and(tmp!=0, np.logical_not(np.isnan(tmp))), tmp!=-1)] = 1
+    
+    ob, dummy = gcoms_boundary_masks(tmp, -1, 0)
+    
+    lat_ob = np.ravel(lat,order='F')[np.ravel(ob,order='F')]
+    lon_ob = np.ravel(lon,order='F')[np.ravel(ob,order='F')]
+    
+    
+    print lat_ob, lon_ob
+    len_lat = len(lat[:,0])
+    len_lon = len(lon[0,:])
+    lat_lon_index = np.nonzero( np.logical_and(lat == lat_ob[0], lon == lon_ob[0]))    
+    for idx in range(0, len(lat_ob)):        
+        lat_lon_index = np.nonzero( np.logical_and(lat == lat_ob[idx], lon == lon_ob[idx]))
+        # messy fudge to determine local dx,dy TODO tidy and formalise
+        j_0 = max(lat_lon_index[0],0)
+        j_e = min(lat_lon_index[0]+1+1,len_lat)
+        i_0 = max(lat_lon_index[1],0)
+        i_e = min(lat_lon_index[1]+1+1,len_lon)
+        if j_e>len_lat-2:
+           j_0 = j_0 - 3
+           j_e = j_0 + 2
+        if i_e>len_lon-2:
+           i_0 = i_0 - 3
+           i_e = i_0 + 2
+        lat_slice = slice(max(lat_lon_index[0],0),min(lat_lon_index[0]+1+1,len_lat))
+        lon_slice = slice(max(lat_lon_index[1],0),min(lat_lon_index[1]+1+1,len_lon))   
+        print 'method2', lon_slice, lat_slice
+        lat_slice = slice(j_0,j_e)
+        lon_slice = slice(i_0,i_e)
+        print 'method1', lon_slice, lat_slice
+        lat_pts = lat[lat_slice, lon_slice]
+        lon_pts = lon[lat_slice, lon_slice]
+        print lat_pts, lon_pts
+        print lat_lon_index[0], lat_lon_index[1] 
+        print len_lon, len_lat, lat_lon_index[0], lat_lon_index[1]
+        dy,py=seawater.dist(lat_pts[:,0], lon_pts[:,0])
+        dx,px=seawater.dist(lat_pts[0,:], lon_pts[0,:])
+        r = np.rint(np.ceil(dr/np.amax([dx,dy])))
+        print dx, dy, r
+        lat_slice = slice(max(lat_lon_index[0]-r,0),min(lat_lon_index[0]+r+1,len_lat))
+        lon_slice = slice(max(lat_lon_index[1]-r,0),min(lat_lon_index[1]+r+1,len_lon))   
+        lat_pts = lat[lat_slice, lon_slice]
+        lon_pts = lon[lat_slice, lon_slice]
+        lat_pts_shape = lat_pts.shape
+        lat_pts = np.ravel(lat_pts)
+        lon_pts = np.ravel(lon_pts)
+        # NOTE: seawater package calculates the distance from point to the next point in the array
+        # that is the reason to insert reference point before every point
+        lat_pts = np.insert(lat_pts,range(0,len(lat_pts)), lat_ob[idx])
+        lon_pts = np.insert(lon_pts,range(0,len(lon_pts)), lon_ob[idx])
+        distance_pts = seawater.dist(lat_pts, lon_pts)
+        #distances repeat themselves so only pick every alternative distance
+        distance_pts = distance_pts[0][::2]
+        
+        #Using pyproj
+        #geod = pyproj.Geod(ellps='WGS84')
+        #dummy,dummy, distance_pts = geod.inv(len(lon_pts)*[lon_ob[idx]],len(lat_pts)*[lat_ob[idx]], lon_pts, lat_pts)
+        #distance_pts=distance_pts/1000.0
+                         
+        distance_pts = np.reshape(distance_pts, lat_pts_shape)
+        distance_pts[distance_pts>dr] = np.NaN
+        distance_pts[np.logical_not(np.isnan(distance_pts))] = 1
+        tmp1 = tmp[lat_slice, lon_slice]
+        tmp1[np.logical_and(tmp1==-1, distance_pts==1)] = 1
+        tmp[lat_slice, lon_slice] = tmp1
+        
+    lat_lb = lat[lb]
+    lon_lb = lon[lb]
+    
+    for idx in range(0, len(lat_lb)):
+        lat_lon_index = np.nonzero( np.logical_and(lat == lat_lb[idx], lon == lon_lb[idx]))
+        # messy fudge to determine local dx,dy TODO tidy and formalise
+        j_0 = max(lat_lon_index[0],0)
+        j_e = min(lat_lon_index[0]+1+1,len_lat)
+        i_0 = max(lat_lon_index[1],0)
+        i_e = min(lat_lon_index[1]+1+1,len_lon)
+        if j_e>len_lat-2:
+           j_0 = j_0 - 3
+           j_e = j_0 + 2
+        if i_e>len_lon-2:
+           i_0 = i_0 - 3
+           i_e = i_0 + 2
+        lat_slice = slice(max(lat_lon_index[0],0),min(lat_lon_index[0]+1+1,len_lat))
+        lon_slice = slice(max(lat_lon_index[1],0),min(lat_lon_index[1]+1+1,len_lon))   
+        print 'method2', lon_slice, lat_slice
+        lat_slice = slice(j_0,j_e)
+        lon_slice = slice(i_0,i_e)
+        print 'method1', lon_slice, lat_slice
+        lat_pts = lat[lat_slice, lon_slice]
+        lon_pts = lon[lat_slice, lon_slice]
+        print lat_pts, lon_pts
+        print lat_lon_index[0], lat_lon_index[1] 
+        print len_lon, len_lat, lat_lon_index[0], lat_lon_index[1]
+        dy,py=seawater.dist(lat_pts[:,0], lon_pts[:,0])
+        dx,px=seawater.dist(lat_pts[0,:], lon_pts[0,:])
+        r = np.rint(np.ceil(dr/np.amax([dx,dy])))
+        print dx, dy, r
+        lat_slice = slice(max(lat_lon_index[0]-r,0),min(lat_lon_index[0]+r+1,len_lat))
+        lon_slice = slice(max(lat_lon_index[1]-r,0),min(lat_lon_index[1]+r+1,len_lon))   
+        lat_pts = lat[lat_slice, lon_slice]
+        lon_pts = lon[lat_slice, lon_slice]
+        lat_pts_shape = lat_pts.shape
+        lat_pts = np.ravel(lat_pts)
+        lon_pts = np.ravel(lon_pts)
+        # NOTE: seawater package calculates the distance from point to the next point in the array
+        # that is the reason to insert reference point before every point
+        lat_pts = np.insert(lat_pts,range(0,len(lat_pts)), lat_lb[idx])
+        lon_pts = np.insert(lon_pts,range(0,len(lon_pts)), lon_lb[idx])
+        distance_pts = seawater.dist(lat_pts, lon_pts)
+        #distances repeat themselves so only pick every alternative distance
+        distance_pts = distance_pts[0][::2]
+        
+        #Using pyproj
+        #geod = pyproj.Geod(ellps='WGS84')
+        #dummy,dummy, distance_pts = geod.inv(len(lon_pts)*[lon_lb[idx]],len(lat_pts)*[lat_lb[idx]], lon_pts, lat_pts) 
+        #distance_pts=distance_pts/1000.0
+        
+        distance_pts = np.reshape(distance_pts, lat_pts_shape)
+        distance_pts[distance_pts>dr] = np.NaN
+        distance_pts[np.logical_not(np.isnan(distance_pts))] = 1
+        tmp1 = tmp[lat_slice, lon_slice]
+        tmp1[np.logical_and(tmp1==-1, distance_pts==1)] = 1
+        tmp[lat_slice, lon_slice] = tmp1        
+         
+    #Only select largest sub region 
+    tmp[nanind] = np.NaN
+    ret_val = np.ones(bathy.shape)
+    ret_val[roi[2]:roi[3],roi[0]:roi[1]] = tmp
+    return ret_val == 1
+    #in
+         
+    
+
+    
diff --git a/pynemo/utils/nemo_bdy_lib.py b/pynemo/utils/nemo_bdy_lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ee96f47f0afe5cb011bd96ebcac8426c1ae9fd3
--- /dev/null
+++ b/pynemo/utils/nemo_bdy_lib.py
@@ -0,0 +1,127 @@
+"""
+ Library of some functions used by multiple classes
+ Written by John Kazimierz Farey, Sep 2012
+"""
+import scipy.spatial as sp
+import numpy as np
+
+def sub2ind(shap, subx, suby):
+    """subscript to index of a 1d array"""
+    ind = (subx * shap[0]) + suby
+    return ind
+
+    # THIS FUNCTION MAY BE BROKEN
+def rot_rep(pxin, pyin, dummy, cd_todo, gcos, gsin):
+    """rotate function"""
+    if cd_todo.lower() in ['en to i', 'ij to e']:
+        x_var, y_var = pxin, pyin
+    elif cd_todo.lower() in ['en to j', 'ij to n']:
+        x_var, y_var = pyin, pxin*-1
+    else:
+        raise SyntaxError('rot_rep cd_todo %s is invalid' %cd_todo)
+    return x_var * gcos + y_var * gsin
+
+def get_output_filename(setup_var, year, month, var_type):
+    """This returns a output filename constructed for a given var_type, year and month"""
+    if var_type == 'ice':
+        return setup_var.settings['dst_dir']+setup_var.settings['fn']+'_bdyT_y'+str(year)+ \
+               'm'+str(month)+'.nc'
+    elif var_type == 'bt':
+        return setup_var.settings['dst_dir']+setup_var.settings['fn']+'_bt_bdyT_y'+str(year)+ \
+               'm'+str(month)+'.nc'
+    elif var_type == 'u':
+        return setup_var.settings['dst_dir'] + setup_var.settings['fn'] + '_bdyU_y' + \
+               str(year) + 'm' + str(month) + '.nc'
+    elif var_type == 'v':
+        return setup_var.settings['dst_dir'] + setup_var.settings['fn'] + '_bdyV_y' + \
+               str(year) + 'm' + str(month) + '.nc'
+
+def get_output_tidal_filename(setup_var, const_name, grid_type):
+    """This method returns a output filename constructed for a given tidal constituent and
+    grid type"""
+    return setup_var.settings['dst_dir']+setup_var.settings['fn']+"_bdytide_rotT_"+const_name+ \
+           "_grid_"+grid_type.upper()+".nc"
+
+def psi_field(U, V):
+    psiu = np.cumsum(U[1:,:], axis=0) - np.cumsum(V[0,:])
+    psiv = ( np.cumsum(U[:,0]) - np.cumsum(V[:,1:], axis=1).T ).T
+    return psiu[:,1:], psiv[1:,:]
+
+def velocity_field(psi):
+    U = np.diff(psi, n=1, axis=0)
+    V = - np.diff(psi, n=1, axis=1)
+    return U, V
+
+def bdy_sections(nbidta,nbjdta,nbrdta,rw):
+    """Extract individual byd sections
+
+    Keyword arguments:
+    """
+    
+    # TODO Need to put a check in here to STOP if we have E-W wrap
+    # as this is not coded yet
+    
+    # Define the outer most halo
+    outer_rim_i = nbidta[nbrdta==rw]
+    outer_rim_j = nbjdta[nbrdta==rw]
+
+    # Set initial constants
+
+    nbdy = len(outer_rim_i)
+    count = 0
+    flag = 0
+    mark = 0
+    source_tree = sp.cKDTree(zip(outer_rim_i, outer_rim_j)) 
+    id_order = np.ones((nbdy,), dtype=np.int)*source_tree.n
+    id_order[count] = 0 # use index 0 as the starting point 
+    count += 1
+    end_pts = {}
+    nsec = 0
+    
+    # Search for individual sections and order
+
+    while count <= nbdy:
+        
+        lcl_pt = zip([outer_rim_i[id_order[count-1]]],
+                     [outer_rim_j[id_order[count-1]]])
+        junk, an_id = source_tree.query(lcl_pt, k=3, distance_upper_bound=1.1)
+        
+        if an_id[0,1] in id_order:
+            if (an_id[0,2] in id_order) or (an_id[0,2] == source_tree.n) : # we are now at an end point and ready to sequence a section
+                if flag == 0:
+                    flag = 1  
+                    end_pts[nsec] = [id_order[count-1], id_order[count-1]] # make a note of the starting point
+                    id_order[mark] = id_order[count-1]
+                    id_order[mark+1:] = source_tree.n # remove previous values
+                    count = mark + 1
+                else:
+                    i = 0
+                    end_pts[nsec][1] = id_order[count-1] # update the end point of the section
+                    nsec += 1
+                    
+                    while i in id_order:
+                        i += 1
+                        
+                    if count < nbdy:
+                        id_order[count] = i
+                    flag = 0
+                    mark = count
+                    count += 1
+
+            else: # lets add the next available point to the sequence
+                id_order[count] = an_id[0,2]
+                count += 1
+
+        else: # lets add the next available point to the sequence
+            id_order[count] = an_id[0,1]
+            count += 1
+        
+    return id_order, end_pts
+    
+def bdy_transport():
+    """Calculate transport across individual bdy sections
+
+    Keyword arguments:
+    """
+    raise NotImplementedError
+    
\ No newline at end of file
diff --git a/pynemo/variable.info b/pynemo/variable.info
new file mode 100644
index 0000000000000000000000000000000000000000..8d46df5675da0493e04c45a0c4ddb26ddefc4999
--- /dev/null
+++ b/pynemo/variable.info
@@ -0,0 +1,50 @@
+ln_zco = z-coordinate - full    steps   (T/F)
+ln_zps = z-coordinate - partial steps   (T/F)
+ln_sco = s- or hybrid z-s-coordinate    (T/F)
+rn_hmin = min depth of the ocean (>0) or min number of ocean level (<0)
+rn_sbot_min = minimum depth of s-bottom surface (>0) (m)
+rn_sbot_max = maximum depth of s-bottom surface (= ocean depth) (>0) (m)
+ln_s_sigma = hybrid s-sigma coordinates
+rn_hc = critical depth with s-sigma
+sn_src_hgr = source grid hgr
+sn_src_zgr = source grid zgr
+sn_dst_hgr = destination grid hgr
+sn_dst_zgr = destination grid zgr
+sn_src_msk = source mask
+sn_bathy = bathymetry file path
+sn_src_dir = data directory or thredds server url path
+sn_dst_dir = directory where the output data files need to be stored
+sn_fn = prefix for output files
+nn_fv = set fill value for output files
+nn_src_time_adj = src time adjustment
+sn_dst_metainfo = meta data information written to the output files. 
+ln_coords_file = If true : produce bdy coordinates files
+cn_coords_file = name of bdy coordinates files (if ln_coords_file is set to .TRUE.)
+ln_mask_file = If .true. : read mask from file
+cn_mask_file = name of mask file (if ln_mask_file is set to .TRUE.)
+ln_dyn2d = boundary conditions for barotropic fields
+ln_dyn3d = boundary conditions for baroclinic velocities
+ln_tra = boundary conditions for T and S
+ln_ice = ice boundary condition
+nn_rimwidth = width of the relaxation zone
+ln_tide = if .true. : produce bdy tidal conditions
+clname(0) = constituent name
+ln_trans = 
+nn_year_000 = year start
+nn_year_end = year end
+nn_month_000 = month start (default = 1 is years>1)
+nn_month_end = month end (default = 12 is years>1)
+sn_dst_calendar = output calendar format
+nn_base_year = base year for time counter
+sn_tide_grid = 
+sn_tide_h =
+sn_tide_u =
+nn_wei = smoothing filter weights
+rn_r0 = decorrelation distance use in gauss smoothing onto dst points.Need to make this a funct. of dlon
+sn_history = history for netcdf file
+ln_nemo3p4 = else presume v3.2 or v3.3
+nn_alpha = Euler rotation angle
+nn_beta = Euler rotation angle
+nn_gamma = Euler rotation angle
+rn_mask_max_depth = Maximum depth to be ignored for the mask
+rn_mask_shelfbreak_dist = Distance from the shelf break
\ No newline at end of file