diff --git a/commands/dos2unix b/commands/dos2unix
new file mode 100755
index 0000000000000000000000000000000000000000..c0c5c47a0da3c8f891d4e2587e048e403f7bf63b
Binary files /dev/null and b/commands/dos2unix differ
diff --git a/commands/mac2unix b/commands/mac2unix
new file mode 100755
index 0000000000000000000000000000000000000000..c0c5c47a0da3c8f891d4e2587e048e403f7bf63b
Binary files /dev/null and b/commands/mac2unix differ
diff --git a/ctd_linkscript~ b/ctd_linkscript~
new file mode 100755
index 0000000000000000000000000000000000000000..2626940d441eadf5149d2e77d29e451a093da3b3
--- /dev/null
+++ b/ctd_linkscript~
@@ -0,0 +1,41 @@
+#!/bin/csh -f
+#
+#copy the raw and SBE-processed CTD files from the ship machine
+#then make links to the files to be ingested by mexec using the expected name format
+
+cd /local/users/pstar/cruise/data
+
+set cpre = JC159
+set cruise = jc159
+set ctdloc = /local/users/pstar/mounts/mnt_cruise_data/Specific_Equipment/CTD/
+
+rsync -av ${ctdloc}/Processed\ Data/*.cnv ctd/ASCII_FILES/
+rsync -av ${ctdloc}/Raw\ Data/*.bl ctd/ASCII_FILES/
+rsync -av ${ctdloc}/Processed\ Data/*.ros ctd/ASCII_FILES/
+rsync -av ${ctdloc}/Raw\ Data/*.hex ctd/RAW_CTD_FILES/
+rsync -av ${ctdloc}/Raw\ Data/*.XMLCON ctd/RAW_CTD_FILES/
+rsync -av ${ctdloc}/Raw\ Data/*.hdr ctd/RAW_CTD_FILES/
+rsync -av ${ctdloc}/Processed\ Data/*.btl ctd/ASCII_FILES/
+
+cd ctd/ASCII_FILES
+
+foreach i (`ls ${cpre}_???_Align_CTM.cnv`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,"_Align")-3,3)}'`
+   set linkfile = ctd_${cruise}_${num}_ctm.cnv 
+   echo $linkfile
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+     ln -s $i $linkfile
+   endif
+end
+
+foreach i (`ls ${cpre}_???.bl`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".bl")-3,3)}'`
+   set linkfile = ctd_${cruise}_${num}.bl
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s $i $linkfile
+   endif
+end
+
+cd ../..
diff --git a/finddir.noread b/finddir.noread
new file mode 100755
index 0000000000000000000000000000000000000000..37c3b5404ed27d49f5907461672199a74dd87b8a
--- /dev/null
+++ b/finddir.noread
@@ -0,0 +1,15 @@
+#
+find . -type d -exec ls -ld {} \; | egrep -ve "r.xr.xr.x"
+
+if($status == "0") then
+
+#the grep command found something because it finished with status zero
+
+echo " "
+echo "----------------------------------------------------"
+echo " The above directories do not have ugo rx permission"
+echo " This makes them unreadable to some users           "
+echo "----------------------------------------------------"
+
+endif
+
diff --git a/findfil.noread b/findfil.noread
new file mode 100755
index 0000000000000000000000000000000000000000..df3fc476601164554f6bb7b5c2bc07654c6ff51e
--- /dev/null
+++ b/findfil.noread
@@ -0,0 +1,14 @@
+#
+find . -type f -exec ls -ld {} \; | egrep -ve "-r..r..r.."
+
+if($status == "0") then
+
+#the grep command found something because it finished with status zero
+ 
+echo " "
+echo "----------------------------------------------------"
+echo " The above files do not have ugo r permission"
+echo " This makes them unreadable to some users           "
+echo "----------------------------------------------------"
+
+endif
diff --git a/fixperms2 b/fixperms2
new file mode 100755
index 0000000000000000000000000000000000000000..ad6c3ba62a680d9af1f974f4c0132a448f7666db
--- /dev/null
+++ b/fixperms2
@@ -0,0 +1,21 @@
+#
+#BAK 18 Sep 2000
+#simple script to fix file permissions to make files more
+#readable. If the file has user read permission,
+#give read permission to group and other. If file
+#has x permission (ie executable or directory) then
+#add rx for group and other. The perm '-400'
+#means 'at least 400'.
+
+# I don't think the 'find .' construct works on SGIs
+
+find . -perm -400 -exec chmod go+r {} \;
+
+find . -perm -500 -exec chmod go+rx {} \;
+
+#check for any problems
+
+#~bak/commands/finddir.noread
+#~bak/commands/findfil.noread
+finddir.noread
+findfil.noread
diff --git a/jcr/conf_script_TPL b/jcr/conf_script_TPL
new file mode 100755
index 0000000000000000000000000000000000000000..b03cf17f5f9bc2b9ce60e9644c3484a9efef6554
--- /dev/null
+++ b/jcr/conf_script_TPL
@@ -0,0 +1,23 @@
+# copy TPLs and edit oceanlogger
+
+set in = scs_raw
+set ot = scs_sed
+
+cd ~
+cd cruise
+cd data
+
+/bin/cp -p $in/*TPL $ot
+
+cd $ot
+
+cat << ! >! wk
+126,oceanlogger-sampletimeyyyy,YYYY
+126,oceanlogger-sampletimeddd,DDD
+126,oceanlogger-sampletimehhmmss,HH:MM:SS
+!
+
+cat oceanlogger.TPL | sed -n '2,$p' >> wk
+
+mv oceanlogger.TPL oceanlogger.TPL_original
+mv wk oceanlogger.TPL
diff --git a/jcr/vmadcp_linkscript_jc b/jcr/vmadcp_linkscript_jc
new file mode 100755
index 0000000000000000000000000000000000000000..b07dc781d582304a03330317196712664810df7b
--- /dev/null
+++ b/jcr/vmadcp_linkscript_jc
@@ -0,0 +1,25 @@
+#!/bin/csh -f
+#
+# Go through loop once for each adcp
+
+set cruise = jc159
+set CRUISE = JC159
+set vmloc = /local/users/pstar/mounts/mnt_cruise_data/Ship_Systems/Acoustics/
+
+foreach adp ('75' '150')
+  echo $adp
+  set pre = ${CRUISE}_OS
+  cd /local/users/pstar/cruise/data/vmadcp
+  cd ${cruise}_os${adp}
+
+  echo "Synchronising raw data for the $adp"
+  rsync -av ${vmloc}/OS${adp}kHz/raw_data/${pre}* vmdas_data/
+
+end
+
+#now sync raw data to banba
+rsync -avz --exclude NMEA --exclude adcp_pyproc/ --exclude fake_uhdas_data/ --exclude mproc/ /local/users/pstar/cruise/data/vmadcp/ pstar@192.168.62.110:/local/users/pstar/codas_shared/$cruise/
+
+#and sync processed data back
+rsync -avz --exclude Temporary_bin/ --exclude 'vmdas_data*'/ --exclude fake_uhdas_data/ --exclude mproc/ pstar@192.168.62.110:/local/users/pstar/codas_shared/$cruise/ /local/users/pstar/cruise/data/vmadcp/
+
diff --git a/jcr/vmadcp_linkscript_jcr b/jcr/vmadcp_linkscript_jcr
new file mode 100755
index 0000000000000000000000000000000000000000..fa6e67df4084ff45fe8e9b9060504faa5ca648c8
--- /dev/null
+++ b/jcr/vmadcp_linkscript_jcr
@@ -0,0 +1,34 @@
+#!/bin/csh -f
+#
+#foreach i (`ls rawdata/*JC32*`)
+
+# overhaul on jr302 may 2014, to fit with new python codas
+# may need modification on cook/discovery to handle os75 and os150
+cd /local/users/pstar/cruise/data/vmadcp/
+
+echo "synchronising raw data"
+rsync -av ../jcrfs/current/adcp/ jr17001_os75/vmdas_data/
+# jcrfs is link to raw data on jrlb
+
+echo "making links to individual sequence directories"
+cd jr17001_os75/vmdas_data
+foreach i (`ls *.*`) # list all *.* to avoid collecting N1R directory
+set num = `echo $i | awk -F'.' '{print substr($1,length($1)-9,3)}'`
+
+set linkdir = rawdata{$num}
+
+if (! -e $linkdir) then
+  echo making $linkdir
+  mkdir $linkdir
+endif
+
+set rfile = $i
+set lfile = $i
+cd $linkdir
+if (! -e $lfile) then
+  echo linking ${lfile} to ${rfile}
+  ln -s ../${rfile} ${lfile}
+endif
+cd ..
+
+end
diff --git a/lad_linkscript_uh b/lad_linkscript_uh
new file mode 100755
index 0000000000000000000000000000000000000000..3a27299a83a5ea82ef369629e645fb3fa9b7be78
--- /dev/null
+++ b/lad_linkscript_uh
@@ -0,0 +1,59 @@
+#!/bin/csh
+#
+# copy the raw .000 ladcp data into cruise/data/ladcp/rawdata
+# then make links to those files using the filename formats expected by 
+# uh/woce (perl) processing
+#
+# j001_01.000
+# j001_02.000
+# j001_03.000
+#
+
+set cpre = JC159
+set cruise = jc159
+set ladloc = /local/users/pstar/mounts/mnt_cruise_data/Specific_Equipment/CTD/LADCP
+set cl = j
+set uhldir = jc1802
+
+#sync file
+cd /local/users/pstar/cruise/data/ladcp/rawdata
+rsync -av ${ladloc}/ ./
+
+#make links
+
+cd /local/users/pstar/cruise/data/ladcp/rawdata/Master/data
+foreach i (`ls ${cpre}_???m.000`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".000")-4,3)}'`
+   set linkfile = {$cl}{$num}_02.000 #use cast 02 to indicate WHM
+   cd /local/users/pstar/cruise/data/ladcp/uh/raw/$uhldir/ladcp
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s ../../../../rawdata/Master/data/$i $linkfile
+   endif
+   cd /local/users/pstar/cruise/data/ladcp/rawdata/Master/data
+end
+
+cd /local/users/pstar/cruise/data/ladcp/rawdata/Slave/data
+foreach i (`ls ${cpre}_???s.000`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".000")-4,3)}'`
+   set linkfile = {$cl}{$num}_03.000 #cast 03 to indicate WHS
+   cd /local/users/pstar/cruise/data/ladcp/uh/raw/$uhldir/ladcp
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s ../../../../rawdata/Slave/data/$i $linkfile
+   endif
+   cd /local/users/pstar/cruise/data/ladcp/rawdata/Slave/data
+end
+
+#and for ctd data
+cd /local/users/pstar/cruise/data/ladcp/ctd
+foreach i (`ls ctd.???.02.asc`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".02.asc")-3,3)}'`
+   set linkfile = ctd.{$num}.03.asc
+   if (! -e $linkfile) then
+      echo Making link $linkfile
+      ln -s $i $linkfile
+   endif
+end
+cd ..
+
diff --git a/lad_linkscript_uhpy b/lad_linkscript_uhpy
new file mode 100755
index 0000000000000000000000000000000000000000..85bc21d33b5674f4ebba87072cc35109d2beb630
--- /dev/null
+++ b/lad_linkscript_uhpy
@@ -0,0 +1,53 @@
+#!/bin/csh
+#
+# copy the raw .000 ladcp data into cruise/data/ladcp/rawdata
+# then make links to those files using the filename formats expected by 
+# uh/clivar (python) processing
+#
+
+set cpre = JC159
+set cruise = jc159
+set ladloc = /local/users/pstar/mounts/mnt_cruise_data/Specific_Equipment/CTD/LADCP
+
+#sync file
+cd /local/users/pstar/cruise/data/ladcp/rawdata
+rsync -av ${ladloc}/ ./
+
+#make links
+
+cd /local/users/pstar/cruise/data/ladcp/rawdata/Master/data
+foreach i (`ls ${cpre}_???m.000`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".000")-4,3)}'`
+   set linkfile = wh{$num}_02.dat #use cast 02 to indicate WHM
+   cd /local/users/pstar/cruise/data/ladcp/uhpy/raw
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s ../../rawdata/Master/data/$i $linkfile
+   endif
+   cd /local/users/pstar/cruise/data/ladcp/rawdata/Master/data
+end
+
+cd /local/users/pstar/cruise/data/ladcp/rawdata/Slave/data
+foreach i (`ls ${cpre}_???s.000`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".000")-4,3)}'`
+   set linkfile = wh{$num}_03.dat #cast 03 to indicate WHS
+   cd /local/users/pstar/cruise/data/ladcp/uhpy/raw
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s ../../rawdata/Slave/data/$i $linkfile
+   endif
+   cd /local/users/pstar/cruise/data/ladcp/rawdata/Slave/data
+end
+
+#and for ctd data
+cd /local/users/pstar/cruise/data/ladcp/ctd
+foreach i (`ls ctd.???.02.asc`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".02.asc")-3,3)}'`
+   set linkfile = ctd.{$num}.03.asc
+   if (! -e $linkfile) then
+      echo Making link $linkfile
+      ln -s $i $linkfile
+   endif
+end
+cd ..
+
diff --git a/mexec_cruise_backup_jc184 b/mexec_cruise_backup_jc184
new file mode 100755
index 0000000000000000000000000000000000000000..c6d86e6b582079809414124f0f1e82f329d2d423
--- /dev/null
+++ b/mexec_cruise_backup_jc184
@@ -0,0 +1,90 @@
+# backup mexec cruise directory from workstation to external hard drive using rsync
+# usage: > mexec_cruise_backup drivename
+# e.g. > mexec_cruise_backup jr16002a
+# the external hard drives must be named/mounted as $cruise$letter
+# e.g. for cruise jr16002: jr16002a, jr16002b, etc.
+#
+
+# setup
+
+
+set target = $1
+set thiscruise = `awk -v nm="$target" 'BEGIN {print substr(nm,1,length(nm)-2)}'`
+#set thiscruise = `echo $target | sed s'/.$//'` 
+set target1 = /media/${target}/this_is_${target}
+set root =  /local/users/pstar/cruise
+set progroot = /local/users/pstar
+set now = `date +%Y%m%d%H%M%S`
+set backuplog = $root/data/other_backups/backup_log_${now}
+
+set thiscruise = jc184
+
+if (-e ${target1}) then
+  set backupdir = /media/${target}/${thiscruise}/backup_${now}
+  set location = /media/${target}
+else
+  echo "disk ${target} not found"
+exit
+endif
+
+if (-e $backuplog) then
+  echo " "
+exit
+else
+  touch $backuplog
+endif
+
+echo $backupdir
+if (-e $backupdir) then
+  echo " "
+  exit
+else
+  mkdir $backupdir
+endif
+
+pushd $root
+
+echo "disk ${target} found at ${location}" >> $backuplog
+
+# dot login and cshrc
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo copying login and cshrc  >> $backuplog
+echo ------               >> $backuplog
+/bin/cp -p ~/.login  $root/data/other_backups/dot_login
+/bin/cp -p ~/.cshrc  $root/data/other_backups/dot_cshrc
+/bin/cp -p ~/.bashrc  $root/data/other_backups/dot_bashrc
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo done login and cshrc  >> $backuplog
+echo ------               >> $backuplog
+
+# software
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo start copying software >> $backuplog
+echo ------               >> $backuplog
+cd $root
+rsync -a sw ${backupdir}
+cd $progroot
+rsync -a programs ${backupdir}
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo done copying software >> $backuplog
+echo ------               >> $backuplog
+
+# backup everything
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo start copy to disk   >> $backuplog
+echo ------               >> $backuplog
+cd $root
+rsync -a --exclude jcrfs data ${backupdir}
+echo -------------        >> $backuplog
+echo Date `date`          >> $backuplog
+echo done copy to disk    >> $backuplog
+echo ------               >> $backuplog
+
+popd
+
+
diff --git a/mexec_cruise_backup_public b/mexec_cruise_backup_public
new file mode 100755
index 0000000000000000000000000000000000000000..f850f5fe7a49b6598b372acff9e6e461e520806f
--- /dev/null
+++ b/mexec_cruise_backup_public
@@ -0,0 +1,2 @@
+rsync -av --exclude photos --exclude outreach /local/users/pstar/mounts/mnt_smb_public/JC159/ /local/users/pstar/jc159/from_public_jc159/
+
diff --git a/modsal_unix b/modsal_unix
index 1d3f63c406e203ce397d5c80fdcff97ad9e1a174..10920bf33f5521e8e02586ad4cab6c49099013f0 100755
--- a/modsal_unix
+++ b/modsal_unix
@@ -1,22 +1,31 @@
-#!/bin/csh
+#!/bin/csh 
 #
 #Script by DADES on cruise jr302 to make mac files readable in unix.
+# tidied up by bak for dy040
 
 #cd data
 #cd ctd
+
+set cruise = 'jc159'
+
 cd ctd
 cd BOTTLE_SAL
-foreach i (`ls /local/users/pstar/cruise/data/ctd/BOTTLE_SAL/sal_jr302_???.csv`)
+foreach i (`ls /local/users/pstar/cruise/data/ctd/BOTTLE_SAL/sal_${cruise}_???.csv`)
+
 mac2unix -n $i ${i}_linux
 echo " " >> ${i}_linux
+echo " " >> ${i}_linux # extra newline added by bak for dy040; may not be needed
 end
 
-foreach i (`ls /local/users/pstar/cruise/data/ctd/BOTTLE_SAL/sal_jr302_????.csv`)
+foreach i (`ls /local/users/pstar/cruise/data/ctd/BOTTLE_SAL/tsg_${cruise}_???.csv`)
 mac2unix -n $i ${i}_linux
 echo " " >> ${i}_linux
+echo " " >> ${i}_linux
 end
 
-rm sal_jr302_01.csv
-cat *linux >>! sal_jr302_01.csv
+touch sal_${cruise}_01.csv
+/bin/rm sal_${cruise}_01.csv
+cat *linux >>! sal_${cruise}_01.csv
 
 
+chmod ugo+r *linux
diff --git a/sal_linkscript_incomplete b/sal_linkscript_incomplete
new file mode 100755
index 0000000000000000000000000000000000000000..660c3a1536aae02a4ffece5bf4b123b51bb8b511
--- /dev/null
+++ b/sal_linkscript_incomplete
@@ -0,0 +1,47 @@
+#!/bin/csh -f
+#
+#run this after editing the .xls files to add a sampnum column and saving as .csv on 
+#the ship machine
+#
+#copies salinometer logs from the ship machine, makes links, and converts to unix format
+#then concatenates all the .csv_linux files into sal_jc159_01.csv
+#
+#if run with three input arguments: crate number, day, month (of analysis), it
+#will copy and convert only the files with name JC159*CTDnn*dd*mmm*.*
+#if run with no input arguments, it will copy and convert everything, clobbering
+#the existing .csv_linux files, which means that if you are going to make edits 
+#you should make them in the original .xls or .csv files i
+#(or introduce them in cruise_options/opt_jc159.m)
+
+cd /local/users/pstar/cruise/data/ctd/BOTTLE_SAL
+
+set cpre = JC159*CTD
+set cruise = jc159
+set rloc = /local/users/pstar/mounts/mnt_cruise_data/Specific_Equipment/CTD/Autosal
+
+if (-e $1) #do all files
+   rsync -av ${rloc}/*.csv ./
+   rsync -av ${rloc}/*.xls ./
+   foreach i (`ls ${cpre}*.csv`)
+      set n
+
+foreach i (`ls ${cpre}_???_Align_CTM.cnv`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,"_Align")-3,3)}'`
+   set linkfile = ctd_${cruise}_${num}_ctm.cnv 
+   echo $linkfile
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+     ln -s $i $linkfile
+   endif
+end
+
+foreach i (`ls ${cpre}_???.bl`)
+   set num = `awk -v nm="$i" 'BEGIN {print substr(nm,index(nm,".bl")-3,3)}'`
+   set linkfile = ctd_${cruise}_${num}.bl
+   if (! -e $linkfile) then
+      echo Making link $linkfile for raw file $i
+      ln -s $i $linkfile
+   endif
+end
+
+cd ../..
diff --git a/techsas_linkscript b/techsas_linkscript
index 8efc612043e835e0bc5e62fa8e345d34f2fac06d..a72975430a4937e655cc1d2985c4cb7ae8258566 100755
--- a/techsas_linkscript
+++ b/techsas_linkscript
@@ -7,9 +7,7 @@
 
 
 cd /local/users/pstar #~/h # cludge on eriu when pstar home directory is wrong
-cd cruise
-cd data
-cd techsas
+cd cruise/data/techsas
 cd netcdf_files_links
 
 set now = `date +%Y%m%d%H%M%S`
diff --git a/uhdas_00 b/uhdas_00
new file mode 120000
index 0000000000000000000000000000000000000000..54e0485825c2b397be7a0956946478e9e4522f55
--- /dev/null
+++ b/uhdas_00
@@ -0,0 +1 @@
+uhdas_00_linkmerge
\ No newline at end of file
diff --git a/uhdas_00_linkmerge b/uhdas_00_linkmerge
new file mode 100755
index 0000000000000000000000000000000000000000..432fc1dd142375101ea5397d408078f94d9ca78f
--- /dev/null
+++ b/uhdas_00_linkmerge
@@ -0,0 +1,31 @@
+#!/bin/csh -f
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# 1) rsync uhdas data from uhdas server to local machine
+#
+# 2) ensure postprocessing directories exist
+#
+# If the script hasn't been run recently, there will 
+# be a lot of files to sync across. The processing on the uhdas
+# server updates plenty of things in routine processing every few minutes.
+# If the script is then immediately run again, there will be a few files
+# that are updated every ping.
+#
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+
+cd /local/users/pstar/cruise/data/vmadcp/spprocessing
+
+foreach i (`cat ~/cruise/data/vmadcp/cruise_segments`)
+echo linking ${i} to $1
+link_uhdaslegs.py ../atsea/${i} $1
+end
+
+if (! -e config) then
+mkdir config
+cp ../atsea/${i}/raw/config/*_proc.py config/$1_proc.py
+echo 'edit config/*proc.py cruiseid, uhdas_dir'
+endif
+
diff --git a/uhdas_01 b/uhdas_01
new file mode 120000
index 0000000000000000000000000000000000000000..4714c756eb3adb27f049da80a3188824cdda8a69
--- /dev/null
+++ b/uhdas_01
@@ -0,0 +1 @@
+uhdas_01_syncraw
\ No newline at end of file
diff --git a/uhdas_01_syncraw b/uhdas_01_syncraw
new file mode 100755
index 0000000000000000000000000000000000000000..1bdcc173bc0f888652edc8812a855722cb14f9aa
--- /dev/null
+++ b/uhdas_01_syncraw
@@ -0,0 +1,34 @@
+#!/bin/csh -f
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# 1) rsync uhdas data from uhdas server to local machine
+#
+# 2) ensure postprocessing directories exist
+#
+# If the script hasn't been run recently, there will 
+# be a lot of files to sync across. The processing on the uhdas
+# server updates plenty of things in routine processing every few minutes.
+# If the script is then immediately run again, there will be a few files
+# that are updated every ping.
+#
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+
+set rdir = ~/mounts/uhdas_data
+set ldir = ~/cruise/data/vmadcp
+
+mkdir -p ${ldir}/atsea
+mkdir -p ${ldir}/postprocessing
+
+foreach i (`cat ~/cruise/data/vmadcp/cruise_segments`)
+echo syncing ${i} to ${ldir}
+rsync -auv ${rdir}/${i} ${ldir}/atsea/
+
+mkdir -p ${ldir}/postprocessing/${i}/proc_editing
+mkdir -p ${ldir}/postprocessing/${i}/proc_archive
+end
+
+echo end of syncing ${i} to ${ldir}
+
diff --git a/uhdas_02 b/uhdas_02
new file mode 120000
index 0000000000000000000000000000000000000000..44ee64511d9f9a39423c178d331b16c63e69c0b9
--- /dev/null
+++ b/uhdas_02
@@ -0,0 +1 @@
+uhdas_02_sync_postprocessing_from_raw
\ No newline at end of file
diff --git a/uhdas_02_sync_postprocessing_from_raw b/uhdas_02_sync_postprocessing_from_raw
new file mode 100755
index 0000000000000000000000000000000000000000..ceb78f04eb95010bbf627ee7f9fbe2ebf8c56dce
--- /dev/null
+++ b/uhdas_02_sync_postprocessing_from_raw
@@ -0,0 +1,32 @@
+#!/bin/csh -f
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# 1) rsync proc directory from the local atsea copy to the editing directory
+#
+# If the script hasn't been run recently, there will 
+# be a lot of files to sync across. The processing on the uhdas
+# server updates plenty of things in routine processing every few minutes.
+# If the script is then immediately run again, there will be a few files
+# that are updated every ping.
+#
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+
+set ldir = ~/cruise/data/vmadcp
+
+
+foreach i (`cat ~/cruise/data/vmadcp/cruise_segments`)
+echo
+echo
+echo syncing 
+echo ${ldir}/atsea/${i}/proc 
+echo to 
+echo ${ldir}/postprocessing/${i}/proc_editing
+rsync -auv ${ldir}/atsea/${i}/proc/ ${ldir}/postprocessing/${i}/proc_editing/
+echo
+echo
+
+end
+
diff --git a/uhdas_03 b/uhdas_03
new file mode 120000
index 0000000000000000000000000000000000000000..ec33a586c3bc927fb227fbcf1fd7582b7bc7d86e
--- /dev/null
+++ b/uhdas_03
@@ -0,0 +1 @@
+uhdas_03_copy_asclog_for_editing
\ No newline at end of file
diff --git a/uhdas_03_copy_asclog_for_editing b/uhdas_03_copy_asclog_for_editing
new file mode 100755
index 0000000000000000000000000000000000000000..ef5af4147dcc45c5f2f4cfb4d93ea534fb9b756e
--- /dev/null
+++ b/uhdas_03_copy_asclog_for_editing
@@ -0,0 +1,73 @@
+#!/bin/bash
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# This is run as a bash, because we want the quick_adcp.py to run in the
+# python 3 environment
+# 
+# This script merges the edits stored in the proc_archive asclog file with 
+# any stored in the proc_editing file
+#
+# The combined sorted edits are stored in both of asc and asclog in the proc_editing file
+# The editing process takes past input from .asc, and appends new edits to .asclog
+#
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+#
+
+# make sure the bash environment is set up
+source ~/.bashrc
+
+# no set command in bash
+ldir=~/cruise/data/vmadcp
+
+for segname in `cat ~/cruise/data/vmadcp/cruise_segments`
+do
+
+pushd ${ldir}/postprocessing/${segname}/proc_editing
+
+
+for pingtype in `ls -d *`
+do
+echo ${segname}/${pingtype}
+data=0
+if test -f ${ldir}/postprocessing/${segname}/proc_editing/${pingtype}/adcpdb/*dir.blk
+then
+data=1
+fi
+
+echo data ${data}
+
+if [ ${data} -eq  1 ]
+then
+pushd ${ldir}/postprocessing/${segname}/proc_editing/${pingtype}
+thisdir=`pwd`
+echo working in ${thisdir}
+
+# now fix the asc files
+
+for asctype in abadbin abottom abadprf
+do
+echo ${asctype}
+f1=${ldir}/postprocessing/${segname}/proc_editing/${pingtype}/edit/${asctype}.asclog
+f2=${ldir}/postprocessing/${segname}/proc_archive/${pingtype}/edit/${asctype}.asclog
+f3=${ldir}/postprocessing/${segname}/proc_editing/${pingtype}/edit/${asctype}.asc
+f4=${ldir}/postprocessing/${segname}/proc_editing/${pingtype}/edit/${asctype}.asclog
+cat ${f1} ${f2} | sort -u > ${f3}
+/bin/cp -p ${f3} ${f4}
+done
+
+thisdir=`pwd`
+echo quick_adcp.py in ${thisdir}
+which quick_adcp.py
+quick_adcp.py --steps2rerun apply_edit:navsteps:calib --auto
+
+
+popd
+fi
+done
+popd
+
+
+done
diff --git a/uhdas_04 b/uhdas_04
new file mode 120000
index 0000000000000000000000000000000000000000..a7461204c8b8e94bc83d734209908203ee61d123
--- /dev/null
+++ b/uhdas_04
@@ -0,0 +1 @@
+uhdas_04_export_nc
\ No newline at end of file
diff --git a/uhdas_04_export_nc b/uhdas_04_export_nc
new file mode 100755
index 0000000000000000000000000000000000000000..2d4c3ba0e730e7458201985da12b77114bf8da0d
--- /dev/null
+++ b/uhdas_04_export_nc
@@ -0,0 +1,57 @@
+#!/bin/bash
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# This is run as a bash, because we want the adcp_nc.py to run in the
+# python 3 environment
+# 
+# This script remakes the contour/os150nb.nc file after dataviewer editing
+#
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+#
+
+# make sure the bash environment is set up
+source ~/.bashrc
+
+# no set command in bash
+ldir=~/cruise/data/vmadcp
+
+for segname in `cat ~/cruise/data/vmadcp/cruise_segments`
+do
+
+pushd ${ldir}/postprocessing/${segname}/proc_editing
+
+
+for pingtype in `ls -d *`
+do
+echo ${segname}/${pingtype}
+data=0
+if test -f ${ldir}/postprocessing/${segname}/proc_editing/${pingtype}/adcpdb/*dir.blk
+then
+data=1
+fi
+
+echo data ${data}
+
+if [ ${data} -eq  1 ]
+then
+pushd ${ldir}/postprocessing/${segname}/proc_editing/${pingtype}
+thisdir=`pwd`
+echo working in ${thisdir}
+
+
+thisdir=`pwd`
+echo adcp_nc.py in ${thisdir}
+which adcp_nc.py
+adcp_nc.py adcpdb contour/${pingtype} jc184 ${pingtype}
+
+
+popd
+fi
+done
+popd
+
+
+done
diff --git a/uhdas_05 b/uhdas_05
new file mode 120000
index 0000000000000000000000000000000000000000..da43b7c46e6059557f368ebff634fd3d5d8e30d5
--- /dev/null
+++ b/uhdas_05
@@ -0,0 +1 @@
+uhdas_05_sync_edited_to_archive
\ No newline at end of file
diff --git a/uhdas_05_sync_edited_to_archive b/uhdas_05_sync_edited_to_archive
new file mode 100755
index 0000000000000000000000000000000000000000..3fe6827e5e2eb1f3c699d8a4310324a3bb68e8f7
--- /dev/null
+++ b/uhdas_05_sync_edited_to_archive
@@ -0,0 +1,32 @@
+#!/bin/csh -f
+#
+# bak in Amsterdam before start of jc184 uhdas trial
+# 3 July 2019
+#
+# 1) rsync proc_edited directory back to proc_archive after editing with dataviewer.py -e
+#
+
+# List of cruise segments to be processed is found in
+# ~/cruise/data/vmadcp/cruise_segments
+#
+set ldir = ~/cruise/data/vmadcp
+#
+#
+foreach i (`cat ~/cruise/data/vmadcp/cruise_segments`)
+echo
+echo
+echo syncing 
+echo ${ldir}/postprocessing/${i}/proc_editing
+echo to 
+echo ${ldir}/postprocessing/${i}/proc_archive
+rsync -av ${ldir}/postprocessing/${i}/proc_editing/ ${ldir}/postprocessing/${i}/proc_archive/
+echo
+echo
+#
+end
+#
+
+
+
+
+