Commit acc7e4eb authored by PStar User Account's avatar PStar User Account
Browse files

documentation for uhdas scripts, added mexec-exec to backup script

parent da2f61d1
#!/bin/csh
# DAS adapt from previous version for remote sync
if ($#argv != 4) then
echo Wrong number of arguements
else
echo - root_path = $1
echo - sync_dir = $2
echo - back_dir = $3
echo - backuplog = $4
endif
set root_path = $1
set sync_dir = $2
set back_dir = $3
set backuplog = $4
if (-e $back_dir) then
echo $back_dir exists
exit
else
mkdir $back_dir
endif
if (-e $sync_dir) then
echo ' '
else
echo $sync_dir does not exist
exit
endif
# Sync all of the cruise directory
echo Going to sync: $root_path
echo to: $sync_dir,
echo backup changed files to $back_dir
echo and record in logfile $backuplog,
echo ' '
echo ------------- >> $backuplog
echo Syncing $root_path to $sync_dir >> $backuplog
echo Start `date` >> $backuplog
rsync -v -a --delete -b --backup-dir=$back_dir $root_path $sync_dir
set how_much1 = `du -sm $sync_dir`
set how_much2 = `du -sm $back_dir`
echo done copying
echo Finish `date` >> $backuplog
echo Total size '(Mb)': $how_much1 >> $backuplog
echo Total size '(Mb)': $how_much2 >> $backuplog
echo ------------- >> $backuplog
#!/bin/csh
# Backup script for use on RAPID cruises
# Data is copied to an external connected to main workstation
# Drive mounted in /media
#
# This version by DAS Updated Tue 28th 2017. Edited previous verions used
# for remote backup on RAPID cruises
# 1) syncs data from "cruise" directory, and creates a bakup of all changed files
# 2) syncs data from "rapid/osnap" directory, and creates a bakup of all changed files
# 3) syncs data from "Desktop" directory, and creates a bakup of all changed files
# When change for a new cruise need to change
# - drive names
# - $root
# - $cru
# Variables for pathnames
set cru = jc145
set CRU = JC145
set root = /local/users/pstar/rpdmoc/
set rapiddir = rapid # Could be osnap
set usersdir = users
# Which drive are we using and is it there?
echo ' '
echo `date` ' Starting backup'
echo ' '
echo -n 'Which disk are you using? Enter '1', '2', or '0': '
set drq = $<
echo $drq
if ($drq == 1) then
set drive_name = ${CRU}_back_1
else if ($drq == 2) then
set drive_name = ${CRU}_back_22
else if ($drq == 0) then
set drive_name = ${CRU}_back
else
echo 'Input not recognised'
exit
endif
# Drive pathname
set drive_path = '/media/'${drive_name}
if (-e ${drive_path}) then
echo Drive connected
else
echo ${drive_path} ' Does not seem to be there'
# exit
endif
# Create a logfile for the backup
set now = `date +%Y%m%d%H%M%S`
set backuplog = ${drive_path}/backup_logs/backup_log_${now}
if (-e $backuplog) then
echo $backuplog exists
exit
else
touch $backuplog
endif
# -----------------------------------------------------------------
# First backup the cruise directory
set root_path_cru = ${root}${cru}
set sync_cru = ${drive_path}/cruise_${cru}_sync_$drq
set back_cru = ${drive_path}/cruise_${cru}_back_$drq/backup_${now}
# Call the main routine
# ----------------------------------------------------
backup_core_local $root_path_cru $sync_cru $back_cru $backuplog
# ----------------------------------------------------
# --------------------------------------------------------------------
# Sync all of the RAPID directory
set now = `date +%Y%m%d%H%M%S`
set root_path_rap = $root$rapiddir
set sync_rap = ${drive_path}/${rapiddir}_${cru}_sync_$drq
set back_rap = ${drive_path}/${rapiddir}_${cru}_back_$drq/backup_${now}
# Call the main routine
# ----------------------------------------------------
backup_core_local $root_path_rap $sync_rap $back_rap $backuplog
# ----------------------------------------------------
# --------------------------------------------------------------------
# Sync all of the RAPID directory
set now = `date +%Y%m%d%H%M%S`
set root_path_users = $root$usersdir
set sync_users = ${drive_path}/${usersdir}_${cru}_sync_$drq
set back_users = ${drive_path}/${usersdir}_${cru}_back_$drq/backup_${now}
# Call the main routine
# ----------------------------------------------------
backup_core_local $root_path_users $sync_users $back_users $backuplog
# ----------------------------------------------------
# --------------------------------------------------------------------
# Sync all of the Desktop/dy054 directory
set now = `date +%Y%m%d%H%M%S`
set deskdir = '../Desktop'
set root_path_desk = $root$deskdir
set sync_desk = ${drive_path}/desk_${cru}_sync_$drq
set back_desk = ${drive_path}/desk_${cru}_back_$drq/backup_${now}
# Call the main routine
# ----------------------------------------------------
backup_core_local $root_path_desk $sync_desk $back_desk $backuplog
# ----------------------------------------------------
# ------------------------------------------------------------------
# And finally copy log of backup to oceanus
set olog = /backup_logs
# Don't want to time out on password so check ok now
echo 'Press return to copy:' $backuplog $root_path_desk$olog
set qqq = $<
scp $backuplog $root_path_cru$olog
echo ' '
echo `date` - Backup finished
#!/bin/csh
# Where we will copy from
set remote_fs = 'surman@neyman:'
set fromDir = '/noc/mpoc/rpdmoc/'
#
set local_path = '/local/users/pstar/rpdmoc'
# Edit following to make list of things to copy
set dirs2copy = 'doc_library bathym_data tide_model hydro CTD_archive jc145 dy039 jc103 d382 jc064 rapid mocha_ab1403 mocha_ab1705 mocha_ab1209 mocha_ab1104 waveglider_telem_trial_2018 Florida_Straits'
# The now loop through the list
foreach dd ($dirs2copy)
echo --- $fromDir$dd ---
set from_path = $remote_fs$fromDir$dd
rsync -v -a $from_path $local_path
echo done copying
echo Finish `date`
echo -------------
end
#!/usr/bin/env ruby
# encoding: UTF-8
$VERBOSE = true # -w
$KCODE = "U" if RUBY_VERSION < "1.9" # -KU
require 'optparse'
require 'socket'
require 'tempfile'
require 'yaml'
require 'fileutils'
module Rmate
DATE = "2017-02-10"
VERSION = "1.5.9"
VERSION_STRING = "rmate version #{Rmate::VERSION} (#{Rmate::DATE})"
class Settings
attr_accessor :host, :port, :unixsocket, :wait, :force, :verbose, :lines, :names, :types
def initialize
@host, @port, @unixsocket = 'localhost', 52698, '~/.rmate.socket'
@wait = false
@force = false
@verbose = false
@lines = []
@names = []
@types = []
read_disk_settings
@host = ENV['RMATE_HOST'].to_s if ENV.has_key? 'RMATE_HOST'
@port = ENV['RMATE_PORT'].to_i if ENV.has_key? 'RMATE_PORT'
@unixsocket = ENV['RMATE_UNIXSOCKET'].to_s if ENV.has_key? 'RMATE_UNIXSOCKET'
parse_cli_options
@host = parse_ssh_connection if @host == 'auto'
end
def read_disk_settings
[ "/etc/rmate.rc", "/usr/local/etc/rmate.rc", "~/.rmate.rc"].each do |current_file|
file = File.expand_path current_file
if File.exist? file
params = YAML::load(File.open(file))
@host = params["host"] unless params["host"].nil?
@port = params["port"] unless params["port"].nil?
@unixsocket = params["unixsocket"] unless params["unixsocket"].nil?
end
end
end
def parse_cli_options
OptionParser.new do |o|
o.on( '--host=name', "Connect to host.", "Use 'auto' to detect the host from SSH.", "Defaults to #{@host}.") { |v| @host = v }
o.on('-s', '--unixsocket=name', "UNIX socket path.", "Takes precedence over host/port if the file exists", \
"Default #{@unixsocket}") { |v| @unixsocket = v }
o.on('-p', '--port=#', Integer, "Port number to use for connection.", "Defaults to #{@port}.") { |v| @port = v }
o.on('-w', '--[no-]wait', 'Wait for file to be closed by TextMate.') { |v| @wait = v }
o.on('-l', '--line [NUMBER]', 'Place caret on line [NUMBER] after loading file.') { |v| @lines <<= v }
o.on('-m', '--name [NAME]', 'The display name shown in TextMate.') { |v| @names <<= v }
o.on('-t', '--type [TYPE]', 'Treat file as having [TYPE].') { |v| @types <<= v }
o.on('-f', '--force', 'Open even if the file is not writable.') { |v| @force = v }
o.on('-v', '--verbose', 'Verbose logging messages.') { |v| @verbose = v }
o.on_tail('-h', '--help', 'Show this message.') { puts o; exit }
o.on_tail( '--version', 'Show version.') { puts VERSION_STRING; exit }
o.parse!
end
end
def parse_ssh_connection
ENV['SSH_CONNECTION'].nil? ? 'localhost' : ENV['SSH_CONNECTION'].split(' ').first
end
end
class Command
def initialize(name)
@command = name
@variables = {}
@data = nil
@size = nil
end
def []=(name, value)
@variables[name] = value
end
def read_file(path)
@size = File.size(path)
@data = File.open(path, "rb") { |io| io.read(@size) }
end
def read_stdin
@data = $stdin.read
@size = @data.bytesize
end
def send(socket)
socket.puts @command
@variables.each_pair do |name, value|
value = 'yes' if value === true
socket.puts "#{name}: #{value}"
end
if @data
socket.puts "data: #{@size}"
socket.puts @data
end
socket.puts
end
end
module_function
def handle_save(socket, variables, data)
path = variables["token"]
if File.writable?(path) || !File.exist?(path)
$stderr.puts "Saving #{path}" if $settings.verbose
begin
backup_path = "#{path}~"
backup_path = "#{backup_path}~" while File.exist? backup_path
FileUtils.cp(path, backup_path, :preserve => true) if File.exist?(path)
open(path, 'wb') { |file| file << data }
File.unlink(backup_path) if File.exist? backup_path
rescue
# TODO We probably want some way to notify the server app that the save failed
$stderr.puts "Save failed! #{$!}" if $settings.verbose
end
else
$stderr.puts "Skipping save, file not writable." if $settings.verbose
end
end
def handle_close(socket, variables, data)
path = variables["token"]
$stderr.puts "Closed #{path}" if $settings.verbose
end
def handle_cmd(socket)
cmd = socket.readline.chomp
variables = {}
data = ""
while line = socket.readline.chomp
break if line.empty?
name, value = line.split(': ', 2)
variables[name] = value
data << socket.read(value.to_i) if name == "data"
end
variables.delete("data")
case cmd
when "save" then handle_save(socket, variables, data)
when "close" then handle_close(socket, variables, data)
else abort "Received unknown command “#{cmd}”, exiting."
end
end
def connect_and_handle_cmds(host, port, unixsocketpath, cmds)
socket = nil
unixsocketpath = File.expand_path(unixsocketpath) unless unixsocketpath.nil?
if unixsocketpath.nil? || !File.exist?(unixsocketpath)
$stderr.puts "Using TCP socket to connect: ‘#{host}:#{port}’" if $settings.verbose
begin
socket = TCPSocket.new(host, port)
rescue Exception => e
abort "Error connecting to ‘#{host}:#{port}’: #{e.message}"
end
else
$stderr.puts "Using UNIX socket to connect: ‘#{unixsocketpath}’" if $settings.verbose
socket = UNIXSocket.new(unixsocketpath)
end
server_info = socket.readline.chomp
$stderr.puts "Connect: ‘#{server_info}’" if $settings.verbose
cmds.each { |cmd| cmd.send(socket) }
socket.puts "."
handle_cmd(socket) while !socket.eof?
socket.close
$stderr.puts "Done" if $settings.verbose
end
end
## MAIN
$settings = Rmate::Settings.new
## Parse arguments.
cmds = []
ARGV << '-' if ARGV.empty? and (!$stdin.tty? or $settings.wait)
ARGV.each_index do |idx|
path = ARGV[idx]
if path == '-'
$stderr.puts "Reading from stdin, press ^D to stop" if $stdin.tty?
else
abort "'#{path}' is a directory! Aborting." if File.directory? path
abort "File #{path} is not writable! Use -f/--force to open anyway." unless $settings.force or File.writable? path or not File.exist? path
$stderr.puts "File #{path} is not writable. Opening anyway." if not File.writable? path and File.exist? path and $settings.verbose
end
cmd = Rmate::Command.new("open")
cmd['display-name'] = "#{Socket.gethostname}:untitled (stdin)" if path == '-'
cmd['display-name'] = "#{Socket.gethostname}:#{path}" unless path == '-'
cmd['display-name'] = $settings.names[idx] if $settings.names.length > idx
cmd['real-path'] = File.expand_path(path) unless path == '-'
cmd['data-on-save'] = true
cmd['re-activate'] = true
cmd['token'] = path
cmd['selection'] = $settings.lines[idx] if $settings.lines.length > idx
cmd['file-type'] = 'txt' if path == '-'
cmd['file-type'] = $settings.types[idx] if $settings.types.length > idx
cmd.read_stdin if path == '-'
cmd.read_file(path) if path != '-' and File.exist? path
cmd['data'] = "0" unless path == '-' or File.exist? path
cmds << cmd
end
unless $settings.wait
pid = fork do
Rmate::connect_and_handle_cmds($settings.host, $settings.port, $settings.unixsocket, cmds)
end
Process.detach(pid)
else
Rmate::connect_and_handle_cmds($settings.host, $settings.port, $settings.unixsocket, cmds)
end
#!/bin/csh
# Where we will copy from
set remote_fs = 'surman@neyman:'
set fromDir = '/noc/mpoc/rpdmoc/'
#
set local_path = '/data/pstar'
# Edit following to make list of things to copy
set dirs2copy = 'cd170 cd177 d279 d304 d324 d333.tar d334 d344 d345 d346 d359 en517 kn182 kn200-4 oc459 p343 p345 rb0602 rb0701 rb0901 rb1009 rb1201 sj0614 sj08'
# The now loop through the list
foreach dd ($dirs2copy)
echo --- $fromDir$dd ---
set from_path = $remote_fs$fromDir$dd
rsync -v -a $from_path $local_path
echo done copying
echo Finish `date`
echo -------------
end
rsync -a -v ${remote_fs}/noc/mpoc/drake/jc159/backup_20180411170013/ ${local_path}/jc159
rsync -a -v --exclude 'bim' ${remote_fs}/noc/mpoc/rpdmoc/users ${local_path}
#
# Clean up by DAS September 2018 # Clean up by DAS September 2018
# Almsot all in .cshrc not sure need anything here # Almsot all in .cshrc not sure need anything here
# #
# This script executed on each login - will be after .cshrc # This script executed on each login - will be after .cshrc
# Load modules - these will be put at front of path # Load modules - these will be put at front of path
# #module load matlab/2015b # #module load matlab/2015b
module load matlab/2011a module load matlab/2011b
#module load anaconda2 #module load anaconda2
...@@ -56,11 +56,15 @@ echo Date `date` >> $backuplog ...@@ -56,11 +56,15 @@ echo Date `date` >> $backuplog
echo start copying mexec software >> $backuplog echo start copying mexec software >> $backuplog
echo ------ >> $backuplog echo ------ >> $backuplog
cd $root cd $root
rsync -a sw/mexec/ ${backupdir}/ rsync -a ~/programs/mexec_v3 ${backupdir}/
echo ------------- >> $backuplog echo ------------- >> $backuplog
echo Date `date` >> $backuplog echo Date `date` >> $backuplog
echo done copying mexec software >> $backuplog echo done copying mexec software >> $backuplog
echo ------ >> $backuplog echo ------ >> $backuplog
rsync -a ~/programs/mexec-exec ${backupdir}/
echo ------------ >> $backuplog
echo Date `date` >> $backuplog
echo done copying mexec shell scripts >> $backuplog
# backup everything in data # backup everything in data
echo ------------- >> $backuplog echo ------------- >> $backuplog
...@@ -68,13 +72,13 @@ echo Date `date` >> $backuplog ...@@ -68,13 +72,13 @@ echo Date `date` >> $backuplog
echo start copy to disk of data >> $backuplog echo start copy to disk of data >> $backuplog
echo ------ >> $backuplog echo ------ >> $backuplog
cd $root cd $root
rsync -aL --exclude legwork --exclude scs_raw --exclude jcrfs --exclude netcdf_files_rawdir_ship data ${backupdir} rsync -aL --exclude legwork --exclude scs_raw --exclude jcrfs --exclude netcdf_files_rawdir_ship --exclude from_public data ${backupdir}
echo ------------- >> $backuplog echo ------------- >> $backuplog
echo Date `date` >> $backuplog echo Date `date` >> $backuplog
echo done copy to disk of data >> $backuplog echo done copy to disk of data >> $backuplog
echo ------ >> $backuplog echo ------ >> $backuplog
echo general_sw, legdata and legwork not backed up echo general_sw, legdata, legwork, public, scs/techsas raw not backed up
popd popd
...@@ -3,15 +3,12 @@ ...@@ -3,15 +3,12 @@
# bak in Amsterdam before start of jc184 uhdas trial # bak in Amsterdam before start of jc184 uhdas trial
# 3 July 2019 # 3 July 2019
# #
# 1) rsync uhdas data from uhdas server to local machine # prepare for reprocessing of single-ping data (i.e. running
# quick_adcp.py)
# make links to contents of atsea and copy over config files
# as a starting point
# #
# 2) ensure postprocessing directories exist # run after uhdas_01!
#
# If the script hasn't been run recently, there will
# be a lot of files to sync across. The processing on the uhdas
# server updates plenty of things in routine processing every few minutes.
# If the script is then immediately run again, there will be a few files
# that are updated every ping.
# #
# List of cruise segments to be processed is found in # List of cruise segments to be processed is found in
# ~/cruise/data/vmadcp/cruise_segments # ~/cruise/data/vmadcp/cruise_segments
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# bak in Amsterdam before start of jc184 uhdas trial # bak in Amsterdam before start of jc184 uhdas trial
# 3 July 2019 # 3 July 2019
# #
# 1) rsync uhdas data from uhdas server to local machine # 1) rsync uhdas data from uhdas server to local machine directory atsea
# #
# 2) ensure postprocessing directories exist # 2) ensure postprocessing directories exist
# #
......
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
# bak in Amsterdam before start of jc184 uhdas trial # bak in Amsterdam before start of jc184 uhdas trial
# 3 July 2019 # 3 July 2019
# #
# 1) rsync proc directory from the local atsea copy to the editing directory # 1) rsync proc directory from the local atsea copy to the
# editing directory, postprocessing/proc_editing
# #
# If the script hasn't been run recently, there will # If the script hasn't been run recently, there will
# be a lot of files to sync across. The processing on the uhdas # be a lot of files to sync across. The processing on the uhdas
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment