#!/usr/bin/tclsh
#
# This utility stuffs the latest Oracle data onto tape. To be run
# nightly. Name this file "backup-to-tape" and leave somewhere in the
# path (i.e. /usr/bin). Try the following (or similar) in /etc/crontab
# 0 5 * * * root backup-to-tape
#
# Deane R. Mathewson deane@civilution.com
#####

#####
# The first part here makes sure the tape is at the end of the existing
# data. If you really want to overwrite that data, you are going to
# have to erase it yourself before you run this utility.
#####

exec mt -f /dev/st0 rewind

if [catch {set filename [exec tar -tf /dev/nst0]} result] {

set filename ""
}

while {$filename != ""} {

exec mt -f /dev/nst0 fsf 1
set filename [exec tar -tf /dev/nst0]

}

#####
# The most recently archived Oracle redo logs must be included in the
# backup. The following section identifies those and adds them to the
# string of filenames to be backed up. It really would be nice to back
# up all archived redo logs as soon as they are created, but I haven't
# figured out a sexy way to do that yet. Obviously it would suck if my
# strategy meant that the tape containing all the most recent backups
# had to be left in the drive 24x7 to receive the aforementioned logs.
# Dammit, who's beer was this?
#####

set source_dir "/u04/oracle/log-arch"
set ls_results [exec ls $source_dir]
set files_to_backup ""

set cur_date [exec date +%y%m%d%H%M%S]
set cutoff_date [exec expr $cur_date - 1000000]

while {[regexp {(arch_[0-9]+_[0-9]+\.arc)(.*)} $ls_results match filename rest] != 0} {

set source_file "$source_dir/$filename"
set file_mod_date [exec date -r $source_file +%y%m%d%H%M%S]
set file_mod_date [string trimleft $file_mod_date 0]

if {$file_mod_date > $cutoff_date} {

append files_to_backup " $source_dir/$filename"
}

set ls_results $rest

}

#####
# The next step is to add the latest export*.dmp and exp-log* files to files_to_backup.
# No, there isn't any good reason that the regexp is so much different. It would be
# pretty amusing if someone decided source_dir was a good place for storing the results
# of illicit late-night web-surfing sessions. But better than during business hours I
# suppose. Oh wait, Those ARE business hours.
#####

set source_dir "/u04/oracle/backup"
set ls_results [exec ls $source_dir]

while {[regexp "(\[^\n\]*)\n(.*)" $ls_results match filename rest] != 0} {

set source_file "$source_dir/$filename"
set file_mod_date [exec date -r $source_file +%y%m%d%H%M%S]
set file_mod_date [string trimleft $file_mod_date 0]

if {$file_mod_date > $cutoff_date} {

append files_to_backup " $source_dir/$filename"
}

set ls_results $rest

}

#####
# We must also add the most recent tarball of web_source, which in our case is /cvsweb,
# where the CVS repository controlling all our web sources is located. Since this is
# CVS, you may be wondering why I do not create any #cvs.rfl file locks while performing
# the backup. Well, Civilution is a 24x7 kind of place and I don't want to disturb work,
# whenever it is being done. Besides, inconsistencies in CVS are chicken feed compared
# to the same in Oracle, and can only happen if the CVS repository has to be restored
# while there exists a newer working directory already checked out. And I can fix any
# problems manually.
#####

set web_source "/cvsweb"
set source_dir "/u04/oracle/cvsweb-bak"
set cur_date [exec date +%b-%d-%Y-%H-%M]
set filename "cvsweb-$cur_date.tar.gz"

set evalstring "exec tar -czf $source_dir/$filename $web_source"
eval $evalstring >& /dev/null

append files_to_backup " $source_dir/$filename"

#####
# The last step is to write all this stuff to the tape. I suppose it might be possible
# to get so many characters in files_to_backup that the cOS (crapOS) pukes, but I have
# not encountered that yet. Muchas gracias to his worshipfullness CRM for the eval
# trick here.
#####

set evalstring "exec tar -cf /dev/st0 $files_to_backup"

eval $evalstring