Hello,
I am having difficulty getting one of my job definitions to work right.
I have a setup of daily/weekly jobs which use the same job resource, to
run at most 10 jobs at once. This is working most of the time. I
cut-n-paste that job definition into one called archive, and that runs
monthly. The problem is, the jobs run one at a time, making the backups
take all day. I can't for the life of me to get that job to run multiples!
Also, when I use the term "mostly" above, it means sometimes the weekly
jobs will run one at a time as well, for no discernable reason, and the
bacula director needs to be restarted in order for it to work right. I
tried restarting bacula fresh right before the archive backups kicked
off to see if that would work, but it didn't. They are currently running
one at a time.
I've attached all my configurations in hopes someone can look at them
and try to determins my problem. Thanks for any help you can provide,
Tom
# Client (File Services) to backup
Client {
Name = voyager-fd
Address = voyager
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "voyager-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/voyager.bst"
Client = "voyager-fd"
FileSet = "Full Set"
}
Job {
Name = "voyager-archive"
JobDefs = "ArchiveJob"
Write bootstrap = "/var/bacula/voyager.bst"
Client = "voyager-fd"
FileSet = "Full Set"
}
##
# Verify Jobs
##
Job {
Name = voyager-verify
JobDefs = "VerifyJob"
Client = voyager-fd
}
##
# Check Jobs
##
Job {
Name = voyager-check
JobDefs = "CheckJob"
Client = voyager-fd
File Set = "Full Set"
Verify Job = "voyager-archive"
}
Job {
Name = voyager-check-w
JobDefs = "CheckJob"
Client = voyager-fd
File Set = "Full Set"
Verify Job = "voyager-backup"
Schedule = "Weekly Check"
}
#
# Default Bacula Director Configuration file
#
# The only thing that MUST be changed is to add one or more
# file or directory names in the Include directive of the
# FileSet resource.
#
# For Bacula release 1.36.2 (28 February 2005) -- suse 9
#
# You might also want to change the default email address
# from root to your address. See the "mail" and "operator"
# directives in the Messages resource.
#
Director { # define myself
Name = Gimli-dir
DIRport = 9101 # where we listen for UA connections
QueryFile = "/etc/bacula/query.sql"
WorkingDirectory = "/var/bacula/working"
PidDirectory = "/var/run"
Maximum Concurrent Jobs = 10
Password = "XXX"
Messages = Daemon
}
JobDefs {
Name = "DefaultJob"
Type = Backup
Level = Incremental
Client = Gimli-fd
FileSet = "Full Set"
Schedule = "Monthly Archive"
Storage = File
Messages = Standard
Pool = Default
Priority = 10
Maximum Concurrent Jobs = 10
}
JobDefs {
Name = "DailyJob"
Type = "Backup"
Level = "Incremental"
Pool = "Daily"
Full Backup Pool = "Weekly"
FileSet = "Full Set"
Messages = "Standard"
Schedule = "Weekly Full"
Storage = "VXAPL"
Priority = 10
Spool Data = Yes
Maximum Concurrent Jobs = 10
}
JobDefs {
Name = "ArchiveJob"
Type = "Backup"
Level = "Full"
Pool = "Monthly"
Full Backup Pool = "Monthly"
FileSet = "Full Set"
Messages = "Standard"
Schedule = "Monthly Archive"
Storage = "VXAPL"
Priority = 5
Spool Data = Yes
Maximum Concurrent Jobs = 10
}
Jobdefs {
Name = "VerifyJob"
Type = Verify
Level = Catalog
File Set = "Verify Set"
Schedule = "Verify Schedule"
Messages = Standard
Storage = VXAPL
Pool = Default
Maximum Concurrent Jobs = 10
}
Jobdefs {
Name = "CheckJob"
Type = Verify
Level = VolumeToCatalog
File Set = "Full Set"
Schedule = "Check Schedule"
Messages = Standard
Storage = VXAPL
Pool = Monthly
Maximum Concurrent Jobs = 1
Priority = 15
}
#
# Define the main nightly save backup job
# By default, this job will back up to disk in /tmp
#Job {
# Name = "Client1"
# JobDefs = "DefaultJob"
# Write Bootstrap = "/var/bacula/working/Client1.bsr"
#}
# Backup the catalog database (after the nightly save)
Job {
Name = "BackupCatalog"
Client = Gimli-fd
JobDefs = "DailyJob"
Level = Full
FileSet="Catalog"
Schedule = "WeeklyCycleAfterBackup"
# This creates an ASCII copy of the catalog
RunBeforeJob = "/etc/bacula/make_catalog_backup bacula bacula"
# This deletes the copy of the catalog
RunAfterJob = "/etc/bacula/delete_catalog_backup"
Write Bootstrap = "/var/bacula/BackupCatalog.bsr"
Priority = 11 # run after main backup
}
# Standard Restore template, to be changed by Console program
Job {
Name = "RestoreFiles"
Type = Restore
Client=Gimli-fd
FileSet="Full Set"
Storage = File
Pool = Default
Messages = Standard
Where = /tmp/bacula-restores
}
#
# When to do the backups, full backup on first sunday of the month,
# differential (i.e. incremental since full) every other sunday,
# and incremental backups other days
#Schedule {
# Name = "WeeklyCycle"
# Run = Full 1st sun at 1:05
# Run = Differential 2nd-5th sun at 1:05
# Run = Incremental mon-sat at 1:05
#}
# This schedule does the catalog. It starts after the WeeklyCycle
Schedule {
Name = "WeeklyCycleAfterBackup"
Run = Level=Full Pool=Monthly FullPool=Monthly on 1 at 6:05
Run = Level=Full Pool=Weekly on 2-31 sat at 6:05
Run = Level=Incremental Pool=Daily FullPool=Weekly on 2-31 sun-fri at 6:05
}
Schedule {
Name = "Monthly Archive"
Run = on 1 at 5:59
}
Schedule {
Name = "Check Schedule"
Run = on 1 at 10:00
}
Schedule {
Name = "Weekly Check"
Run = sat at 10:00
}
Schedule {
Name = "Weekly Full"
Run = Level=Full Pool=Weekly sat at 6:00
Run = Level=Incremental Pool=Daily FullPool=Weekly sun-fri at 6:00
}
Schedule {
Name = "Verify Schedule"
Run = mon-fri at 12:00
}
# List of files to be backed up
FileSet {
Name = "Full Set"
Include {
Options { signature=MD5; onefs=yes }
File = "\\|bash -c \"df -PklF ext3 | tail +2 | awk '{print \$6}'\""
}
Exclude {
File = /tmp
File = /var/bacula/spool
}
}
#
# Put your list of files here, preceded by 'File =', one per line
# or include an external list with:
#
# File = <file-name
#
# Note: / backs up everything on the root partition.
# if you have other partitons such as /usr or /home
# you will probably want to add them too.
#
# By default this is defined to point to the Bacula build
# directory to give a reasonable FileSet to backup to
# disk storage during initial testing.
#
# File = /home/truth/src/bacula-1.36.2
#
# If you backup the root directory, the following two excluded
# files can be useful
#
# Exclude {
# File = /proc
# File = /tmp
# File = /.journal
# File = /.fsck
# }
# This is the backup of the catalog
FileSet {
Name = "Catalog"
Include {
Options {
signature = MD5
}
File = /var/bacula/working/bacula.sql
}
}
FileSet {
Name = "Verify Set"
Include {
Options { verify=pins1; signature=SHA1; }
File = "/etc"
File = "/boot"
File = "/bin"
File = "/sbin"
File = "/usr/bin"
File = "/usr/sbin"
File = "/lib"
File = "/usr/lib"
File = "/root"
}
Exclude {
File = "/etc/lvm"
File = "/etc/cups"
File = "/etc/mtab"
File = "/root/.viminfo"
File = "/root/.bash_history"
File = "/etc/mail/statistics"
File = "/root/.razor"
File = "/etc/snort"
File = "/etc/ntp"
File = "/root/.Xauthority"
File = "/root/.kde"
File = "/root/.mcop"
File = "/root/Desktop"
File = "/root/KDesktop"
File = "/root/.X.err"
File = "/root/.ICEauthority"
File = "/etc/ntp.drift"
File = "/root/.ncftp"
File = "/root/.fonts.cache-1"
File = "/etc/lvmconf"
File = "/etc/lvmtab.d"
File = "/root/.cpan"
}
}
# Definiton of file storage device
Storage {
Name = File
# Do not use "localhost" here
Address = Gimli # N.B. Use a fully qualified name here
SDPort = 9103
Password = "XXX"
Device = FileStorage
Media Type = File
}
Storage {
Name = "VXAPL"
Address = "Gimli"
SDPort = 9103
Password = "XXX"
Device = VXAPL
Media Type = X23
Autochanger = yes
Maximum Concurrent Jobs = 10
}
# Definition of DDS tape storage device
#Storage {
# Name = DDS-4
# Do not use "localhost" here
# Address = Gimli # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "XXX"
# Device = DDS-4 # must be same as Device in Storage
daemon
# Media Type = DDS-4 # must be same as MediaType in Storage
daemon
#}
# Definition of 8mm tape storage device
#Storage {
# Name = "8mmDrive"
# Do not use "localhost" here
# Address = Gimli # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "XXX"
# Device = "Exabyte 8mm"
# MediaType = "8mm"
#}
# Generic catalog service
Catalog {
Name = MainCatalog
dbname = bacula; user = bacula; password = "banechin"
Multiple Connections = yes
}
# Reasonable message delivery -- send most everything to email address
# and to the console
Messages {
Name = Standard
#
# NOTE! If you send to two email or more email addresses, you will need
# to replace the %r in the from field (-f part) with a single valid
# email address in both the mailcommand and the operatorcommand.
#
mailcommand = "/sbin/bsmtp -h mail.united-securities.com -f \"\(Bacula\)
[EMAIL PROTECTED]" -s \"Bacula: %t %e of %c %l\" %r"
operatorcommand = "/sbin/bsmtp -h mail.united-securities.com -f \"\(Bacula\)
[EMAIL PROTECTED]" -s \"Bacula: Intervention needed for %j\" %r"
mail = [EMAIL PROTECTED] = all, !skipped
operator = [EMAIL PROTECTED] = mount
console = all, !skipped, !saved
#
# WARNING! the following will create a file that you must cycle from
# time to time as it will grow indefinitely. However, it will
# also keep all your messages if they scroll off the console.
#
append = "/var/bacula/working/log" = all, !skipped
}
#
# Message delivery for daemon messages (no job).
Messages {
Name = Daemon
mailcommand = "/sbin/bsmtp -h mail.united-securities.com -f \"\(Bacula\)
[EMAIL PROTECTED]" -s \"Bacula daemon message\" %r"
mail = [EMAIL PROTECTED] = all, !skipped
console = all, !skipped, !saved
append = "/var/bacula/working/log" = all, !skipped
}
# Default pool definition
Pool {
Name = Default
Pool Type = Backup
Recycle = yes # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Accept Any Volume = yes # write on any volume in the pool
Cleaning Prefix = "CLN"
}
Pool {
Name = Monthly
Pool Type = Backup
Volume Use Duration = 1 day
Volume Retention = 7 years
Recycle = no
Auto Prune = yes
Cleaning Prefix = "CLN"
Accept Any Volume = yes
}
Pool {
Name = Weekly
Pool Type = Backup
Volume Use Duration = 1 day
Volume Retention = 20 days
Recycle = yes
Auto Prune = yes
Recycle Oldest Volume = yes
Cleaning Prefix = "CLN"
Accept Any Volume = yes
}
Pool {
Name = Daily
Pool Type = Backup
Volume Use Duration = 6 days
Volume Retention = 7 days
Recycle = yes
Auto Prune = yes
Recycle Oldest Volume = yes
Cleaning Prefix = "CLN"
Accept Any Volume = yes
}
Pool {
Name = Cleaning
Pool Type = Backup
Cleaning Prefix = "CLN"
}
#
# Restricted console used by tray-monitor to get the status of the director
#
Console {
Name = Gimli-mon
Password = "XXX"
CommandACL = status, .status
}
#
# Clients
#
@/etc/bacula/gimli.conf
@/etc/bacula/telstar.conf
@/etc/bacula/voyager.conf
@/etc/bacula/usant.conf
@/etc/bacula/bashful.conf
@/etc/bacula/frodo.conf
@/etc/bacula/samwise.conf
@/etc/bacula/meriadoc.conf
@/etc/bacula/peregrin.conf
#
# Default Bacula File Daemon Configuration file
#
# For Bacula release 1.36.2 (28 February 2005) -- suse 9
#
# There is not much to change here except perhaps the
# File daemon Name to
#
#
# List Directors who are permitted to contact this File daemon
#
Director {
Name = Gimli-dir
Password = "XXX"
}
#
# Restricted Director, used by tray-monitor to get the
# status of the file daemon
#
Director {
Name = Gimli-mon
Password = "XXX"
Monitor = yes
}
#
# "Global" File daemon configuration specifications
#
FileDaemon { # this is me
Name = Gimli-fd
FDport = 9102 # where we listen for the director
WorkingDirectory = /var/bacula/working
Pid Directory = /var/run
Maximum Concurrent Jobs = 20
}
# Send all messages except skipped files back to Director
Messages {
Name = Standard
director = Gimli-dir = all, !skipped
}
#
# Default Bacula Storage Daemon Configuration file
#
# For Bacula release 1.36.2 (28 February 2005) -- suse 9
#
# You may need to change the name of your tape drive
# on the "Archive Device" directive in the Device
# resource. If you change the Name and/or the
# "Media Type" in the Device resource, please ensure
# that dird.conf has corresponding changes.
#
Storage { # definition of myself
Name = Gimli-sd
SDPort = 9103 # Director's port
WorkingDirectory = "/var/bacula/working"
Pid Directory = "/var/run"
Maximum Concurrent Jobs = 20
}
#
# List Directors who are permitted to contact Storage daemon
#
Director {
Name = Gimli-dir
Password = "XXX"
}
#
# Restricted Director, used by tray-monitor to get the
# status of the storage daemon
#
Director {
Name = Gimli-mon
Password = "XXX"
Monitor = yes
}
#
# Devices supported by this Storage daemon
# To connect, the Director's bacula-dir.conf must have the
# same Name and MediaType.
#
Device {
Name = FileStorage
Media Type = File
Archive Device = /tmp
LabelMedia = yes; # lets Bacula label unlabeled media
Random Access = Yes;
AutomaticMount = yes; # when device opened, read it
RemovableMedia = no;
AlwaysOpen = no;
}
Device {
Name = "VXAPL"
Media Type = "X23"
Archive Device = "/dev/nst0"
Autochanger = Yes
Changer Device = "/dev/sg1"
Changer Command = "/etc/bacula/mtx-changer %c %o %S %a %d"
Removable Media = Yes
Random Access = No
# Maximum Concurrent Jobs = 20
Always Open = yes
Automatic Mount = yes
Alert Command = "sh -c '/usr/local/sbin/tapeinfo -f %c |grep TapeAlert|cat'"
Spool Directory = "/var/bacula/spool"
Maximum Job Spool Size = 2000000000
Maximum Spool Size = 15000000000
}
#
# A Linux or Solaris tape drive
#
#Device {
# Name = DDS-4 #
# Media Type = DDS-4
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
## Changer Command = "/etc/bacula/mtx-changer %c %o %S %a %d"
## Changer Device = /dev/sg0
## AutoChanger = yes
## Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
#}
#
# A FreeBSD tape drive
#
#Device {
# Name = DDS-4
# Description = "DDS-4 for FreeBSD"
# Media Type = DDS-4
# Archive Device = /dev/nsa1
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes
# Offline On Unmount = no
# Hardware End of Medium = no
# BSF at EOM = yes
# Backward Space Record = no
# Fast Forward Space File = no
# TWO EOF = yes
#}
#
# A OnStream tape drive.
# You need the kernel osst driver 0.9.14 or later, and
# do "mt -f /dev/nosst0 defblksize 32768" once as root.
#
#Device {
# Name = OnStream
# Description = "OnStream drive on Linux"
# Media Type = OnStream
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes
# Offline On Unmount = no
## The min/max blocksizes of 32768 are *required*
# Minimum Block Size = 32768
# Maximum Block Size = 32768
#}
#
# A very old Exabyte with no end of media detection
#
#Device {
# Name = "Exabyte 8mm"
# Media Type = "8mm"
# Archive Device = /dev/nst0
# Hardware end of medium = No;
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = Yes;
# RemovableMedia = yes;
# RandomAccess = no;
#}
#
# Send all messages to the Director,
# mount messages also are sent to the email address
#
Messages {
Name = Standard
director = Gimli-dir = all
}
# Client (File Services) to backup
Client {
Name = bashful-fd
Address = bashful
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "bashful-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/bashful.bst"
Client = "bashful-fd"
FileSet = "Bashful Full Set"
}
Job {
Name = "bashful-archive"
JobDefs = "ArchiveJob"
Client = "bashful-fd"
Write bootstrap = "/var/bacula/bashful.bst"
FileSet = "Bashful Full Set"
}
FileSet {
Name = "Bashful Full Set"
Include {
Options { signature=MD5; onefs=yes }
File = "/"
File = "/var"
File = "/usr"
File = "/home"
}
Exclude {
File = /tmp
}
}
##
# Verify Jobs
##
Job {
Name = bashful-verify
JobDefs = "VerifyJob"
Client = bashful-fd
}
##
# Check Jobs
##
Job {
Name = bashful-check
JobDefs = "CheckJob"
Client = bashful-fd
File Set = "Bashful Full Set"
Verify Job = "bashful-archive"
}
Job {
Name = bashful-check-w
JobDefs = "CheckJob"
Client = bashful-fd
File Set = "Bashful Full Set"
Verify Job = "bashful-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = Frodo-fd
Address = Frodo
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "Frodo-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/frodo.bst"
Client = "Frodo-fd"
FileSet = "Full Set"
}
Job {
Name = "Frodo-archive"
JobDefs = "ArchiveJob"
Client = "Frodo-fd"
FileSet = "Full Set"
Write bootstrap = "/var/bacula/Frodo.bst"
}
##
# Verify Jobs
##
Job {
Name = Frodo-verify
JobDefs = "VerifyJob"
Client = Frodo-fd
}
##
# Check Jobs
##
Job {
Name = Frodo-check
JobDefs = "CheckJob"
Client = Frodo-fd
File Set = "Full Set"
Verify Job = "Frodo-archive"
}
Job {
Name = Frodo-check-w
JobDefs = "CheckJob"
Client = Frodo-fd
File Set = "Full Set"
Verify Job = "Frodo-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = Gimli-fd
Address = Gimli
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "Gimli-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/gimli.bst"
Client = "Gimli-fd"
FileSet = "Gimli Full Set"
}
Job {
Name = "Gimli-archive"
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
FileSet = "Gimli Full Set"
Write bootstrap = "/var/bacula/gimli.bst"
}
Job {
Name = "mercuryp-backup"
JobDefs = "DailyJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryp.bst"
FileSet = "MercuryP DB2"
Client Run Before Job = "su - mercuryp -c \"/u00/mercuryp/backupdb.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
Client Run After Job = "su - mercuryp -c \"/u00/mercuryp/prunelogs.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
}
Job {
Name = "mercuryp-archive"
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryp.bst"
FileSet = "MercuryP DB2"
Client Run Before Job = "su - mercuryp -c \"/u00/mercuryp/backupdb.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
Client Run After Job = "su - mercuryp -c \"/u00/mercuryp/prunelogs.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
}
Job {
Name = mercuryp-restore-db2
Type = Restore
Client = Gimli-fd
File Set = "MercuryP DB2"
Messages = Standard
Storage = VXAPL
Pool = Default
Client Run Before Job = "su - mercuryp -c \"/u00/mercuryp/backupdb.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
Client Run After Job = "su - mercuryp -c \"/u00/mercuryp/prunelogs.sh '%t'
'%l' 'mercuryp' 'mercuryp'\""
Where = "/"
}
Job {
Name = "mercuryr-backup"
JobDefs = "DailyJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryr.bst"
FileSet = "MercuryR DB2"
Client Run Before Job = "su - mercuryr -c \"/u01/mercuryr/backupdb.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
Client Run After Job = "su - mercuryr -c \"/u01/mercuryr/prunelogs.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
}
Job {
Name = "mercuryr-archive"
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryr.bst"
FileSet = "MercuryR DB2"
Client Run Before Job = "su - mercuryr -c \"/u01/mercuryr/backupdb.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
Client Run After Job = "su - mercuryr -c \"/u01/mercuryr/prunelogs.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
}
Job {
Name = mercuryr-restore-db2
Type = Restore
Client = Gimli-fd
File Set = "MercuryR DB2"
Messages = Standard
Storage = VXAPL
Pool = Default
Client Run Before Job = "su - mercuryr -c \"/u01/mercuryr/backupdb.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
Client Run After Job = "su - mercuryr -c \"/u01/mercuryr/prunelogs.sh '%t'
'%l' 'mercuryr' 'mercuryr'\""
Where = "/"
}
Job {
Name = "mercuryd-backup"
JobDefs = "DailyJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryd.bst"
FileSet = "MercuryD DB2"
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Priority = 8
}
Job {
Name = "mercuryd-archive"
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercuryd.bst"
FileSet = "MercuryD DB2"
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Priority = 3
}
Job {
Name = mercuryd-restore-db2
Type = Restore
Client = Gimli-fd
File Set = "MercuryD DB2"
Messages = Standard
Storage = VXAPL
Pool = Default
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercuryd' 'mercuryd'\""
Where = "/"
}
Job {
Name = "mercrd-backup"
JobDefs = "DailyJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercrd.bst"
FileSet = "MercRD DB2"
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Priority = 9
}
Job {
Name = "mercrd-archive"
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
Write bootstrap = "/var/bacula/mercrd.bst"
FileSet = "MercRD DB2"
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Priority = 4
}
Job {
Name = mercrd-restore-db2
Type = Restore
Client = Gimli-fd
File Set = "MercRD DB2"
Messages = Standard
Storage = VXAPL
Pool = Default
Client Run Before Job = "su - mercuryd -c \"/u02/mercuryd/backupdb.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Client Run After Job = "su - mercuryd -c \"/u02/mercuryd/prunelogs.sh '%t'
'%l' 'mercrd' 'mercuryd'\""
Where = "/"
}
Job {
Name = postgresql-backup
JobDefs = "DailyJob"
Client = "Gimli-fd"
File Set = "PostgreSQL Databases"
Client Run Before Job = "su - postgres -c \"/var/lib/pgsql/backupdbs.sh\""
Client Run After Job = "su - postgres -c \"/var/lib/pgsql/cleanup.sh\""
Write Bootstrap = "/var/bacula/postgresqldbs.bst"
}
Job {
Name = postgresql-archive
JobDefs = "ArchiveJob"
Client = "Gimli-fd"
File Set = "PostgreSQL Databases"
Client Run Before Job = "su - postgres -c \"/var/lib/pgsql/backupdbs.sh\""
Client Run After Job = "su - postgres -c \"/var/lib/pgsql/cleanup.sh\""
Write Bootstrap = "/var/bacula/postgresqldbs.bst"
}
Job {
Name = mysql-backup
Jobdefs = "DailyJob"
Client = "Gimli-fd"
File Set = "MySQL Databases"
Client Run Before job = "/root/bin/backupmysqldbs.sh"
Client Run After job = "/root/bin/cleanupmysql.sh"
Write Bootstrap = "/var/bacula/mysqldbs.bst"
}
Job {
Name = mysql-archive
Jobdefs = "ArchiveJob"
Client = "Gimli-fd"
File Set = "MySQL Databases"
Client Run Before job = "/root/bin/backupmysqldbs.sh"
Client Run After job = "/root/bin/cleanupmysql.sh"
Write Bootstrap = "/var/lib/bacula/mysqldbs.bst"
}
FileSet {
Name = "Gimli Full Set"
Ignore FileSet Changes = yes
Include {
Options { signature=MD5; onefs=yes }
File = "\\|bash -c \"df -PklF ext3 | tail +2 | awk '{print \$6}'\""
}
Exclude {
File = /tmp
File = /var/bacula/spool
File = /u00
File = /u01
File = /u02
File = /var/lib/pgsql
File = /var/lib/pgsql/data
File = /var/lib/mysql
File = /shares/SuSE
}
}
FileSet {
Name = "MercuryP DB2"
Include {
Options { signature=MD5; readfifo=Yes }
File = "/u00/mercuryp/dbpipe"
}
Include {
Options { signature=MD5; }
File = "/u00/mercuryp/NODE0000/SQL00001"
File = "/u00/mercuryp/NODE0000/sqldbdir"
File = "/u00/mercuryp/backupdb.sh"
File = "/u00/mercuryp/prunelogs.sh"
}
}
FileSet {
Name = "MercuryR DB2"
Include {
Options { signature=MD5; readfifo=Yes }
File = "/u01/mercuryr/dbpipe"
}
Include {
Options { signature=MD5; }
File = "/u01/mercuryr/NODE0000/SQL00001"
File = "/u01/mercuryr/NODE0000/sqldbdir"
File = "/u01/mercuryr/backupdb.sh"
File = "/u01/mercuryr/prunelogs.sh"
}
}
FileSet {
Name = "MercuryD DB2"
Ignore FileSet Changes = yes
Include {
Options { signature=MD5; readfifo=Yes }
File = "/u02/mercuryd/dbpipe"
}
Include {
Options { signature=MD5; }
File = "/u02/mercuryd/NODE0000/SQL00001"
File = "/u02/mercuryd/NODE0000/sqldbdir"
File = "/u02/mercuryd/backupdb.sh"
File = "/u02/mercuryd/prunelogs.sh"
}
}
FileSet {
Name = "MercRD DB2"
Ignore FileSet Changes = yes
Include {
Options { signature=MD5; readfifo=Yes }
File = "/u02/mercuryd/dbpipe"
}
Include {
Options { signature=MD5; }
File = "/u02/mercuryd/NODE0000/SQL00002"
File = "/u02/mercuryd/NODE0000/sqldbdir"
File = "/u02/mercuryd/backupdb.sh"
File = "/u02/mercuryd/prunelogs.sh"
}
}
FileSet {
Name = "PostgreSQL Databases"
Include {
Options { signature=MD5; }
File = "/tmp/pg_dump"
File = "/var/lib/pgsql"
}
Exclude {
File = /var/lib/pgsql/data/base
File = /var/lib/pgsql/data/global
}
}
FileSet {
Name = "MySQL Databases"
Include {
Options { signature=MD5; }
File = "/tmp/mysql_dump"
}
}
##
# Verify Jobs
##
Job {
Name = Gimli-verify
JobDefs = "VerifyJob"
Client = Gimli-fd
}
##
# Check Jobs
##
Job {
Name = Gimli-check
JobDefs = "CheckJob"
Client = Gimli-fd
File Set = "Gimli Full Set"
Verify Job = "Gimli-archive"
}
Job {
Name = Gimli-check-w
JobDefs = "CheckJob"
Client = Gimli-fd
File Set = "Gimli Full Set"
Verify Job = "Gimli-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = Meriadoc-fd
Address = Meriadoc
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "Meriadoc-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/meriadoc.bst"
Client = "Meriadoc-fd"
FileSet = "Full Set"
}
Job {
Name = "Meriadoc-archive"
JobDefs = "ArchiveJob"
Client = "Meriadoc-fd"
FileSet = "Full Set"
Write bootstrap = "/var/bacula/Meriadoc.bst"
}
##
# Verify Jobs
##
Job {
Name = Meriadoc-verify
JobDefs = "VerifyJob"
Client = Meriadoc-fd
}
##
# Check Jobs
##
Job {
Name = Meriadoc-check
JobDefs = "CheckJob"
Client = Meriadoc-fd
File Set = "Full Set"
Verify Job = "Meriadoc-archive"
}
Job {
Name = Meriadoc-check-w
JobDefs = "CheckJob"
Client = Meriadoc-fd
File Set = "Full Set"
Verify Job = "Meriadoc-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = Peregrin-fd
Address = Peregrin
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "Peregrin-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/peregrin.bst"
Client = "Peregrin-fd"
FileSet = "Full Set"
}
Job {
Name = "Peregrin-archive"
JobDefs = "ArchiveJob"
Client = "Peregrin-fd"
FileSet = "Full Set"
Write bootstrap = "/var/bacula/Peregrin.bst"
}
##
# Verify Jobs
##
Job {
Name = Peregrin-verify
JobDefs = "VerifyJob"
Client = Peregrin-fd
}
##
# Check Jobs
##
Job {
Name = Peregrin-check
JobDefs = "CheckJob"
Client = Peregrin-fd
File Set = "Full Set"
Verify Job = "Peregrin-archive"
}
Job {
Name = Peregrin-check-w
JobDefs = "CheckJob"
Client = Peregrin-fd
File Set = "Full Set"
Verify Job = "Peregrin-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = Samwise-fd
Address = Samwise
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "Samwise-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/samwise.bst"
Client = "Samwise-fd"
FileSet = "Full Set"
}
Job {
Name = "Samwise-archive"
JobDefs = "ArchiveJob"
Client = "Samwise-fd"
FileSet = "Full Set"
Write bootstrap = "/var/bacula/Samwise.bst"
}
##
# Verify Jobs
##
Job {
Name = Samwise-verify
JobDefs = "VerifyJob"
Client = Samwise-fd
}
##
# Check Jobs
##
Job {
Name = Samwise-check
JobDefs = "CheckJob"
Client = Samwise-fd
File Set = "Full Set"
Verify Job = "Samwise-archive"
}
Job {
Name = Samwise-check-w
JobDefs = "CheckJob"
Client = Samwise-fd
File Set = "Full Set"
Verify Job = "Samwise-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = telstar-fd
Address = telstar
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "telstar-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/telstar.bst"
Client = "telstar-fd"
FileSet = "Full Set"
}
Job {
Name = "telstar-archive"
JobDefs = "ArchiveJob"
Client = "telstar-fd"
FileSet = "Full Set"
Write bootstrap = "/var/bacula/telstar.bst"
}
##
# Verify Jobs
##
Job {
Name = telstar-verify
JobDefs = "VerifyJob"
Client = telstar-fd
}
##
# Check Jobs
##
Job {
Name = telstar-check
JobDefs = "CheckJob"
Client = telstar-fd
File Set = "Full Set"
Verify Job = "telstar-archive"
}
Job {
Name = telstar-check-w
JobDefs = "CheckJob"
Client = telstar-fd
File Set = "Full Set"
Verify Job = "telstar-backup"
Schedule = "Weekly Check"
}
# Client (File Services) to backup
Client {
Name = USANT-fd
Address = 10.150.250.21
FDPort = 9102
Catalog = MainCatalog
Password = "XXX"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
Maximum Concurrent Jobs = 5
}
Job {
Name = "USANT-backup"
JobDefs = "DailyJob"
Write bootstrap = "/var/bacula/usant.bst"
Client = "USANT-fd"
FileSet = "USANT Full Set"
}
Job {
Name = "USANT-archive"
JobDefs = "ArchiveJob"
Write bootstrap = "/var/bacula/usant.bst"
Client = "USANT-fd"
FileSet = "USANT Full Set"
}
FileSet {
Name = "USANT Full Set"
Include {
Options {
signature = MD5
}
File = "C:/"
File = "D:/"
}
}
Job {
Name = "USANT-restore"
Type = Restore
Client = USANT-fd
File Set = "USANT Full Set"
Messages = Standard
Storage = VXAPL
Pool = Default
Where = "D:/Restores"
}
##
# Check Jobs
##
Job {
Name = USANT-check
JobDefs = "CheckJob"
Client = Gimli-fd
File Set = "USANT Full Set"
Verify Job = "USANT-archive"
}
Job {
Name = USANT-check-w
JobDefs = "CheckJob"
Client = Gimli-fd
File Set = "USANT Full Set"
Verify Job = "USANT-backup"
Schedule = "Weekly Check"
}