It looks like there is also only one schedule named "MonthlyCycle" and it specifies 2 Run directives which explicitly set the storage device for that level. So the jobs all start together and attempt to use the same storage device, but using different pools and different labels. For a tape device it would be impossible to write to multiple volumes concurrently, since you cannot mount multiple tape volumes in the same drive at the same time. This is what you are doing, albeit you are attempting to write to multiple volumes concurrently on a disk device. But, can bacula have multiple volumes mounted on a single device, random access or not? I don't know. I suspect, though, that bacula treats all devices as streams and perhaps can only mount one volume at a time.

The simple solution is to configure different devices for each pool, rather than for each run level.

--- Josh Fisher


Alexander Kolesnik wrote:
Arno,

  
have you tried using only one storage device?
    

There is one storage device for each backup level.

  
Apart from that, it might be helpful if you described your problem again
- not everybody will want to look through the older mail just to find
out what you refer to...
    

Sure. The problem is that concurrent backups don't start. Bacula
version is 1.36.2 with pool.patch.
Here is the configuration files (I skipped sections like jobs for
restoration, filesets and catalog/messages):

bacula-dir.conf:
-----------
Director {
  Name = smith-dir
  DIRport = 9101
  QueryFile = "/etc/bacula/query.sql"
  WorkingDirectory = "/var/bacula"
  PidDirectory = "/var/run"
  SubSysDirectory = "/var/run/subsys"
  Maximum Concurrent Jobs = 10
  Messages = Standard
}

Schedule {
  Name = "MonthlyCycle"
  Run = Level=Full Storage=File_Weekly tue at 01:35
  Run = Level=Incremental Storage=File_Daily thu-sat at 1:35
}

JobDefs {
  Name = "Backup_defaults"
  Type = Backup
  Schedule = "MonthlyCycle"
  Storage = NullFile
  Messages = Standard
  Pool = NullPool
}

Storage {
  Name = File_Weekly
  Address = smith.domain.com
  SDPort = 9103
  Device = Dev_Weekly
  Media Type = File
  Maximum Concurrent Jobs = 10
}

Storage {
  Name = File_Daily
  Address = smith.domain.com
  SDPort = 9103
  Device = Dev_Daily
  Media Type = File
  Maximum Concurrent Jobs = 10
}

Storage {
  Name = NullFile
  Address = smith.domain.com
  SDPort = 9103
  Device = Dev_Catalog
  Media Type = File
  Maximum Concurrent Jobs = 10
}

Pool {
  Name = NullPool
  Pool Type = Backup
}
-------------------

bacula-sd.conf:
-----------
Storage {                             # definition of myself
  Name = smith-sd
  SDPort = 9103                  # Director's port
  WorkingDirectory = "/var/bacula"
  Pid Directory = "/var/run"
  Subsys Directory = "/var/run/subsys"
  Maximum Concurrent Jobs = 10
}

Director {
  Name = smith-dir
}

Device {
  Name = Dev_Weekly
  Media Type = File
  Archive Device = /backup/weekly
  LabelMedia = yes;                   # lets Bacula label unlabelled media
  Random Access = Yes;
  AutomaticMount = yes;               # when device opened, read it
  RemovableMedia = no;
  AlwaysOpen = no;
}

# Daily storage device
Device {
  Name = Dev_Daily
  Media Type = File
  Archive Device = /backup/daily
  LabelMedia = yes;                   # lets Bacula label unlabelled media
  Random Access = Yes;
  AutomaticMount = yes;               # when device opened, read it
  RemovableMedia = no;
  AlwaysOpen = no;
}
-------------------

california.conf:
-----------
Job {
  Name = "Backup_california"
  JobDefs = Backup_defaults
  Client=california-fd
  FileSet="Files_california"
  Pool = Pool_Monthly_california
  Full Backup Pool = Pool_Weekly_california
  Incremental Backup Pool = Pool_Daily_california
  Write Bootstrap = "/var/bacula/california.bsr"
  Priority = 20
}

Client {
  Name = california-fd
  Address = california.domain.com
  FDPort = 9102
  Catalog = MyCatalog
  File Retention = 2w
  Job Retention = 2w
  AutoPrune = yes
}

Pool {
  Name = Pool_Weekly_california
  Volume Use Duration = 6d
  Pool Type = Backup
  Recycle Oldest Volume = yes
  Maximum Volumes = 1
  Recycle = yes
  AutoPrune = yes
  Volume Retention = 6d
  Accept Any Volume = yes
  LabelFormat = week_california_
}

Pool {
  Name = Pool_Daily_california
  Volume Use Duration = 18h
  Pool Type = Backup
  Maximum Volumes = 6
  Recycle = yes
  AutoPrune = yes
  Volume Retention = 6d
  Accept Any Volume = yes
  LabelFormat = day_california_
}
-------------------

clive.conf:
-----------
Job {
  Name = "Backup_clive"
  JobDefs = Backup_defaults
  Client=clive-fd
  FileSet="Files_clive"
  Pool = Pool_Monthly_clive
  Full Backup Pool = Pool_Weekly_clive
  Incremental Backup Pool = Pool_Daily_clive
  Write Bootstrap = "/var/bacula/clive.bsr"
  Priority = 20
}

Client {
  Name = clive-fd
  Address = clive
  FDPort = 9102
  Catalog = MyCatalog
  File Retention = 2w
  Job Retention = 2w
  AutoPrune = yes
}

Pool {
  Name = Pool_Weekly_clive
  Volume Use Duration = 6d
  Pool Type = Backup
  Recycle Oldest Volume = yes
  Maximum Volumes = 1                 # (was 3)
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Volume Retention = 6d               # 1 weeks (was 4)
  Accept Any Volume = yes             # write on any volume in the pool
  LabelFormat = week_clive_
}

Pool {
  Name = Pool_Daily_clive
  Volume Use Duration = 18h
  Pool Type = Backup
  Maximum Volumes = 6
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Volume Retention = 6d               # 7 days
  Accept Any Volume = yes             # write on any volume in the pool
  LabelFormat = day_clive_
}
-------------------

eye.conf:
-----------
Job {
  Name = "Backup_eye"
  JobDefs = Backup_defaults
  Client=eye-fd
  FileSet="Files_eye"
  Pool = Pool_Monthly_eye
  Full Backup Pool = Pool_Weekly_eye
  Incremental Backup Pool = Pool_Daily_eye
  Write Bootstrap = "/var/bacula/eye.bsr"
  Priority = 20
}

Client {
  Name = eye-fd
  Address = eye.domain.com
  FDPort = 9102
  Catalog = MyCatalog
  File Retention = 2w                 # 3 months
  Job Retention = 2w                  # 3 months
  AutoPrune = yes                     # Prune expired Jobs/Files
}

Pool {
  Name = Pool_Weekly_eye
  Volume Use Duration = 6d
  Pool Type = Backup
  Recycle Oldest Volume = yes
  Maximum Volumes = 1                 # (was 3)
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Volume Retention = 6d               # 1 weeks (was 4)
  Accept Any Volume = yes             # write on any volume in the pool
  LabelFormat = week_eye_
}

Pool {
  Name = Pool_Daily_eye
  Volume Use Duration = 18h
  Pool Type = Backup
  Maximum Volumes = 6
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Volume Retention = 6d               # 7 days
  Accept Any Volume = yes             # write on any volume in the pool
  LabelFormat = day_eye_
}
-------------------

Here is what stat dir says:
Running Jobs:
 JobId Level   Name                       Status
======================================================================
    49 Increme  Backup_eye.2005-04-21_16.33.04 is waiting on max Storage jobs
    48 Increme  Backup_clive.2005-04-21_16.33.03 is running

(california's been backed up already)

Here is 'show jobs' for clive:
Job: name=Backup_clive JobType=66 level=0 Priority=20 MaxJobs=1
     Resched=0 Times=0 Interval=1,800 Spool=0
  --> Client: name=clive-fd address=clive FDport=9102 MaxJobs=1
      JobRetention=14 days  FileRetention=14 days  AutoPrune=1
  --> Catalog: name=MyCatalog address=wit-m.omnisp.ru DBport=0 db_name=bacula
      db_user=bacula
  --> FileSet: name=Files_clive
      O Z6M
      N
      I </etc/bacula/filesets/files.clive.inc
      N
      E </etc/bacula/filesets/files.exc
      E </etc/bacula/filesets/files.clive.exc
      N
  --> Schedule: name=MonthlyCycle
  --> Run Level=Incremental
      hour=16
      mday=0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
      month=0 1 2 3 4 5 6 7 8 9 10 11
      wday=4 5 6
      wom=0 1 2 3 4
      woy=0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
      mins=33
     --> Storage: name=File_Daily address=smith.domain.com SDport=9103 MaxJobs=10
      DeviceName=Dev_Daily MediaType=File
  --> Run Level=Full
      hour=21
      mday=0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
      month=0 1 2 3 4 5 6 7 8 9 10 11
      wday=2
      wom=0 1 2 3 4
      woy=0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
      mins=30
     --> Storage: name=File_Weekly address=smith.domain.com SDport=9103 MaxJobs=10
      DeviceName=Dev_Weekly MediaType=File
  --> WriteBootstrap=/var/bacula/clive.bsr
  --> --> Pool: name=Pool_Weekly_clive PoolType=Backup
      use_cat=1 use_once=0 acpt_any=1 cat_files=1
      max_vols=1 auto_prune=1 VolRetention=6 days
      VolUse=6 days  recycle=1 LabelFormat=week_clive_
      CleaningPrefix=*None*
      recyleOldest=0 MaxVolJobs=0 MaxVolFiles=0
  --> Pool: name=Pool_Daily_clive PoolType=Backup
      use_cat=1 use_once=0 acpt_any=1 cat_files=1
      max_vols=6 auto_prune=1 VolRetention=6 days
      VolUse=18 hours  recycle=1 LabelFormat=day_clive_
      CleaningPrefix=*None*
      recyleOldest=0 MaxVolJobs=0 MaxVolFiles=0

  

Reply via email to