Compare commits

...

5 Commits

Author SHA1 Message Date
Joe S 77140c5783 Add YouTube backup job to bacula 2020-11-16 20:26:06 -05:00
Joe S a1bd67de16 Update new users jason and dawn 2020-11-10 02:40:38 -05:00
Joe S 745d907894 Use ls to include directory contents in config
Thanks optizer from IRC for this one.
2020-11-10 02:38:59 -05:00
Joe S 0c4ca88c34 Clean up unused/cache/written by bacula 2020-11-06 22:54:48 -05:00
Joe S 3d1f5e7554 Change into bacula-dir directory
This is to avoid using weird git features when enabling git on the host.
2020-11-06 22:49:22 -05:00
81 changed files with 76 additions and 2726 deletions

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
scripts/webhooks/*.md
*.db
*.pyc
*.gdb

View File

@ -1,33 +0,0 @@
print fail_time
print my_name
print exename
print exepath
print assert_msg
print db_engine_name
print version
print host_os
print distname
print distver
print host_name
print dist_name
show env TestName
bt
thread apply all bt
f 0
info locals
f 1
info locals
f 2
info locals
f 3
info locals
f 4
info locals
f 5
info locals
f 6
info locals
f 7
info locals
detach
quit

View File

@ -1,40 +0,0 @@
Connecting to Director 10.85.3.30:9101
1000 OK: 103 bacula-dir Version: 9.4.2 (04 February 2019)
Enter a period to cancel a command.
messages
08-Jun 03:48 temp-iron-sd JobId 97: Elapsed time=16:25:53, Transfer rate=24.92 M Bytes/second
08-Jun 03:48 temp-iron-sd JobId 97: Sending spooled attrs to the Director. Despooling 13,762,121 bytes ...
08-Jun 03:48 bacula-dir JobId 97: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 97
Job: Backup-Mark.2020-06-07_11.15.04_52
Backup Level: Full (upgraded from Incremental)
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Mark Backup" 2020-06-07 02:15:00
Pool: "Full-Pool" (From Job FullPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 07-Jun-2020 11:15:04
Start time: 07-Jun-2020 11:15:06
End time: 08-Jun-2020 03:48:44
Elapsed time: 16 hours 33 mins 38 secs
Priority: 10
FD Files Written: 48,482
SD Files Written: 48,482
FD Bytes Written: 1,474,531,659,803 (1.474 TB)
SD Bytes Written: 1,474,540,412,469 (1.474 TB)
Rate: 24733.0 KB/s
Software Compression: 2.0% 1.0:1
Comm Line Compression: 2.1% 1.0:1
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): A00572|A02057|A01099|A02218
Volume Session Id: 25
Volume Session Time: 1591127400
Last Volume Bytes: 357,543,051,264 (357.5 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK

View File

@ -1,48 +0,0 @@
01-Nov 07:52 bacula-dir JobId 535: Start Backup JobId 535, Job=Backup-Aidan.2020-11-01_02.15.01_32
01-Nov 07:52 bacula-dir JobId 535: Using Device "Drive-0-LTO-3" to write.
01-Nov 07:52 bacula-iron-sd JobId 535: Spooling data ...
01-Nov 07:52 bacula-iron-sd JobId 535: Committing spooled data to Volume "PE5125". Despooling 430 bytes ...
01-Nov 07:52 bacula-iron-sd JobId 535: Despooling elapsed time = 00:00:01, Transfer rate = 430 Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 535: Elapsed time=00:00:01, Transfer rate=0 Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 535: Sending spooled attrs to the Director. Despooling 0 bytes ...
01-Nov 07:52 bacula-dir JobId 535: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 535
Job: Backup-Aidan.2020-11-01_02.15.01_32
Backup Level: Differential, since=2020-10-04 21:37:11
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Aidan Backup" 2020-09-23 23:43:52
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 07:52:46
End time: 01-Nov-2020 07:52:48
Elapsed time: 2 secs
Priority: 10
FD Files Written: 0
SD Files Written: 0
FD Bytes Written: 0 (0 B)
SD Bytes Written: 0 (0 B)
Rate: 0.0 KB/s
Software Compression: None
Comm Line Compression: None
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s):
Volume Session Id: 89
Volume Session Time: 1601284581
Last Volume Bytes: 194,796,887,040 (194.7 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 07:52 bacula-dir JobId 535: Begin pruning Jobs older than 30 years .
01-Nov 07:52 bacula-dir JobId 535: No Jobs found to prune.
01-Nov 07:52 bacula-dir JobId 535: Begin pruning Files.
01-Nov 07:52 bacula-dir JobId 535: No Files found to prune.
01-Nov 07:52 bacula-dir JobId 535: End auto prune.

View File

@ -1,64 +0,0 @@
01-Nov 05:26 bacula-dir JobId 531: Start Backup JobId 531, Job=Backup-Chris.2020-11-01_02.15.01_28
01-Nov 05:26 bacula-dir JobId 531: Using Device "Drive-0-LTO-3" to write.
01-Nov 05:26 bacula-iron-sd JobId 531: Spooling data ...
01-Nov 06:06 bacula-iron-sd JobId 531: User specified Job spool size reached: JobSpoolSize=10,737,438,025 MaxJobSpoolSize=10,737,418,240
01-Nov 06:06 bacula-iron-sd JobId 531: Writing spooled data to Volume. Despooling 10,737,438,025 bytes ...
01-Nov 06:14 bacula-iron-sd JobId 531: Despooling elapsed time = 00:08:12, Transfer rate = 21.82 M Bytes/second
01-Nov 06:14 bacula-iron-sd JobId 531: Spooling data again ...
01-Nov 06:22 bacula-iron-sd JobId 531: User specified Job spool size reached: JobSpoolSize=10,737,438,668 MaxJobSpoolSize=10,737,418,240
01-Nov 06:22 bacula-iron-sd JobId 531: Writing spooled data to Volume. Despooling 10,737,438,668 bytes ...
01-Nov 06:29 bacula-iron-sd JobId 531: Despooling elapsed time = 00:06:40, Transfer rate = 26.84 M Bytes/second
01-Nov 06:29 bacula-iron-sd JobId 531: Spooling data again ...
01-Nov 06:37 bacula-iron-sd JobId 531: User specified Job spool size reached: JobSpoolSize=10,737,438,647 MaxJobSpoolSize=10,737,418,240
01-Nov 06:37 bacula-iron-sd JobId 531: Writing spooled data to Volume. Despooling 10,737,438,647 bytes ...
01-Nov 06:42 bacula-iron-sd JobId 531: Despooling elapsed time = 00:05:27, Transfer rate = 32.83 M Bytes/second
01-Nov 06:42 bacula-iron-sd JobId 531: Spooling data again ...
01-Nov 06:49 bacula-iron-sd JobId 531: User specified Job spool size reached: JobSpoolSize=10,737,438,695 MaxJobSpoolSize=10,737,418,240
01-Nov 06:49 bacula-iron-sd JobId 531: Writing spooled data to Volume. Despooling 10,737,438,695 bytes ...
01-Nov 06:54 bacula-iron-sd JobId 531: Despooling elapsed time = 00:05:09, Transfer rate = 34.74 M Bytes/second
01-Nov 06:55 bacula-iron-sd JobId 531: Spooling data again ...
01-Nov 07:08 bacula-iron-sd JobId 531: Committing spooled data to Volume "PE5125". Despooling 5,914,233,985 bytes ...
01-Nov 07:11 bacula-iron-sd JobId 531: Despooling elapsed time = 00:02:40, Transfer rate = 36.96 M Bytes/second
01-Nov 07:11 bacula-iron-sd JobId 531: Elapsed time=01:44:44, Transfer rate=7.766 M Bytes/second
01-Nov 07:11 bacula-iron-sd JobId 531: Sending spooled attrs to the Director. Despooling 83,597,578 bytes ...
01-Nov 07:11 bacula-dir JobId 531: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 531
Job: Backup-Chris.2020-11-01_02.15.01_28
Backup Level: Differential, since=2020-10-04 13:06:17
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Chris Backup" 2020-06-23 16:46:12
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 05:26:44
End time: 01-Nov-2020 07:11:51
Elapsed time: 1 hour 45 mins 7 secs
Priority: 10
FD Files Written: 264,661
SD Files Written: 264,661
FD Bytes Written: 48,751,972,776 (48.75 GB)
SD Bytes Written: 48,807,529,474 (48.80 GB)
Rate: 7729.8 KB/s
Software Compression: 21.3% 1.3:1
Comm Line Compression: 0.9% 1.0:1
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): PE5125
Volume Session Id: 85
Volume Session Time: 1601284581
Last Volume Bytes: 162,657,202,176 (162.6 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 07:11 bacula-dir JobId 531: Begin pruning Jobs older than 30 years .
01-Nov 07:11 bacula-dir JobId 531: No Jobs found to prune.
01-Nov 07:11 bacula-dir JobId 531: Begin pruning Files.
01-Nov 07:11 bacula-dir JobId 531: No Files found to prune.
01-Nov 07:11 bacula-dir JobId 531: End auto prune.

View File

@ -1,56 +0,0 @@
01-Nov 07:11 bacula-dir JobId 533: Start Backup JobId 533, Job=Backup-Christina.2020-11-01_02.15.01_30
01-Nov 07:11 bacula-dir JobId 533: Using Device "Drive-0-LTO-3" to write.
01-Nov 07:11 bacula-iron-sd JobId 533: Spooling data ...
01-Nov 07:23 bacula-iron-sd JobId 533: User specified Job spool size reached: JobSpoolSize=10,737,438,664 MaxJobSpoolSize=10,737,418,240
01-Nov 07:23 bacula-iron-sd JobId 533: Writing spooled data to Volume. Despooling 10,737,438,664 bytes ...
01-Nov 07:27 bacula-iron-sd JobId 533: Despooling elapsed time = 00:04:22, Transfer rate = 40.98 M Bytes/second
01-Nov 07:27 bacula-iron-sd JobId 533: Spooling data again ...
01-Nov 07:34 bacula-iron-sd JobId 533: User specified Job spool size reached: JobSpoolSize=10,737,438,689 MaxJobSpoolSize=10,737,418,240
01-Nov 07:34 bacula-iron-sd JobId 533: Writing spooled data to Volume. Despooling 10,737,438,689 bytes ...
01-Nov 07:39 bacula-iron-sd JobId 533: Despooling elapsed time = 00:05:00, Transfer rate = 35.79 M Bytes/second
01-Nov 07:39 bacula-iron-sd JobId 533: Spooling data again ...
01-Nov 07:48 bacula-iron-sd JobId 533: Committing spooled data to Volume "PE5125". Despooling 10,670,666,561 bytes ...
01-Nov 07:52 bacula-iron-sd JobId 533: Despooling elapsed time = 00:04:28, Transfer rate = 39.81 M Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 533: Elapsed time=00:40:47, Transfer rate=13.12 M Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 533: Sending spooled attrs to the Director. Despooling 1,447 bytes ...
01-Nov 07:52 bacula-dir JobId 533: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 533
Job: Backup-Christina.2020-11-01_02.15.01_30
Backup Level: Differential, since=2020-10-04 13:09:46
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Christina Backup" 2020-08-25 02:15:00
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 07:11:55
End time: 01-Nov-2020 07:52:42
Elapsed time: 40 mins 47 secs
Priority: 10
FD Files Written: 7
SD Files Written: 7
FD Bytes Written: 32,115,622,157 (32.11 GB)
SD Bytes Written: 32,115,623,044 (32.11 GB)
Rate: 13124.5 KB/s
Software Compression: None
Comm Line Compression: None
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): PE5125
Volume Session Id: 87
Volume Session Time: 1601284581
Last Volume Bytes: 194,796,758,016 (194.7 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 07:52 bacula-dir JobId 533: Begin pruning Jobs older than 30 years .
01-Nov 07:52 bacula-dir JobId 533: No Jobs found to prune.
01-Nov 07:52 bacula-dir JobId 533: Begin pruning Files.
01-Nov 07:52 bacula-dir JobId 533: No Files found to prune.
01-Nov 07:52 bacula-dir JobId 533: End auto prune.

View File

@ -1,48 +0,0 @@
01-Nov 07:52 bacula-dir JobId 536: Start Backup JobId 536, Job=Backup-FireBN.2020-11-01_02.15.01_33
01-Nov 07:52 bacula-dir JobId 536: Using Device "Drive-0-LTO-3" to write.
01-Nov 07:52 bacula-iron-sd JobId 536: Spooling data ...
01-Nov 07:52 bacula-iron-sd JobId 536: Committing spooled data to Volume "PE5125". Despooling 44,033,605 bytes ...
01-Nov 07:52 bacula-iron-sd JobId 536: Despooling elapsed time = 00:00:01, Transfer rate = 44.03 M Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 536: Elapsed time=00:00:03, Transfer rate=14.66 M Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 536: Sending spooled attrs to the Director. Despooling 4,237 bytes ...
01-Nov 07:52 bacula-dir JobId 536: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 536
Job: Backup-FireBN.2020-11-01_02.15.01_33
Backup Level: Differential, since=2020-10-04 21:37:15
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "FireBN Backup" 2020-10-02 00:32:42
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 07:52:49
End time: 01-Nov-2020 07:52:54
Elapsed time: 5 secs
Priority: 10
FD Files Written: 20
SD Files Written: 20
FD Bytes Written: 43,989,160 (43.98 MB)
SD Bytes Written: 43,991,829 (43.99 MB)
Rate: 8797.8 KB/s
Software Compression: 1.9% 1.0:1
Comm Line Compression: None
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): PE5125
Volume Session Id: 90
Volume Session Time: 1601284581
Last Volume Bytes: 194,840,948,736 (194.8 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 07:52 bacula-dir JobId 536: Begin pruning Jobs older than 30 years .
01-Nov 07:52 bacula-dir JobId 536: No Jobs found to prune.
01-Nov 07:52 bacula-dir JobId 536: Begin pruning Files.
01-Nov 07:52 bacula-dir JobId 536: No Files found to prune.
01-Nov 07:52 bacula-dir JobId 536: End auto prune.

View File

@ -1,48 +0,0 @@
01-Nov 07:52 bacula-dir JobId 534: Start Backup JobId 534, Job=Backup-Julio.2020-11-01_02.15.01_31
01-Nov 07:52 bacula-dir JobId 534: Using Device "Drive-0-LTO-3" to write.
01-Nov 07:52 bacula-iron-sd JobId 534: Spooling data ...
01-Nov 07:52 bacula-iron-sd JobId 534: Committing spooled data to Volume "PE5125". Despooling 14,257 bytes ...
01-Nov 07:52 bacula-iron-sd JobId 534: Despooling elapsed time = 00:00:01, Transfer rate = 14.25 K Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 534: Elapsed time=00:00:01, Transfer rate=13.74 K Bytes/second
01-Nov 07:52 bacula-iron-sd JobId 534: Sending spooled attrs to the Director. Despooling 772 bytes ...
01-Nov 07:52 bacula-dir JobId 534: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 534
Job: Backup-Julio.2020-11-01_02.15.01_31
Backup Level: Differential, since=2020-10-04 21:19:13
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Julio Backup" 2020-09-01 02:15:00
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 07:52:43
End time: 01-Nov-2020 07:52:46
Elapsed time: 3 secs
Priority: 10
FD Files Written: 4
SD Files Written: 4
FD Bytes Written: 13,251 (13.25 KB)
SD Bytes Written: 13,743 (13.74 KB)
Rate: 4.4 KB/s
Software Compression: 81.5% 5.4:1
Comm Line Compression: 1.4% 1.0:1
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): PE5125
Volume Session Id: 88
Volume Session Time: 1601284581
Last Volume Bytes: 194,796,822,528 (194.7 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 07:52 bacula-dir JobId 534: Begin pruning Jobs older than 30 years .
01-Nov 07:52 bacula-dir JobId 534: No Jobs found to prune.
01-Nov 07:52 bacula-dir JobId 534: Begin pruning Files.
01-Nov 07:52 bacula-dir JobId 534: No Files found to prune.
01-Nov 07:52 bacula-dir JobId 534: End auto prune.

View File

@ -1,52 +0,0 @@
01-Nov 13:20 bacula-dir JobId 538: Start Backup JobId 538, Job=Backup-Robotics.2020-11-01_02.15.01_35
01-Nov 13:20 bacula-dir JobId 538: Using Device "Drive-0-LTO-3" to write.
01-Nov 13:20 bacula-iron-sd JobId 538: Spooling data ...
01-Nov 13:29 bacula-iron-sd JobId 538: User specified Job spool size reached: JobSpoolSize=16,106,158,076 MaxJobSpoolSize=16,106,127,360
01-Nov 13:29 bacula-iron-sd JobId 538: Writing spooled data to Volume. Despooling 16,106,158,076 bytes ...
01-Nov 13:35 bacula-iron-sd JobId 538: Despooling elapsed time = 00:05:38, Transfer rate = 47.65 M Bytes/second
01-Nov 13:35 bacula-iron-sd JobId 538: Spooling data again ...
01-Nov 13:41 bacula-iron-sd JobId 538: Committing spooled data to Volume "A02152". Despooling 14,365,535,210 bytes ...
01-Nov 13:46 bacula-iron-sd JobId 538: Despooling elapsed time = 00:04:41, Transfer rate = 51.12 M Bytes/second
01-Nov 13:46 bacula-iron-sd JobId 538: Elapsed time=00:25:59, Transfer rate=19.52 M Bytes/second
01-Nov 13:46 bacula-iron-sd JobId 538: Sending spooled attrs to the Director. Despooling 1,689,693 bytes ...
01-Nov 13:46 bacula-dir JobId 538: Bacula bacula-dir 9.4.2 (04Feb19):
Build OS: x86_64-pc-linux-gnu ubuntu 20.04
JobId: 538
Job: Backup-Robotics.2020-11-01_02.15.01_35
Backup Level: Differential, since=2020-10-05 03:54:39
Client: "Zinc-Client" 9.6.3 (09Mar20) amd64-portbld-freebsd11.3,freebsd,11.3-RELEASE-p8
FileSet: "Google-Drive Robotics" 2020-08-05 15:36:42
Pool: "Diff-Pool" (From Job DiffPool override)
Catalog: "MyCatalog" (From Client resource)
Storage: "Iron-Autochanger" (From Pool resource)
Scheduled time: 01-Nov-2020 02:15:01
Start time: 01-Nov-2020 13:20:43
End time: 01-Nov-2020 13:46:44
Elapsed time: 26 mins 1 sec
Priority: 10
FD Files Written: 5,930
SD Files Written: 5,930
FD Bytes Written: 30,442,166,309 (30.44 GB)
SD Bytes Written: 30,443,217,714 (30.44 GB)
Rate: 19501.7 KB/s
Software Compression: None
Comm Line Compression: None
Snapshot/VSS: no
Encryption: no
Accurate: no
Volume name(s): A02152
Volume Session Id: 92
Volume Session Time: 1601284581
Last Volume Bytes: 161,025,371,136 (161.0 GB)
Non-fatal FD errors: 0
SD Errors: 0
FD termination status: OK
SD termination status: OK
Termination: Backup OK
01-Nov 13:46 bacula-dir JobId 538: Begin pruning Jobs older than 30 years .
01-Nov 13:46 bacula-dir JobId 538: No Jobs found to prune.
01-Nov 13:46 bacula-dir JobId 538: Begin pruning Files.
01-Nov 13:46 bacula-dir JobId 538: No Files found to prune.
01-Nov 13:46 bacula-dir JobId 538: End auto prune.

View File

@ -1,78 +0,0 @@
Storage { # definition of myself
Name = bacula-iron-sd
SDPort = 9103 # Director's port
WorkingDirectory = "/var/lib/bacula"
Pid Directory = "/run/bacula"
Plugin Directory = "/usr/lib/bacula"
Maximum Concurrent Jobs = 20
SDAddress = 10.85.3.34
}
Director { # Director who is allowed to contact us
Name = bacula-dir
Password = "iamnotacrook"
}
Autochanger {
Name = "Iron-Autochanger"
Device = Drive-1-LTO-3
Changer Device = /dev/sg3
Changer Command = "/etc/bacula/scripts/mtx-changer %c %o %S %a %d"
}
Device {
Name = Drive-1-LTO-3
Drive Index = 1
Media Type = LTO-3
Archive Device = /dev/st0
AutomaticMount = yes; # when device opened, read it
AlwaysOpen = yes;
RemovableMedia = yes;
RandomAccess = no;
Maximum File Size = 4GB
Spool Directory = "/spool" # Where you store data before writing it to tape
#Changer Command = "/opt/bacula/scripts/mtx-changer %c %o %S %a %d"
Changer Command = "/etc/bacula/scripts/mtx-changer %c %o %S %a %d"
Changer Device = /dev/sg3
AutoChanger = yes
# Enable the Alert command only if you have the mtx package loaded
Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
# If you have smartctl, enable this, it has more info than tapeinfo
# Alert Command = "sh -c 'smartctl -H -l error %c'"
}
#
# A Linux or Solaris LTO-3 tape drive
#
#Device {
# Name = LTO-3
# Media Type = LTO-3
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# Maximum File Size = 4GB
# Changer Command = "/etc/bacula/scripts/mtx-changer %c %o %S %a %d"
# Changer Device = /dev/sg0
# AutoChanger = yes
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/nst0
# Alert Command = "/etc/bacula/scripts/tapealert %l"
#
# # Enable the Alert command only if you have the mtx package loaded
## Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
## If you have smartctl, enable this, it has more info than tapeinfo
## Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
Messages {
Name = Standard
director = bacula-dir = all
}

View File

@ -1,858 +0,0 @@
#!/bin/sh
#
# baculabackupreport.sh
#
# ------------------------------------------------------------------------------
#
# waa - 20130428 - Initial release.
# Generate basic Bacula backup report.
#
# waa - 20170501 - Change Log moved to bottom of script.
#
# ------------------------------------------------------------------------------
#
# Copyright (c) 2013-2017, William A. Arlofski waa-at-revpol-dot-com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ------------------------------------------------------------------------------
# System variables
# ----------------
server="localhost"
admin="root"
bcbin="/opt/bacula/bin/bconsole"
sendmail="/usr/sbin/sendmail"
bcconfig="/opt/bacula/etc/bconsole.conf"
# Database variables
# ------------------
dbtype="pgsql" # Supported options are pgsql, mysql, mariadb
db="bacula"
dbuser="bacula"
dbbin="/usr/bin/psql"
# dbpass="-pPassword" # Uncomment and set db password if one is used
# Formatting variables
# --------------------
html="yes" # Generate HTML emails instead of plain text emails?
boldstatus="yes" # Set <b> tag on Status field (only if html="yes")
colorstatusbg="yes" # Colorize the Status cell's background? (only if html="yes")
jobtableheadercolor="#b0b0b0" # Background color for the HTML table's header
jobtablejobcolor="#f4f4f4" # Background color for the job rows in the HTML table
runningjobcolor="#4d79ff" # Background color of the Status cell for "Running" jobs
goodjobcolor="#00f000" # Background color of the Status cell for "OK" jobs
warnjobcolor="#ffff00" # Background color of the Status cell for "OK" jobs (with warnings - well, actually with 'joberrors')
badjobcolor="#cc3300" # Background color of the Status cell for "bad" jobs
goodjobwitherrcolor="#cccc00" # Background color of the Status cell for "OK" jobs (with errors) - Not implemented due to request
fontfamily="Verdana, Arial, Helvetica, sans-serif" # Set the font family to use for HTML emails
fontsize="16px" # Set the font size to use for email title and print summaries
fontsizejobinfo="12px" # Set the font size to use for job information inside of table
fontsizesumlog="10px" # Set the font size of bad logs and job summaries
printsummary="yes" # Print a short summary after the job list table? (Total Jobs, Files & Bytes)
emailsummaries="no" # Email all job summaries. Be careful with this, it can generate very large emails
emailbadlogs="yes" # Email logs of bad jobs or jobs with JobErrors -ne 0. Be careful, this can generate very large emails.
addsubjecticon="yes" # Prepend the email Subject with UTF-8 icons (a 'checkmark', 'circle with slash', or a bold 'x')
nojobsicon="=?utf-8?Q?=E2=8A=98?=" # utf-8 subject icon when no jobs have been run
goodjobsicon="=?utf-8?Q?=E2=9C=94?=" # utf-8 subject icon when all jobs were "OK"
badjobsicon="=?utf-8?Q?=E2=9C=96?=" # utf-8 subject icon when there are jobs with errors etc
starbadjobids="yes" # Prepend an asterisk "*" to jobids of "bad" jobs
sortfield="EndTime" # Which catalog db field to sort on? Multiple,fields,work,here
sortorder="DESC" # Which direction to sort?
emailtitle="Jobs Run On ${server} in the Past ${1} Hours" # This is prepended at the top of the email, before the jobs table
# --------------------------------------------------
# Nothing should need to be modified below this line
# --------------------------------------------------
hist=${1}
if [ -z ${hist} ]; then
echo -e "\nUSE:\n$0 <history in hours>\n"
exit 1
fi
if [ ! -e ${bcconfig} ]; then
echo -e "\nThe bconsole configuration file does not seem to be '${bcconfig}'."
echo -e "Please check the setting for the variable 'bcconfig'.\n"
exit 1
fi
if [ ! -x ${bcbin} ]; then
echo -e "\nThe bconsole binary does not seem to be '${bcbin}', or it is not executable."
echo -e "Please check the setting for the variable 'bcbin'.\n"
exit 1
fi
if [ ! -x ${dbbin} ]; then
echo -e "\nThe database client binary does not seem to be '${dbbin}', or it is not executable."
echo -e "Please check the setting for the variable 'dbbin'.\n"
exit 1
fi
if [ ! -x ${sendmail} ]; then
echo -e "\nThe sendmail binary does not seem to be '${sendmail}', or it is not executable."
echo -e "Please check the setting for the variable 'sendmail'.\n"
exit 1
fi
# Build query based on dbtype. Good thing we have "standards" Sigh...
# -------------------------------------------------------------------
case ${dbtype} in
mysql )
queryresult=$(echo "SELECT JobId, Name, StartTime, EndTime, Type, Level, JobStatus, JobFiles, JobBytes, \
TIMEDIFF (EndTime,StartTime) as RunTime, JobErrors \
FROM Job \
WHERE (RealEndTime >= DATE_ADD(NOW(), INTERVAL -${hist} HOUR) OR JobStatus='R') \
ORDER BY ${sortfield} ${sortorder};" \
| ${dbbin} -u ${dbuser} ${dbpass} ${db} \
| sed '/^JobId/d' )
;;
pgsql )
queryresult=$(echo "SELECT JobId, Name, StartTime, EndTime, Type, Level, JobStatus, JobFiles, JobBytes, \
AGE(EndTime, StartTime) as RunTime, JobErrors \
FROM Job \
WHERE (RealEndTime >= CURRENT_TIMESTAMP(2) - cast('${hist} HOUR' as INTERVAL) OR JobStatus='R') \
ORDER BY ${sortfield} ${sortorder};" \
| ${dbbin} -U ${dbuser} ${dbpass} ${db} -0t \
| sed -e 's/|//g' -e '/^$/d' )
;;
mariadb )
queryresult=$(echo "SELECT JobId, Name, StartTime, EndTime, Type, Level, JobStatus, JobFiles, JobBytes, \
TIMEDIFF (EndTime,StartTime) as RunTime, JobErrors \
FROM Job \
WHERE (RealEndTime >= DATE_ADD(NOW(), INTERVAL -${hist} HOUR) OR JobStatus='R') \
ORDER BY ${sortfield} ${sortorder};" \
| ${dbbin} -u ${dbuser} -p${dbpass} ${db} -s -N )
;;
* )
echo "dbtype of '${dbtype}' is invalid. Please set dbtype variable to 'mysql', 'pgsql', or 'mariadb'"
exit 1
;;
esac
# If we have no jobs to report on, then
# we need to skip the entire awk script
# and some bash stuff and jump all the
# way to about line 673
# -------------------------------------
if [ -z "${queryresult}" ]; then
results="0"
else
results="1"
# Now for some fun with awk
# -------------------------
IFS=" "
msg=$(echo ${queryresult} | \
LC_ALL=en_US.UTF-8 \
awk \
-v html="${html}" \
-v boldstatus="${boldstatus}" \
-v colorstatusbg="${colorstatusbg}" \
-v jobtableheadercolor="${jobtableheadercolor}" \
-v jobtablejobcolor="${jobtablejobcolor}" \
-v runningjobcolor="${runningjobcolor}" \
-v goodjobcolor="${goodjobcolor}" \
-v goodjobwitherrcolor="${goodjobwitherrcolor}" \
-v warnjobcolor="${warnjobcolor}" \
-v badjobcolor="${badjobcolor}" \
-v printsummary="${printsummary}" \
-v starbadjobids="${starbadjobids}" \
'BEGIN { awkerr = 0 }
{star = " " }
# List of possible jobstatus codes
# --------------------------------
# Enter SQL query: SELECT * FROM status;
# +-----------+---------------------------------+----------+
# | jobstatus | jobstatuslong | severity |
# +-----------+---------------------------------+----------+
# | C | Created, not yet running | 15 |
# | R | Running | 15 |
# | B | Blocked | 15 |
# | T | Completed successfully | 10 |
# | E | Terminated with errors | 25 |
# | e | Non-fatal error | 20 |
# | f | Fatal error | 100 |
# | D | Verify found differences | 15 |
# | A | Canceled by user | 90 |
# | F | Waiting for Client | 15 |
# | S | Waiting for Storage daemon | 15 |
# | m | Waiting for new media | |
# | M | Waiting for media mount | 15 |
# | s | Waiting for storage resource | 15 |
# | j | Waiting for job resource | 15 |
# | c | Waiting for client resource | 15 |
# | d | Waiting on maximum jobs | 15 |
# | t | Waiting on start time | 15 |
# | p | Waiting on higher priority jobs | 15 |
# | a | SD despooling attributes | 15 |
# | i | Doing batch insert file records | 15 |
# | I | Incomplete Job | 25 |
# +-----------+---------------------------------+----------+
# Is this job still running?
# If a job is still running, then there will be no "Stop Time"
# fields, so $9 (jobstatus) will be shifted left two columns
# to $7, and we will need to test and then reassign these variables
# Note, this seems to be required for PostgreSQL, but MariaDB and
# MySQL return all zeros for the date and time for running jobs
# -----------------------------------------------------------------
{ if ($7 == "R" && $8 ~ /^[0-9]+/)
{
$13 = $10
$11 = $9
$10 = $8
$9 = $7
$8 = $6
$7 = $5
$5 = "--=Still Running=--"
$6 = ""
}
}
# Assign words to job status code characters
# ------------------------------------------
# First, check to see if we need to generate an HTML email
{ if (html == "yes")
{
# Set default opening and closing tags for status cell
# ----------------------------------------------------
tdo = "<td align=\"center\">"
tdc = "</td>"
# Check to see if the job is "OK" then assign
# the "goodjobcolor" to the cell background
# -------------------------------------------
if ($9 ~ /[T]/ && $13 == 0)
{
if (colorstatusbg == "yes")
# Assign jobs that are OK or Running the goodjobcolor
# ---------------------------------------------------
{
tdo = "<td align=\"center\" bgcolor=\"" goodjobcolor "\">"
}
# Should the status be bolded?
# ----------------------------
if (boldstatus == "yes")
{
tdo=tdo"<b>"
tdc="</b>"tdc
}
status["T"]=tdo"-OK-"tdc
# If it is a good job, but with errors or warnings
# then we will assign the warnjobcolor
# ------------------------------------------------
} else if ($9 == "T" && $13 != 0)
{
if (colorstatusbg == "yes")
# Assign OK jobs with errors the warnjobcolor
# -------------------------------------------
{
tdo = "<td align=\"center\" bgcolor=\"" warnjobcolor "\">"
}
# Should the status be bolded?
# ----------------------------
if (boldstatus == "yes")
{
tdo=tdo"<b>"
tdc="</b>"tdc
}
# Since the "W" jobstatus never appears in the DB, we manually
# assign it here so it can be recognized later on in the script
# -------------------------------------------------------------
$9 = "W"
status["W"]=tdo"OK/Warnings"tdc
# If the job is still running we will
# assign it the runningjobcolor
# -----------------------------------
} else if ($9 == "R")
{
if (colorstatusbg == "yes")
# Assign running jobs the runningjobcolor
# ---------------------------------------
{
tdo = "<td align=\"center\" bgcolor=\"" runningjobcolor "\">"
}
# Should the status be bolded?
# ----------------------------
if (boldstatus == "yes")
{
tdo=tdo"<b>"
tdc="</b>"tdc
}
status["R"]=tdo"Running"tdc
# If it is a bad job, then
# we assign the badjobcolor
# -------------------------
} else if ($9 ~ /[ABDef]/)
{
if (colorstatusbg == "yes")
# Assign bad jobs the badjobcolor
# -------------------------------
{
tdo = "<td align=\"center\" bgcolor=\"" badjobcolor "\">"
}
# Should the status be bolded?
# ----------------------------
if (boldstatus == "yes")
{
tdo=tdo"<b>"
tdc="</b>"tdc
}
status["A"]=tdo"Aborted"tdc
status["D"]=tdo"Verify Diffs"tdc
status["f"]=tdo"Failed"tdc
# If it is a job with warnings or errors, assign the job the warnjobcolor
# I have never seen a "W" status in the db. Jobs that are "OK -- with warnings"
# still have a "T" jobstatus, but the joberrors field is incremented in the db
# -----------------------------------------------------------------------------
} else if ($9 ~ /[EI]/)
{
if (colorstatusbg == "yes")
# Assign job the warnjobcolor
# ---------------------------
{
tdo = "<td align=\"center\" bgcolor=\"" warnjobcolor "\">"
}
# Should the status be bolded?
# ----------------------------
if (boldstatus == "yes")
{
tdo=tdo"<b>"
tdc="</b>"tdc
}
status["E"]=tdo"OK, w/Errors"tdc
status["I"]=tdo"Incomplete"tdc
}
} else
# $html is not "yes" so statuses will be normal text
# --------------------------------------------------
{
status["A"]=" Aborted "
status["D"]=" Verify Diffs "
status["E"]=" OK, w/Errors "
status["f"]=" Failed "
status["I"]=" Incomplete "
status["R"]=" Running "
status["T"]=" -OK- "
# Since the "W" jobstatus never appears in the DB, we manually
# assign it here so it can be recognized later on in the script
# -------------------------------------------------------------
if ($9 == "T" && $13 != 0)
{ $9 = "W"
status["W"]=" OK/Warnings "
}
}
}
# These status characters seem to only
# be Director "in memory" statuses. They
# do not get entered into the DB ever so we
# cannot catch them with the db query we use
# I might have to query the DIR as well as
# the DB to be able to capture these
# ------------------------------------------
{
status["C"]=" Created "
status["B"]=" Blocked "
status["F"]=" Wait FD "
status["S"]=" Wait SD "
status["m"]=" Wait New Media"
status["M"]=" Wait Mount "
status["s"]=" Wait Storage"
status["j"]=" Wait Job "
status["c"]=" Wait Client "
status["d"]=" Wait Max Jobs"
status["t"]="Wait Start Time"
status["p"]=" Wait Priority"
status["a"]=" Despool Attrs"
status["i"]=" Batch Insert "
status["L"]="Spool Last Data"
}
# Assign words to job type code characters
# ----------------------------------------
{
jobtype["D"]="Admin"
jobtype["B"]="Backup"
jobtype["C"]="Copy"
jobtype["c"]="Control"
jobtype["R"]="Restore"
jobtype["V"]="Verify"
}
# Assign words to job level code characters
# -----------------------------------------
{
level["F"]="Full"
level["I"]="Incr"
level["D"]="Diff"
level["f"]="VFul"
level["-"]="----"
}
# Assign words to Verify job level code characters
# ------------------------------------------------
{
level["A"]="VVol"
level["C"]="VCat"
level["V"]="Init"
level["O"]="VV2C"
level["d"]="VD2C"
}
# Check to see if the job did not "T"erminate OK then increment $awkerr,
# and prepend the JobId with an asterisk for quick visual identification
# of problem jobs.
# Need to choose between a positive or negative test of the job status code
# -------------------------------------------------------------------------
# Negative check - testing for non existence of all "good" status codes
# $9 !~ /[TRCFSMmsjcdtpai]/ { awkerr++; $1 = "* "$1 }
# Positive check - testing the existence of all "bad" status codes
# good { if ($9 ~ /[ABDEIWef]/ || $13 != 0) { awkerr++; if (starbadjobids == "yes") { star = "*" } } }
{ if ($9 ~ /[ABDEIef]/) { awkerr++; if (starbadjobids == "yes") { star = "*" } } }
# If the job is an Admin, Copy, Control,
# Restore, or Migration job it will have
# no real "Level", so we set it to "----"
# ---------------------------------------
{ if ($7 ~ /[CcDRm]/) { $8 = "-" } }
# Print out each job, formatted with the following fields:
# JobId Name Status Errors Type Level Files Bytes StartTime EndTime RunTime
# -------------------------------------------------------------------------
{ if (html == "yes")
{ printf("<tr bgcolor=\"%s\"> \
<td align=\"center\">%s%s%s</td> \
<td>%s</td> \
%s \
<td align=\"right\">%'"'"'d</td> \
<td align=\"center\">%s</td> \
<td align=\"center\">%s</td> \
<td align=\"right\">%'"'"'d</td> \
<td align=\"right\">%'"'"'9.2f GB</td> \
<td align=\"center\">%s %s</td> \
<td align=\"center\">%s %s</td> \
<td align=\"center\">%s</td> \
</tr>\n", \
jobtablejobcolor, star, $1, star, $2, status[$9], $13, jobtype[$7], level[$8], $10, $11/(1024*1024*1024), $3, $4, $5, $6, $12);
} else
{ printf("%s %-7s %-14s %16s %'"'"'12d %8s %6s %'"'"'9d %'"'"'9.2f GB %11s %-9s %-10s %-9s %-9s\n", \
star, $1, $2, status[$9], $13, jobtype[$7], level[$8], $10, $11/(1024*1024*1024), $3, $4, $5, $6, $12);
}
}
# Count the number of jobs
# ------------------------
{ totaljobs++ }
# Count the number of files and bytes from all jobs
# -------------------------------------------------
{ files += $10 }
{ bytes += $11 }
# Finally, print out the summaries
# --------------------------------
END {
if (printsummary == "yes")
{ if (html == "yes")
{
printf("</table>")
printf("<br>\
<hr align=\"left\" width=\"25%\">\
<table width=\"25%\">\
<tr><td><b>Total Jobs</b></td><td align=\"center\"><b>:</b></td> <td align=\"right\"><b>%'"'"'15d</b></td></tr>\
<tr><td><b>Total Files</b></td><td align=\"center\"><b>:</b></td> <td align=\"right\"><b>%'"'"'15d</b></td></tr>\
<tr><td><b>Total Bytes</b></td><td align=\"center\"><b>:</b></td> <td align=\"right\"><b>%'"'"'15.2f GB</b></td></tr>\
</table>\
<hr align=\"left\" width=\"25%\">",\
totaljobs, files, bytes/(1024*1024*1024));
} else
printf("\
=================================\n\
Total Jobs : %'"'"'15d\n\
Total Files : %'"'"'15d\n\
Total Bytes : %'"'"'15.2f GB\n\
=================================\n",\
totaljobs, files, bytes/(1024*1024*1024));
} exit awkerr }
')
# Any failed jobs, or jobs with errors?
# -------------------------------------
numbadjobs=$?
# Do we email the job summaries?
# ------------------------------
if [ ${emailsummaries} == "yes" ]; then
# Get all of the jobids from the query results, but
# skip any running jobs because they will not have
# a summary in the DB until the job has terminated
# -------------------------------------------------
alljobids=$(echo "${queryresult}" \
| awk '{ if ($7 != "R") printf("%s ", $1) }')
# If no jobids were returned, skip creating
# the header and looping through zero records
# -------------------------------------------
if [ ! -z "${alljobids}" ]; then
# Generate the header
# -------------------
msg="${msg}"$(
if [ ${html} == "yes" ]; then
echo "<pre>====================================="
else
echo -e "\n\n\n====================================="
fi
echo "Job Summaries of All Terminated Jobs:"
echo "====================================="
)
# Get the job logs from all jobs and just grep for the summary
# ------------------------------------------------------------
for jobid in ${alljobids}; do
msg="${msg}"$(
echo -e "\n--------------"
echo "JobId: ${jobid}"
echo "--------------"
echo "llist joblog jobid=${jobid}" | ${bcbin} -c ${bcconfig} | grep -A31 "^ Build OS:"
echo "======================================================================"
)
done
if [ ${html} == "yes" ]; then
msg=${msg}$(echo "</pre>")
fi
fi
fi
# Do we email the bad job logs with the report?
# ---------------------------------------------
if [ ${emailbadlogs} == "yes" ]; then
# Get the badjobs, or the good jobs with
# JobErrors != 0 from the query results
# --------------------------------------
badjobids=$(echo "${queryresult}" \
| awk '{ if ($9 ~ /[ABDEIef]/ || ($9 == "T" && $13 != 0)) printf("%s ", $1) }')
# If no jobids were returned, skip creating
# the header and looping through zero records
# -------------------------------------------
if [ ! -z "${badjobids}" ]; then
# Generate the header
# -------------------
msg="${msg}"$(
if [ ${html} == "yes" ]; then
echo "<pre>=========================================================="
else
echo -e "\n\n\n=========================================================="
fi
echo "Job logs of failed jobs, or good jobs with JobErrors != 0:"
echo "=========================================================="
)
# Get the bad job's log from the Director via bconsole
# ----------------------------------------------------
for jobid in ${badjobids}; do
msg="${msg}"$(
echo -e "\n--------------"
echo "JobId: ${jobid}"
echo "--------------"
echo "llist joblog jobid=${jobid}" | ${bcbin} -c ${bcconfig}
echo "======================================================================"
)
done
if [ ${html} == "yes" ]; then
msg=${msg}$(echo "</pre>")
fi
fi
fi
# Prepend the header to the $msg output
# -------------------------------------
if [ ${html} == "yes" ]; then
msg="<html>
<head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">
<style>
body {font-family:$fontfamily; font-size:$fontsize;} td {font-size:$fontsizejobinfo;} pre {font-size:$fontsizesumlog;}
</style>
</head>
<body>
<p><u><b>${emailtitle}</b></u></p>
<table width=\"98%\" align=\"center\" border=\"1\" cellpadding=\"2\" cellspacing=\"0\">
<tr bgcolor=\"${jobtableheadercolor}\">
<td align=\"center\"><b>Job ID</b></td>
<td align=\"center\"><b>Job Name</b></td>
<td align=\"center\"><b>Status</b></td>
<td align=\"center\"><b>Errors</b></td>
<td align=\"center\"><b>Type</b></td>
<td align=\"center\"><b>Level</b></td>
<td align=\"center\"><b>Files</b></td>
<td align=\"center\"><b>Bytes</b></td>
<td align=\"center\"><b>Start Time</b></td>
<td align=\"center\"><b>End Time</b></td>
<td align=\"center\"><b>Run Time</b></td>
</tr>
${msg}
</body></html>"
else
msg="
${emailtitle}
------------------------------------------
JobId Job Name Status Errors Type Level Files Bytes Start Time End Time Run Time
----- -------------- --------------- ---------- ------- ----- -------- ----------- ------------------- ------------------- --------
${msg}"
fi
fi # If there were zero results returned from the
# SQL the query, we skip the entire awk script,
# and a lot of other bash stuff that generates
# the email body and we end up here
# -------------------------------------------------
if [ ${results} -eq 0 ]; then
status="No Jobs Have Been Run"
subjecticon="${nojobsicon}"
msg="Nothing to see here..."
else
# Totally unnecessary, but, well... OCD... :)
# --------------------------------------------
if [ ${numbadjobs} -ne 0 ]; then
if [ ${numbadjobs} -eq 1 ]; then
job="Job"
else
job="Jobs"
fi
status="(${numbadjobs}) ${job} with Errors"
subjecticon="${badjobsicon}"
else
status="All Jobs OK"
subjecticon="${goodjobsicon}"
fi
fi
# More silliness
# --------------
if [ ${hist} -eq 1 ]; then
hour="Hour"
else
hour="Hours"
fi
# Email the report
# ----------------
(
echo "To: ${admin}"
echo "From: ${admin}"
if [ ${addsubjecticon} == "yes" ]; then
echo "Subject: ${subjecticon} ${server} - ${status} in the Past ${hist} ${hour}"
else
echo "Subject: ${server} - ${status} in the Past ${hist} ${hour}"
fi
if [ ${html} == "yes" ] && [ ${results} -ne 0 ]; then
echo "Content-Type: text/html"
echo "MIME-Version: 1.0"
fi
echo ""
echo "${msg}"
) | /usr/sbin/sendmail -t
# -------------
# End of script
# -------------
# ----------
# Change Log
# ----------
# ----------------------------
# William A. Arlofski
# Reverse Polarity, LLC
# helpdesk@revpol.com
# http://www.revpol.com/bacula
# ----------------------------
#
#
# 20130428 - Initial release
# Generate and email a basic Bacula backup report
# 1st command line parameter is expected to be a
# number of hours. No real error checking is done
#
# 20131224 - Removed "AND JobStatus='T'" to get all backup jobs
# whether running, or completed with errors etc.
# - Added Several fields "StartTime", "EndTime",
# "JobFiles"
# - Removed "JobType" because we are only selecting
# jobs of type "Backup" (AND Type='B')
# - Modified header lines and printf lines for better
# formatting
#
# 20140107 - Modified script to include more information and cleaned
# up the output formatting
#
# 20150704 - Added ability to work with MySQL or Postgresql
#
# 20150723 - Modified query, removed "Type='B'" clause to catch all jobs,
# including Copy jobs, Admin jobs etc. Modified header, and
# output string to match new query and include job's "Type"
# column.
#
# 20170225 - Rewrote awk script so that a status/summary could be set in
# the email report's subject. eg:
# Subject: "serverName - All Jobs OK in the past x hours"
# Subject: "serverName - x Jobs FAILED in the past y hours"
#
# 20170303 - Fixed output in cases where there are jobs running and there
# is no "Stop Time" for a job.
#
# 20170406 - Some major modifications:
# - Added feature to spell out words instead of using the
# single character codes for Job type, Job Status, and
# Job Level - Including the different levels for Verify
# jobs
# - If a job terminates with an error or warning, then the
# job's line in the output is prepended with an asterisk
# "*" for quick visual identification
# - Modified the outputs of the files and bytes fields to
# include commas when the number is > 999
# - Added totals to the end of the report for Jobs, Files,
# and Bytes
# - Added $sortfield and $sortorder variables to allow output
# to be sorted as desired
# - Set the level of a job to "----" when the level is not
# applicable as in Restore jobs, Admin jobs etc.
#
# 20170408 - Some minor cleanup, and moving things around
# - Added $emailsummaries variable to append the job summaries
# to the end of the report.
# - Added $emailbadlogs variable to append full joblogs of jobs
# which have failed or jobs with errors to the end of the report
# for quick access to investigate failed jobs.
#
# 20170417 - Added some tests for binaries and the bconsole config file
#
# 20170429 - Thanks to Chris Couture for contacting me and submitting a
# working query for MariaDB. I have added 'mariadb' as a new
# dbtype option.
# - Thanks to Chris Couture for the ideas and some code examples
# to create an HTML email.
# - Added $html variable to enable HTML emails.
# - Added $boldstatus variable to make the Status <b>bold</b>
# in HTML emails.
# - Added $colorstatusbg variable to color the background of
# the Status cell in HTML emails.
# - Thanks to Chris Couture for the idea of adding RunTime
# to the email output.
# - Thanks to Chris Couture for the idea of using some unicode
# characters (a 'checkmark'or a bold 'x') in the Subject:
# to quickly see if everything ran OK.
# - Added $addsubjecticon variable to enable/disable the
# prepending of this icon to the Subject.
# - Added $printsumary variable to give the option to print the
# total Jobs, Files, and Bytes after the job listing table.
# - Added $starbadjobids variable to enable/disable prepending
# the bad jobids with an asterisk "*".
# - Modified the way the email is built at the end. Thanks to
# Chris Courture again for this nice idea.
# - Added $jobtableheadercolor, $jobtablejobcolor, $goodjobcolor,
# $goodjobwitherrcolor, $runningjobcolor, $warnjobcolor, and
# $badjobcolor variables to colorize HTML emails
# - Added $emailtitle variable for the title at the top
# - Added $fontfamily, $fontsize, $fontsizejobinfo, and $fontsizesumlog
# variables to allow styling of the HTML output (Thanks again Chris)
# - Added $nojobsicon, $goodjobsicon, and $badjobsicon variables to
# allow setting the prepended utf-8 subject icon character
# - Reformatted things so that if there are no jobs returned by the
# SQL query, the email message sent is nice and short
# - Modified the license to allow for inclusion into Bacula Community,
# and possibly the Enterprise Edition releases
#
# 20170430 - Modified the order of the fields to make more sense
# - Re-aligned the text email so that when an asterisk is pre-pended it
# does not shift the whole line
#
# 20170508 - Re-worked some of the logic so that good jobs (JobStatus="T") which
# have errors will have their status listed as "OK/Warnings", and it
# will not trigger as a "bad job" on the JobErrors, so it will not
# have an asterisk prepended to the JobId in the job listing. I think
# this fix is more of a temporary change in the hopes that a "W"
# status to represent "good jobs with warnings" is implemented in the
# db in the future.
# - Added an "Errors" column to the table to show "JobErrors" from the
# db.
# - Some minor variable name changes and other minor changes
#
# 20170511 - Minor adjustments to the alignment formatting of the text email
# - Minor 'case' changes to a couple levels (Init & VCat)
#
# ------------------------------------------------------------------------------
# I like small tabs. Use :set list in vim to see tabbing etc
# vim: set tabstop=2:softtabstop=2:shiftwidth=2 #

View File

@ -1,33 +0,0 @@
print fail_time
print my_name
print exename
print exepath
print assert_msg
print db_engine_name
print version
print host_os
print distname
print distver
print host_name
print dist_name
show env TestName
bt
thread apply all bt
f 0
info locals
f 1
info locals
f 2
info locals
f 3
info locals
f 4
info locals
f 5
info locals
f 6
info locals
f 7
info locals
detach
quit

View File

@ -1,397 +0,0 @@
#!/bin/sh
#
# Bacula interface to virtual autoloader using disk storage
#
# Written by Kern Sibbald
#
# Bacula(R) - The Network Backup Solution
#
# Copyright (C) 2000-2016 Kern Sibbald
#
# The original author of Bacula is Kern Sibbald, with contributions
# from many others, a complete list can be found in the file AUTHORS.
#
# You may use this file and others of this release according to the
# license defined in the LICENSE file, which includes the Affero General
# Public License, v3.0 ("AGPLv3") and some additional permissions and
# terms pursuant to its AGPLv3 Section 7.
#
# This notice must be preserved when any source code is
# conveyed and/or propagated.
#
# Bacula(R) is a registered trademark of Kern Sibbald.
# If you set in your Device resource
#
# Changer Command = "path-to-this-script/disk-changer %c %o %S %a %d"
# you will have the following input to this script:
#
# So Bacula will always call with all the following arguments, even though
# in come cases, not all are used. Note, the Volume name is not always
# included.
#
# disk-changer "changer-device" "command" "slot" "archive-device" "drive-index" "volume"
# $1 $2 $3 $4 $5 $6
#
# By default the autochanger has 10 Volumes and 1 Drive.
#
# Note: For this script to work, you *must" specify
# Device Type = File
# in each of the Devices associated with your AutoChanger resource.
#
# changer-device is the name of a file that overrides the default
# volumes and drives. It may have:
# maxslot=n where n is one based (default 10)
# maxdrive=m where m is zero based (default 1 -- i.e. 2 drives)
#
# This code can also simulate barcodes. You simply put
# a list of the slots and barcodes in the "base" directory/barcodes.
# See below for the base directory definition. Example of a
# barcodes file:
# /var/bacula/barcodes
# 1:Vol001
# 2:Vol002
# ...
#
# archive-device is the name of the base directory where you want the
# Volumes stored appended with /drive0 for the first drive; /drive1
# for the second drive, ... For example, you might use
# /var/bacula/drive0 Note: you must not have a trailing slash, and
# the string (e.g. /drive0) must be unique, and it must not match
# any other part of the directory name. These restrictions could be
# easily removed by any clever script jockey.
#
# Full example: disk-changer /var/bacula/conf load 1 /var/bacula/drive0 0 TestVol001
#
# The Volumes will be created with names slot1, slot2, slot3, ... maxslot in the
# base directory. In the above example the base directory is /var/bacula.
# However, as with tapes, their Bacula Volume names will be stored inside the
# Volume label. In addition to the Volumes (e.g. /var/bacula/slot1,
# /var/bacula/slot3, ...) this script will create a /var/bacula/loadedn
# file to keep track of what Slot is loaded. You should not change this file.
#
# Modified 8 June 2010 to accept Volume names from the calling program as arg 6.
# In this case, rather than storing the data in slotn, it is stored in the
# Volume name. Note: for this to work, Volume names may not include spaces.
#
wd=/var/lib/bacula
#
# log whats done
#
# to turn on logging, uncomment the following line
#touch $wd/disk-changer.log
#
dbgfile="$wd/disk-changer.log"
debug() {
if test -f $dbgfile; then
echo "`date +\"%Y%m%d-%H:%M:%S\"` $*" >> $dbgfile
fi
}
#
# Create a temporary file
#
make_temp_file() {
TMPFILE=`mktemp -t mtx.XXXXXXXXXX`
if test x${TMPFILE} = x; then
TMPFILE="$wd/disk-changer.$$"
if test -f ${TMPFILE}; then
echo "Temp file security problem on: ${TMPFILE}"
exit 1
fi
fi
}
# check parameter count on commandline
#
check_parm_count() {
pCount=$1
pCountNeed=$2
if test $pCount -lt $pCountNeed; then
echo "usage: disk-changer ctl-device command [slot archive-device drive-index]"
echo " Insufficient number of arguments arguments given."
if test $pCount -lt 2; then
echo " Mimimum usage is first two arguments ..."
else
echo " Command expected $pCountNeed arguments"
fi
exit 1
fi
}
#
# Strip off the final name in order to get the Directory ($dir)
# that we are dealing with.
#
get_dir() {
bn=`basename $device`
dir=`echo "$device" | sed -e s%/$bn%%g`
if [ ! -d $dir ]; then
echo "ERROR: Autochanger directory \"$dir\" does not exist."
echo " You must create it."
exit 1
fi
}
#
# Get the Volume name from the call line, or directly from
# the volslotn information.
#
get_vol() {
havevol=0
debug "vol=$volume"
if test "x$volume" != x && test "x$volume" != "x*NONE*" ; then
debug "touching $dir/$volume"
touch $dir/$volume
echo "$volume" >$dir/volslot${slot}
havevol=1
elif [ -f $dir/volslot${slot} ]; then
volume=`cat $dir/volslot${slot}`
havevol=1
fi
}
# Setup arguments
ctl=$1
cmd="$2"
slot=$3
device=$4
drive=$5
volume=$6
# set defaults
maxdrive=1
maxslot=10
# Pull in conf file
if [ -f $ctl ]; then
. $ctl
fi
# Check for special cases where only 2 arguments are needed,
# all others are a minimum of 5
#
case $2 in
list|listall)
check_parm_count $# 2
;;
slots)
check_parm_count $# 2
;;
transfer)
check_parm_count $# 4
if [ $slot -gt $maxslot ]; then
echo "Slot ($slot) out of range (1-$maxslot)"
debug "Error: Slot ($slot) out of range (1-$maxslot)"
exit 1
fi
;;
*)
check_parm_count $# 5
if [ $drive -gt $maxdrive ]; then
echo "Drive ($drive) out of range (0-$maxdrive)"
debug "Error: Drive ($drive) out of range (0-$maxdrive)"
exit 1
fi
if [ $slot -gt $maxslot ]; then
echo "Slot ($slot) out of range (1-$maxslot)"
debug "Error: Slot ($slot) out of range (1-$maxslot)"
exit 1
fi
;;
esac
debug "Parms: $ctl $cmd $slot $device $drive $volume $havevol"
case $cmd in
unload)
debug "Doing disk -f $ctl unload $slot $device $drive $volume"
get_dir
if [ -f $dir/loaded${drive} ]; then
ld=`cat $dir/loaded${drive}`
else
echo "Storage Element $slot is Already Full"
debug "Unload error: $dir/loaded${drive} is already unloaded"
exit 1
fi
if [ $slot -eq $ld ]; then
echo "0" >$dir/loaded${drive}
unlink $device 2>/dev/null >/dev/null
unlink ${device}.add 2>/dev/null >/dev/null
rm -f ${device} ${device}.add
else
echo "Storage Element $slot is Already Full"
debug "Unload error: $dir/loaded${drive} slot=$ld is already unloaded"
exit 1
fi
;;
load)
debug "Doing disk $ctl load $slot $device $drive $volume"
get_dir
i=0
# Check if slot already in a drive
while [ $i -le $maxdrive ]; do
if [ -f $dir/loaded${i} ]; then
ld=`cat $dir/loaded${i}`
else
ld=0
fi
if [ $ld -eq $slot ]; then
echo "Drive ${i} Full (Storage element ${ld} loaded)"
debug "Load error: Cannot load Slot=${ld} in drive=$drive. Already in drive=${i}"
exit 1
fi
i=`expr $i + 1`
done
# Check if we have a Volume name
get_vol
if [ $havevol -eq 0 ]; then
# check if slot exists
if [ ! -f $dir/slot${slot} ] ; then
echo "source Element Address $slot is Empty"
debug "Load error: source Element Address $slot is Empty"
exit 1
fi
fi
if [ -f $dir/loaded${drive} ]; then
ld=`cat $dir/loaded${drive}`
else
ld=0
fi
if [ $ld -ne 0 ]; then
echo "Drive ${drive} Full (Storage element ${ld} loaded)"
echo "Load error: Drive ${drive} Full (Storage element ${ld} loaded)"
exit 1
fi
echo "0" >$dir/loaded${drive}
unlink $device 2>/dev/null >/dev/null
unlink ${device}.add 2>/dev/null >/dev/null
rm -f ${device} ${device}.add
if [ $havevol -ne 0 ]; then
ln -s $dir/$volume $device
ln -s $dir/${volume}.add ${device}.add
rtn=$?
else
ln -s $dir/slot${slot} $device
ln -s $dir/slot${slot}.add ${device}.add
rtn=$?
fi
if [ $rtn -eq 0 ]; then
echo $slot >$dir/loaded${drive}
fi
exit $rtn
;;
list)
debug "Doing disk -f $ctl -- to list volumes"
get_dir
if [ -f $dir/barcodes ]; then
cat $dir/barcodes
else
i=1
while [ $i -le $maxslot ]; do
slot=$i
volume=
get_vol
if [ $havevol -eq 0 ]; then
echo "$i:"
else
echo "$i:$volume"
fi
i=`expr $i + 1`
done
fi
exit 0
;;
listall)
# ***FIXME*** must add new Volume stuff
make_temp_file
debug "Doing disk -f $ctl -- to list volumes"
get_dir
if [ ! -f $dir/barcodes ]; then
exit 0
fi
# we print drive content seen by autochanger
# and we also remove loaded media from the barcode list
i=0
while [ $i -le $maxdrive ]; do
if [ -f $dir/loaded${i} ]; then
ld=`cat $dir/loaded${i}`
v=`awk -F: "/^$ld:/"' { print $2 }' $dir/barcodes`
echo "D:$i:F:$ld:$v"
echo "^$ld:" >> $TMPFILE
fi
i=`expr $i + 1`
done
# Empty slots are not in barcodes file
# When we detect a gap, we print missing rows as empty
# At the end, we fill the gap between the last entry and maxslot
grep -v -f $TMPFILE $dir/barcodes | sort -n | \
perl -ne 'BEGIN { $cur=1 }
if (/(\d+):(.+)?/) {
if ($cur == $1) {
print "S:$1:F:$2\n"
} else {
while ($cur < $1) {
print "S:$cur:E\n";
$cur++;
}
}
$cur++;
}
END { while ($cur < '"$maxslot"') { print "S:$cur:E\n"; $cur++; } } '
rm -f $TMPFILE
exit 0
;;
transfer)
# ***FIXME*** must add new Volume stuff
get_dir
make_temp_file
slotdest=$device
if [ -f $dir/slot{$slotdest} ]; then
echo "destination Element Address $slot is Full"
exit 1
fi
if [ ! -f $dir/slot${slot} ] ; then
echo "source Element Address $slot is Empty"
exit 1
fi
echo "Transfering $slot to $slotdest"
mv $dir/slot${slot} $dir/slot{$slotdest}
mv $dir/slot${slot}.add $dir/slot{$slotdest}.add
if [ -f $dir/barcodes ]; then
sed "s/^$slot:/$slotdest:/" > $TMPFILE
sort -n $TMPFILE > $dir/barcodes
fi
exit 0
;;
loaded)
debug "Doing disk -f $ctl $drive -- to find what is loaded"
get_dir
if [ -f $dir/loaded${drive} ]; then
a=`cat $dir/loaded${drive}`
else
a="0"
fi
debug "Loaded: drive=$drive is $a"
echo $a
exit
;;
slots)
debug "Doing disk -f $ctl -- to get count of slots"
echo $maxslot
;;
esac

View File

@ -1,84 +0,0 @@
#!/bin/sh
#
# Copyright (C) 2000-2018 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
# Bacula interface to get worm status of tape
#
# isworm %l (control device name)
#
# Typical output:
# sdparm --page=0x1D -f /dev/sg0
# /dev/st0: HP Ultrium 5-SCSI I5AW [tape]
# Medium configuration (SSC) mode page:
# WORMM 1 [cha: n, def: 1, sav: 1]
# WMLR 1 [cha: n, def: 1, sav: 1]
# WMFR 2 [cha: n, def: 2, sav: 2]
#
# Where WORMM is worm mode
# WMLR is worm mode label restrictions
# 0 - No blocks can be overwritten
# 1 - Some types of format labels may not be overwritten
# 2 - All format labels can be overwritten
# WMFR is worm mode filemark restrictions
# 0-1 - Reserved
# 2 - Any number of filemarks immediately preceding EOD can be
# overwritten except file mark closest to BOP (beginning of
# partition).
# 3 - Any number of filemarks immediately preceding the EOD
# can be overwritten
# 4-FF - Reserved
#
if [ x$1 = x ] ; then
echo "First argument missing. Must be device control name."
exit 1
fi
sdparm=`which sdparm`
if [ x${sdparm} = x ] ; then
echo "sdparm program not found, but is required."
exit 0
fi
#
# This should be the correct way to determine if the tape is WORM
# but it does not work for mhvtl. Comment out the next 5 lines
# and the code that follows will detect correctly on mhtvl.
#
worm=`$sdparm --page=0x1D -f $1 |grep " *WORMM"|cut -b12-16|sed "s:^ *::"`
if [ $? = 0 ] ; then
echo $worm
exit 0
fi
tapeinfo=`which tapeinfo`
if [ x${tapeinfo} = x ] ; then
echo "tapeinfo program not found, but is required."
exit 1
fi
#
# Unfortunately IBM and HP handle the Medium Type differently,
# so we detect the vendor and get the appropriate Worm flag.
#
vendor=`$tapeinfo -f $1|grep "^Vendor ID:"|cut -b13-15`
if [ x$vendor = xHP ] ; then
worm=`$tapeinfo -f $1|grep "^Medium Type: 0x"|cut -b16-16`
echo $worm
exit 0
fi
if [ x$vendor = xIBM ] ; then
worm=`$tapeinfo -f $1|grep "^Medium Type: 0x"|cut -b17-17`
if [ x$worm = xc ]; then
echo "1"
exit 0
fi
if [ x$worm = xC ]; then
echo "1"
exit 0
fi
fi
echo "0"
exit 0

View File

@ -1,111 +0,0 @@
#!/bin/sh
#
# Copyright (C) 2000-2017 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
# This script dumps your Bacula catalog in ASCII format
# It works for MySQL, SQLite, and PostgreSQL
#
# $1 is the name of the database to be backed up and the name
# of the output file (default = bacula).
# $2 is the user name with which to access the database
# (default = bacula).
# $3 is the password with which to access the database or "" if no password
# (default ""). WARNING!!! Passing the password via the command line is
# insecure and should not be used since any user can display the command
# line arguments and the environment using ps. Please consult your
# MySQL or PostgreSQL manual for secure methods of specifying the
# password.
# $4 is the host on which the database is located
# (default "")
# $5 is the type of database
#
#
default_db_type=postgresql
user=${2:-XXX_DBUSER_XXX}
#
# See if the fifth argument is a valid backend name.
# If so the user overrides the default database backend.
#
if [ $# -ge 5 ]; then
case $5 in
sqlite3)
db_type=$5
;;
mysql)
db_type=$5
;;
postgresql)
db_type=$5
;;
ingres)
db_type=$5
;;
*)
;;
esac
fi
#
# If no new db_type is gives use the default db_type.
#
if [ -z "${db_type}" ]; then
db_type="${default_db_type}"
fi
cd /var/lib/bacula
rm -f $1.sql
case ${db_type} in
sqlite3)
BINDIR=/usr/bin
echo ".dump" | ${BINDIR}/sqlite3 $1.db >$1.sql
;;
mysql)
BINDIR=/usr/bin
if test $# -gt 2; then
MYSQLPASSWORD=" --password=$3"
else
MYSQLPASSWORD=""
fi
if test $# -gt 3; then
MYSQLHOST=" --host=$4"
else
MYSQLHOST=""
fi
${BINDIR}/mysqldump -u ${user}${MYSQLPASSWORD}${MYSQLHOST} -f --opt $1 >$1.sql
;;
postgresql)
BINDIR=/usr/bin
if test $# -gt 2; then
PGPASSWORD=$3
export PGPASSWORD
fi
if test $# -gt 3; then
PGHOST=" --host=$4"
else
PGHOST=""
fi
# you could also add --compress for compression. See man pg_dump
exec ${BINDIR}/pg_dump -c $PGHOST -U $user $1 >$1.sql
;;
esac
#
# To read back a MySQL database use:
# cd /var/lib/bacula
# rm -f ${BINDIR}/../var/bacula/*
# mysql <bacula.sql
#
# To read back a SQLite database use:
# cd /var/lib/bacula
# rm -f bacula.db
# sqlite bacula.db <bacula.sql
#
# To read back a PostgreSQL database use:
# cd /var/lib/bacula
# dropdb bacula
# createdb bacula -T template0 -E SQL_ASCII
# psql bacula <bacula.sql
#

View File

@ -1,195 +0,0 @@
#!/usr/bin/perl
#
# Author: Eric Bollengier, Copyright, 2006-2017
# License: BSD 2-Clause; see file LICENSE-FOSS
use strict;
=head1 SCRIPT
This script dumps your Bacula catalog in ASCII format
It works for MySQL, SQLite, and PostgreSQL
=head1 USAGE
make_catalog_backup.pl [-m] MyCatalog
=head1 LICENSE
Author: Eric Bollengier, 2010
License: BSD 2-Clause; see file LICENSE-FOSS
=cut
my $cat = shift or die "Usage: $0 [-m] catalogname";
my $mode = "dump";
if ($cat eq '-m') {
$mode = "analyse";
$cat = shift or die "Usage: $0 [-m] catalogname";
}
my $dir_conf='/usr/sbin/dbcheck -B -c /etc/bacula/bacula-dir.conf';
my $wd = "/var/lib/bacula";
sub dump_sqlite3
{
my %args = @_;
exec("echo .dump | sqlite3 '$wd/$args{db_name}.db' > '$wd/$args{db_name}.sql'");
print "Error while executing sqlite dump $!\n";
return 1;
}
# TODO: use just ENV and drop the pg_service.conf file
sub setup_env_pgsql
{
my %args = @_;
my $username = getpwuid $ENV{'UID'};
umask(0077);
if ($args{db_address}) {
$ENV{PGHOST}=$args{db_address};
}
if ($args{db_socket}) {
$ENV{PGHOST}=$args{db_socket};
}
if ($args{db_port}) {
$ENV{PGPORT}=$args{db_port};
}
if ($args{db_user}) {
$ENV{PGUSER}=$args{db_user};
}
if ($args{db_password}) {
$ENV{PGPASSWORD}=$args{db_password};
}
$ENV{PGDATABASE}=$args{db_name};
system("echo '\\q' | HOME='$wd' psql") == 0 or die "$username doesn't have access to the catalog database\n";
}
sub dump_pgsql
{
my %args = @_;
setup_env_pgsql(%args);
exec("HOME='$wd' pg_dump -c > '$wd/$args{db_name}.sql'");
print "Error while executing postgres dump $!\n";
return 1; # in case of error
}
sub analyse_pgsql
{
my %args = @_;
setup_env_pgsql(%args);
my @output =`LANG=C HOME='$wd' vacuumdb -z 2>&1`;
my $exitcode = $? >> 8;
print grep { !/^WARNING:\s+skipping\s\"(pg_|sql_)/ } @output;
if ($exitcode != 0) {
print "Error while executing postgres analyse. Exitcode=$exitcode\n";
}
return $exitcode;
}
sub setup_env_mysql
{
my %args = @_;
umask(0077);
unlink("$wd/.my.cnf");
open(MY, ">$wd/.my.cnf")
or die "Can't open $wd/.my.cnf for writing $@";
$args{db_address} = $args{db_address} || "localhost";
my $addr = "host=$args{db_address}";
if ($args{db_socket}) { # unix socket is fastest than net socket
$addr = "socket=\"$args{db_socket}\"";
}
my $mode = $args{mode} || 'client';
print MY "[$mode]
$addr
user=\"$args{db_user}\"
password=\"$args{db_password}\"
";
if ($args{db_port}) {
print MY "port=$args{db_port}\n";
}
close(MY);
}
sub dump_mysql
{
my %args = @_;
setup_env_mysql(%args);
exec("HOME='$wd' mysqldump -f --opt $args{db_name} > '$wd/$args{db_name}.sql'");
print "Error while executing mysql dump $!\n";
return 1;
}
sub analyse_mysql
{
my %args = @_;
$args{mode} = 'mysqlcheck';
setup_env_mysql(%args);
exec("HOME='$wd' mysqlcheck -a $args{db_name}");
print "Error while executing mysql analyse $!\n";
return 1;
}
sub handle_catalog
{
my ($mode, %args) = @_;
if ($args{db_type} eq 'SQLite3') {
$ENV{PATH}="/usr/bin:$ENV{PATH}";
if ($mode eq 'dump') {
dump_sqlite3(%args);
}
} elsif ($args{db_type} eq 'PostgreSQL') {
$ENV{PATH}="/usr/bin:$ENV{PATH}";
if ($mode eq 'dump') {
dump_pgsql(%args);
} else {
analyse_pgsql(%args);
}
} elsif ($args{db_type} eq 'MySQL') {
$ENV{PATH}="/usr/bin:$ENV{PATH}";
if ($mode eq 'dump') {
dump_mysql(%args);
} else {
analyse_mysql(%args);
}
} else {
die "This database type isn't supported";
}
}
open(FP, "$dir_conf -C '$cat'|") or die "Can't get catalog information $@";
# catalog=MyCatalog
# db_type=SQLite
# db_name=regress
# db_driver=
# db_user=regress
# db_password=
# db_address=
# db_port=0
# db_socket=
my %cfg;
while(my $l = <FP>)
{
if ($l =~ /catalog=(.+)/) {
if (exists $cfg{catalog} and $cfg{catalog} eq $cat) {
exit handle_catalog($mode, %cfg);
}
%cfg = (); # reset
}
if ($l =~ /(\w+)=(.+)/) {
$cfg{$1}=$2;
}
}
if (exists $cfg{catalog} and $cfg{catalog} eq $cat) {
exit handle_catalog($mode, %cfg);
}
print "Can't find your catalog ($cat) in director configuration\n";
exit 1;

View File

@ -1,51 +0,0 @@
#!/usr/bin/gawk -f
# extract.awk script expects Catalog definition in a form of:
# Catalog {
# Name = NameOfCatalog
# dbname = ""; DB Address = ""; user = ""; password = ""; DB Socket = ""; DB Port = ""
# }
#
BEGIN { RS= "}" ; FS="[;\n]+"}
function trim(v) {
## Remove leading and trailing spaces
sub(/^ */,"",v)
sub(/ *$/,"",v)
return v
}
$0 ~ /Catalog[[:space:]]*{/ {
for ( i = 1; i <= NF ; i++)
{
split($i,a,"=")
if (a[1] ~ /dbname/)
dbname = trim(gensub("\"","","g",a[2])) # remove " char
if (a[1] ~ /user/)
user = trim(gensub("\"","","g",a[2]))
if (a[1] ~ /Name/)
catname = trim(gensub("\"","","g",a[2]))
if (a[1] ~ /password/)
password = trim(gensub("\"","","g",a[2]))
if (a[1] ~ /DB Address/)
dbaddress = trim(gensub("\"","","g",a[2]))
if (a[1] ~ /DB Socket/)
dbsocket = trim(gensub("\"","","g",a[2]))
if (a[1] ~ /DB Port/)
dbport = trim(gensub("\"","","g",a[2]))
}
if (catname == cat1 || catname == cat2 || catname == cat3 || catname == cat4) {
if (dbaddress == "") #Not optional in the case of MySQL
dbaddress = "localhost"
system("rm -rf /var/lib/bacula/pg_service.conf")
system("touch /var/lib/bacula/pg_service.conf")
system("chmod 600 /var/lib/bacula/pg_service.conf")
printf "[bacula]\n dbname=%s\n user=%s\n password=%s\n",dbname,user,password >> "/var/lib/bacula/pg_service.conf"
if (dbport != "")
printf " port=%s\n",dbport >> "/var/lib/bacula/pg_service.conf"
system("HOME=/var/lib/bacula PGSERVICE=bacula PGSYSCONFDIR=/var/lib/bacula pg_dump > /var/lib/bacula/bacula.sql")
}
}

View File

@ -1,353 +0,0 @@
#!/bin/sh
#
# Bacula(R) - The Network Backup Solution
#
# Copyright (C) 2000-2016 Kern Sibbald
#
# The original author of Bacula is Kern Sibbald, with contributions
# from many others, a complete list can be found in the file AUTHORS.
#
# You may use this file and others of this release according to the
# license defined in the LICENSE file, which includes the Affero General
# Public License, v3.0 ("AGPLv3") and some additional permissions and
# terms pursuant to its AGPLv3 Section 7.
#
# This notice must be preserved when any source code is
# conveyed and/or propagated.
#
# Bacula(R) is a registered trademark of Kern Sibbald.
#
# If you set in your Device resource
#
# Changer Command = "path-to-this-script/mtx-changer %c %o %S %a %d"
# you will have the following input to this script:
#
# So Bacula will always call with all the following arguments, even though
# in come cases, not all are used.
#
# mtx-changer "changer-device" "command" "slot" "archive-device" "drive-index"
# $1 $2 $3 $4 $5
#
# for example:
#
# mtx-changer /dev/sg0 load 1 /dev/nst0 0 (on a Linux system)
#
# will request to load the first cartidge into drive 0, where
# the SCSI control channel is /dev/sg0, and the read/write device
# is /dev/nst0.
#
# The commands are:
# Command Function
# unload unload a given slot
# load load a given slot
# loaded which slot is loaded?
# list list Volume names (requires barcode reader)
# slots how many slots total?
# listall list all info
# transfer
#
# Slots are numbered from 1 ...
# Drives are numbered from 0 ...
#
#
# If you need to an offline, refer to the drive as $4
# e.g. mt -f $4 offline
#
# Many changers need an offline after the unload. Also many
# changers need a sleep 60 after the mtx load.
#
# N.B. If you change the script, take care to return either
# the mtx exit code or a 0. If the script exits with a non-zero
# exit code, Bacula will assume the request failed.
#
# myversion must be the same as version in mtx-changer.conf
myversion=2
# source our conf file
if test ! -f /etc/bacula/scripts/mtx-changer.conf ; then
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo "ERROR: /etc/bacula/scripts/mtx-changer.conf file not found!!!!"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
exit 1
fi
. /etc/bacula/scripts/mtx-changer.conf
if test "${version}" != "${myversion}" ; then
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo "ERROR: /etc/bacula/scripts/mtx-changer.conf has wrong version. Wanted ${myversion}, got ${version} !!!"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
exit 1
fi
MTX=/usr/sbin/mtx
if test ${debug_log} -ne 0 ; then
touch /var/lib/bacula/mtx.log
fi
dbgfile="/var/lib/bacula/mtx.log"
debug() {
if test -f $dbgfile -a ${debug_level} -ge $1; then
echo "`date +%m%d-%H:%M:%S.%N|cut -c1-16` ${chgr_id} $2" >> $dbgfile
fi
}
#
# Create a temporary file
#
make_temp_file() {
TMPFILE=`mktemp /var/lib/bacula/mtx.XXXXXXXXXX`
if test x${TMPFILE} = x; then
TMPFILE="/var/lib/bacula/mtx.$$"
if test -f ${TMPFILE}; then
echo "ERROR: Temp file security problem on: ${TMPFILE}"
exit 1
fi
fi
}
#
# Create a temporary file for stderr
#
# Note, this file is used because sometime mtx emits
# unexpected error messages followed by the output
# expected during success.
# So we separate STDOUT and STDERR in
# certain of the mtx commands. The contents of STDERR
# is then printed after the STDOUT produced by mtx
# thus we sometimes get better changer results.
#
make_err_file() {
ERRFILE=`mktemp /var/lib/bacula/mtx.err.XXXXXXXXXX`
if test x${ERRFILE} = x; then
ERRFILE="/var/lib/bacula/mtx.err.$$"
if test -f ${ERRFILE}; then
echo "ERROR: Temp file security problem on: ${ERRFILE}"
exit 1
fi
fi
}
#
# The purpose of this function to wait a maximum
# time for the drive. It will
# return as soon as the drive is ready, or after
# waiting a maximum of 300 seconds.
# Note, this is very system dependent, so if you are
# not running on Linux, you will probably need to
# re-write it, or at least change the grep target.
# We've attempted to get the appropriate OS grep targets
# in the code at the top of this script.
#
wait_for_drive() {
i=0
while [ $i -le 300 ]; do # Wait max 300 seconds
if mt -f $1 status 2>&1 | grep "${ready}" >/dev/null 2>&1; then
break
fi
debug $dbglvl "Device $1 - not ready, retrying..."
sleep 1
i=`expr $i + 1`
done
}
# check parameter count on commandline
#
check_parm_count() {
pCount=$1
pCountNeed=$2
if test $pCount -lt $pCountNeed; then
echo "ERROR: usage: mtx-changer ctl-device command [slot archive-device drive-index]"
echo " Insufficient number of arguments given."
if test $pCount -lt 2; then
echo " Mimimum usage is first two arguments ..."
else
echo " Command expected $pCountNeed arguments"
fi
exit 1
fi
}
# Check for special cases where only 2 arguments are needed,
# all others are a minimum of 5
#
case $2 in
list|listall)
check_parm_count $# 2
;;
slots)
check_parm_count $# 2
;;
transfer)
check_parm_count $# 4
;;
*)
check_parm_count $# 5
;;
esac
# Setup arguments
ctl=$1
cmd="$2"
slot=$3
device=$4
drive=$5
debug $dbglvl "Parms: $ctl $cmd $slot $device $drive"
case $cmd in
unload)
if test ${offline} -eq 1 ; then
mt -f $device offline
fi
if test ${offline_sleep} -ne 0 ; then
sleep ${offline_sleep}
fi
make_err_file
for i in 1 2 3 4 5 ; do
debug $idbglvl "Doing mtx -f $ctl unload slot=$slot drv=$drive"
${MTX} -f $ctl unload $slot $drive 2>${ERRFILE}
rtn=$?
if test $rtn -eq 0 ; then
break
fi
grep "Error Code=" ${ERRFILE} 2>/dev/null 1>/dev/null
if test $? -ne 0 ; then
break
fi
sleep $i
done
cat ${ERRFILE}
rm -f ${ERRFILE} >/dev/null 2>&1
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl unload slot=$slot drv=$drive"
fi
exit $rtn
;;
load)
make_err_file
for i in 1 2 3 4 5 ; do
debug $idbglvl "Doing mtx -f $ctl load slot=$slot drv=$drive"
${MTX} -f $ctl load $slot $drive 2>${ERRFILE}
rtn=$?
if test $rtn -eq 0 ; then
break
fi
grep "Error Code=" ${ERRFILE} 2>/dev/null 1>/dev/null
if test $? -ne 0 ; then
break
fi
sleep $i
done
if test ${load_sleep} -ne 0 ; then
sleep ${load_sleep}
fi
wait_for_drive $device
cat ${ERRFILE}
rm -f ${ERRFILE} >/dev/null 2>&1
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl load slot=$slot drv=$drive"
fi
exit $rtn
;;
list)
make_temp_file
if test ${inventory} -ne 0 ; then
${MTX} -f $ctl inventory
fi
debug $dbglvl "Doing mtx -f $ctl list"
${MTX} -f $ctl status >${TMPFILE}
rtn=$?
if test ${vxa_packetloader} -ne 0 ; then
cat ${TMPFILE} | grep " *Storage Element [0-9]*:.*Full" | sed "s/ Storage Element //" | sed "s/Full :VolumeTag=//"
else
cat ${TMPFILE} | grep " Storage Element [0-9]*:.*Full" | awk "{print \$3 \$4}" | sed "s/Full *\(:VolumeTag=\)*//"
fi
cat ${TMPFILE} | grep "^Data Transfer Element [0-9]*:Full (Storage Element [0-9]" | awk '{printf "%s:%s\n",$7,$10}'
rm -f ${TMPFILE} >/dev/null 2>&1
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl list"
fi
exit $rtn
;;
listall)
# Drive content: D:Drive num:F:Slot loaded:Volume Name
# D:0:F:2:vol2 or D:Drive num:E
# D:1:F:42:vol42
# D:3:E
#
# Slot content:
# S:1:F:vol1 S:Slot num:F:Volume Name
# S:2:E or S:Slot num:E
# S:3:F:vol4
#
# Import/Export tray slots:
# I:10:F:vol10 I:Slot num:F:Volume Name
# I:11:E or I:Slot num:E
# I:12:F:vol40
make_temp_file
if test ${inventory} -ne 0 ; then
${MTX} -f $ctl inventory
fi
debug $dbglvl "Doing mtx -f $ctl -- to list all"
${MTX} -f $ctl status >${TMPFILE}
rtn=$?
# can be converted to awk+sed+cut, see below
perl -ne '
/Data Transfer Element (\d+):Empty/ && print "D:$1:E\n";
/Data Transfer Element (\d+):Full \(Storage Element (\d+) Loaded\)(:VolumeTag =\s*(.+))?/ && print "D:$1:F:$2:$4\n";
/Storage Element (\d+):Empty/ && print "S:$1:E\n";
/Storage Element (\d+):Full( :VolumeTag=(.+))?/ && print "S:$1:F:$3\n";
/Storage Element (\d+) IMPORT.EXPORT:Empty/ && print "I:$1:E\n";
/Storage Element (\d+) IMPORT.EXPORT:Full( :VolumeTag=(.+))?/ && print "I:$1:F:$3\n";' ${TMPFILE}
# If perl isn't installed, you can use by those commands
#cat ${TMPFILE} | grep "Data Transfer Element" | awk "{print \"D:\"\$4 \$7 \$9 \$10}" | sed "s/=/:/" | sed "s/Full/F:/" | sed "s/Empty/E/"
#cat ${TMPFILE} | grep -v "Data Transfer Element" | grep "Storage Element" | grep -v "IMPORT/EXPORT" | awk "{print \"S:\"\$3 \$4 \$5}" | sed "s/IMPORT\/EXPORT//" | sed "s/Full *:VolumeTag=/F:/" | sed "s/Empty/E/"
#cat ${TMPFILE} | grep -v "Data Transfer Element" | grep "Storage Element" | grep "IMPORT/EXPORT" | awk "{print \"I:\"\$3 \$4 \$5}" | sed "s/IMPORT\/EXPORT//" | sed "s/Full *:VolumeTag=/F:/" | sed "s/Empty/E/"
rm -f ${TMPFILE} >/dev/null 2>&1
exit $rtn
;;
transfer)
slotdest=$device
debug $dbglvl "Doing transfer from $slot to $slotdest"
${MTX} -f $ctl transfer $slot $slotdest
rtn=$?
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl transfer from=$slot to=$slotdest"
fi
exit $rtn
;;
loaded)
make_temp_file
debug $idbglvl "Doing mtx -f $ctl $drive -- to find what is loaded"
${MTX} -f $ctl status >${TMPFILE}
rtn=$?
cat ${TMPFILE} | grep "^Data Transfer Element $drive:Full" | awk "{print \$7}"
cat ${TMPFILE} | grep "^Data Transfer Element $drive:Empty" | awk "{print 0}"
rm -f ${TMPFILE} >/dev/null 2>&1
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl loaded drv=$drive"
fi
exit $rtn
;;
slots)
debug $dbglvl "Doing mtx -f $ctl -- to get count of slots"
${MTX} -f $ctl status | grep " *Storage Changer" | awk "{print \$5}"
rtn=$?
if test $rtn -ne 0 ; then
debug $idbglvl "FAIL: mtx -f $ctl slots"
fi
;;
esac

View File

@ -1,89 +0,0 @@
#
# Copyright (C) 2000-2015 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
#
# This file is sourced by the mtx-changer script every time it runs.
# You can put your site customization here, and when you do an
# upgrade, the process should not modify this file. Thus you
# preserve your mtx-changer configuration.
#
# We update the version when an incompatible change
# to mtx-changer or this conf file is made, such as
# adding a new required variable.
version=2
# Set to 1 if you want to do offline before unload
offline=0
# Set to amount of time in seconds to wait after an offline
offline_sleep=0
# Set to amount of time in seconds to wait after a load
load_sleep=0
# Set to 1 to do an inventory before a status. Not normally needed.
inventory=0
# If you have a VXA PacketLoader, it might display a different
# Storage Element line, so try setting the following to 1
vxa_packetloader=0
#
# Debug logging
#
# If you have multiple SD's, set this differently for each one
# so you know which message comes from which one. This can
# be any string, and will appear in each debug message just
# after the time stamp.
chgr_id=0
# Set to 1 if you want debug info written to a log
debug_log=0
# Set to debug level you want to see
# 0 is off
# 10 is important events (load, unload, loaded)
# 100 is everything
# Note debug_log must be set to 1 for anything to be generated
#
debug_level=10
# Debug levels by importance
# Normally you do not need to change this
dbglvl=100
# More important messages
idbglvl=10
#
# mt status output
# SunOS No Additional Sense
# FreeBSD Current Driver State: at rest.
# Linux ONLINE
# Note Debian has a different mt than the standard Linux version.
# When no tape is in the drive it waits 2 minutes.
# When a tape is in the drive, it prints user unfriendly output.
# Note, with Ubuntu Gusty (8.04), there are two versions of mt,
# so we attempt to figure out which one.
#
OS=`uname`
case ${OS} in
SunOS)
ready="No Additional Sense"
;;
FreeBSD)
ready="Current Driver State: at rest."
;;
Linux)
ready="ONLINE"
if test -f /etc/debian_version ; then
mt --version|grep "mt-st" >/dev/null 2>&1
if test $? -eq 1 ; then
ready="drive status"
fi
fi
;;
esac

View File

@ -1,7 +0,0 @@
#
# See the file <bacula-source>/examples/sample-query.sql
# for some sample queries.
#
# 1
:The default file is empty, see sample-query.sql (in /opt/bacula/scripts or <bacula-source>/examples) for samples
SELECT 'See sample-query.sql (in /opt/bacula/scripts or <bacula-source>/examples) for samples' AS Info;

View File

@ -1,68 +0,0 @@
#!/bin/sh
#
# Copyright (C) 2000-2016 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
# Bacula interface to tapeinfo to get tape alerts
#
# tapealert %l (control device name)
#
# Note: you must have in your SD Device resource:
# Alert Command = /full-path/tapealert %l
# Control Device = /dev/sg0n (where this is the scsi control
# device for the device you are using).
#
# Note: to test
# 1. uncomment out the DEBUG=1 line below
# 2. Possibly remove or add TapeAlert[nn]: that you want to test.
# Note, the message following the : is not used.
# 3. Run Bacula
#
#DEBUG=1
tapeinfo=`which tapeinfo`
if [ x${tapeinfo} = x ] ; then
echo "tapeinfo program not found, but is required."
exit 1
fi
if [ x$1 = x ] ; then
echo "First argument missing. Must be device control name."
exit 1
fi
if [ x$DEBUG = x ] ; then
$tapeinfo -f $1 |grep "^TapeAlert" - |cut -b1-13
exit $?
else
# For testing only
cat <<EOF |grep "^TapeAlert" - |cut -b1-13
Product Type: Tape Drive
Vendor ID: 'IBM '
Product ID: 'ULTRIUM-TD6 '
Revision: 'G350'
Attached Changer API: No
SerialNumber: 'F3A2930090'
TapeAlert[3]: Hard Error: Uncorrectable read/write error.
TapeAlert[5]: Read Failure: Tape faulty or tape drive broken.
TapeAlert[39]: Undefined.
MinBlock: 1
MaxBlock: 8388608
SCSI ID: 9
SCSI LUN: 0
Ready: yes
BufferedMode: yes
Medium Type: 0x58
Density Code: 0x58
BlockSize: 0
DataCompEnabled: yes
DataCompCapable: yes
DataDeCompEnabled: yes
CompType: 0xff
DeCompType: 0xff
EOF
fi

View File

@ -1,19 +1,7 @@
@/etc/bacula/clients/director-client.conf
@/etc/bacula/clients/zinc-client.conf
@/etc/bacula/users/caleb.user.conf
@/etc/bacula/users/evelynn.user.conf
@/etc/bacula/users/mark.user.conf
@/etc/bacula/users/joe.user.conf
@/etc/bacula/users/michael.user.conf
@/etc/bacula/users/matthew.user.conf
@/etc/bacula/users/chris.user.conf
@/etc/bacula/users/christina.user.conf
@/etc/bacula/users/julio.user.conf
@/etc/bacula/users/aidan.user.conf
@/etc/bacula/users/firebn.user.conf
@/etc/bacula/users/travis.user.conf
@/etc/bacula/users/zach.user.conf
@|"/bin/sh -c 'ls /etc/bacula/users/*.conf >/dev/null 2>&1 && cat /etc/bacula/users/*.conf'"
@/etc/bacula/other/system.conf
@/etc/bacula/other/archive.conf

View File

@ -63,3 +63,20 @@ FileSet {
File = "/mnt/Git/"
}
}
Job { # Backup Everything in YouTube
Name = "Backup-YouTube"
JobDefs = "Default-System-Job"
FileSet = "YouTube"
}
# YouTube storage location on zinc
FileSet {
Name = "YouTube"
Include {
Options {
signature = MD5
}
File = "/mnt/YouTube/"
}
}

View File

Before

Width:  |  Height:  |  Size: 468 KiB

After

Width:  |  Height:  |  Size: 468 KiB

27
users/dawn.user.conf Normal file
View File

@ -0,0 +1,27 @@
# Backup Mom
Job {
Name = "Backup-Dawn"
JobDefs = "Default-User-Job"
FileSet = "Dawn Backup"
Messages = MomMail
}
FileSet { # Where moms's data is
Name = "Dawn Backup"
Include {
Options {
signature = MD5
compression=GZIP
}
File = "/mnt/NextCloud/data/dawn/"
}
}
messages { # Send mail to me and mom
name = MomMail
mail = dawnsedutto@gmail.com = all, !skipped
mail = kenwood364@gmail.com = all, !skipped
console = all, !skipped, !saved
catalog = all
}

27
users/jason.user.conf Normal file
View File

@ -0,0 +1,27 @@
# Backup Jason
Job {
Name = "Backup-Jason"
JobDefs = "Default-User-Job"
FileSet = "Jason Backup"
Messages = JasonMail
}
FileSet { # Where jason's data is
Name = "Jason Backup"
Include {
Options {
signature = MD5
compression=GZIP
}
File = "/mnt/NextCloud/data/jason/"
}
}
messages { # Send mail to me and jason
name = JasonMail
mail = 346wizard@gmail.com = all, !skipped
mail = kenwood364@gmail.com = all, !skipped
console = all, !skipped, !saved
catalog = all
}