From 911e2c66c4affdbeb576215696d1339a33010f53 Mon Sep 17 00:00:00 2001 From: Dobrica Pavlinusic Date: Fri, 28 Jan 2011 14:59:45 +0100 Subject: [PATCH] document new archive host ASA extension --- README.ASA | 81 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 README.ASA diff --git a/README.ASA b/README.ASA new file mode 100644 index 0000000..5134cc3 --- /dev/null +++ b/README.ASA @@ -0,0 +1,81 @@ +This document tries to describe ASA extensions for BackupPC 3.2.0 + +Written by Dobrica Pavlinusic 2011-01-27 + +Search and archive maintain data in PostgreSQL and full-text search. Since full-text search +is single-writer, we need to serialize somehow requests for it's update. + +This is implemented using archive host feature using _search_archive.pl configuration +file in /etc/BackupPC/pc/_search_archive.pl + + +You can manually trigger all pending backups using: + + BackupPC_ASA_ArchiveStart _search_archive backuppc + +This will start archive host _search_archive which will run it's configuration: + + +# +# /etc/BackupPC/pc/_search_archive.pl +# + +# Set this client's XferMethod to archive to make it an archive host: +$Conf{XferMethod} = 'archive'; + +# The path on the local file system where archives will be written: +$Conf{ArchiveDest} = '/data/BackupPC/_search_archive'; + +# the type and level of compression used on the archive: +$Conf{ArchiveComp} = 'gzip'; +$Conf{CompressLevel} = 9; + +# dump only incremental changes in tars not whole content - ASA extension +# XXX this option must be global in /etc/BackupPC/config.pl +$Conf{TarCreateIncremental} = 1; + +# archive media size (in bytes) 4.2Gb for DVD +#$Conf{ArchiveMediaSize} = 4200 * 1024 * 1024; +$Conf{ArchiveMediaSize} = 1440 * 1024; # FIXME floppy + +# size of one chunk burned to archive medium +# useful for transfer to smaller media or limited filesystems +#$Conf{ArchiveChunkSize} = (2048 - 2) * 1024 * 1024; # 2Gb filesystem-limit +$Conf{ArchiveChunkSize} = 100 * 1024 * 1024; # FIXME zipdrive + + +# A size in megabytes to split the archive in to parts at. +# This is useful where the file size of the archive might exceed the +# capacity of the removable media. For example specify 700 if you are using CDs. +#$Conf{ArchiveSplit} = 650; +$Conf{ArchiveSplit} = 300 * 1024; # FIXME small testing chunks + + +# The amount of parity data to create for the archive using the par2 utility. +# In some cases, corrupted archives can be recovered from parity data. +$Conf{ArchivePar} = 0; +$Conf{ParPath} = undef; + + +# use parallel gzip (speedup on multi-code machines) +$Conf{GzipPath} = '/usr/bin/pigz'; + + +# The full command to run to create archives: +$Conf{ArchiveClientCmd} = '$Installdir/bin/BackupPC_archiveHost' +. ' $tarCreatePath $splitpath $parpath $host $backupnumber' +. ' $compression $compext $splitsize $archiveloc $parfile *'; + +# host provides serialization, so we can safely update fulltext index +$Conf{ArchivePreUserCmd} = '/srv/BackupPC/bin/BackupPC_ASA_SearchUpdate -h$HostList'; + +$Conf{Md5sumPath} = '/usr/bin/md5sum'; + +# after archives are created, pull data back in database - ASA extension +$Conf{ArchivePostUserCmd} = '/srv/BackupPC/bin/BackupPC_ASA_PostArchive_Update -h$HostList -n$BackupList'; + +# Logging verbosity: +$Conf{XferLogLevel} = 1; + + + -- 2.20.1