# Default configuration structure for replicate # This file is loaded by the replicate script to provide default configuration values # Variables from $programDefinition hashref (scriptDirectory, scriptFullPath) will be interpolated at runtime { 'dryrun' => 1, # if set, will not actually run the replicate command(s) 'verbosity' => 3, # 0 is no output, 1 is normal, >1 is for debugging 'displayLogsOnConsole' => 1, # if set, will display logs on the current displayLogsOnConsole 'displayLogsOnTTY' => '', # if set to a tty, will display logs there. the /dev/ is optional 'historyFile' => '/history.tsv', # Tab-delimited file recording previous replication stream size estimates for each dataset/command. Used for historical validation. 'runningAverageCount' => 6, # Number of previous entries from the history file to use when calculating the running average for size estimate validation. 'report' => { 'from' => 'root@backup.example.org', # From header on email reports 'email' => 'user@example.com', # who to send report to (one address/alias only) 'subject' => 'Replicate Report', # base subject line on e-mail }, 'source' => { # define the source machine 'hostname' => 'dd-125', # DNS name or IP of machine 'remote' => 1 # This is remote, so we will access it via ssh }, 'target' => { # target machine 'hostname' => 'dd-050', 'remote' => 0 # this is local, so we'll just run stuff locally }, 'datasets' => { # define each dataset to be replicated # these are global value for all datasets. If dataset defines value, it overrides these 'recurse' => 0, # recurse (1) or don't recurse (0) into child datasets 'bwlimit' => '50M', # bandwidth limit in megabits (not bytes) for transfer. Must have the command 'pv' installed 'maxDelta' => 0.25, # The maximum factor by which a new size estimate can exceed the running average of previous estimates for this dataset. For example, 2.0 means the estimate can be up to 2x the average. 'ComputerFiles' => { # this is our first dataset definition. You must have at least one 'recurse' => 1, # override global. Do recurse into child datasets 'bwlimit' => '20M', # override global, slow this down to 30Mb/s 'source' => { # define the source for this dataset (source, above) 'pool' => 'storage', # zpool we are pulling from 'dataset' => 'files_share/computer_files' # name of dataset. So, this will be storage/files_share/computer_files }, 'target' => { # define where to put the stream 'pool' => 'backup', # ok, we will put it in pool 'backup' 'dataset' => 'ComputerFiles' # in dataset ComputerFiles, so backup/ComputerFiles } } } }