Commit 041b5f88 authored by guyzmo's avatar guyzmo Committed by Joshua Tauberer

Support for rsync+ssh backup target (#678)

* Added support for backup to a remote server using rsync

* updated web interface to get data from user
* added way to list files from server

It’s not using the “username” field of the yaml configuration
file to minimise the amount of patches needed. So the username
is actually sorted within the rsync URL.
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* Added ssh key generation upon installation for root user.
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* Removed stale blank lines, and fixed typo
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* fix backup-location lines, by switching it from id to class

* Various web UI fixes

- fixed user field being shadowed ;
- fixed settings reading comparaison ;
- fixed forgotten min-age field.
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* Added SSH Public Key shown on the web interface UI
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* trailing spaces.
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* fixed the extraneous environment
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* Updated key setup

- made key lower in bits, but stronger (using -a option),
- made ssh-keygen run in background using nohup,
- added independent key file, as id_rsa_miab,
- added ssh-options to all duplicity calls to use the id_rsa_miab keyfile,
- changed path to the public key display
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* added rsync options for ssh identity support
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* removed strict host checking for all backup operations
Signed-off-by: 's avatarBernard `Guyzmo` Pratz <guyzmo+github@m0g.net>

* Remove nohup from ssh-keygen so errors aren't hidden. Also only generate a key if none exists yet

* Add trailing slash when checking a remote backup. Also check if we actually can read the remote size

* Factorisation of the repeated rsync/ssh options

cf https://github.com/mail-in-a-box/mailinabox/pull/678#discussion_r81478919

* Updated message SSH key creation

https://github.com/mail-in-a-box/mailinabox/pull/678#discussion_r81478886
parent 3b78a8d9
...@@ -30,6 +30,11 @@ def backup_status(env): ...@@ -30,6 +30,11 @@ def backup_status(env):
backups = { } backups = { }
backup_cache_dir = os.path.join(backup_root, 'cache') backup_cache_dir = os.path.join(backup_root, 'cache')
rsync_ssh_options = [
"--ssh-options='-i /root/.ssh/id_rsa_miab'",
"--rsync-options=-e \"/usr/bin/ssh -oStrictHostKeyChecking=no -oBatchMode=yes -p 22 -i /root/.ssh/id_rsa_miab\"",
]
def reldate(date, ref, clip): def reldate(date, ref, clip):
if ref < date: return clip if ref < date: return clip
rd = dateutil.relativedelta.relativedelta(ref, date) rd = dateutil.relativedelta.relativedelta(ref, date)
...@@ -52,6 +57,7 @@ def backup_status(env): ...@@ -52,6 +57,7 @@ def backup_status(env):
"size": 0, # collection-status doesn't give us the size "size": 0, # collection-status doesn't give us the size
"volumes": keys[2], # number of archive volumes for this backup (not really helpful) "volumes": keys[2], # number of archive volumes for this backup (not really helpful)
} }
code, collection_status = shell('check_output', [ code, collection_status = shell('check_output', [
"/usr/bin/duplicity", "/usr/bin/duplicity",
"collection-status", "collection-status",
...@@ -59,7 +65,7 @@ def backup_status(env): ...@@ -59,7 +65,7 @@ def backup_status(env):
"--gpg-options", "--cipher-algo=AES256", "--gpg-options", "--cipher-algo=AES256",
"--log-fd", "1", "--log-fd", "1",
config["target"], config["target"],
], ] + rsync_ssh_options,
get_env(env), get_env(env),
trap=True) trap=True)
if code != 0: if code != 0:
...@@ -177,24 +183,24 @@ def get_passphrase(env): ...@@ -177,24 +183,24 @@ def get_passphrase(env):
with open(os.path.join(backup_root, 'secret_key.txt')) as f: with open(os.path.join(backup_root, 'secret_key.txt')) as f:
passphrase = f.readline().strip() passphrase = f.readline().strip()
if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!") if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!")
return passphrase return passphrase
def get_env(env): def get_env(env):
config = get_backup_config(env) config = get_backup_config(env)
env = { "PASSPHRASE" : get_passphrase(env) } env = { "PASSPHRASE" : get_passphrase(env) }
if get_target_type(config) == 's3': if get_target_type(config) == 's3':
env["AWS_ACCESS_KEY_ID"] = config["target_user"] env["AWS_ACCESS_KEY_ID"] = config["target_user"]
env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"] env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"]
return env return env
def get_target_type(config): def get_target_type(config):
protocol = config["target"].split(":")[0] protocol = config["target"].split(":")[0]
return protocol return protocol
def perform_backup(full_backup): def perform_backup(full_backup):
env = load_environment() env = load_environment()
...@@ -204,7 +210,7 @@ def perform_backup(full_backup): ...@@ -204,7 +210,7 @@ def perform_backup(full_backup):
backup_cache_dir = os.path.join(backup_root, 'cache') backup_cache_dir = os.path.join(backup_root, 'cache')
backup_dir = os.path.join(backup_root, 'encrypted') backup_dir = os.path.join(backup_root, 'encrypted')
# Are backups dissbled? # Are backups disabled?
if config["target"] == "off": if config["target"] == "off":
return return
...@@ -283,7 +289,7 @@ def perform_backup(full_backup): ...@@ -283,7 +289,7 @@ def perform_backup(full_backup):
env["STORAGE_ROOT"], env["STORAGE_ROOT"],
config["target"], config["target"],
"--allow-source-mismatch" "--allow-source-mismatch"
], ] + rsync_ssh_options,
get_env(env)) get_env(env))
finally: finally:
# Start services again. # Start services again.
...@@ -305,7 +311,7 @@ def perform_backup(full_backup): ...@@ -305,7 +311,7 @@ def perform_backup(full_backup):
"--archive-dir", backup_cache_dir, "--archive-dir", backup_cache_dir,
"--force", "--force",
config["target"] config["target"]
], ] + rsync_ssh_options,
get_env(env)) get_env(env))
# From duplicity's manual: # From duplicity's manual:
...@@ -320,7 +326,7 @@ def perform_backup(full_backup): ...@@ -320,7 +326,7 @@ def perform_backup(full_backup):
"--archive-dir", backup_cache_dir, "--archive-dir", backup_cache_dir,
"--force", "--force",
config["target"] config["target"]
], ] + rsync_ssh_options,
get_env(env)) get_env(env))
# Change ownership of backups to the user-data user, so that the after-bcakup # Change ownership of backups to the user-data user, so that the after-bcakup
...@@ -359,7 +365,7 @@ def run_duplicity_verification(): ...@@ -359,7 +365,7 @@ def run_duplicity_verification():
"--exclude", backup_root, "--exclude", backup_root,
config["target"], config["target"],
env["STORAGE_ROOT"], env["STORAGE_ROOT"],
], get_env(env)) ] + rsync_ssh_options, get_env(env))
def run_duplicity_restore(args): def run_duplicity_restore(args):
env = load_environment() env = load_environment()
...@@ -370,7 +376,7 @@ def run_duplicity_restore(args): ...@@ -370,7 +376,7 @@ def run_duplicity_restore(args):
"restore", "restore",
"--archive-dir", backup_cache_dir, "--archive-dir", backup_cache_dir,
config["target"], config["target"],
] + args, ] + rsync_ssh_options + args,
get_env(env)) get_env(env))
def list_target_files(config): def list_target_files(config):
...@@ -383,6 +389,34 @@ def list_target_files(config): ...@@ -383,6 +389,34 @@ def list_target_files(config):
if p.scheme == "file": if p.scheme == "file":
return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)] return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)]
elif p.scheme == "rsync":
rsync_fn_size_re = re.compile(r'.* ([^ ]*) [^ ]* [^ ]* (.*)')
rsync_target = '{host}:{path}'
_, target_host, target_path = config['target'].split('//')
target_path = '/' + target_path
if not target_path.endswith('/'):
target_path += '/'
rsync_command = [ 'rsync',
'-e',
'/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
'--list-only',
'-r',
rsync_target.format(
host=target_host,
path=target_path)
]
code, listing = shell('check_output', rsync_command, trap=True)
if code == 0:
for l in listing.split('\n'):
match = rsync_fn_size_re.match(l)
if match:
yield (match.groups()[1], int(match.groups()[0].replace(',','')))
else:
raise ValueError("Connection to rsync host failed")
elif p.scheme == "s3": elif p.scheme == "s3":
# match to a Region # match to a Region
fix_boto() # must call prior to importing boto fix_boto() # must call prior to importing boto
...@@ -425,7 +459,7 @@ def list_target_files(config): ...@@ -425,7 +459,7 @@ def list_target_files(config):
def backup_set_custom(env, target, target_user, target_pass, min_age): def backup_set_custom(env, target, target_user, target_pass, min_age):
config = get_backup_config(env, for_save=True) config = get_backup_config(env, for_save=True)
# min_age must be an int # min_age must be an int
if isinstance(min_age, str): if isinstance(min_age, str):
min_age = int(min_age) min_age = int(min_age)
...@@ -443,11 +477,11 @@ def backup_set_custom(env, target, target_user, target_pass, min_age): ...@@ -443,11 +477,11 @@ def backup_set_custom(env, target, target_user, target_pass, min_age):
list_target_files(config) list_target_files(config)
except ValueError as e: except ValueError as e:
return str(e) return str(e)
write_backup_config(env, config) write_backup_config(env, config)
return "OK" return "OK"
def get_backup_config(env, for_save=False, for_ui=False): def get_backup_config(env, for_save=False, for_ui=False):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup') backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
...@@ -482,6 +516,9 @@ def get_backup_config(env, for_save=False, for_ui=False): ...@@ -482,6 +516,9 @@ def get_backup_config(env, for_save=False, for_ui=False):
if config["target"] == "local": if config["target"] == "local":
# Expand to the full URL. # Expand to the full URL.
config["target"] = "file://" + config["file_target_directory"] config["target"] = "file://" + config["file_target_directory"]
ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
if os.path.exists(ssh_pub_key):
config["ssh_pub_key"] = open(ssh_pub_key, 'r').read()
return config return config
......
...@@ -16,16 +16,60 @@ ...@@ -16,16 +16,60 @@
<select class="form-control" rows="1" id="backup-target-type" onchange="toggle_form()"> <select class="form-control" rows="1" id="backup-target-type" onchange="toggle_form()">
<option value="off">Nowhere (Disable Backups)</option> <option value="off">Nowhere (Disable Backups)</option>
<option value="local">{{hostname}}</option> <option value="local">{{hostname}}</option>
<option value="rsync">rsync</option>
<option value="s3">Amazon S3</option> <option value="s3">Amazon S3</option>
</select> </select>
</div> </div>
</div> </div>
<!-- LOCAL BACKUP -->
<div class="form-group backup-target-local"> <div class="form-group backup-target-local">
<div class="col-sm-10 col-sm-offset-2"> <div class="col-sm-10 col-sm-offset-2">
<p>Backups are stored on this machine&rsquo;s own hard disk. You are responsible for periodically using SFTP (FTP over SSH) to copy the backup files from <tt id="backup-location"></tt> to a safe location. These files are encrypted, so they are safe to store anywhere.</p> <p>Backups are stored on this machine&rsquo;s own hard disk. You are responsible for periodically using SFTP (FTP over SSH) to copy the backup files from <tt class="backup-location"></tt> to a safe location. These files are encrypted, so they are safe to store anywhere.</p>
<p>Separately copy the encryption password from <tt class="backup-encpassword-file"></tt> to a safe and secure location. You will need this file to decrypt backup files.</p> <p>Separately copy the encryption password from <tt class="backup-encpassword-file"></tt> to a safe and secure location. You will need this file to decrypt backup files.</p>
</div> </div>
</div> </div>
<!-- RSYNC BACKUP -->
<div class="form-group backup-target-rsync">
<div class="col-sm-10 col-sm-offset-2">
<p>Backups synced to a remote machine using rsync over SSH, with local
copies in <tt class="backup-location"></tt>. These files are encrypted, so
they are safe to store anywhere.</p> <p>Separately copy the encryption
password from <tt class="backup-encpassword-file"></tt> to a safe and
secure location. You will need this file to decrypt backup files.</p>
</div>
</div>
<div class="form-group backup-target-rsync">
<label for="backup-target-rsync-host" class="col-sm-2 control-label">Hostname</label>
<div class="col-sm-8">
<input type="text" placeholder="hostname.local" class="form-control" rows="1" id="backup-target-rsync-host">
</div>
</div>
<div class="form-group backup-target-rsync">
<label for="backup-target-rsync-path" class="col-sm-2 control-label">Path</label>
<div class="col-sm-8">
<input type="text" placeholder="/backups/{{hostname}}" class="form-control" rows="1" id="backup-target-rsync-path">
</div>
</div>
<div class="form-group backup-target-rsync">
<label for="backup-target-rsync-user" class="col-sm-2 control-label">Username</label>
<div class="col-sm-8">
<input type="text" class="form-control" rows="1" id="backup-target-rsync-user">
</div>
</div>
<div class="form-group backup-target-rsync">
<label for="ssh-pub-key" class="col-sm-2 control-label">Public SSH Key</label>
<div class="col-sm-8">
<input type="text" class="form-control" rows="1" id="ssh-pub-key" readonly>
<div class="small" style="margin-top: 2px">
Copy the Public SSH Key above, and paste it within the <tt>~/.ssh/authorized_keys</tt>
of target user on the backup server specified above. That way you'll enable secure and
passwordless authentication from your mail-in-a-box server and your backup server.
</div>
</div>
</div>
<!-- S3 BACKUP -->
<div class="form-group backup-target-s3"> <div class="form-group backup-target-s3">
<div class="col-sm-10 col-sm-offset-2"> <div class="col-sm-10 col-sm-offset-2">
<p>Backups are stored in an Amazon Web Services S3 bucket. You must have an AWS account already.</p> <p>Backups are stored in an Amazon Web Services S3 bucket. You must have an AWS account already.</p>
...@@ -60,7 +104,8 @@ ...@@ -60,7 +104,8 @@
<input type="text" class="form-control" rows="1" id="backup-target-pass"> <input type="text" class="form-control" rows="1" id="backup-target-pass">
</div> </div>
</div> </div>
<div class="form-group backup-target-local backup-target-s3"> <!-- Common -->
<div class="form-group backup-target-local backup-target-rsync backup-target-s3">
<label for="min-age" class="col-sm-2 control-label">Days:</label> <label for="min-age" class="col-sm-2 control-label">Days:</label>
<div class="col-sm-8"> <div class="col-sm-8">
<input type="number" class="form-control" rows="1" id="min-age"> <input type="number" class="form-control" rows="1" id="min-age">
...@@ -92,7 +137,7 @@ ...@@ -92,7 +137,7 @@
function toggle_form() { function toggle_form() {
var target_type = $("#backup-target-type").val(); var target_type = $("#backup-target-type").val();
$(".backup-target-local, .backup-target-s3").hide(); $(".backup-target-local, .backup-target-rsync, .backup-target-s3").hide();
$(".backup-target-" + target_type).show(); $(".backup-target-" + target_type).show();
} }
...@@ -114,7 +159,7 @@ function nice_size(bytes) { ...@@ -114,7 +159,7 @@ function nice_size(bytes) {
function show_system_backup() { function show_system_backup() {
show_custom_backup() show_custom_backup()
$('#backup-status tbody').html("<tr><td colspan='2' class='text-muted'>Loading...</td></tr>") $('#backup-status tbody').html("<tr><td colspan='2' class='text-muted'>Loading...</td></tr>")
api( api(
"/system/backup/status", "/system/backup/status",
...@@ -160,28 +205,37 @@ function show_system_backup() { ...@@ -160,28 +205,37 @@ function show_system_backup() {
} }
function show_custom_backup() { function show_custom_backup() {
$(".backup-target-local, .backup-target-s3").hide(); $(".backup-target-local, .backup-target-rsync, .backup-target-s3").hide();
api( api(
"/system/backup/config", "/system/backup/config",
"GET", "GET",
{ }, { },
function(r) { function(r) {
$("#backup-target-user").val(r.target_user);
$("#backup-target-pass").val(r.target_pass);
$("#min-age").val(r.min_age_in_days);
$(".backup-location").text(r.file_target_directory);
$(".backup-encpassword-file").text(r.enc_pw_file);
$("#ssh-pub-key").val(r.ssh_pub_key);
if (r.target == "file://" + r.file_target_directory) { if (r.target == "file://" + r.file_target_directory) {
$("#backup-target-type").val("local"); $("#backup-target-type").val("local");
} else if (r.target == "off") { } else if (r.target == "off") {
$("#backup-target-type").val("off"); $("#backup-target-type").val("off");
} else if (r.target.substring(0, 8) == "rsync://") {
$("#backup-target-type").val("rsync");
var path = r.target.substring(8).split('//');
var [ user, host ] = path.shift().split('@');
$("#backup-target-rsync-user").val(user);
$("#backup-target-rsync-host").val(host);
$("#backup-target-rsync-path").val('/'+path);
} else if (r.target.substring(0, 5) == "s3://") { } else if (r.target.substring(0, 5) == "s3://") {
$("#backup-target-type").val("s3"); $("#backup-target-type").val("s3");
var hostpath = r.target.substring(5).split('/'); var hostpath = r.target.substring(5).split('/');
var host = hostpath.shift(); var host = hostpath.shift();
$("#backup-target-s3-host").val(host); $("#backup-target-s3-host").val(host);
$("#backup-target-s3-path").val(hostpath.join('/')); $("#backup-target-s3-path").val(hostpath.join('/'));
} }
$("#backup-target-user").val(r.target_user);
$("#backup-target-pass").val(r.target_pass);
$("#min-age").val(r.min_age_in_days);
$('#backup-location').text(r.file_target_directory);
$('.backup-encpassword-file').text(r.enc_pw_file);
toggle_form() toggle_form()
}) })
} }
...@@ -190,12 +244,18 @@ function set_custom_backup() { ...@@ -190,12 +244,18 @@ function set_custom_backup() {
var target_type = $("#backup-target-type").val(); var target_type = $("#backup-target-type").val();
var target_user = $("#backup-target-user").val(); var target_user = $("#backup-target-user").val();
var target_pass = $("#backup-target-pass").val(); var target_pass = $("#backup-target-pass").val();
var target; var target;
if (target_type == "local" || target_type == "off") if (target_type == "local" || target_type == "off")
target = target_type; target = target_type;
else if (target_type == "s3") else if (target_type == "s3")
target = "s3://" + $("#backup-target-s3-host").val() + "/" + $("#backup-target-s3-path").val(); target = "s3://" + $("#backup-target-s3-host").val() + "/" + $("#backup-target-s3-path").val();
else if (target_type == "rsync") {
target = "rsync://" + $("#backup-target-rsync-user").val() + "@" + $("#backup-target-rsync-host").val()
+ "/" + $("#backup-target-rsync-path").val();
target_user = '';
}
var min_age = $("#min-age").val(); var min_age = $("#min-age").val();
api( api(
......
...@@ -214,6 +214,12 @@ pollinate -q -r ...@@ -214,6 +214,12 @@ pollinate -q -r
# Between these two, we really ought to be all set. # Between these two, we really ought to be all set.
# We need an ssh key to store backups via rsync, if it doesn't exist create one
if [ ! -f /root/.ssh/id_rsa_miab ]; then
echo 'Creating SSH key for backup…'
ssh-keygen -t rsa -b 2048 -a 100 -f /root/.ssh/id_rsa_miab -N '' -q
fi
# ### Package maintenance # ### Package maintenance
# #
# Allow apt to install system updates automatically every day. # Allow apt to install system updates automatically every day.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment