#!/usr/bin/python3

#
# babackup
#
# Copyright (C) 2024-2025 by John Heidemann <johnh@isi.edu>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#

# pylint: disable=line-too-long, trailing-whitespace, trailing-newlines, no-else-return
 
import argparse
import sys
import os
import os.path
import platform
import subprocess
import io
import tempfile
import datetime
import time
import logging
import re
import hashlib
import atexit
import base64
import psutil
# from systemd import journal
import pdb
# pdb.set_trace()

import yaml


def subprocess_run_capture_output(cmd):
    """backwards compatible subprocess.run for capture_output (new in 3.7)"""
    (major, minor, _) = platform.python_version_tuple()
    if int(major) == 3 and int(minor) >= 7:
        return subprocess.run(cmd, capture_output = True, encoding = 'utf-8', check = False)
    else:
        return subprocess.run(cmd, encoding = 'utf-8', stdout = subprocess.PIPE, stderr = subprocess.PIPE, check= False)


class Program:
    __version__ = '1.41'
    
    def __init__(self):
        """babackup: client-side backup to drive rsync"""
        # if assertion_fails:
        #     raise Exception("Assertion failed")
        # or better sys.exit("Assertion failed")
        self.parse_args()
        if self.action == 'reconfigure':
            self.reconfigure_old_backups()
        elif self.action == 'new':
            self.configure_new_backup()
        elif self.action == 'run':
            self.run_backups()
        else:
            sys.exit(f"babackup: unknown action {self.action}")


    def verbose_log(self, s, verbosity = 1):
        """our common logging function, both to stdout and the log"""
        if self.verbose >= verbosity:
            print(s)
        if verbosity == 1:
            logging.info(s)
        elif verbosity >= 2:
            logging.debug(s)


    def configure_logging(self):
        """set up formal logging, as given in the config"""
        logging_path = None
        logging_conf = self.conf.get('logging')
        if logging_conf is not None and logging_conf.get("filename") is not None:
            logging_path = self.conf['logging']['filename']
        if logging_path is None:
            logging_path = self.var_dir + "/client.log"
        logging_dirname = os.path.dirname(logging_path)
        if not os.path.isdir(logging_dirname):
            os.makedirs(logging_dirname)
        logging.basicConfig(filename  = logging_path, level = 'INFO', format="%(asctime)s: %(message)s", datefmt='%Y-%m-%d %H:%M:%S')


    def parse_args(self):
        """handle arguments"""
        parser = argparse.ArgumentParser(description = 'backup things via rsync to a remote server from a client', epilog="""
babackup

Primary use case:

        babackup

will read the configuration file and backup each memorized tree.

To select a specific tree, use -N name.

To begin backing up a new tree, do:

        babackup --new-path=/local/path/to/tree/root/ --new-server=user@server:server/full/or/user/path/to/destination

which will generate a new ssh key and show the command to run on the server
to set it up.

The --new-path follows rsync's path rules,
so /new/path (no trailing slash) backs up path and its children
(if path is a directory),
and /new/path/ (trailing slash) backs up the contents of path.
(Also, absence of a leading / makes paths relative to
where babackup is run from, which would be ones home directory
if run from cron.)

One can have multiple --new-paths to backup multiple directories.
In that case they probably should not have trailing slashes.

Babackup avoids concurrent runs on backups with the name name.
However, it will override apparently stale runs, or force it with -f -f.

""")
        # see https://docs.python.org/2/library/argparse.html
        #  ArgumentParser.add_argument(name or flags...[, action][, nargs][, const][, default][, type][, choices][, required][, help][, metavar][, dest])

        #  parser.add_argument('--focus', help='focus on a given TARGET', choices=['us', 'nynj', 'coverage'], default='us')
        #  parser.add_argument('--output', '-o', help='output FILE')
        #  parser.add_argument('--duty-cycle', help='duty cycle (a float)', type=float)
        #  parser.add_argument('--type', '-t', choices=['pdf', 'png'], help='type of output (pdf or png)', default = 'pdf')
        #  parser.add_argument('--day', type=int, help='day to plot', default = None)
        parser.add_argument('--name', '-N', help='use backup NAME, or define new backup NAME')
        parser.add_argument('--conf', '-c', help='use configuration FILE.yaml (default: ~/.config/babackup/client.yaml or /etc/babackup/client.yaml)', default = None)
        parser.add_argument('--new-path', help='create a new backup configuration for PATH', action='append')
        parser.add_argument('--new-mode', help='a new backup should use mode rrsync or ssh', default='unspecified')
        parser.add_argument('--new-server', help='a new backup will go to USER@SERVER:PATH')
        parser.add_argument('--new-secondary-path', help='a PATH (implicitly on the server) for a secondary backup', action='append')
        parser.add_argument('--new-interval', help='the desired minimum backup interval, in seconds (in minutes, hours or days with m, h d)')
        parser.add_argument('--new-exclude-from', help='a new backup will exclude from FILE')
        parser.add_argument('--new-filter', help='a new backup will use the following FILTER (suggest "merge /path/to/file.filter")')
        parser.add_argument('--new-keyfile', help='a new backup will use this ssh keyfile (default: generate one)')
        parser.add_argument('--new-pass', help='the passphrase for the keyfile (default: generate one)')
        parser.add_argument('--new-relative', help='use --relative in rsync, presereving the full path on the server (defaults on with mulitple --new-path) ', action='store_true', default=None)
        parser.add_argument('--new-condition-v4router', help='this backup runs conditionally on the router (an IPv4 address) being present')
        parser.add_argument('--new-condition-v6router', help='this backup runs conditionally on the router (an IPv6 address) being present')
        parser.add_argument('--reconfigure-old', help='reconfigure the backup given by name, from the .config', action='store_true', default=False)
        parser.add_argument('--force', '-f', help='force a backup run, ignoring any minimum interval', action='count', default=0)
        parser.add_argument('--automatic', help='automate any setup (cron, ssh keys) for a new backup', action='store_true', default=True)
        parser.add_argument('--status', help='show status', action='store_true', default=False)
        parser.add_argument('--debug', '-d', help='debugging mode', action='store_true', default=False)
        parser.add_argument('--verbose', '-v', action='count', default=0)
        args = parser.parse_args()
        self.action = 'run'
        if args.new_path is not None:
            self.action = 'new'
        if args.reconfigure_old:
            self.action = 'reconfigure'
        self.debug = args.debug
        self.verbose = args.verbose
        self.conf_path = args.conf
        self.new_path = args.new_path
        self.new_userserverpath = args.new_server
        self.new_secondary_path = args.new_secondary_path
        self.new_interval = args.new_interval
        self.name = args.name
        self.force = args.force
        self.status = args.status
        self.new_exclude_from = args.new_exclude_from
        self.new_filter = args.new_filter
        self.new_keyfile = args.new_keyfile
        self.new_pass = args.new_pass
        self.new_relative = args.new_relative
        self.new_condition_v4router = args.new_condition_v4router
        self.new_condition_v6router = args.new_condition_v6router
        self.new_mode = args.new_mode
        self.automatic = args.automatic
        self.temp_dir_td = None
        self.running = {}
        if self.status:
            self.debug = True
            if self.verbose < 2:
                self.verbose = 2
        return args

    
    def read_remember(self):
        """read our other config file remembering when we backed stuff up"""
        #
        # and load our memory
        #
        self.remember_path = f"{self.var_dir}/client_remember.yaml"
        try:
            with open(self.remember_path, 'r', encoding = 'utf-8') as remember_stream:
                self.remember = yaml.safe_load(remember_stream)
        except IOError:
            self.remember = {}
        if self.remember.get('backup_times') is None:
            self.remember['backup_times'] = {}
        self.remember_changed = False


    def write_remember(self):
        """write a changed remember file in the var_dir"""
        if not self.remember_changed:
            return
        if not os.path.isdir(self.var_dir):
            os.makedirs(self.var_dir, mode=0o755)
        with open(self.remember_path, 'w+', encoding='utf-8') as remember_stream:
            yaml.dump(self.remember, remember_stream)
        self.remember_changed = False


    def read_conf(self):
        """figure out what configuration file we're using, then read and return it
Also sets up logging."""
        # where?
        if self.conf_path is None:
            self.conf_dir = "/etc/babackup"
            self.var_dir = "/var/lib/babackup"
            if os.getuid() != 0:
                self.conf_dir = self.var_dir = os.path.expanduser("~") + "/.config/babackup"
            else:
                # disambiguate
                pass
        else:
            self.conf_dir = self.var_dir = os.path.dirname(self.conf_path)
        # what?
        if self.conf_path is None:
            self.conf_path = self.conf_dir + "/client.yaml"
        # read it
        try:
            with open(self.conf_path, 'r', encoding='utf-8') as conf_stream:
                self.conf = yaml.safe_load(conf_stream)
        except IOError:
            self.conf = {}
        if self.conf.get('backups') is None:
            self.conf['backups'] = []
        self.read_remember()
        # also set up logging
        self.configure_logging()


    def write_conf(self):
        """store a presumably changed client.conf"""
        if not os.path.isdir(self.conf_dir):
            os.makedirs(self.conf_dir, mode=0o755)
        # use opener so we can set the file mode to restrictive
        with open(self.conf_path, 'w+', encoding='utf-8', opener=lambda path, flags:os.open(path, os.O_WRONLY|os.O_CREAT, mode=0o600)) as conf_stream:
            yaml.dump(self.conf, conf_stream)


    def create_ssh_keyfile(self, path, comment, passwd):
        """generate a new ssh keyfile at PATH, with COMMENT and optional PASSWD, then return the new random BPASS"""

        #
        # generate a pass, if necessary
        #
        if passwd is None or passwd == '':
            with open("/dev/urandom", 'rb') as ur_stream:
                ur = ur_stream.read(24)
            passwd = base64.b64encode(ur)

        # now the key
        result = subprocess.run(['/usr/bin/ssh-keygen', '-t', 'ed25519', '-C', comment, '-N', passwd, '-f', path], check = True)
        # remember what key used
        self.new_keyfile = path
        if result.returncode != 0:
            sys.exit("babackup: ssh-keygen failed with code " + str(result.returncode))
        #
        # finally compute the bpass
        # (which is just the pass-phrase, base64 encoded so it's not in cleartext in the conf)
        #
        bpass_bytes = base64.b64encode(passwd)
        bpass = bpass_bytes.decode('utf-8')
        return bpass


    def check_crontab(self, program, location, suggestion):
        """see if the user has a cron for PROGRAM
        If not, remind them to do SUGGESTION at LOCATION"""
        result = subprocess_run_capture_output(['/usr/bin/crontab', '-l'])
        show_message = False
        crontab_output = ''
        found_it = False
        if result.returncode == 1:
            # error 1 is no crontab
            show_message = True
        elif result.returncode == 0:
            with io.StringIO(result.stdout) as crontab_stream:
                for line in crontab_stream:
                    crontab_output += line
                    if line.startswith("#"):
                        continue
                    fields = line.split()
                    if len(fields) >= 5 and fields[5].endswith(program):
                        # hit, but keep going so we read the whole thing
                        found_it = True
            show_message = not found_it
        else:
            # ignore other errors
            pass
        if show_message:
            print(f"To automate {program}, add this crontab entry (crontab -e)\non the {location}:\n\n\t{suggestion}\n\n")
        if not found_it and self.automatic:
            print("since --automatic; installing crontab entry now, for you\n")
            proc = subprocess.Popen(['/usr/bin/crontab', '-'], stdin = subprocess.PIPE, encoding='utf-8')
            proc.stdin.write(crontab_output + suggestion + "\n")
            proc.stdin.close()
            returncode = proc.wait()
            if returncode != 0:
                print(f"crontab update FAILED with error {returncode}")



            
    def check_server_known_hosts(self, userserverpath):
        """check that the server in USERSERVERPATH is in ~/.ssh/known_hosts"""
        #
        # figure out the server
        #
        m = re.match(r'^([^:@]+@)?([^:]*:)(.*)$', userserverpath)
        user = m.group(1)
        server = m.group(3)
        path = m.group(2)
        if server is None:
            return
        #
        # and its key
        #
        result = subprocess_run_capture_output(['ssh-keyscan', server])
        if result.returncode != 0:
            print(f"Please make sure you can ssh to {server}.  (ssh-keyscan {server} failed)\n")
            return
        needed_keys = []
        ssh_keyscan_output = ""
        with io.StringIO(result.stdout) as keyscan_stream:
            for line in keyscan_stream:
                ssh_keyscan_output += line
                if line.startswith("#"):
                    continue
                fields = line.split()
                if len(fields) == 3:
                    needed_keys.append(f"{fields[2]} {fields[3]}")
        if len(needed_keys) == 0:
            print(f"Before running babackup you must make sure the server's host key is\nin ~/.ssh/known hosts.  Please run\n\tssh-keyscan {server}\nand put the output there.\n(Trying it now gave no output.)\n")
            return
        #
        # do we know it?
        #
        show_message = False
        known_hosts_path = os.path.expanduser("~") + "/.ssh/known_hosts"
        if not os.path.exists(known_hosts_path):
            show_message = True
        else:
            with open(known_hosts_path, "r", encoding='utf-8') as known_hosts_stream:
                for line in known_hosts_stream:
                    if line.startswith("#"):
                        continue
                    fields = line.split()
                    possible_match = f"{fields[-2]} {fields[-1]}"
                    if possible_match in needed_keys:
                        show_message = False
                        break
        if show_message:
            print(f"Before running babackup, you MUST know the server's host key.\nLearn it with:\t\tssh-keyscan {server} >>~/.ssh/known_hosts\n")

        if self.automatic:
            print("since --automatic; installing host key now, for you\n")
            with open(known_hosts_path, "a", encoding='utf-8') as known_hosts_stream:
                known_hosts_stream.write(ssh_keyscan_output)

    def install_client_crontab(self):
        self.check_crontab("babackup", "client (this computer)", "5 * * * * /usr/bin/babackup")

    def read_public_key_first_line(self, keyfile):
        """extract the key from KEYFILE"""
        with open(keyfile, "r", encoding='utf-8') as keyfile_stream:
            lines = keyfile_stream.readlines()
            if len(lines) != 1:
                sys.exit(f"babackup: cannot parse public key in {keyfile}")
        return lines[0].rstrip()


    def configure_new_backup(self):
        """configure a new backup
Update configuration files, generate keys, say what to do on the server, etc.
        """
        self.read_conf()

        if self.new_path is None:
            sys.exit("babackup: attempt to add new backup without specifying --new-path=/client/new/path")
        if type(self.new_path) is not list:
            sys.exit("babackup: internal error, self.new_path is not a list")
            
        if self.name is None:
            m = hashlib.sha256()
            m.update(" ".join(self.new_path).encode('utf-8'))
            self.name = m.hexdigest()[0:16]
        name = self.name
        if len(list(filter(lambda backup: backup.get('name') == name, self.conf['backups']))) > 0:
            sys.exit(f"babackup: attempt to add new backup named {name} that already exists")

        mode = self.new_mode
        if not mode in ('rrsync', 'ssh', 'local'):
            sys.exit("babackup: must select --new-mode=ssh or rrsync or local for a new backup")

        if self.new_userserverpath is None:
            sys.exit("babackup: must select a destintation with  --new-server=server.example.com:/path/to/destination")
            

        # our new baby
        backup = {}
        backup["name"] = name
        backup["path"] = self.new_path
        backup["mode"] = mode

        if mode == 'rrsync':
            # because we're going to be using rrsync, only the server knows the servers_side_path
            m = re.match(r'^([^:]*:)(.*)$', self.new_userserverpath)
            userserver_only = m.group(1)
            server_side_path = m.group(2)
            if userserver_only is None or server_side_path is None:
                sys.exit(f"babackup: rrsync mode requires --new-server server:path/on/server, not '{self.new_userserverpath}'\n")
            if server_side_path[0] != '/':
                server_side_path = "~/" + server_side_path
            backup["userserverpath"] = userserver_only
            backup["server_side_path"] = server_side_path
        elif mode in ('ssh', 'local'):
            if self.new_userserverpath is None:
                sys.exit("babackup: a new backup requires some --new-server=server:path")
            server_side_path = backup["userserverpath"] = self.new_userserverpath
        else:
            sys.exit(f"babackup: interal error, bad mode {mode}")

        if self.new_secondary_path is not None:
            backup["secondary"] = self.new_secondary_path
            
        if self.new_exclude_from is not None:
            backup["exclude_from"] = self.new_exclude_from

        if self.new_filter is not None:
            backup["filter"] = self.new_filter
            if os.path.exists(backup["filter"]):
                sys.exit(f"babackup: your --new-filter is a path to a file ({backup['filter']}), but it should probably have the \"merge\" command before the filename.")


        if self.new_relative is None:
            backup['relative'] = (len(self.new_path) > 1)
        else:
            backup['relative'] = self.new_relative
            
        if self.new_interval is not None:
            m = re.match(r'^(\d+)([a-z]?)$', self.new_interval)
            if m is None:
                sys.exit(f"babackup: cannot parse --new-interval={self.new_interval}")
            v = int(m.group(1))
            if m.group(2) == 's':
                pass
            elif m.group(2) == 'm':
                v *= 60
            elif m.group(2) == 'h':
                v *= 60*60
            elif m.group(2) == 'd':
                v *= 24*60*60
            else:
                sys.exit(f"babackup: cannot parse scale on --new-interval={self.new_interval} (I accpet s/m/h/d)")
            backup["interval"] = v

        if self.new_condition_v4router is not None:
            if backup.get('condition') is None:
                backup['condition'] = {}
            backup['condition']['v4router'] = self.new_condition_v4router
        if self.new_condition_v6router is not None:
            if backup.get('condition') is None:
                backup['condition'] = {}
            backup['condition']['v6router'] = self.new_condition_v6router

        public_key_info = ''
        if self.new_keyfile is None and mode == 'rrsync':
            # no keyfile, so make one
            # ssh-keygen -t ed25519 -C 'for babackup-{name}' -N '' -f path
            now_isodate = datetime.date.fromtimestamp(time.time()).isoformat()
            keyfilename = f"babackup-{name}-{now_isodate}"
            keyfile_path = self.conf_dir + f"/{keyfilename}"
            if os.path.exists(keyfile_path):
                self.verbose_log(f"babackup: reusing existing ssh key in {keyfile_path}", 1)
            else:
                self.verbose_log(f"babackup: generating new public key to {keyfile_path}", 1)
                bpass = self.create_ssh_keyfile(keyfile_path, keyfilename, self.new_pass)
                backup['bpass'] = bpass
                # Note that we store bpass locally, but the user never "sees" it on the terminal.
            self.new_keyfile = keyfile_path
        if self.new_keyfile is not None:
            backup['keyfile'] = self.new_keyfile

            # and read the public side
            public_key_first_line = self.read_public_key_first_line(f"{self.new_keyfile}.pub")
            public_key_info = " --new-pub-key='" + public_key_first_line + "'"
            self.check_server_known_hosts(backup.get('userserverpath'))

        #
        # inform the user what to do on the server
        #
        print("\nTo complete configuration of babackup, run this command on the server:\n")
        secondary_option = ""
        if "secondary" in backup:
            for secondary in backup["secondary"]:
                secondary_option += f" --new-secondary-path={secondary}"
        print(f"\tbabackup_server --name={name} --new-mode={mode} --new-server-path={server_side_path}{public_key_info}{secondary_option}\n")

        self.install_client_crontab()

        self.conf['backups'].append(backup)
        if self.debug:
            return
        self.write_conf()
        
        
    def write_temp_file(self, filename, contents):
        """write CONTENTS to FILENAME in a (possibly new) temp directory"""
        if self.temp_dir_td is None:
            self.temp_dir_td = tempfile.TemporaryDirectory()
        temp_file_path = self.temp_dir_td.name + "/" + filename
        with open(temp_file_path , "w+") as tf:
            tf.write(contents)
        # let IOErrors propagate
        return temp_file_path
    
        
    def run_rsync(self, args):
        """run rsync, successfully, with ARGLINE.
We now require the user to insert the /usr/bin/rsync"""
        self.verbose_log(" ".join(args), 2)
        if self.debug:
            return
        result = subprocess.run(args, check=False)
        if result.returncode != 0:
            sys.exit("rsync with " + " ".join(args) + " failed with code " + str(result.returncode))

 
    def backup_run_complete(self, backup):
        """remove a sentinel file to indicate we're no longer running"""
        if 'sentinel_path' in backup:
            for sentinel_path in backup['sentinel_path']:
                if os.path.exists(sentinel_path):
                    os.unlink(sentinel_path)
        if not backup['name'] in self.running:
            # maybe we already aborted it
            return
        del self.running[backup['name']]


    def backup_run_abort(self, backup):
        """abort a running BACKUP that has started"""
        if not backup['name'] in self.running:
            return
        atexit.unregister(self.running[backup['name']])
        self.backup_run_complete(backup)

            
    def backup_run_running(self, backup, suffix = ""):
        """check if a BACKUP for SUFFIX is already running concurrently
(close with backup_run_abort or backup_run_complete)"""
        sanitized_name = backup['name']
        sanitized_name = re.sub(r'[^\w\s-]', '', sanitized_name).strip("-_ \t\n")
        # sigh race
        sentinel_path = f"{self.var_dir}/on{suffix}.{sanitized_name}"
        if 'sentiniel_path' not in backup:
            backup['sentinel_path'] = []
        backup['sentinel_path'].append(sentinel_path)
        if os.path.exists(sentinel_path):
            # give old program 23h to run
            if time.time() - os.path.getmtime(sentinel_path) < 23*60*60:
                return True
            #
            # Now require that the pid not be running
            # (for when you have a really long backup job going!)
            with open(sentinel_path, "r", encoding='utf-8') as sen_stream:
                for ln in sen_stream.readlines():
                    if ln.startswith("pid: "):
                        old_pid = int(ln[5:])
                        if psutil.pid_exists(old_pid):
                            self.verbose_log(f"babackup: old sentinel {sentinel_path}, but pid {old_pid} is active", 2)
                            return True
            self.verbose_log(f"babackup: overriding old sentinel {sentinel_path}", 2)
        now_str = datetime.datetime.now(tz = datetime.timezone.utc).isoformat(timespec = 'seconds')
        my_pid = os.getpid()
        with open(sentinel_path, "a+", encoding='utf-8') as sen_stream:
            sen_stream.write(f"start: {now_str}\npid: {my_pid}\n")
        self.running[backup['name']] = atexit.register(lambda: self.backup_run_complete(backup))
        return False


    def duration_to_human(self, duration):
        """convert a DURATION in seconds to a human-sensible string"""
        duration = int(duration)
        if duration < 2*60:
            return f"{duration}s"
        elif duration < 2*60*60:
            minutes = duration // 60
            return f"{minutes}m"
        elif duration < 48*60*60:
            hours = duration // (60*60)
            return f"{hours}h"
        else:
            days = duration // (24*60*60)
            return f"{days}d"
    

    def command_startswith(self, command, startswith):
        """run COMMAND and see if the output starts with STARTSWITH"""
        result = subprocess_run_capture_output(command)
        if result.returncode != 0:
            # ignore errors, like no crontab
            return
        with io.StringIO(result.stdout) as command_stream:
            for line in command_stream:
                if line.startswith(startswith):
                    return True
        return False


    def run_backup(self, backup):
        """run one backups with configuration BACKUP"""

        name = backup.get("name")
        if name is None:
            sys.exit("babackup: backup is missing 'name:'")
        if backup.get('path') is None:
            sys.exit(f"babackup: backup {name} is missing path")
        if type(backup['path']) != list:
            sys.exit(f"babackup: backup {name} internal error, path is not a list")
        mode = backup.get('mode', "ssh")

        #
        # are we already running it (concurrently)?
        #
        if self.backup_run_running(backup):
            if self.force <= 1:
                self.verbose_log(f"babackup: backup {name} is active; skipping", 1)
                self.backup_run_abort(backup)
                return
            self.verbose_log(f"babackup: backup {name} is active, but overriding to continue", 1)

        #
        # do we need to run it?
        #
        now_timestamp = datetime.datetime.now(datetime.timezone.utc).timestamp()
        if self.force > 0:
            self.verbose_log(f"babackup: backup {name} forced, even though it was done recently", 1)
        elif self.remember.get('backup_times') is None or self.remember['backup_times'].get(name) is None:
            self.verbose_log(f"babackup: backup {name} needs first backup", 1)
        else:
            freshness = now_timestamp - float(self.remember['backup_times'][name])
            freshness_human = self.duration_to_human(freshness)
            if freshness > backup.get("new_interval", 23*60*60):
                self.verbose_log(f"babackup: backup {name} needs to run (prior was {freshness_human} ago)", 1)
            else:
                self.verbose_log(f"babackup: backup {name} not needed yet (prior was {freshness_human} ago)", 2)
                self.backup_run_abort(backup)
                return

        #
        # do we meet the conditions to run it?
        #
        for version in ['4', '6']:
            if backup.get('condition') is not None:
                if backup['condition'].get(f"v{version}router") is not None:
                    #
                    # conditional on a router
                    #
                    required_router = backup['condition'][f"v{version}router"]
                    if not self.command_startswith(['/usr/sbin/ip', f"-{version}", 'route', 'show'], f"default via {required_router}"):
                        if self.force:
                            self.verbose_log(f"babackup: {name} overriding not at v{version}router {required_router}")
                        else:
                            self.verbose_log(f"babackup: avoiding backup {name}, not at v{version}router {required_router}")
                            self.backup_run_abort(backup)
                            return

        if self.status:
            self.backup_run_abort(backup)
            return
                    

        #
        # figure out ssh
        #
        rsync_rsh = ''
        rsync_preamble = ['/usr/bin/rsync']
        userserver_sep = ''
        server_path = '.'
            
        if backup.get('userserverpath') is not None:
            if mode == 'local':
                userserver = None
                server_path = backup['userserverpath'] + '/current'
            if mode != 'local':
                (userserver, server_path) = backup['userserverpath'].split(":")
                userserver_sep = userserver + ":"
                #(user, server) = userserver.split("@")
                rsync_rsh = "ssh"
                if backup.get('keyfile'):
                    rsync_rsh += f" -i {backup['keyfile']} "
                if backup.get('bpass'):
                    # Propagate the password, painfully.
                    # The ssh people ONLY want humans.
                    # Supposedly you can make SSH_ASKPATH a program
                    # that just prints the password, but Doesn't Work For Me.
                    # sshpass is designed for exactly this problem,
                    # AND WORKS, but is tricky to use.
                    bpass = backup['bpass']
                    # don't have to convert to bytes, since b64decode takes strings
                    passwd_bytes = base64.b64decode(bpass)
                    passwd = passwd_bytes.decode('utf-8')  # but we do have to decode bytes back to string
                    #####
                    #
                    # Here begins the new hotness: sshpass.
                    #
                    # Please note we hard-code the prompt.
                    # It must match the local ssh client.
                    #
                    # The current prompt is Fedora 39's openssh-9.3p1.
                    # CentOS8 is openssh 8.0.
                    #
                    sshpass_passwd_path = self.write_temp_file('passwd', f"{passwd}\n")
                    # with bpass, we ONLY use our password.  Defeat any agent.
                    for e in ['SSH_AUTH_SOCK', 'SSH_ASKPASS']:
                        if os.environ.get(e):
                            del os.environ[e]
                    rsync_preamble = ['/usr/bin/sshpass', '-f', sshpass_passwd_path, '-P', f"Enter passphrase for key '{backup['keyfile']}': "] + rsync_preamble
                    self.verbose_log("babackup: propagating bpass", 2)
                os.environ['RSYNC_RSH'] = rsync_rsh
            else:
                server_path = os.path.expanduser(server_path)
        if rsync_rsh is not None:
            self.verbose_log(f"babackup: RSYNC_RSH={rsync_rsh}", 2)

        #
        # we're going to begin
        #
        if self.debug:
            self.verbose_log(f"babackup: skipping backup due to debug", 1)
            self.backup_run_abort(backup)
            return
        # Sync the begin so we all know we're going.
        begin_time = datetime.datetime.now(datetime.timezone.utc)
        begin_file_path = self.write_temp_file('begin', begin_time.isoformat(timespec='minutes') + "\n")
        self.run_rsync(rsync_preamble + [begin_file_path, f"{userserver_sep}{server_path}"])
        self.verbose_log(f"babackup: {name} beginning backup", 1)

	#
        # first, create the "data" dir (so we know it exists)
        #
        data_prototype_path = f"{self.temp_dir_td.name}/data"
        if not os.path.isdir(data_prototype_path):
            os.mkdir(data_prototype_path)
        self.run_rsync(rsync_preamble + ["-r", data_prototype_path, f"{userserver_sep}{server_path}"])

        #
        # now the real work
        # 
        rsync_args = ['-aHbx']
        if backup.get('relative') is not None and backup['relative']:
            rsync_args.append('--relative')
        if backup.get('exclude_from') is not None:
            rsync_args.append("--exclude-from=" + backup['exclude_from'])
        if backup.get('filter') is not None:
            rsync_args.append("--filter=" + backup['filter'])
        # link-dest is actually a relative path to the actual destination
        if mode  == 'rrsync':
            # as a special case, if we're running under rrsync, we have to anchor the link-dest.  rrsync unanchors it for us on the server side
            rsync_args.append("--link-dest=/last/data")
        elif mode in ('ssh', 'local'):
            # not under rrsync, so link dest is above the user destination
            rsync_args.append("--link-dest=../last/data")
        else:
            sys.exit(f"babackup: unknown mode {mode}")
        # using --delete causes rsync-3.2.3 to throw the error "ERROR: rejecting excluded file-list name: .jpeganno.keywords~"
        # omitting it avoids this problem.
        # See discusison at https://askubuntu.com/questions/1458655/rsync-error-rejecting-excluded-file-list and https://github.com/WayneD/rsync/issues/375
        # (for backups to a new directory, --delete is not necessary);
        # this hack olny affects a backup that overwrites another.
        # rsync_args = ['--delete']  + rsync_args
        self.run_rsync(rsync_preamble + rsync_args + backup['path'] + [f"{userserver_sep}{server_path}/data"])

        #
        # and we're done
        #
        end_time = datetime.datetime.now(datetime.timezone.utc)
        end_time_str = end_time.isoformat(timespec='minutes')
        end_time_str_as_path = end_time_str.replace(":", "_")
        end_file_path = self.write_temp_file('end', end_time_str + "\n")
        # before we're done, save a copy in a file with the name as the date, so we can remember the date
        self.run_rsync(rsync_preamble + [end_file_path, f"{userserver_sep}{server_path}/{end_time_str_as_path}"])
        # And write the committing "end"
        self.run_rsync(rsync_preamble + [end_file_path, f"{userserver_sep}{server_path}"])
        # Track track begin (not end) time, so we shouldn't progress forward by backup runtime.
        self.verbose_log(f"babackup: {name} ended backup", 1)
        self.remember['backup_times'][name] = str(begin_time.timestamp())
        self.remember_changed = True
        self.write_remember()  # sync after every backup, in case of failure

        
    def run_backups(self):
        """run all backups (or whatever was specified with -N)"""
        self.read_conf()
        for backup in self.conf['backups']:
            if (self.name is None or self.name == backup['name']) and backup.get("enabled", True):
                self.run_backup(backup)
        self.write_remember()

    def reconfigure_old_backup(self, backup):
        """show the configuration an old BACKUP, extracted from .config"""

        public_key_info = ''
        if backup['mode'] == 'rrsync':
            public_key_first_line = self.read_public_key_first_line(f"{backup['keyfile']}.pub")
            public_key_info = " --new-pub-key='" + public_key_first_line + "'"

        server_side_path = '/dev/null'
        if backup['mode'] == 'rrsync':
            server_side_path = backup['server_side_path']
        elif backup['mode'] == 'local':
            server_side_path = backup['userserverpath']
        else:
            sys.exit(f"babackup: --reconfigure-old on {backup['name']} has unknown mode: {backup['mode']}")

        secondary_option = ""
        if "secondary" in backup:
            for secondary in backup["secondary"]:
                secondary_option += f" --new-secondary-path={secondary}"
            
        print("\nTo restore service on the server, run this command on the server:\n")
        print(f"\tbabackup_server --name={backup['name']} --new-mode={backup['mode']} --new-server-path={server_side_path}{public_key_info}{secondary_option}\n")
        self.install_client_crontab()

    def reconfigure_old_backups(self):
        """show the configuration of old backups, extracted from .config"""
        self.read_conf()
        for backup in self.conf['backups']:
            if (self.name is None or self.name == backup['name']):
                self.reconfigure_old_backup(backup)


if __name__ == '__main__':
    Program()
    sys.exit(0)

