import paramiko import sys import json import logging import unicodedata logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S' ) def create_remote(config, settings): type_r = config["remote_type"] if type_r == "sftp": import remote_sftp remote = remote_sftp.sftp_remote(config, settings) elif type_r == "local_dir": import remote_local_dir remote = remote_local_dir.local_dir_remote(config) return remote def distribute_files(path_list, dest): for pathfile in path_list: if not str(pathfile).endswith('.dem'): continue log_msg = "uploading demo: " + str(pathfile) logging.info(log_msg) if not dest.put(pathfile, dest.path): log_msg = ''.join(["failed putting file: ", str(pathfile)]) logging.warning(log_msg) sys.exit(1) def load_config(config_file): try: with open(config_file) as infile: try: json_dict = json.load(infile) #logging.info(json_dict) return json_dict["remotes"], json_dict["jobs"], json_dict["settings"] except json.JSONDecodeError as e: logging.warning('exception caught: ', exc_info = True) sys.exit(1) except FileNotFoundError: logging.warning('exception caught: ', exc_info = True) sys.exit(1) def main(): config_file = 'config.json' if sys.argv[1:]: config_file = sys.argv[1] remotes, jobs, settings = load_config(config_file) for job in jobs: src = create_remote(remotes[job["src"]], settings) dest = create_remote(remotes[job["dest"]], settings) source_files = src.list_dir() distribute_files(source_files, dest) for pathfile in source_files: src.delete_local(pathfile) dest.delete_remote(source_files) logging.info(('finished job: ', job)) if __name__ == '__main__': main()