import paramiko from paramiko.ssh_exception import SSHException, NoValidConnectionsError import time import hashlib import stat import sys from datetime import datetime, timedelta import os class sftp_remote: def __init__(self, config, settings): self.config = config self.path = config['path'] self.description = config['description'] self.hostname = config['hostname'] self.username = config['username'] self.port = config['port'] self.remote_type = config['remote_type'] self.remote_attempts = settings[self.remote_type]['remote_attempts'] self.remote_delay = settings[self.remote_type]['remote_delay'] self.remote_error = None self.password = settings[self.remote_type][self.hostname][self.username]['password'] self.ssh = None def connect(self): self.transport = paramiko.Transport((self.hostname, int(self.port))) self.transport.connect(username = self.username, password = self.password) self.sftp = paramiko.SFTPClient.from_transport(self.transport) def disconnect(self): del (self.sftp) self.transport.close() def __close__(self): del (self.sftp) self.transport.close() def ssh_connect(self): self.ssh = paramiko.SSHClient() #i trust my vm's self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.connect(self.hostname, username=self.username, password=self.password) def ssh_disconnect(self): self.ssh.close() def remote_channel_command(self, job, command_state): zipname = f'{job["zipname"]}-{str(datetime.now()).split(" ")[0]}.tar.xz' self.ssh_connect() channel = self.ssh.get_transport().open_session(timeout=120) #-T3= 3 threads, use 70% of available ram if 'tar_recursive' == command_state: exec_command_state = f"XZ_OPT='-T3 --memlimit-compress=70%' tar --exclude={zipname} --exclude=/home/nonroot/backups --exclude=/home/nonroot/demos --exclude=/home/nonroot/snap --exclude=logs/ --exclude=/home/autismbot1 --exclude=/home/autismbot2 --exclude=/home/autismbot3 --exclude=/home/autismbot4 --exclude=/home/nonroot2 --exclude=/home/fastdl -cJf {zipname} {self.path}" elif 'mysqldump' == command_state: #check README for my.cnf regarding how this works exec_command_state = f'mysqldump -u backups_db --all-databases > mysqldump.sql' else: #tar_mysqldump exec_command_state = f"XZ_OPT='-T3 --memlimit-compress=70%' tar -cJf {zipname} /home/{self.username}/mysqldump.sql" exec_cmd = f'cd /home/{self.username}/; {exec_command_state}' keyboard_interrupt = False try: channel.exec_command(exec_cmd) #2^21 (2,097,152) characters before filling up the buffers of exec_command while True: buf = channel.recv(1024) if not buf: break #priting the output for sure causes it to run a bit slower #print('buffer: ', buf) channel.recv_exit_status() except KeyboardInterrupt: keyboard_interrupt = True finally: channel.close() if 'tar_mysqldump' == command_state: file_path = f'/home/{self.username}/mysqldump.sql' self.connect() self.sftp.remove(file_path) self.disconnect() self.ssh_disconnect() if keyboard_interrupt: print("manually interrupted") sys.exit(1) return zipname def get_remote_files(self, job, zipname): total_attempts = int(self.remote_attempts) while total_attempts > 0: print(f'job: {job}') self.connect() job_path = job["download_dir"] #print(f'/home/{self.username}/{zipname}') #print(f'{job_path}{zipname}') self.sftp.get(f'/home/{self.username}/{zipname}', f'{job_path}{zipname}') sha256_first = self.digest(f'{job_path}{zipname}') os.remove(f'{job_path}{zipname}') self.disconnect() #redownloading to validate the SHA sum self.connect() self.sftp.get(f'/home/{self.username}/{zipname}', f'{job_path}{zipname}') sha256_second = self.digest(f'{job_path}{zipname}') self.disconnect() if sha256_first == sha256_second: return f'{job_path}{zipname}' os.remove(f'{job_path}{zipname}') total_attempts = self.subtract_remote_attempts(total_attempts) if total_attempts == 0: return None def subtract_remote_attempts(self, total_attempts): time.sleep(self.remote_delay) return total_attempts -1 def digest(self, file_path): hashvalue = hashlib.sha256() with open(file_path, 'rb') as file_: while True: chunk = file_.read(hashvalue.block_size) if not chunk: break hashvalue.update(chunk) return hashvalue.hexdigest() def put(self, local_path, remote_path, files_bytes = None): sha256 = self.digest(local_path) #sha value at source total_attempts = int(self.remote_attempts) local_path_str = str(local_path) local_path_get = ''.join([local_path_str[:local_path_str.rindex('/')]]) local_temp_folder = local_path_get + "/tempfolder/" try: self.connect() print('remote_path chdir: ', remote_path) self.sftp.chdir(remote_path) # Test if remote_path exists except IOError: self.sftp.mkdir(remote_path) finally: self.disconnect() if not os.path.isdir(local_temp_folder): original_umask = os.umask(0) #create local temp folder for redownloaded files os.makedirs(local_temp_folder, 755) os.umask(original_umask) filename = local_path_str.split("/")[-1] remote_path = remote_path + filename local_temp_folder = local_temp_folder + filename while total_attempts > 0: self.connect() self.sftp.put(local_path, remote_path) self.disconnect() self.connect() self.sftp.get(remote_path, local_temp_folder) self.disconnect() local_sha256 = self.digest(local_temp_folder) os.remove(local_temp_folder) if local_sha256 == sha256: #print('sha confirmed') return True total_attempts = self.subtract_remote_attempts(total_attempts) if total_attempts == 0: return False def change_path(self, pathfile): filename = pathfile.split("/")[-1] pathfile = self.path + filename utime = self.sftp.stat(pathfile).st_mtime last_modified = datetime.fromtimestamp(utime) return pathfile, last_modified def delete_file(self, last_modified, pathfile, delete_time): if (datetime.now() - last_modified) > timedelta(days= delete_time): print('deleting file remotely: ', pathfile) self.sftp.remove(pathfile) def delete_remote_demos(self, source_files): self.connect() for pathfile in source_files: pathfile = str(pathfile) if not pathfile.endswith('.dem'): continue pathfile, last_modified = self.change_path(pathfile) self.delete_file(last_modified, pathfile, 45) self.disconnect() def delete_remote_zips(self, backups_dir): self.connect() for files in self.sftp.listdir(f'{backups_dir}'): pathfile = f'{backups_dir}{files}' if not pathfile.endswith(".xz"): continue utime = self.sftp.stat(pathfile).st_mtime last_modified = datetime.fromtimestamp(utime) self.delete_file(last_modified, pathfile, 40) self.disconnect() def delete_remote_zip_temp(self, zipname): self.connect() self.sftp.remove(f'/home/{self.username}/{zipname}') self.disconnect() def delete_local_zip(self, local_zip_path_name): os.remove(local_zip_path_name)