database back up code
Mon Nov 27 2023 11:17:11 GMT+0000 (Coordinated Universal Time)
def generate_database_backup(): host = os.environ.get("DATABASE_HOST") # Database host port = os.environ.get("DATABASE_PORT") # Database port db_name = os.environ.get("POSTGRES_DB") # Database name db_username = os.environ.get("POSTGRES_USER") # Database username db_password = os.environ.get("POSTGRES_PASSWORD") # Database password current_datetime = datetime.now() date_folder = current_datetime.strftime("%Y-%m-%d") # Date folder download_sql_file = f'db_backup_{db_name}_{current_datetime.strftime("%Y-%m-%d_%H:%M")}.sql' # SQL file name compressed_sql_file = f"{download_sql_file}.zip" # Compressed SQL file name # zip_file_name = f'backup_{db_name}_{current_datetime.strftime("%H:%M")}.zip' # Zip file name s3_bucket_name = settings.AWS_STORAGE_BUCKET_NAME # Name of your S3 bucket s3_key = f"dailyDataBaseBackup/{date_folder}/{compressed_sql_file}" # Key (path) under which the file will be stored in S3 aws_access_key_id = settings.AWS_ACCESS_KEY_ID aws_secret_access_key = settings.AWS_SECRET_ACCESS_KEY # Calculate the date 1 month ago from today in UTC one_month_ago = datetime.now(timezone.utc) - timedelta(days=30) # Upload the backup file to S3 s3 = boto3.client( "s3", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, ) # List objects in the S3 bucket objects = s3.list_objects_v2(Bucket=s3_bucket_name)["Contents"] # Iterate through the objects and delete backup files older than 1 month for obj in objects: key = obj["Key"] last_modified = obj["LastModified"] # Check if the key represents a compressed SQL backup file and if it's older than 1 month if ( key.startswith("dailyDataBaseBackup/") and key.endswith(".sql.gz") and last_modified < one_month_ago ): s3.delete_object(Bucket=s3_bucket_name, Key=key) # Command to create a database backup file backup_command = f"export PGPASSWORD='{db_password}' && pg_dump -h {host} -p {port} -U {db_username} {db_name} -w --clean > {download_sql_file}" print(f"Running command: {backup_command}") exit_code = subprocess.call(backup_command, shell=True) if exit_code == 0: # Compress the SQL backup file using gzip # Create a zip archive of the compressed SQL file with zipfile.ZipFile(compressed_sql_file, "w", zipfile.ZIP_DEFLATED) as zipf: zipf.write(compressed_sql_file, os.path.basename(compressed_sql_file)) # Upload the new backup file to S3 with open(compressed_sql_file, "rb") as file: s3.upload_fileobj(file, s3_bucket_name, s3_key) # Remove the files from the project folder os.remove(compressed_sql_file) os.remove(download_sql_file) # os.remove(zip_file_name) # db_backup_{db_name}_2023-10-11_11:00.sql.gz # Get the full S3 file path s3_file_path = f"https://{s3_bucket_name}.s3.amazonaws.com/{s3_key}" sender_email = settings.DEFAULT_FROM_EMAIL password = settings.EMAIL_HOST_PASSWORD receiver_email = "devteam@hikartech.in" context = ssl.create_default_context() msg = MIMEMultipart("alternative") msg["Subject"] = "Database Backup For Hikemm" msg["From"] = sender_email msg["To"] = receiver_email body = f""" <!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Link to S3 File</title> </head> <body> <h1>Link to S3 File</h1> <p>Dear all,</p> <p>I hope this email finds you well. I wanted to share a link to an S3 file that you might find useful:</p> <p><a href="{s3_file_path}"> S3 File URL</a> </p> <p>You can click on the link above to access the file. If you have any trouble accessing it or need further assistance, please don't hesitate to reach out to me.</p> <p>Thank you, and have a great day!</p> <p>Best regards</p> </body> </html> """ html_part = MIMEText(body, "html") msg.attach(html_part) message = msg.as_bytes() email_client = boto3.client( "ses", aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, region_name="ap-south-1", ) response = email_client.send_raw_email( Source=sender_email, Destinations=[receiver_email], RawMessage={"Data": message}, ) message_id = response["MessageId"] logger.error(f"Email sent successfully {message_id}") RUN apt-get update && apt-get install -y postgresql-client # add this in docker file
Comments