Snippets Collections
def generate_database_backup():
    host = os.environ.get("DATABASE_HOST")  # Database host
    port = os.environ.get("DATABASE_PORT")  # Database port
    db_name = os.environ.get("POSTGRES_DB")  # Database name
    db_username = os.environ.get("POSTGRES_USER")  # Database username
    db_password = os.environ.get("POSTGRES_PASSWORD")  # Database password
    current_datetime = datetime.now()
    date_folder = current_datetime.strftime("%Y-%m-%d")  # Date folder
    download_sql_file = f'db_backup_{db_name}_{current_datetime.strftime("%Y-%m-%d_%H:%M")}.sql'  # SQL file name
    compressed_sql_file = f"{download_sql_file}.zip"  # Compressed SQL file name
    # zip_file_name = f'backup_{db_name}_{current_datetime.strftime("%H:%M")}.zip'  # Zip file name
    s3_bucket_name = settings.AWS_STORAGE_BUCKET_NAME  # Name of your S3 bucket
    s3_key = f"dailyDataBaseBackup/{date_folder}/{compressed_sql_file}"  # Key (path) under which the file will be stored in S3
    aws_access_key_id = settings.AWS_ACCESS_KEY_ID
    aws_secret_access_key = settings.AWS_SECRET_ACCESS_KEY

    # Calculate the date 1 month ago from today in UTC
    one_month_ago = datetime.now(timezone.utc) - timedelta(days=30)

    # Upload the backup file to S3
    s3 = boto3.client(
        "s3",
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
    )

    # List objects in the S3 bucket
    objects = s3.list_objects_v2(Bucket=s3_bucket_name)["Contents"]

    # Iterate through the objects and delete backup files older than 1 month
    for obj in objects:
        key = obj["Key"]
        last_modified = obj["LastModified"]

        # Check if the key represents a compressed SQL backup file and if it's older than 1 month
        if (
            key.startswith("dailyDataBaseBackup/")
            and key.endswith(".sql.gz")
            and last_modified < one_month_ago
        ):
            s3.delete_object(Bucket=s3_bucket_name, Key=key)

    # Command to create a database backup file
    backup_command = f"export PGPASSWORD='{db_password}' && pg_dump -h {host} -p {port} -U {db_username} {db_name} -w --clean > {download_sql_file}"

    print(f"Running command: {backup_command}")
    exit_code = subprocess.call(backup_command, shell=True)

    if exit_code == 0:

        # Compress the SQL backup file using gzip
        
        # Create a zip archive of the compressed SQL file
        with zipfile.ZipFile(compressed_sql_file, "w", zipfile.ZIP_DEFLATED) as zipf:
            zipf.write(compressed_sql_file, os.path.basename(compressed_sql_file))

        # Upload the new backup file to S3
       
    
        with open(compressed_sql_file, "rb") as file:
            s3.upload_fileobj(file, s3_bucket_name, s3_key)

            # Remove the files from the project folder
        os.remove(compressed_sql_file)
        os.remove(download_sql_file)
        # os.remove(zip_file_name)

        # db_backup_{db_name}_2023-10-11_11:00.sql.gz

        # Get the full S3 file path
        s3_file_path = f"https://{s3_bucket_name}.s3.amazonaws.com/{s3_key}"

        sender_email = settings.DEFAULT_FROM_EMAIL
        password = settings.EMAIL_HOST_PASSWORD
        receiver_email = "devteam@hikartech.in"
        context = ssl.create_default_context()
        msg = MIMEMultipart("alternative")
        msg["Subject"] = "Database Backup For Hikemm"
        msg["From"] = sender_email
        msg["To"] = receiver_email
        body = f"""

        <!DOCTYPE html>
        <html>
        <head>
            <meta charset="UTF-8">
            <title>Link to S3 File</title>
        </head>
        <body>
            <h1>Link to S3 File</h1>
            <p>Dear all,</p>
            <p>I hope this email finds you well. I wanted to share a link to an S3 file that you might find useful:</p>
            <p><a href="{s3_file_path}"> S3 File URL</a> </p>
            <p>You can click on the link above to access the file. If you have any trouble accessing it or need further assistance, please don't hesitate to reach out to me.</p>
            <p>Thank you, and have a great day!</p>
            <p>Best regards</p>
        </body>
        </html>



        
"""

        html_part = MIMEText(body, "html")
        msg.attach(html_part)
        message = msg.as_bytes()
        email_client = boto3.client(
            "ses",
            aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
            region_name="ap-south-1",
        )
        response = email_client.send_raw_email(
            Source=sender_email,
            Destinations=[receiver_email],
            RawMessage={"Data": message},
        )
        message_id = response["MessageId"]
        logger.error(f"Email sent successfully {message_id}")




RUN apt-get update && apt-get install -y postgresql-client  # add this in docker file 
using System.Linq;
using Microsoft.EntityFrameworkCore;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.AspNetCore;


namespace TDD.Tests
{
    public class PatientTestsDbWAF&lt;TStartup&gt; : WebApplicationFactory&lt;TStartup&gt; where TStartup : class
    {

        protected override IWebHostBuilder CreateWebHostBuilder()
        {
            return WebHost.CreateDefaultBuilder()
                .UseStartup&lt;TStartup&gt;();
        }
        protected override void ConfigureWebHost(IWebHostBuilder builder)
        {
            builder.ConfigureServices(async services =&gt;
           {
               // Remove the app's DbContext registration.
               var descriptor = services.SingleOrDefault(
                      d =&gt; d.ServiceType ==
                          typeof(DbContextOptions&lt;DataContext&gt;));

               if (descriptor != null)
               {
                   services.Remove(descriptor);
               }

               // Add DbContext using an in-memory database for testing.
               services.AddDbContext&lt;DataContext&gt;(options =&gt;
                  {
                      // Use in memory db to not interfere with the original db.
                      options.UseInMemoryDatabase("PatientTestsTDD.db");
                  });
           });
        }
    }
}
Db::getInstance()->executeS("DELETE FROM product_images WHERE id_product NOT IN (" . join($allProductIds,',') . ")")
star

Mon Nov 27 2023 11:17:11 GMT+0000 (Coordinated Universal Time)

#db
star

Tue Jun 06 2023 08:05:54 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-tdd-with-integration-tests-in-net-5-0/?utm_source

#tdd #test #in-memory #db
star

Tue Mar 02 2021 10:31:38 GMT+0000 (Coordinated Universal Time)

#prestashop #delete #mysql #db #database #ids

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension