Snippets Collections
A crypto exchange clone script is a software solution modeled on successful crypto exchanges like Binance, Coinbase, or Kraken. These scripts serve as pre-built templates containing the core functionalities needed for launching a trading platform.

Hivelance has earned its reputation as a premier crypto exchange clone script development company by delivering more number of high-quality blockchain projects. With a decade of experience and a deep understanding of market dynamics, we build platforms that combine performance, security, and innovation.



Know More:

Visit - https://www.hivelance.com/crypto-exchange-clone-script
WhatsApp - +918438595928, +971505249877
Telegram - Hivelance
Mail - sales@hivelance.com
.ginput_container_checkbox {
    .gchoice {
      margin-bottom: 1rem;

      @media (min-width: $md) {
        margin-bottom: 0;
      }

      label {
        position: relative;
        right: -4px;

        @media (min-width: $md) {
          right: -8px;
        }


        &::before {
          position: absolute;
          content: ' ';
          width: 16px;
          height: 16px;
          border: 1px solid $elgray;
          background: $lwhite;
          top: 3px;
          left: -20px;
          border-radius: 5px;

          @media (min-width: $md) {
            width: 20px;
            height: 20px;
            top: 3px;
            left: -26px;
          }
        }

        &::after {
          content: "";
          position: absolute;
          border-bottom: 2px solid $elgray;
          border-left: 2px solid $elgray;
          height: 6px;
          left: -16px;
          opacity: 0;
          top: 7px;
          transform: rotate(-45deg);
          transition: all .3s linear;
          width: 9px;

          @media (min-width: $md) {
            top: 9px;
            width: 10px;
            left: -21px;
          }
        }
      }

      input {
        opacity: 0;
      }

      input[type="checkbox"]:checked~label:after {
        opacity: 1 !important;
      }
    }
  }

  .ginput_container_radio {
    .gchoice {
      margin-bottom: 1rem;

      @media (min-width: $md) {
        margin-bottom: 0;
      }

      label {
        position: relative;
        right: -4px;

        @media (min-width: $md) {
          right: -8px;
        }

        &::before {
          position: absolute;
          content: ' ';
          width: 16px;
          height: 16px;
          border: 2px solid $primary;
          background: $white;
          top: 3;
          left: -20px;
          border-radius: 50%;

          @media (min-width: $md) {
            width: 20px;
            height: 20px;
            top: 3px;
            left: -26px;
          }
        }

        &::after {
          content: "";
          position: absolute;
          width: 8px;
          height: 8px;
          left: -16px;
          opacity: 0;
          top: 7px;
          border-radius: 50%;
          background: $primary;
          transition: all .3s linear;

          @media (min-width: $md) {
            width: 10px;
            height: 10px;
            top: 8px;
            left: -21px;
          }
        }
      }

      input {
        opacity: 0;
      }

      input[type="radio"]:checked~label:after {
        opacity: 1 !important;
      }
    }
  }
-- TPAP FILL RATE 
CREATE TABLE team_kingkong.tpap_fill_rate AS
WITH

-- Total row count per table to avoid window function overhead
txn_info_total AS (
  SELECT COUNT(*) AS total_rows
  FROM switch.txn_info_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),
txn_info AS (
  SELECT *
  FROM switch.txn_info_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),

txn_participants_total AS (
  SELECT COUNT(*) AS total_rows
  FROM switch.txn_participants_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),
txn_participants AS (
  SELECT *
  FROM switch.txn_participants_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),

risk_data_total AS (
  SELECT COUNT(*) AS total_rows
  FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),
risk_data AS (
  SELECT request, response
  FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
  WHERE dl_last_updated > DATE(CURRENT_DATE - INTERVAL '7' DAY)
),

risk_parsed AS (
  SELECT
    json_extract_scalar(request, '$.evaluationType') AS evaluation_type,
    json_extract_scalar(request, '$.requestPayload.latitude') AS latitude,
    json_extract_scalar(request, '$.requestPayload.longitude') AS longitude,
    json_extract_scalar(request, '$.requestPayload.osVersion') AS os_version,
    json_extract_scalar(request, '$.requestPayload.payeeType') AS payee_type,
    json_extract_scalar(request, '$.requestPayload.payeeVpa') AS payee_vpa,
    json_extract_scalar(request, '$.requestPayload.payerType') AS payer_type,
    json_extract_scalar(request, '$.requestPayload.payerVpa') AS payer_vpa,
    json_extract_scalar(response, '$.messages.cst[0]') AS cst_risk_code,
    json_extract_scalar(response, '$.action_recommended') AS action_recommended
  FROM risk_data
)

-- Final column-level fill rates
SELECT 'switch' AS db_name, 'txn_info_snapshot_v3' AS table_name, 'category' AS column_name,
       100.0 * COUNT_IF(category IS NOT NULL AND TRIM(category) != '') / (SELECT total_rows FROM txn_info_total) AS fill_rate_pct
FROM txn_info

UNION ALL
SELECT 'switch', 'txn_info_snapshot_v3', 'status',
       100.0 * COUNT_IF(status IS NOT NULL AND TRIM(status) != '') / (SELECT total_rows FROM txn_info_total)
FROM txn_info

UNION ALL
SELECT 'switch', 'txn_info_snapshot_v3', 'txn_id',
       100.0 * COUNT_IF(txn_id IS NOT NULL AND TRIM(txn_id) != '') / (SELECT total_rows FROM txn_info_total)
FROM txn_info

UNION ALL
SELECT 'switch', 'txn_participants_snapshot_v3', 'amount',
       100.0 * COUNT_IF(amount IS NOT NULL) / (SELECT total_rows FROM txn_participants_total)
FROM txn_participants

UNION ALL
SELECT 'switch', 'txn_participants_snapshot_v3', 'participant_type',
       100.0 * COUNT_IF(participant_type IS NOT NULL AND TRIM(participant_type) != '') / (SELECT total_rows FROM txn_participants_total)
FROM txn_participants

UNION ALL
SELECT 'switch', 'txn_participants_snapshot_v3', 'scope_cust_id (payer)',
       100.0 * COUNT_IF(scope_cust_id IS NOT NULL AND participant_type = 'PAYER') /
       NULLIF(COUNT_IF(participant_type = 'PAYER'), 0)
FROM txn_participants

UNION ALL
SELECT 'switch', 'txn_participants_snapshot_v3', 'txn_id',
       100.0 * COUNT_IF(txn_id IS NOT NULL AND TRIM(txn_id) != '') / (SELECT total_rows FROM txn_participants_total)
FROM txn_participants

UNION ALL
SELECT 'switch', 'txn_participants_snapshot_v3', 'vpa',
       100.0 * COUNT_IF(vpa IS NOT NULL AND TRIM(vpa) != '') / (SELECT total_rows FROM txn_participants_total)
FROM txn_participants

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'evaluation_type',
       100.0 * COUNT_IF(evaluation_type IS NOT NULL AND TRIM(evaluation_type) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'latitude',
       100.0 * COUNT_IF(latitude IS NOT NULL AND TRIM(latitude) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'longitude',
       100.0 * COUNT_IF(longitude IS NOT NULL AND TRIM(longitude) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'os_version',
       100.0 * COUNT_IF(os_version IS NOT NULL AND TRIM(os_version) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'payee_type',
       100.0 * COUNT_IF(payee_type IS NOT NULL AND TRIM(payee_type) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'payee_vpa',
       100.0 * COUNT_IF(payee_vpa IS NOT NULL AND TRIM(payee_vpa) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'payer_type',
       100.0 * COUNT_IF(payer_type IS NOT NULL AND TRIM(payer_type) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'payer_vpa',
       100.0 * COUNT_IF(payer_vpa IS NOT NULL AND TRIM(payer_vpa) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'cst_risk_code',
       100.0 * COUNT_IF(cst_risk_code IS NOT NULL AND TRIM(cst_risk_code) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed

UNION ALL
SELECT 'tpap_hss', 'upi_switchv2_dwh_risk_data_snapshot_v3', 'action_recommended',
       100.0 * COUNT_IF(action_recommended IS NOT NULL AND TRIM(action_recommended) != '') / (SELECT total_rows FROM risk_data_total)
FROM risk_parsed;
Select * from attlog where employeeID = 275;


UPDATE attlog
SET personName = 'Parveen Naik'
WHERE employeeID = 275;

#INPUT NUMBER OF APPLES WANNA BUY
Apples = 7
#COST PER APPLE
Cost = 3
#THIS IS YOUR MONEY
Money = 50
MoneyNeeded = Apples * Cost

if MoneyNeeded > 20:
    MoneyNeeded = ( Apples * Cost ) - 5
else: MoneyNeeded = Apples * Cost

print("                                        ____________")
print("                                       |            |")
print("                                       | Apple Shop |")
print("                                       |____________|")
print()
print("                                         NEW DEAL:")
print("                                SPEND > $20 WE TAKE $5 OFF")
print("")
print("                                MoneyNeeded & Apples Bought")
print("                                      ---------------")
print("                                           $",MoneyNeeded)
print("                                        ", Apples, "Apples")

print("                                      ---------------")
print("")
    
if MoneyNeeded > Money:
    print("                                       Not Enough Money")
else:
    print("                                       Enough Money ")
// Remove tabs and sub-menu from ASE Pro for non superadmin
add_action('admin_head', function() {
    $current_user = wp_get_current_user();
    
    // Only add CSS if the current user is NOT 'mastaklance'
    if ($current_user->user_login !== 'mastaklance') {
        echo '<style>label[for="tab-content-management"], label[for="tab-admin-interface"], label[for="tab-login-logout"], label[for="tab-disable-components"], label[for="tab-security"], label[for="tab-optimizations"], .asenha-toggle.utilities.local-user-avatar, .asenha-toggle.utilities.multiple-user-roles, .asenha-toggle.utilities.image-sizes-panel, .asenha-toggle.utilities.view-admin-as-role, .asenha-toggle.utilities.enable-password-protection, .asenha-toggle.utilities.maintenance-mode, .asenha-toggle.utilities.redirect-404-to-homepage, .asenha-toggle.utilities.display-system-summary, .asenha-toggle.utilities.search-engine-visibility-status, .asenha-toggle.custom-code.enable-code-snippets-manager, .asenha-toggle.custom-code.enable-custom-admin-css, .asenha-toggle.custom-code.enable-custom-frontend-css, .asenha-toggle.custom-code.enable-custom-body-class, .asenha-toggle.custom-code.manage-robots-txt { display: none; }</style>';
    }
});
Mạng dân đen VN thì rẻ bèo ấy mà, bao nhiêu vụ rồi có gì mới đâu? Sống ở VN thì phải xác định bản thân chỉ là con kiến, bọn cai trị nó đạp chết thì phải chịu.

Lúc này thì không thấy thằng nào vào bảo VN đáng sống nữa nhỉ? Với tôi, 1 đất nước đáng sống hay không trước tiên phải nhìn vào mạng người ở nước đó đáng giá bao nhiêu, nhớ cái vụ 2 anh dân đen bị thằng chó vàng xua ra làm lá chắn thịt để cản tội phạm không? Được đền bù tổng cộng 8 củ thôi nhé, nên cái mạng dân VN đáng giá 4 củ/người. Tất nhiên cái mạng của tôi và gia đình tôi cao hơn thế nhiều, nên đéo thèm ở VN nữa, hehe.

Ở VN bị conan đập chết cũng đéo kêu được, như vụ bạn Mỹ Hằng rơi từ tầng 9 chung cư Centana đấy. Thằng chồng làm conan đánh chết bạn này, xong thả từ tầng 9 xuống, nguyên 1 đám súc vật bao che cho nhau, thi thể bầm dập nát nhừ không nhận ra nổi. 2 ông bà già đeo bảng đi kêu oan khắp nơi, mà không có kết quả. Tôi hỏi mấy fence: Gọi VN là cái xứ rác rưởi có xứng đáng không? Chửi chính quyền VN như thế nào mới đủ? Trời xanh ở đâu? Nhân quả ở đâu?

Thế nên mấy thằng bò đỏ và ngạo nghễ nên câm mẹ mõm lại, chúng mày còn không xứng đáng được so sánh với súc vật. Bọn súc vật còn có tình yêu thương đồng loại, bọn mày là thứ quỷ dữ được tạo ra bởi 1 XH quá tàn bạo, nói thật cảm thấy quá sức ghê tởm khi trên đời tồn tại những loại người như này.
-- Fastag_Trusted_VRN_CCDC_Weekly_Monthly_limitCheck
DROP TABLE team_kingkong.onus_Fastag_Trusted_VRN_CCDC_Weekly_Monthly_limitCheck_breaches;
 
-- CREATE TABLE team_kingkong.onus_Fastag_Trusted_VRN_CCDC_Weekly_Monthly_limitCheck_breaches AS
INSERT INTO team_kingkong.onus_Fastag_Trusted_VRN_CCDC_Weekly_Monthly_limitCheck_breaches
with onus_txn_base as
    (SELECT DISTINCT A.*, case when m1.mid is not null then category else 'Others' end as business_category FROM 
        (select userid, transactionid as txn_id,
        cast(eventAmount as double) / 100 as txn_amount,
        dateinserted as txn_date,
        substr(cast(dateinserted as varchar(30)), 1, 7) as yearMonth,
        paymethod, paytmmerchantid, responsestatus, actionrecommended, velocitytimestamp
        , subscriberid as vrn
        FROM cdp_risk_transform.maquette_flattened_onus_snapshot_v3
        WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND SOURCE = 'PG'
        AND responsestatus IN ('SUCCESS') AND actionrecommended <> 'BLOCK'
        AND paytmmerchantid IN ('PTMFVT32998068120662') AND paymethod IN ('DEBIT_CARD', 'CREDIT_CARD')
        AND eventid IN (SELECT eventlinkid
        FROM risk_maquette_data_async.pplus_payment_result_prod_async_snapshot_v3
        WHERE dl_last_updated BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND payresult = 'payment_success')) a
    left join
        (select * from team_kingkong.voc_mid_categorization where mid != '') m1
    on a.paytmmerchantid = m1.mid)
 
SELECT *, CASE
  WHEN (txn_amount + week_txn_amount) >= week_threshold
       AND (txn_amount + month_txn_amount) >= month_threshold THEN 'week+month_limit_breached'
  WHEN (txn_amount + week_txn_amount) >= week_threshold THEN 'week_limit_breached'
  WHEN (txn_amount + month_txn_amount) >= month_threshold THEN 'month_limit_breached'
  ELSE NULL END AS breach_reason FROM 
    (SELECT A.*
    , SUM(IF(DATE(B.txn_date) BETWEEN DATE(DATE(A.txn_date) - INTERVAL '7' DAY) AND DATE(A.txn_date), B.txn_amount, NULL)) AS week_txn_amount
    , 25295 AS week_threshold
    -- No.of attempted txns per calendar month > 30 (consider only the CCBP transactions)
    , SUM(IF(DATE(B.txn_date) BETWEEN date_trunc('month', DATE(A.txn_date)) AND DATE(A.txn_date), B.txn_amount, NULL)) AS month_txn_amount
    , 50590 AS month_threshold
    FROM
        (SELECT * FROM onus_txn_base
        WHERE DATE(txn_date) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
        )A
    INNER JOIN
        (SELECT * FROM onus_txn_base)B
    ON A.vrn = B.vrn AND A.txn_id <> B.txn_id AND B.velocitytimestamp < A.velocitytimestamp
    AND DATE(B.txn_date) BETWEEN DATE(A.txn_date - INTERVAL '30' DAY) AND DATE(A.txn_date)
    GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12)
WHERE ((txn_amount + week_txn_amount) >= week_threshold) OR ((txn_amount + month_txn_amount) >= month_threshold);



--- on_us_loan_repayments_user_limits
DROP TABLE team_kingkong.onus_on_us_loan_repayments_user_limits_breaches;
 
-- CREATE TABLE team_kingkong.onus_on_us_loan_repayments_user_limits_breaches AS
INSERT INTO team_kingkong.onus_on_us_loan_repayments_user_limits_breaches
with onus_txn_base as
    (
        SELECT DISTINCT A.*, case when m1.mid is not null then category else 'Others' end as business_category FROM 
        (select userid, subscriberid, transactionid as txn_id,
        cast(eventAmount as double) / 100 as txn_amount,
        dateinserted as txn_date,
        substr(cast(dateinserted as varchar(30)), 1, 7) as yearMonth,
        paymethod, paytmmerchantid, responsestatus, actionrecommended, velocitytimestamp
        FROM cdp_risk_transform.maquette_flattened_onus_snapshot_v3
        WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND SOURCE = 'PG'
        AND verticalid = '56' AND categoryid = '37217' AND responsestatus = 'SUCCESS' AND actionrecommended <> 'BLOCK'
        AND eventid IN (SELECT eventlinkid
        FROM risk_maquette_data_async.pplus_payment_result_prod_async_snapshot_v3
        WHERE dl_last_updated BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND payresult = 'payment_success')) a
    left join
        (select * from team_kingkong.voc_mid_categorization where mid != '') m1
    on a.paytmmerchantid = m1.mid
    )
 
SELECT *, 'monthly threshold breached' as breach_reason FROM 
    (SELECT A.*
    , COUNT(DISTINCT IF(DATE(B.txn_date) BETWEEN DATE(DATE(A.txn_date) - INTERVAL '2592000' SECOND) AND DATE(A.txn_date), B.txn_id, NULL)) AS txn_succ_month
    , 20 AS txn_succ_month_threshold
    FROM
        (SELECT * FROM onus_txn_base
        WHERE DATE(txn_date) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31')A
    INNER JOIN
        (SELECT * FROM onus_txn_base)B
    ON A.userid = B.userid AND A.subscriberid = B.subscriberid AND A.txn_id <> B.txn_id AND B.velocitytimestamp < A.velocitytimestamp
    AND DATE(B.txn_date) BETWEEN DATE(A.txn_date - INTERVAL '30' DAY) AND DATE(A.txn_date)
    GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12)
WHERE txn_succ_month >= txn_succ_month_threshold;


-- Fastag_NonTrustedUser_CCDC_Monthly_limitCheck
DROP TABLE team_kingkong.onus_Fastag_NonTrustedUser_CCDC_Monthly_limitCheck_breaches;

-- CREATE TABLE team_kingkong.onus_Fastag_NonTrustedUser_CCDC_Monthly_limitCheck_breaches AS
INSERT INTO team_kingkong.onus_Fastag_NonTrustedUser_CCDC_Monthly_limitCheck_breaches
with onus_txn_base as
    (SELECT DISTINCT A.*, case when m1.mid is not null then category else 'Others' end as business_category FROM 
        (select userid, transactionid as txn_id,
        cast(eventAmount as double) / 100 as txn_amount,
        dateinserted as txn_date,
        substr(cast(dateinserted as varchar(30)), 1, 7) as yearMonth,
        paymethod, paytmmerchantid, responsestatus, actionrecommended, velocitytimestamp
        , subscriberid as vrn
        FROM cdp_risk_transform.maquette_flattened_onus_snapshot_v3
        WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND SOURCE = 'PG'
        AND responsestatus IN ('SUCCESS') AND actionrecommended <> 'BLOCK'
        AND paytmmerchantid IN ('PTMFVT32998068120662') AND paymethod IN ('DEBIT_CARD', 'CREDIT_CARD')
        AND eventid IN (SELECT eventlinkid
        FROM risk_maquette_data_async.pplus_payment_result_prod_async_snapshot_v3
        WHERE dl_last_updated BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND payresult = 'payment_success')) a
    left join
        (select * from team_kingkong.voc_mid_categorization where mid != '') m1
    on a.paytmmerchantid = m1.mid)
 
SELECT *, CASE
    WHEN (txn_amount + week_txn_amount) >= week_threshold AND (txn_amount + month_txn_amount) >= month_threshold THEN 'week and month threshold breached'
    WHEN (txn_amount + week_txn_amount) >= week_threshold THEN 'week threshold breached'
    WHEN (txn_amount + month_txn_amount) >= month_threshold THEN 'month threshold breached'
    ELSE NULL END AS breach_reason FROM 
    (SELECT A.*
    , SUM(IF(DATE(B.txn_date) BETWEEN DATE(DATE(A.txn_date) - INTERVAL '7' DAY) AND DATE(A.txn_date), B.txn_amount, NULL)) AS week_txn_amount
    , 1011800 AS week_threshold
    , SUM(IF(DATE(B.txn_date) BETWEEN date_trunc('month', DATE(A.txn_date)) AND DATE(A.txn_date), B.txn_amount, NULL)) AS month_txn_amount
    , 2023600 AS month_threshold
    FROM
        (SELECT * FROM onus_txn_base
        WHERE DATE(txn_date) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
        )A
    INNER JOIN
        (SELECT * FROM onus_txn_base)B
    ON A.vrn = B.vrn AND A.txn_id <> B.txn_id AND B.velocitytimestamp < A.velocitytimestamp
    AND DATE(B.txn_date) BETWEEN DATE(A.txn_date - INTERVAL '30' DAY) AND DATE(A.txn_date)
    GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12)
WHERE ((txn_amount + week_txn_amount) >= week_threshold) OR ((txn_amount + month_txn_amount) >= month_threshold);



DROP TABLE team_kingkong.onus_CCBP_GMV_per_user_1d_7d_30d_breaches;

-- CREATE TABLE team_kingkong.onus_CCBP_GMV_per_user_1d_7d_30d_breaches AS
INSERT INTO team_kingkong.onus_CCBP_GMV_per_user_1d_7d_30d_breaches
with onus_txn_base as
    (SELECT DISTINCT A.*, case when m1.mid is not null then category else 'Others' end as business_category FROM 
        (select userid, transactionid as txn_id,
        cast(eventAmount as double) / 100 as txn_amount,
        dateinserted,
        substr(cast(dateinserted as varchar(30)), 1, 7) as yearMonth,
        paymethod, paytmmerchantid, velocitytimestamp
        FROM cdp_risk_transform.maquette_flattened_onus_snapshot_v3
        WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND SOURCE = 'PG'
        AND paytmmerchantid IN ('PTMCBP84799392178473','PTMVIS48435535949128','PTMCBP11428987150800')
        AND eventid IN (SELECT eventlinkid
        FROM risk_maquette_data_async.pplus_payment_result_prod_async_snapshot_v3
        WHERE dl_last_updated BETWEEN DATE(DATE'2025-01-01' - INTERVAL '30' DAY) AND DATE'2025-01-31'
        AND payresult = 'payment_success')
        AND responsestatus ='SUCCESS' AND actionrecommended <> 'BLOCK') a
    left join
        (select * from team_kingkong.voc_mid_categorization where mid != '') m1
    on a.paytmmerchantid = m1.mid)

SELECT *, CASE
  WHEN (txn_amount + txn_txn_amount_same_day) > txn_txn_amount_same_day_threshold
       AND (txn_amount + txn_txn_amount_7_day) > txn_txn_amount_7_day_threshold
       AND (txn_amount + txn_txn_amount_cal_month) > txn_txn_amount_cal_month_threshold THEN 'same day, 7-day and month threshold breached'

  WHEN (txn_amount + txn_txn_amount_same_day) > txn_txn_amount_same_day_threshold
       AND (txn_amount + txn_txn_amount_7_day) > txn_txn_amount_7_day_threshold THEN 'same day and 7-day threshold breached'

  WHEN (txn_amount + txn_txn_amount_same_day) > txn_txn_amount_same_day_threshold
       AND (txn_amount + txn_txn_amount_cal_month) > txn_txn_amount_cal_month_threshold THEN 'same day and month threshold breached'

  WHEN (txn_amount + txn_txn_amount_7_day) > txn_txn_amount_7_day_threshold
       AND (txn_amount + txn_txn_amount_cal_month) > txn_txn_amount_cal_month_threshold THEN '7-day and month threshold breached'

  WHEN (txn_amount + txn_txn_amount_same_day) > txn_txn_amount_same_day_threshold THEN 'same day threshold breached'
  WHEN (txn_amount + txn_txn_amount_7_day) > txn_txn_amount_7_day_threshold THEN '7-day threshold breached'
  WHEN (txn_amount + txn_txn_amount_cal_month) > txn_txn_amount_cal_month_threshold THEN 'month threshold breached'
  ELSE NULL END AS breach_reason FROM 
    (SELECT A.*
    -- No.of successful txns per user per calendar day > 8 (consider only the CCBP transactions)
    , SUM(IF(DATE(B.dateinserted) = DATE(A.dateinserted), B.txn_amount, 0)) AS txn_txn_amount_same_day
    , 100000000/100 AS txn_txn_amount_same_day_threshold
    -- No.of successful txns last 7 days > 15 (consider only the CCBP transactions)
    , SUM(IF(B.dateinserted BETWEEN (DATE(A.dateinserted) - INTERVAL '604800' SECOND) AND A.dateinserted, B.txn_amount, 0)) AS txn_txn_amount_7_day
    , 250000000/100 AS txn_txn_amount_7_day_threshold
    -- No.of successful txns per calendar month > 25 (consider only the CCBP transactions)
    , SUM(IF(B.dateinserted BETWEEN date_trunc('month', DATE(A.dateinserted)) AND A.dateinserted, B.txn_amount, 0)) AS txn_txn_amount_cal_month
    , 500000000/100 AS txn_txn_amount_cal_month_threshold
    FROM
        (SELECT * FROM onus_txn_base
        WHERE DATE(dateinserted) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31')A
    INNER JOIN
        (SELECT * FROM onus_txn_base)B
    ON A.userid = B.userid AND A.txn_id <> B.txn_id AND B.velocitytimestamp < A.velocitytimestamp
    AND DATE(B.dateinserted) BETWEEN DATE(A.dateinserted - INTERVAL '30' DAY) AND DATE(A.dateinserted)
    GROUP BY 1,2,3,4,5,6,7,8,9)
WHERE ((txn_amount + txn_txn_amount_same_day) > txn_txn_amount_same_day_threshold) OR ((txn_amount + txn_txn_amount_7_day) > txn_txn_amount_7_day_threshold) OR ((txn_amount + txn_txn_amount_cal_month) > txn_txn_amount_cal_month_threshold)
;
-- RISK235 (NO BREACHES)
DROP TABLE team_kingkong.tpap_risk235_breaches;

-- CREATE TABLE team_kingkong.tpap_risk235_breaches AS
INSERT INTO team_kingkong.tpap_risk235_breaches
with tpap_base as
(
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.latitude, D.longitude
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and upper(status) = 'SUCCESS' AND category IN ('VPA2VPA', 'VPA2ACCOUNT')) C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , regexp_replace(cast(json_extract(request, '$.requestPayload.latitude') as varchar), '"', '') as latitude
    , regexp_replace(cast(json_extract(request, '$.requestPayload.longitude') as varchar), '"', '') as longitude
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payeeVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payeeVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
WHERE ((payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%'))
AND payee_vpa LIKE '%@%'
)
 
SELECT *, 'upi_p2p_multiple_locations_30min' AS rule_name, 'Distinct lat long threshold breach' as breach_reason FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.latitude,
      t1.longitude,
      t1.txn_date AS txn_date,
      COUNT(DISTINCT CONCAT(t2.latitude, '_', t2.longitude)) AS distinct_lat_lon_count,
      10 AS lat_long_cnt_threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
    ON t1.payee_vpa = t2.payee_vpa
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '1800' SECOND) AND t1.txn_time -- 30 MIN
      AND t1.txn_id <> t2.txn_id AND t1.txn_amount > 5000 
      AND t1.txn_date BETWEEN DATE'2025-01-01' AND DATE'2025-07-23'
      AND NOT (t1.latitude = t2.latitude AND t1.longitude = t2.longitude)
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, t1.txn_date, t1.latitude, t1.longitude)
WHERE distinct_lat_lon_count > lat_long_cnt_threshold
;
-- RISK235
-- if in previous 30 minutes distinct( lat,long)>=10 then block (Paytm specific)

-- CREATE TABLE team_kingkong.tpap_risk235_breaches AS
INSERT INTO team_kingkong.tpap_risk235_breaches
with tpap_base as
(
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.latitude, D.longitude
FROM
    (SELECT txn_id, scope_cust_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(created_on) as txn_date,
    MAX(amount) AS txn_amount,
    created_on AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    AND vpa IS NOT NULL
    GROUP BY 1,2,7)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    and upper(status) in ('SUCCESS')) C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , regexp_replace(cast(json_extract(request, '$.requestPayload.latitude') as varchar), '"', '') as latitude
    , regexp_replace(cast(json_extract(request, '$.requestPayload.longitude') as varchar), '"', '') as longitude
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'PERSON')D
ON B.txn_id = D.txnid
WHERE (payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%')
AND payee_vpa LIKE '%@%'
)
 
SELECT * FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.latitude,
      t1.longitude,
      DATE(t1.txn_time) AS txn_date,
      COUNT(DISTINCT CONCAT(t2.latitude, '_', t2.longitude)) AS distinct_lat_lon_count,
      10 AS lat_long_cnt_threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
    ON t1.payee_vpa = t2.payee_vpa
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '1800' SECOND) AND t1.txn_time -- 30 MIN
      AND t1.txn_id <> t2.txn_id AND t1.txn_amount > 5000
      AND NOT (t1.latitude = t2.latitude AND t1.longitude = t2.longitude)
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, DATE(t1.txn_time), t1.latitude, t1.longitude)
WHERE distinct_lat_lon_count >= lat_long_cnt_threshold
;
-- RISK236
-- DROP TABLE team_kingkong.tpap_risk236_breaches;

-- CREATE TABLE team_kingkong.tpap_risk236_breaches AS
INSERT INTO team_kingkong.tpap_risk236_breaches
with tpap_base as
(
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.latitude, D.longitude
, 'upi_p2p_multiple_locations_60min' as rule_name
, 'Txns from >15 locations in 60 mins' as breach_reason
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and upper(status) = 'SUCCESS' AND category IN ('VPA2VPA', 'VPA2ACCOUNT')) C
on B.txn_id = C.txn_id
INNER JOIN
    (
        SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , regexp_replace(cast(json_extract(request, '$.requestPayload.latitude') as varchar), '"', '') as latitude
    , regexp_replace(cast(json_extract(request, '$.requestPayload.longitude') as varchar), '"', '') as longitude
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payeeVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payeeVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
WHERE (payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%') OR (payee_vpa LIKE '%@paytm%') OR (payee_vpa LIKE '%@pt%'))
 
SELECT * FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.latitude,
      t1.longitude,
      DATE(t1.txn_time) AS txn_date,
      COUNT(DISTINCT CONCAT(t2.latitude, '_', t2.longitude)) AS distinct_lat_lon_count,
      15 AS lat_long_cnt_threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
    ON t1.payee_vpa = t2.payee_vpa
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '3600' SECOND) AND t1.txn_time -- 60 MIN
      AND t1.txn_id <> t2.txn_id AND t1.txn_amount > 5000
      AND NOT (t1.latitude = t2.latitude AND t1.longitude = t2.longitude)
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, DATE(t1.txn_time), t1.latitude, t1.longitude)
WHERE distinct_lat_lon_count >= lat_long_cnt_threshold
;
-- RISK 318
DROP TABLE team_kingkong.tpap_risk318_breaches;

-- CREATE TABLE team_kingkong.tpap_risk318_breaches AS
INSERT INTO team_kingkong.tpap_risk318_breaches
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.os, D.ios_version
, 'ios_version_block' AS rule_name
, 'Txn on ios < v17' AS breach_reason
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and upper(status) = 'SUCCESS') C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , regexp_replace(cast(json_extract(request, '$.requestPayload.osVersion') as varchar), '"', '') AS os
    , SUBSTRING(REGEXP_REPLACE(CAST(JSON_EXTRACT(request, '$.requestPayload.osVersion') AS VARCHAR), '"', ''),4,3) as ios_version
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.osVersion') as varchar), '"', '') LIKE 'iOS%'
    AND SUBSTRING(REGEXP_REPLACE(CAST(JSON_EXTRACT(request, '$.requestPayload.osVersion') AS VARCHAR), '"', ''),4,3) <> ''
    AND CAST(SUBSTRING(REGEXP_REPLACE(CAST(JSON_EXTRACT(request, '$.requestPayload.osVersion') AS VARCHAR), '"', ''),4,3) AS DOUBLE) < 17
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') IN ('UPI_TRANSACTION', 'UPI_LITE_TRANSACTION'))D
ON B.txn_id = D.txnid
WHERE (payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%');
-- RISK 152
-- DROP TABLE team_kingkong.tpap_risk152_breaches;

-- CREATE TABLE team_kingkong.tpap_risk152_breaches AS
INSERT INTO team_kingkong.tpap_risk152_breaches
with tpap_base as
(
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    and upper(status) = 'SUCCESS' AND category = 'VPA2VPA') C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-01-01' - INTERVAL '1' DAY) AND DATE'2025-01-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'PERSON'
    AND lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) NOT IN ('7068069067@ptyes', 'onpaytmgas@paytm', '7068069067@ptsbi', '7068069067@pthdfc', '7068069067@paytm', '7068069067@ptaxis', 'jio@citibank')
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
WHERE ((payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%'))
AND payee_vpa LIKE '%@%'
)
 
SELECT *, 'upi_p2p_multiple_senders' AS rule_name, 'payer cnt & txn threshold breach' as breach_reason FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.txn_date,
      COUNT(t2.txn_id) AS prior_txns_last_24h,
      70 as txn24hr_threshold,
      COUNT(DISTINCT IF(t1.payer_vpa <> t2.payer_vpa, t2.payer_vpa, NULL)) AS prior_payers_last_24h,
      50 AS payer24hr_threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
    ON t1.payee_vpa = t2.payee_vpa
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '86400' SECOND) AND t1.txn_time -- 24 hrs
      AND t1.txn_id <> t2.txn_id
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, t1.txn_date)
WHERE (prior_txns_last_24h >= txn24hr_threshold) AND (prior_payers_last_24h >= payer24hr_threshold);
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":x-connect: Boost Days - What's on for this week :x-connect:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n\n Good morning Melbourne, hope you all had a wonderful long weekend :smile: See below for what's in store this week: "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are bringing back the classic Old school slices. * \n\n :caramel-slice: Lemon, Mint, Caramel and Hedgehog \n\n :coffee: *Weekly Café Special:* _:rainbow: Rainbow Hot Chocolate_"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 11th June :calendar-date-11:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " \n\n :lunch: *Mexican themed Lunch*: Lunch is from *12pm* in the L3 Kitchen & Wominjeka Breakout Space. "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 12th June, :Calendar-date-12:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":eggs: *Breakfast*: from *8:30am-10:30am* in the Wominjeka Breakout Space. See menu in the:thread:  \n\n   \n\n "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Friday, 13th June :calendar-date-13:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Social Happy Hour :rainbow-x: :rupaul: Don't miss out on our fabulous Happy Hour Collaboration from 4.00pm- 5.30pm. Drag Trivia with Ms Carmel Latte, party pies and plenty of sparkle :pink-heart:   "
			}
		},
		{
			"type": "divider"
		}
	]
}
<?php if (is_active_sidebar( 'footer-menu-services-widget-area' )) : ?>     
            <div class="grid-25 tablet-grid-50 mobile-grid-100">
                <ul class="sidebar footer-n-menu">
                    <?php dynamic_sidebar( 'footer-menu-services-widget-area' ); ?>
                </ul>
            </div>
            <?php endif;?>
            
            <?php if (is_active_sidebar( 'footer-menu-about-widget-area' )) :?>
            
            <div class="grid-15 tablet-grid-50 mobile-grid-100">
                <ul class="sidebar footer-n-menu">
                    <?php dynamic_sidebar( 'footer-menu-about-widget-area' ); ?>
                </ul>
            </div>
            
            <?php endif;?>
@app.route('/access_logs_data')
def access_logs_data():
    conn = None
    cursor = None
    try:
        conn = mysql.connector.connect(
            host=MYSQL_HOST,
            user=MYSQL_USER,
            password=MYSQL_PASSWORD,
            database=MYSQL_DATABASE
        )
        cursor = conn.cursor(dictionary=True)
        
        # Create access_logs table if it doesn't exist
        cursor.execute('''
            CREATE TABLE IF NOT EXISTS access_logs (
                id INT AUTO_INCREMENT PRIMARY KEY,
                license_plate VARCHAR(255) NOT NULL,
                feed_type VARCHAR(50) NOT NULL,
                action VARCHAR(50) NOT NULL,
                timestamp DATETIME NOT NULL
            )
        ''')
        
        # Fetch all logs
        cursor.execute("SELECT * FROM access_logs ORDER BY timestamp DESC")
        logs = cursor.fetchall()
        
        # Process logs for all_time_stats
        entrances = [log for log in logs if log['feed_type'].lower() == 'entrance']
        exits = [log for log in logs if log['feed_type'].lower() == 'exit']
        granted = [log for log in logs if log['action'].lower() == 'auto']
        denied = [log for log in logs if log['action'].lower() != 'auto']
        
        # Get unique plates
        registered_plates = set(log['license_plate'] for log in granted)
        unregistered_plates = set(log['license_plate'] for log in denied)
        
        # Find peak hour
        hour_counts = Counter()
        for log in logs:
            timestamp = log['timestamp']
            if hasattr(timestamp, 'hour'):
                hour = timestamp.hour
            else:
                # Handle string timestamps if needed
                try:
                    hour = datetime.fromisoformat(str(timestamp)).hour
                except:
                    hour = 0
            hour_counts[hour] += 1
        
        peak_hour = max(hour_counts.items(), key=lambda x: x[1])[0] if hour_counts else 0
        
        # Calculate average daily traffic
        if logs:
            # Get unique dates from logs
            dates = set()
            for log in logs:
                timestamp = log['timestamp']
                if hasattr(timestamp, 'date'):
                    dates.add(timestamp.date())
                else:
                    try:
                        dates.add(datetime.fromisoformat(str(timestamp)).date())
                    except:
                        pass
            
            avg_traffic = round(len(logs) / max(1, len(dates)))
        else:
            avg_traffic = 0
        
        # Create all_time_stats dictionary
        all_time_stats = {
            'total_entrances': len(entrances),
            'total_exits': len(exits),
            'granted_access': len(granted),
            'denied_access': len(denied),
            'registered_vehicles': len(registered_plates),
            'unregistered_vehicles': len(unregistered_plates),
            'peak_hour': f"{peak_hour:02d}:00",
            'avg_traffic': avg_traffic
        }
        
        # Process data for charts (daily, weekly, monthly)
        now = datetime.now()
        
        # Create reportData structure
        report_data = {
            'day': process_period_data(logs, now, 'day'),
            'week': process_period_data(logs, now, 'week'),
            'month': process_period_data(logs, now, 'month')
        }
        
        return jsonify({
            'all_time_stats': all_time_stats,
            'report_data': report_data
        })
    
    except mysql.connector.Error as err:
        logging.error(f"MySQL Error fetching reports data: {err}")
        return jsonify({'error': 'Error fetching reports data'}), 500
    finally:
        if cursor:
            cursor.close()
        if conn and conn.is_connected():
            conn.close()
def save_vehicle_owner(license_plate, owner_name, owner_contact, owner_address):
    conn = None
    cursor = None
    try:
        conn = mysql.connector.connect(
            host=MYSQL_HOST,
            user=MYSQL_USER,
            password=MYSQL_PASSWORD,
            database=MYSQL_DATABASE
        )
        cursor = conn.cursor()
        cursor.execute('''
            CREATE TABLE IF NOT EXISTS avbs (
                license_plate VARCHAR(255) PRIMARY KEY,
                owner_name VARCHAR(255) NOT NULL,
                owner_contact VARCHAR(255),
                owner_address TEXT,
                registration_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            )
        ''')
        sql = "INSERT INTO avbs (license_plate, owner_name, owner_contact, owner_address) VALUES (%s, %s, %s, %s) ON DUPLICATE KEY UPDATE owner_name=%s, owner_contact=%s, owner_address=%s, registration_timestamp=CURRENT_TIMESTAMP"
        val = (license_plate, owner_name, owner_contact, owner_address, owner_name, owner_contact, owner_address)
        cursor.execute(sql, val)
        conn.commit()
        logging.info(f"Saved/Updated owner details for license plate: {license_plate}")
        return True
    except mysql.connector.Error as err:
        logging.error(f"MySQL Error saving owner details: {err}")
        return False
    finally:
        if cursor:
            cursor.close()
        if conn and conn.is_connected():
            conn.close()

CAMERA_CONFIG = {
    'entrance': 0,  # First USB camera index for entrance
    'exit': 1,      # Second USB camera index for exit
    'single_camera_mode': False  # Set to False to use two separate cameras
}

# MySQL configuration
MYSQL_HOST = 'localhost'
MYSQL_USER = 'root'
MYSQL_PASSWORD = ''  
MYSQL_DATABASE = 'avbs' 

# Arduino configuration
ARDUINO_PORT = 'COM5'  # Change this to match your Arduino's COM port
ARDUINO_BAUD_RATE = 9600
arduino_connected = False
arduino_serial = None

YOLO_CONF_THRESHOLD = 0.25  # Confidence threshold for YOLO detection
PADDLE_OCR_CONF_THRESHOLD = 0.65  # Confidence threshold for OCR
SAVE_INTERVAL_SECONDS = 60  # Interval for saving JSON data
JSON_OUTPUT_DIR = "output_json"  # Directory for JSON output
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":xeros-connect: Boost Days - What's on this week! :xeros-connect:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Mōrena Ahuriri :wave: Happy Monday, let's get ready to dive into another week with our Xeros Connect Boost Day programme! See below for what's in store :eyes:"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-11: Wednesday, 11th June :camel:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n:coffee: *Café Partnership*: Enjoy coffee and café-style beverages from our cafe partner, *Adoro*, located in our office building *8:00AM - 11:30AM*.\n:muffin: *Breakfast*: Provided by *Design Cuisine* from *9:30AM-10:30AM* in the Kitchen."
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-12: Thursday, 12th June :duck:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n:coffee: *Café Partnership*: Enjoy coffee and café-style beverages from our cafe partner, *Adoro*, located in our office building *8:00AM - 11:30AM*.\n:sandwich: *Lunch*: Provided by *Roam* from *12:30PM-1:30PM* in the Kitchen."
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "*What else?* :party: \nWhat would you like from our future socials? \nMore food, drinks, or entertainment? \nWe'd love to hear feedback and ideas from you! \nDM your local WX coordinator or leave any suggestions in the thread :comment: \n*Keep up with us* :eyes: \nStay tuned to this channel for more details, check out the <https://calendar.google.com/calendar/u/0?cid=eGVyby5jb21fbXRhc2ZucThjaTl1b3BpY284dXN0OWlhdDRAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ|*Hawkes Bay Social Calendar*>, and get ready to Boost your workdays!\n\nWX Team :party-wx:"
			}
		}
	]
}
# Load necessary libraries
library(tidyverse)
library(tidytext)
library(lubridate)

# Sample text data with dates
feedback <- data.frame(
  text = c("I love this product!", "Terrible service.", "Okay experience.",
           "Wonderful!", "Worst support ever."),
  date = as.Date(c("2024-01-10", "2024-01-12", "2024-01-15", "2024-01-18", "2024-01-20"))
)

# Tokenize, clean, and assign sentiment
data("stop_words")
sentiment_data <- feedback %>%
  unnest_tokens(word, text) %>%
  anti_join(stop_words, by = "word") %>%
  inner_join(get_sentiments("bing"), by = "word") %>%
  count(date, sentiment) %>%
  pivot_wider(names_from = sentiment, values_from = n, values_fill = 0) %>%
  mutate(score = positive - negative,
         sentiment_label = case_when(
           score > 0 ~ "Positive",
           score < 0 ~ "Negative",
           TRUE ~ "Neutral"
         ))

# Trend visualization (bar plot over time)
ggplot(sentiment_data, aes(x = date, y = score, fill = sentiment_label)) +
  geom_col() +
  scale_fill_manual(values = c("Positive" = "green", "Negative" = "red", "Neutral" = "gray")) +
  labs(title = "Sentiment Trend Over Time", x = "Date", y = "Sentiment Score") +
  theme_minimal()

# Distribution visualization (pie chart)
ggplot(sentiment_data, aes(x = "", fill = sentiment_label)) +
  geom_bar(width = 1) +
  coord_polar("y") +
  theme_void() +
  labs(title = "Overall Sentiment Distribution")
# Apriori Algorithm in R

# Install and load required package
install.packages("arules")
library(arules)

# Load built-in transaction data
data("Groceries")

# Apply Apriori algorithm to find frequent itemsets
frequent_items <- apriori(Groceries, parameter = list(supp = 0.01, target = "frequent itemsets"))

# Generate association rules
rules <- apriori(Groceries, parameter = list(supp = 0.01, confidence = 0.5))

# Sort rules by lift
sorted_rules <- sort(rules, by = "lift", decreasing = TRUE)

# View top results
inspect(head(frequent_items, 10))
inspect(head(sorted_rules, 10))
#logistic Regression
# Install the 'caret' package (only run once; comment out if already installed)
install.packages("caret")

# Load the 'caret' package for machine learning utilities
library(caret)

# Load the built-in iris dataset
data(iris)

# Convert the problem into binary classification:
# Setosa (1) vs Non-Setosa (0)
iris$Label <- ifelse(iris$Species == "setosa", 1, 0)

# Remove the original Species column as it's no longer needed
iris <- iris[, -5]

# Set seed for reproducibility
set.seed(123)

# Split the data: 80% for training and 20% for testing
idx <- createDataPartition(iris$Label, p = 0.8, list = FALSE)
train <- iris[idx, ]   # Training set
test <- iris[-idx, ]   # Test set

# Train a logistic regression model using the training data
model <- glm(Label ~ ., data = train, family = "binomial")

# Predict probabilities on the test set and convert to class labels (1 or 0)
pred <- ifelse(predict(model, test, type = "response") > 0.5, 1, 0)

# Generate a confusion matrix to evaluate model performance
conf <- confusionMatrix(factor(pred), factor(test$Label))

# Display evaluation metrics
cat("Precision:", round(conf$byClass["Precision"], 2), "\n")
cat("Recall:", round(conf$byClass["Recall"], 2), "\n")
cat("F1-score:", round(conf$byClass["F1"], 2), "\n")
# K mean clustering
install.packages(c("ggplot2", "factoextra", "cluster"))

library(ggplot2)
library(factoextra)
library(cluster)

data("iris")
irisdata <- scale(iris[, -5])

set.seed(123)

fviz_nbclust(irisdata, kmeans, method = "wss")

model <- kmeans(irisdata, centers = 3, nstart = 25)

iris$Cluster <- as.factor(model$cluster)

print(model$centers)

table(model$cluster)

fviz_cluster(model, data = irisdata)

sil <- silhouette(model$cluster, dist(irisdata))

fviz_silhouette(sil)
# Load libraries
library(tm)
library(SnowballC)
library(caret)
library(e1071)

# Load and prepare data
sms_data <- read.csv("https://raw.githubusercontent.com/jbrownlee/Datasets/master/sms_spam.csv", stringsAsFactors = FALSE)
colnames(sms_data) <- c("Label", "Message")
sms_data$Label <- factor(sms_data$Label, levels = c("ham", "spam"))

# Clean and preprocess text
corpus <- VCorpus(VectorSource(sms_data$Message))
corpus <- tm_map(corpus, content_transformer(tolower))
corpus <- tm_map(corpus, removePunctuation)
corpus <- tm_map(corpus, removeNumbers)
corpus <- tm_map(corpus, removeWords, stopwords("english"))
corpus <- tm_map(corpus, stemDocument)
corpus <- tm_map(corpus, stripWhitespace)

# Create Document-Term Matrix
dtm <- DocumentTermMatrix(corpus)
dtm_df <- as.data.frame(as.matrix(dtm))
dtm_df$Label <- sms_data$Label

# Split into training and testing sets
set.seed(123)
split_index <- createDataPartition(dtm_df$Label, p = 0.8, list = FALSE)
train_data <- dtm_df[split_index, ]
test_data <- dtm_df[-split_index, ]

# Separate features and labels
x_train <- train_data[, -ncol(train_data)]
y_train <- train_data$Label
x_test <- test_data[, -ncol(test_data)]
y_test <- test_data$Label

# Train Naive Bayes model and predict
nb_model <- naiveBayes(x_train, y_train)
predictions <- predict(nb_model, x_test)

# Evaluate performance
conf_mat <- confusionMatrix(predictions, y_test)
print(conf_mat)
cat("Accuracy:", round(conf_mat$overall["Accuracy"] * 100, 2), "%\n")
# Load packages
library(class)
library(ggplot2)
library(caret)

# Normalize and prepare data
data(iris)
norm <- function(x) (x - min(x)) / (max(x) - min(x))
iris_norm <- as.data.frame(lapply(iris[1:4], norm))
iris_norm$Species <- iris$Species

# Train-test split
set.seed(123)
idx <- createDataPartition(iris_norm$Species, p = 0.8, list = FALSE)
train_X <- iris_norm[idx, 1:4]; test_X <- iris_norm[-idx, 1:4]
train_Y <- iris_norm[idx, 5]; test_Y <- iris_norm[-idx, 5]

# Evaluate KNN for various k
eval_knn <- function(k) mean(knn(train_X, test_X, train_Y, k) == test_Y) * 100
k_vals <- seq(1, 20, 2)
acc <- sapply(k_vals, eval_knn)
results <- data.frame(K = k_vals, Accuracy = acc)
print(results)

# Plot accuracy vs. K
ggplot(results, aes(K, Accuracy)) +
  geom_line(color = "blue") + geom_point(color = "red") +
  labs(title = "KNN Accuracy vs. K", x = "K", y = "Accuracy (%)") +
  theme_minimal()

# Final model with optimal K
final_pred <- knn(train_X, test_X, train_Y, k = 5)
print(confusionMatrix(final_pred, test_Y))
# Load required packages
library(rpart)
library(rpart.plot)
library(ggplot2)
library(caret)

# Prepare data
data(iris)
set.seed(123)
index <- createDataPartition(iris$Species, p = 0.8, list = FALSE)
train <- iris[index, ]; test <- iris[-index, ]

# Train decision tree
model <- rpart(Species ~ ., data = train, method = "class")
rpart.plot(model, main = "Decision Tree", extra = 104)

# Predict and evaluate
pred <- predict(model, test, type = "class")
print(confusionMatrix(pred, test$Species))

# Visualize decision boundaries (for Sepal features)
grid <- expand.grid(
  Sepal.Length = seq(min(iris$Sepal.Length), max(iris$Sepal.Length), 0.1),
  Sepal.Width = seq(min(iris$Sepal.Width), max(iris$Sepal.Width), 0.1)
)
grid$Species <- predict(model, newdata = grid, type = "class")

ggplot(iris, aes(Sepal.Length, Sepal.Width, color = Species)) +
  geom_point() +
  geom_tile(data = grid, aes(fill = Species), alpha = 0.2) +
  labs(title = "Decision Tree Boundaries (Sepal Features)") +
  theme_minimal()
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":xero_pride::house_cupcake::rainbow::pink-heart: What's On!  :xero_pride::house_cupcake::rainbow::pink-heart:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Good morning Brisbane! Please see below for what's on this week."
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-9: Monday, 9th June",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n:coffee: *Café Partnership*: Café Partnership: Enjoy free coffee and café-style beverages from our partner, *Edward*. \n\n :lunch: *Lunch*: from *12pm* in the kitchen."
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-11: Wednesday, 11th June",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":coffee: *Café Partnership*: Café Partnership: Enjoy coffee and café-style beverages from our partner, *Edward*. \n\n :late-cake: *Morning Tea*: from *10am* in the kitchen."
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-13: Friday, 13th June",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":rainbow: :pink-heart: #rainbow-x and the WX Team are gearing up for our *Pride Social* on *Friday 13th June!* Join us for a colourful evening filled with delicious food and drinks. Make sure you wear lots of colour to celebrate with us! :pink-heart::rainbow:"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "*LATER THIS MONTH:*"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":blob-party: *27th June:* Social Happy Hour: Wind down over some drinks & nibbles with your work pals!"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Stay tuned to this channel for more details, check out the <https://calendar.google.com/calendar/u/0?cid=Y19uY2M4cDN1NDRsdTdhczE0MDhvYjZhNnRjb0Bncm91cC5jYWxlbmRhci5nb29nbGUuY29t|*Brisbane Social Calendar*>, and get ready to Boost your workdays!\n\nLove,\nWX Team :party-wx:"
			}
		}
	]
}
The landscape of iOS app development is continually evolving, driven by Apple's consistent innovations in hardware and software. Developers are constantly adopting new tools and paradigms to build more sophisticated, intuitive, and secure applications.

Here's the some of the latest trends and technologies that shaping iOS app development process:

◦ SwiftUI's Continued Dominance: Apple's declarative UI framework, SwiftUI, is maturing rapidly. It allows developers to build user interfaces across all Apple platforms (iOS, iPadOS, macOS, watchOS, tvOS, and even visionOS) with less code and in a more intuitive way. Its integration with Xcode previews streamlines the design process.

◦ Artificial Intelligence (AI) and Machine Learning (ML) Integration: Core ML, Create ML, and advancements in the Neural Engine allow developers to embed powerful AI/ML capabilities directly into apps. This enables features like intelligent recommendations, advanced image recognition, natural language processing, and smarter personal assistants, often with enhanced privacy as processing occurs on-device.

◦ Augmented Reality (AR) with ARKit: AR experiences continue to become more immersive and integrated into various app categories, from gaming and retail (virtual try-ons) to education and healthcare. ARKit's ongoing enhancements provide developers with robust tools to create compelling AR content.

◦ Enhanced Privacy and Security: Apple's strong emphasis on user privacy remains a core trend. Features like App Tracking Transparency (ATT) and Passkeys are pushing developers to build apps with privacy-by-design, focusing on transparent data handling and secure authentication.

◦ Spatial Computing and VisionOS: With the advent of Apple Vision Pro, spatial computing is becoming a significant area for developers. While still nascent, creating apps that seamlessly blend digital content with the real world or offer fully immersive experiences represents a new frontier for iOS developers.

◦ Swift Concurrency and Performance Optimization: The adoption of Swift's structured concurrency features (async/await, Actors) is improving the performance and reliability of complex iOS applications by simplifying asynchronous code execution and preventing common concurrency bugs.

◦ Widgets, Live Activities, and App Clips: Expanding beyond the main app, developers are leveraging widgets for at-a-glance information, Live Activities for real-time updates directly on the Lock Screen or Dynamic Island, and App Clips for lightweight, on-demand app experiences without full downloads.

These trends collectively aim to deliver more personal, intelligent, and interconnected user experiences across the Apple ecosystem.

Looking to incorporate these cutting-edge advanced technologies into your next project? Then Appticz is the best for the entrepreneurs who are looking to innovative iOS app development company, Utilizing these latest trends and technologies to build your responsive solutions that drive exceptional user engagement and business growth.
SELECT 
  Id, 
  PermissionsRead, 
  PermissionsEdit, 
  SobjectType, 
  Field, 
  Parent.Type, 
  Parent.Name, 
  Parent.PermissionSetGroup.DeveloperName, 
  Parent.Profile.Name 
FROM FieldPermissions
WHERE Field = 'Object__c.Field__c'
create table team_kingkong.mid_limits_shivam as (
WITH latest_limits AS (
    SELECT
        merchantid,
        identifier,
        maxamtpermonth,
        ROW_NUMBER() OVER (PARTITION BY merchantid, identifier ORDER BY modifieddate DESC) AS rn
    FROM (
        SELECT
            merchantid,
            identifier,
            maxamtpermonth,
            (DATE_DIFF('millisecond', TIMESTAMP '1970-01-01 5:30:00', CAST(modifieddate AS TIMESTAMP))) AS modifieddate
        FROM merchant_velocity.instrument_historic_data_snapshot_v3
        WHERE dl_last_updated >= DATE '2010-01-01'
        
        UNION ALL
        
        SELECT
            merchantid,
            identifier,
            maxamtpermonth,
            modifieddate
        FROM TP_S_2022_MD_EVENTLOG_001.TP_S_2022_MD_EVENTLOG_001_snapshot_v3
        WHERE dl_last_updated >= DATE '2010-01-01'
    )
),

pivoted_limits AS (
    SELECT
        merchantid,
        MAX(CASE WHEN identifier = 'UPI_CC' THEN maxamtpermonth END) AS UPI_CC_limit,
        MAX(CASE WHEN identifier = 'UPI' THEN maxamtpermonth END) AS UPI_limit,
        MAX(CASE WHEN identifier = 'CC' THEN maxamtpermonth END) AS CC_limit,
        MAX(CASE WHEN identifier = 'DC' THEN maxamtpermonth END) AS DC_limit,
        MAX(CASE WHEN identifier = 'UPI_CREDITLINE' THEN maxamtpermonth END) AS UPI_CREDITLINE_limit,
        MAX(CASE WHEN identifier = 'PER_MID' THEN maxamtpermonth END) AS overall_limit
    FROM latest_limits
    WHERE rn = 1
    GROUP BY merchantid
),

merchant_types AS (
    SELECT 
        v1.merchantid,
        CASE 
            WHEN o_mid IS NOT NULL THEN 'Online'
            WHEN e_mid IS NOT NULL THEN 'EDC' 
            ELSE 'QR' 
        END AS EDC_QR
    FROM pivoted_limits v1
    LEFT JOIN (
        SELECT DISTINCT merchant_id AS o_mid
        FROM datalake.online_payment_merchants
    ) m_3 ON v1.merchantid = m_3.o_mid
    LEFT JOIN (
        SELECT DISTINCT mid AS e_mid
        FROM paytmpgdb.entity_edc_info_snapshot_v3
        WHERE terminal_status = 'ACTIVE'
        AND dl_last_updated >= DATE '2010-01-01'
    ) m_4 ON v1.merchantid = m_4.e_mid
)
select * from (
SELECT
    p.merchantid,
    m.EDC_QR,
    CAST(p.UPI_CC_limit AS double)/100 AS UPI_CC_limit,
    CAST(p.UPI_limit AS double)/100 AS UPI_limit,
    CAST(p.CC_limit AS double)/100 AS CC_limit,
    CAST(p.DC_limit AS double)/100 AS DC_limit,
    CAST(p.UPI_CREDITLINE_limit AS double)/100 AS UPI_CREDITLINE_limit,
    CAST(p.overall_limit AS double)/100 AS overall_limit
    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) 
    --     THEN '1' ELSE '0' 
    -- END AS unlimited_overall_limit_flag,
    
    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) OR (p.UPI_CC_limit IS NULL OR p.UPI_CC_limit = -1) THEN 'NA'
    --     WHEN p.UPI_CC_limit > p.overall_limit THEN '1' 
    --     ELSE '0' 
    -- END AS UPI_CC_limit_ov_lmt_flag,

    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) OR (p.UPI_limit IS NULL OR p.UPI_limit = -1) THEN 'NA'
    --     WHEN p.UPI_limit > p.overall_limit THEN '1' 
    --     ELSE '0' 
    -- END AS UPI_limit_ov_lmt_flag,

    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) OR (p.CC_limit IS NULL OR p.CC_limit = -1) THEN 'NA'
    --     WHEN p.CC_limit > p.overall_limit THEN '1' 
    --     ELSE '0' 
    -- END AS CC_limit_ov_lmt_flag,

    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) OR (p.DC_limit IS NULL OR p.DC_limit = -1) THEN 'NA'
    --     WHEN p.DC_limit > p.overall_limit THEN '1' 
    --     ELSE '0' 
    -- END AS DC_limit_ov_lmt_flag,

    -- CASE 
    --     WHEN (p.overall_limit IS NULL OR p.overall_limit = -1) OR (p.UPI_CREDITLINE_limit IS NULL OR p.UPI_CREDITLINE_limit = -1) THEN 'NA'
    --     WHEN p.UPI_CREDITLINE_limit > p.overall_limit THEN '1' 
    --     ELSE '0' 
    -- END AS UPI_CREDITLINE_limit_ov_lmt_flag,

    -- CASE 
    --     WHEN (p.UPI_limit IS NULL OR p.UPI_limit = -1) OR (p.UPI_CC_limit IS NULL OR p.UPI_CC_limit = -1) THEN 'NA'
    --     WHEN p.UPI_CC_limit > p.UPI_limit THEN '1' 
    --     ELSE '0' 
    -- END AS UPI_CC_limit_UPI_limit_flag,

    -- CASE 
    --     WHEN (p.UPI_limit IS NULL OR p.UPI_limit = -1) OR (p.UPI_CREDITLINE_limit IS NULL OR p.UPI_CREDITLINE_limit = -1) THEN 'NA'
    --     WHEN p.UPI_CREDITLINE_limit > p.UPI_limit THEN '1' 
    --     ELSE '0' 
    -- END AS UPI_CREDITLINE_limit_UPI_limit_flag
FROM pivoted_limits p
JOIN merchant_types m ON p.merchantid = m.merchantid));
-- where UPI_CC_limit_ov_lmt_flag = '1'  OR UPI_limit_ov_lmt_flag = '1'
--     OR CC_limit_ov_lmt_flag = '1' OR DC_limit_ov_lmt_flag = '1'
--     OR UPI_CREDITLINE_limit_ov_lmt_flag = '1' OR UPI_CC_limit_UPI_limit_flag = '1'
function updateSelect(e) {
    const { target } = e;

    const value = target.value;

    const cadenceRow = inputCadence.closest(".form__row");
    const elevationRow = inputElevation.closest(".form__row");

    // Remove the hidden class from both rows first
    cadenceRow.classList.remove("form__row--hidden");
    elevationRow.classList.remove("form__row--hidden");

    const selected = {
      cycling: elevationRow,
      running: cadenceRow,
    };

    selected[value].classList.add("form__row--hidden");
  }
-- RISK 306
-- If payer account and payee vpa count of p2p transactions in previous 24 hours is more than equal to 10 then BLOCK
DROP TABLE team_kingkong.tpap_risk306_breaches;

CREATE TABLE team_kingkong.tpap_risk306_breaches AS
-- INSERT INTO team_kingkong.tpap_risk306_breaches
with tpap_base as
(SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-07-01' - INTERVAL '1' DAY) AND DATE'2025-08-05'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-07-01' - INTERVAL '1' DAY) AND DATE'2025-08-05'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-07-01' - INTERVAL '1' DAY) AND DATE'2025-08-05'
    and DATE(created_on) BETWEEN DATE(DATE'2025-07-01' - INTERVAL '1' DAY) AND DATE'2025-08-05'
    and upper(status) = 'SUCCESS' AND category = 'VPA2MERCHANT') C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(date'2025-07-01' - INTERVAL '1' DAY) AND DATE'2025-08-05'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'ENTITY'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
WHERE ((payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%'))
AND payee_vpa LIKE '%@%' AND payee_vpa <> 'onpaytmgas@paytm')
 
SELECT *, 'upi_payer_payee_combination_txn_count_p2m' AS rule_name, '24hr txn cnt threshold breach' as breach_reason FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.txn_date,
      COUNT(DISTINCT t2.txn_id) AS prior_txns_last_24h,
      15 as threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
      ON t1.payer_vpa = t2.payer_vpa
      AND t1.payee_vpa = t2.payee_vpa
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '86400' SECOND) AND t1.txn_time
      AND t1.txn_id <> t2.txn_id AND t1.txn_date BETWEEN DATE'2025-07-01' AND DATE'2025-08-05'
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, t1.txn_date)
WHERE prior_txns_last_24h > threshold
;
INSERT INTO team_kingkong.tpap_risk005_breaches
with tpap_base as
(SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(CASE WHEN participant_type = 'PAYER' THEN mobile_no END) AS payer_mobile_no,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    AND DATE(created_on) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    and DATE(created_on) BETWEEN DATE(DATE'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    and upper(status) = 'SUCCESS') C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE(date'2025-03-01' - INTERVAL '1' DAY) AND DATE'2025-03-31'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
WHERE (payer_vpa LIKE '%@paytm%') OR (payer_vpa LIKE '%@pt%'))
 
SELECT * FROM
    (SELECT t1.payer_vpa,
      t1.payee_vpa,
      t1.payer_mobile_no,
      t1.txn_id,
      t1.txn_amount,
      t1.category,
      t1.upi_subtype,
      t1.txn_time,
      t1.txn_date,
      COUNT(t2.txn_id) AS prior_txns_last_24h,
      50 as threshold
    FROM tpap_base t1
    INNER JOIN tpap_base t2
      ON t1.payer_mobile_no = t2.payer_mobile_no
      AND t2.txn_time BETWEEN (t1.txn_time - INTERVAL '86400' SECOND) AND t1.txn_time
      AND t1.txn_id <> t2.txn_id
      AND t1.txn_date BETWEEN DATE'2025-03-01' AND DATE'2025-03-31'
    GROUP BY t1.payer_vpa, t1.payee_vpa, t1.payer_mobile_no, t1.txn_id, t1.txn_amount, t1.category, t1.upi_subtype, t1.txn_time, t1.txn_date)
WHERE prior_txns_last_24h >= threshold
;
-- TPAP: RSIK_127
-- DROP TABLE team_kingkong.tpap_risk127_breaches;
 
-- CREATE TABLE team_kingkong.tpap_risk127_breaches AS
INSERT INTO team_kingkong.tpap_risk127_breaches
SELECT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, payerAccountType
, payerType
, payerName
, payeeName
, 'payerAccountName == payeeAccountName' AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(created_on) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-07-16' AND DATE'2025-07-20'
    AND DATE(created_on) BETWEEN DATE'2025-07-16' AND DATE'2025-07-20'
    AND vpa IS NOT NULL
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-07-16' AND DATE'2025-07-20'
    and DATE(created_on) BETWEEN DATE'2025-07-16' AND DATE'2025-07-20'
    and upper(status) = 'SUCCESS') C
on B.txn_id = C.txn_id
inner JOIN
    (SELECT DISTINCT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerAccountType') as varchar), '"', '')) as payerAccountType
    , lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') as varchar), '"', '')) as payerType
    , regexp_replace(cast(json_extract(request, '$.requestPayload.payerName') as varchar), '"', '') as payerName
    , regexp_replace(cast(json_extract(request, '$.requestPayload.payeeName') as varchar), '"', '') as payeeName
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN date'2025-07-16' AND DATE'2025-07-20'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerAccountType') as varchar), '"', '')) = 'credit'
    AND lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') as varchar), '"', '')) = 'person'
    AND lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payeeName') as varchar), '"', '')) = lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerName') as varchar), '"', ''))
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK')D
ON B.txn_id = D.txnid;
import os
from PIL import Image
from tkinter import Tk, filedialog

def process_image(input_path, output_folder, quality=75):
    filename = os.path.basename(input_path)
    name, ext = os.path.splitext(filename)
    ext = ext.lower()

    # Definir nombre de salida (siempre .jpg)
    output_path = os.path.join(output_folder, f"{name}.jpg")

    with Image.open(input_path) as img:
        if ext in ['.jpg', '.jpeg']:
            # Solo comprimir JPEG
            img.save(output_path, format='JPEG', quality=quality, optimize=True)
        else:
            # Convertir a JPEG y comprimir
            img = img.convert("RGB")  # Convertir de RGBA/PNG a RGB
            img.save(output_path, format='JPEG', quality=quality, optimize=True)

    print(f"Processed: {filename} → {output_path}")

def compress_images_in_folder(folder_path, quality=75):
    output_folder = os.path.join(folder_path, "compressed_jpeg")
    os.makedirs(output_folder, exist_ok=True)

    for filename in os.listdir(folder_path):
        if filename.lower().endswith(('.jpg', '.jpeg', '.png', '.bmp', '.tiff', '.webp')):
            input_path = os.path.join(folder_path, filename)
            process_image(input_path, output_folder, quality)

    print(f"\n✅ Finished. Compressed images saved in: {output_folder}")

def choose_folder():
    root = Tk()
    root.withdraw()
    return filedialog.askdirectory(title="Select folder with images to convert & compress")

if __name__ == "__main__":
    folder = choose_folder()
    if folder:
        compress_images_in_folder(folder, quality=75)
    else:
        print("No folder selected.")
<input type="text" placeholder="Search..."
value="<?php echo isset($_GET['s_q_fulltext']) ? esc_attr($_GET['s_q_fulltext']) : ''; ?>"
name="s_q_fulltext">

  <?php
$keyword = isset($_GET['s_q_fulltext']) ? sanitize_text_field($_GET['s_q_fulltext']) : '';

global $wpdb;
$keyword = esc_sql($keyword);
$sql = "
SELECT DISTINCT p.ID
FROM {$wpdb->posts} p
LEFT JOIN {$wpdb->postmeta} pm ON p.ID = pm.post_id
WHERE p.post_type IN ('post', 'page')
AND p.post_status = 'publish'
AND (
  p.post_title LIKE '%$keyword%' OR
  p.post_content LIKE '%$keyword%' OR
  pm.meta_value LIKE '%$keyword%'
)
";

$post_ids = $wpdb->get_col($sql);

$args = array(
  'post_type' => ['post', 'page'],
  //'s' => $keyword,
  'post__in' => $post_ids,
);

  $search_query = new WP_Query($args);

?>

  <?php if (!empty($keyword)): ?>
    <?php if ($search_query->have_posts()): ?>


      <div data-pad="2" class="me-block me-PanelCol search-height me-max-width"
id="UwYEobJ5xSOSFJC5JLMkxXA" data-mod="MEBuild2.ParallaxScroll" data-opt="{}">
  <div class="underlay"></div>
<div class="overlay"></div>
<div class="row me-max-width collapse">
  <div class="column me-iwrap small-12 medium-order-1">
    <div class="overlay"></div>
<div class="underlay"></div>
<div data-pad="0" class="me-block me-SearchSolrFilterResults"
id="U53w2SU9WSFSjovg3pydCww">
  <div class="row collapse me-max-width small-up-1">

    <?php while ($search_query->have_posts()):
    $search_query->the_post(); ?>
      <?php get_template_part('template-parts/content', 'search'); ?>
        <?php endwhile; ?>
          <?php the_posts_navigation(); ?>
            <?php wp_reset_postdata(); ?>

              </div>
</div>
</div>
</div>
</div>
<?php else: ?>
  <?php get_template_part('template-parts/content', 'none'); ?>
    <?php endif;
star

Tue Jun 10 2025 06:31:24 GMT+0000 (Coordinated Universal Time) https://www.hivelance.com/pump-fun-clone-script

@stevejohnson #pumpfun clone #pumpfun clone script #pumpfun clone script development #pumpfun platform clone

star

Tue Jun 10 2025 06:30:32 GMT+0000 (Coordinated Universal Time) https://www.hivelance.com/paypal-clone-script

@stevejohnson #paypalclone script #paypalapp clone #paypalclone software

star

Tue Jun 10 2025 05:49:41 GMT+0000 (Coordinated Universal Time)

@divyasoni23 #css

star

Tue Jun 10 2025 05:49:11 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Tue Jun 10 2025 05:44:50 GMT+0000 (Coordinated Universal Time)

@Taimoor

star

Tue Jun 10 2025 01:01:30 GMT+0000 (Coordinated Universal Time)

@bobby #python

star

Mon Jun 09 2025 20:28:40 GMT+0000 (Coordinated Universal Time)

@mastaklance

star

Mon Jun 09 2025 18:48:20 GMT+0000 (Coordinated Universal Time) https://voz.vn/t/nu-sinh-bi-đut-lia-1-chan-sau-va-cham-voi-xe-sang-bmw.1106546/page-53

@abcabcabc

star

Mon Jun 09 2025 13:26:25 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 13:00:00 GMT+0000 (Coordinated Universal Time) https://appticz.com/taxi-booking-app-development

@davidscott

star

Mon Jun 09 2025 11:37:00 GMT+0000 (Coordinated Universal Time) https://myrosmol.ru/participants

@tone3

star

Mon Jun 09 2025 09:40:25 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 09:40:24 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 08:44:45 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 07:10:36 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 07:09:52 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Jun 09 2025 02:31:59 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Mon Jun 09 2025 02:06:33 GMT+0000 (Coordinated Universal Time)

@mamba

star

Sun Jun 08 2025 21:14:44 GMT+0000 (Coordinated Universal Time)

@P1827056G

star

Sun Jun 08 2025 21:08:03 GMT+0000 (Coordinated Universal Time)

@P1827056G

star

Sun Jun 08 2025 21:04:30 GMT+0000 (Coordinated Universal Time)

@P1827056G

star

Sun Jun 08 2025 20:18:54 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Sun Jun 08 2025 18:03:57 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 18:02:32 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 18:01:43 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 18:00:37 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 17:59:31 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 17:54:22 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sun Jun 08 2025 17:53:10 GMT+0000 (Coordinated Universal Time)

@wayneinvein

star

Sat Jun 07 2025 06:14:23 GMT+0000 (Coordinated Universal Time) https://www.beleaftechnologies.com/mev-bot-development-company

@steeve #mev

star

Fri Jun 06 2025 13:59:32 GMT+0000 (Coordinated Universal Time) https://www.roblox.com/users/3503843893/profile

@Gay

star

Fri Jun 06 2025 13:58:21 GMT+0000 (Coordinated Universal Time) https://www.roblox.com/users/3196979185/profile

@Gay

star

Fri Jun 06 2025 13:55:48 GMT+0000 (Coordinated Universal Time) https://www.roblox.com/users/3196979185/profile

@Gay

star

Fri Jun 06 2025 13:03:13 GMT+0000 (Coordinated Universal Time) https://cryptocurrency-exchange-development-company.com/

@raydensmith

star

Fri Jun 06 2025 02:09:50 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Thu Jun 05 2025 12:18:14 GMT+0000 (Coordinated Universal Time) https://www.rankup365.com/home-services-seo/painters

@whites9

star

Thu Jun 05 2025 12:04:42 GMT+0000 (Coordinated Universal Time) https://www.coinsclone.com/business-benefits-of-starting-a-crypto-exchange/

@CharleenStewar #businessbenefits of cryptocurrency exchange #benefits of cryptocurrency exchange

star

Thu Jun 05 2025 12:04:35 GMT+0000 (Coordinated Universal Time) https://www.addustechnologies.com/p2p-crypto-exchange-software

@Seraphina

star

Thu Jun 05 2025 11:34:06 GMT+0000 (Coordinated Universal Time) https://wisewaytec.com/blockchain-development-company/

@snehawt15

star

Thu Jun 05 2025 10:46:27 GMT+0000 (Coordinated Universal Time) https://appticz.com/ios-app-development-company

@aditi_sharma_

star

Thu Jun 05 2025 10:02:48 GMT+0000 (Coordinated Universal Time)

@dannygelf #salesforce #permissions #soql

star

Thu Jun 05 2025 09:21:49 GMT+0000 (Coordinated Universal Time)

@Shivam3.tyagi

star

Thu Jun 05 2025 08:02:15 GMT+0000 (Coordinated Universal Time)

@davidmchale #mapping #select

star

Thu Jun 05 2025 07:41:07 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Thu Jun 05 2025 07:40:25 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Thu Jun 05 2025 06:02:06 GMT+0000 (Coordinated Universal Time)

@Pulak

star

Thu Jun 05 2025 05:29:42 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Wed Jun 04 2025 22:14:56 GMT+0000 (Coordinated Universal Time)

@vjg #python

star

Wed Jun 04 2025 10:58:48 GMT+0000 (Coordinated Universal Time)

@chitss2610

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension