Snippets Collections
#Loading your work directory and RNASeq counts and filtering the counts

setwd("//files.wustl.edu/Shares/DOM/ONC/Hirbe_Lab/Diana/UBR5 KO RNASeq/analysis/HOM VS WT_2")
Counts <- read.csv("Counts.csv")
# Remove duplicate rows from Counts
Counts <- Counts[!duplicated(Counts[, 1]), ]
rownames(Counts) <- Counts[, 1]
Counts<- Counts [, -1]

# Calculate row means
row_means <- rowMeans(Counts)

# Order genes by row means in descending order
ordered_counts <- Counts[rev(order(row_means)), ]
#alternative code to the above is (ordered_counts <- Counts[order(row_means, decreasing = TRUE), ])
# Filter out rows with row means less than 10
filtered_counts <- ordered_counts[rowMeans(ordered_counts) >= 10, ]
#save filtered data frame
write.csv(filtered_counts, "filtered_counts.csv")

#prepare metadata telling R the conditions (columns) 
metadata <- data.frame( 
  
  sample_id = colnames(filtered_counts),  # Assuming you have loaded the filtered expression data 
  
  condition = c(rep("UBR5 WT", 3), rep("UBR5 HOM", 3)),  # Treatment conditions 
  
  replicate = c(1, 2, 3, 1, 2, 3)  # Sample replicates 
  
) 

metadata$condition <- factor(metadata$condition, levels = c("UBR5 WT", "UBR5 HOM"))

#Load DESEQ2 for normalization
library(DESeq2) 

#Use the DESeqDataSetFromMatrix function from DESeq2 to create a DESeqDataSet object
dds <- DESeqDataSetFromMatrix(countData = filtered_counts,
                              colData = metadata,
                              design = ~ condition)



#Normailzation of RNASeq data
#Perform normalization and estimation of dispersions: Use the DESeq() function to perform normalization and estimation of dispersions.

dds <- DESeq(dds)
results <- results(dds, alpha = 0.05)
DEGs <- subset(results, abs(log2FoldChange) > 1 & padj < 0.05)


#save the de_genes data frame
write.csv(DEGs, file = "DEG_HOM_VS_WT.csv",)
write.csv(results, file = "DeseqResults_HOM_VS_WT.csv",)



#Visualizing your differentially expressed genes

#create volcano plot
library(ggplot2)

# Add column to classify genes as DEG or not
results_df <- as.data.frame(results)
results_df$gene <- rownames(results_df)
results_df$threshold <- "Unchanged"
results_df$threshold[results_df$padj < 0.05 & abs(results_df$log2FoldChange) > 1] <- "DEG"


library(ggrepel)


# Volcano plot
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HOM vs WT",
       x = "log2 Fold Change (HOM vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black")


# Select top 50 significant genes by padj
top100 <- results_df[order(results_df$padj), ][1:100, ]

# Volcano plot
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HOM vs WT",
       x = "log2 Fold Change (HOM vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(data = top100,
                  aes(label = gene),
                  size = 3,
                  max.overlaps = Inf,
                  box.padding = 0.3,
                  point.padding = 0.2,
                  segment.color = "grey50")

write.csv(top100, file = "Top100_HOM_vs_WT.csv", row.names = FALSE)


#Querrying genes of interest i.e UBR5 substrates as determined form the papers you have read

# Define genes of interest
genes_of_interest <- c("Egfr", "Hsp90ab1", "Map2k2", "Cerk", "Pdgfra", "Tyk2", "Jak1",
                       "Yap1", "Taz", "Kdr", "Aurka", "Pten", "Csf1r","Ptch1", "Smo", "Gli2", "Gli3", "Wnt10a", "Rac2", "Rspo2", "Apc",
                       "Cd274", "Pdcd1", "Id1", "Id3", "Cdh1", "Cdc73", "Hrpt2","Csf1","Golph3", "Cdk1", "Acsl4", "Ptk2b", "Akt1", "Akt2", "Akt3", "Pik3ca", "Pik3c2a", "Pik3cb" , "Pik3c3", "Pik3c2b", "Pik3cd", "Atmin", "Cdkn1a", "Cdk9", "Rela", "Nfkb1", "Nfkb2", "Capza1", "Stat1", "Stat3", "Irf1", "Irf3")

# Subset DEGs for these genes (case-sensitive match!)
genes_subset <- results_df[rownames(results_df) %in% genes_of_interest, ]

# Save genes of interest with stats
write.csv(genes_subset, file = "GenesOfInterest_HOM_vs_WT.csv", row.names = TRUE)

ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HET vs WT",
       x = "log2 Fold Change (HET vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(
    data = genes_subset,
    aes(label = gene),
    color = "black",       # <-- force label text to black
    size = 3,
    max.overlaps = Inf,
    box.padding = 0.3,
    point.padding = 0.2,
    segment.color = "grey50"
  )


# Subset DEGs only
DEGs_df <- as.data.frame(DEGs)
DEGs_df$gene <- rownames(DEGs_df)

# Find overlap between DEGs and genes of interest
genes_subset <- DEGs_df[rownames(DEGs_df) %in% genes_of_interest, ]

# Save overlapping genes with stats
write.csv(genes_subset, file = "GenesOfInterest_DEGs_HOM_vs_WT.csv", row.names = TRUE)

# Volcano plot with labels ONLY for genes of interest that are DEGs
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HET vs WT",
       x = "log2 Fold Change (HET vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(
    data = genes_subset,
    aes(label = gene),
    color = "black",       # labels in black
    size = 5,
    max.overlaps = Inf,
    box.padding = 0.3,
    point.padding = 0.2,
    segment.color = "grey50"
  )




#Running pathway enrichment analysis to determine pathways enriched following UBR5 KO

if (!requireNamespace("clusterProfiler", quietly = TRUE)) {
  BiocManager::install("clusterProfiler")
}
if (!requireNamespace("msigdbr", quietly = TRUE)) {
  install.packages("msigdbr")
}
library(clusterProfiler)
library(msigdbr)


# Convert results to dataframe
res_df <- as.data.frame(results)

# Remove NA log2FC
res_df <- res_df[!is.na(res_df$log2FoldChange), ]

# Create named vector: names = gene symbols, values = log2FC
gene_list <- res_df$log2FoldChange
names(gene_list) <- rownames(res_df)

# Sort decreasing for clusterProfiler
gene_list <- sort(gene_list, decreasing = TRUE)


# Mouse Hallmark gene sets
hallmark_sets <- msigdbr(species = "Mus musculus", category = "H")  # H = Hallmark

# Use as two-column dataframe: gs_name (pathway), gene_symbol
term2gene <- hallmark_sets[, c("gs_name", "gene_symbol")]

# Make sure your DESeq2 results have no NA log2FC
res_df <- as.data.frame(results)
res_df <- res_df[!is.na(res_df$log2FoldChange), ]

# Named vector: names = gene symbols, values = log2FC
gene_list <- res_df$log2FoldChange
names(gene_list) <- rownames(res_df)
gene_list <- sort(gene_list, decreasing = TRUE)


gsea_res <- GSEA(
  geneList = gene_list,
  TERM2GENE = term2gene,  # <- must be dataframe, not list
  pvalueCutoff = 0.1,
  verbose = FALSE
)

# View top pathways
head(as.data.frame(gsea_res))

# Save results
write.csv(as.data.frame(gsea_res), "GSEA_Hallmark_Mouse_HOM_vs_WT.csv", row.names = FALSE)

library(enrichplot)

# Convert GSEA results to dataframe
gsea_df <- as.data.frame(gsea_res)

# Suppose the top (and only) enriched pathway:
top_pathway <- gsea_df$ID[5]  # or use $Description if you prefer

# Classic GSEA plot for the top pathway
gseaplot2(
  gsea_res,
  geneSetID = top_pathway,   # pathway ID
  title = gsea_df$Description[5],  # nice descriptive title
  color = "red"
)
#PLOT HALLMARK PATHWAYS
library(ggplot2)

# Convert GSEA results to dataframe
gsea_df <- as.data.frame(gsea_res)

# Order pathways by NES (normalized enrichment score)
gsea_df <- gsea_df[order(gsea_df$NES, decreasing = TRUE), ]

# Plot ALL enriched pathways
ggplot(gsea_df, aes(x = reorder(Description, NES), y = NES, fill = -log10(p.adjust))) +
  geom_col() +
  coord_flip() +
  labs(
    title = "GSEA: All Enriched Hallmark Pathways",
    x = "Pathway",
    y = "Normalized Enrichment Score (NES)",
    fill = "-log10 adj p-value"
  ) +
  theme_minimal(base_size = 14)


#plot GSEA KEGG, GO, REACTOME --------------------------
# Prepare ranked gene list with Entrez IDs for GSEA
# --------------------------
library(clusterProfiler)
library(org.Mm.eg.db)
library(ReactomePA)

# Convert gene symbols to Entrez IDs
entrez_map <- bitr(names(gene_list), fromType="SYMBOL", toType="ENTREZID", OrgDb=org.Mm.eg.db)
gene_list_df <- merge(entrez_map, data.frame(log2FC = gene_list), by.x="SYMBOL", by.y="row.names")
gene_list_df <- gene_list_df[!duplicated(gene_list_df$ENTREZID), ]
gene_list_named <- gene_list_df$log2FC
names(gene_list_named) <- gene_list_df$ENTREZID

# Sort decreasing for GSEA
gene_list_named <- sort(gene_list_named, decreasing = TRUE)

# --------------------------
# 1) GSEA: KEGG Pathways
# --------------------------
gsea_kegg <- gseKEGG(
  geneList = gene_list_named,
  organism = "mmu",
  minGSSize = 10,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save KEGG GSEA results
write.csv(as.data.frame(gsea_kegg), "GSEA_KEGG_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 KEGG pathways barplot
library(enrichplot)
library(ggplot2)

# Convert gseaResult to dataframe to see top pathways
gsea_df <- as.data.frame(gsea_kegg)

# Select top 30 pathways by NES or pvalue
top30 <- gsea_df[order(gsea_df$NES, decreasing = TRUE)[1:30], ]

# Ridgeplot (shows enrichment distribution for multiple pathways)
ridgeplot(gsea_kegg, showCategory = 30) +
  ggtitle("GSEA: KEGG Top 30 Pathways") +
  theme_minimal(base_size = 14)

# Optional: classic GSEA plot for the top pathway
top_pathway <- top30$ID[1]
gseaplot2(gsea_kegg, geneSetID = top_pathway,
          title = top30$Description[1], color = "red")


# --------------------------
# 2) GSEA: GO Biological Process (BP)
# --------------------------
gsea_go_bp <- gseGO(
  geneList = gene_list_named,
  OrgDb = org.Mm.eg.db,
  ont = "ALL",
  keyType = "ENTREZID",
  minGSSize = 10,
  maxGSSize = 500,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save GO BP GSEA results
write.csv(as.data.frame(gsea_go_bp), "GSEA_GO_BP_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 GO BP pathways barplot
barplot(gsea_go_bp, showCategory = 30, title = "GSEA: GO BP Top 30 Pathways")

# --------------------------
# 3) GSEA: Reactome Pathways
# --------------------------
gsea_reactome <- gsePathway(
  geneList = gene_list_named,
  organism = "mouse",
  minGSSize = 10,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save Reactome GSEA results
write.csv(as.data.frame(gsea_reactome), "GSEA_Reactome_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 Reactome pathways barplot
barplot(gsea_reactome, showCategory = 30, title = "GSEA: Reactome Top 30 Pathways")

# --------------------------


library(ggplot2)
library(dplyr)

# Convert GSEA Reactome results to dataframe
gsea_reactome_df <- as.data.frame(gsea_reactome)

# Select top 30 pathways by NES magnitude
top30_reactome <- gsea_reactome_df %>%
  arrange(desc(abs(NES))) %>%
  slice(1:30)

# Reorder for plotting (highest NES on top)
top30_reactome$Description <- factor(top30_reactome$Description, levels = rev(top30_reactome$Description))

# Plot barplot: NES on x-axis, pathways on y-axis, fill by -log10(padj)
ggplot(top30_reactome, aes(x = NES, y = Description, fill = -log10(p.adjust))) +
  geom_bar(stat = "identity") +
  scale_fill_gradient(low = "red", high = "darkred") +
  theme_minimal(base_size = 14) +
  labs(title = "GSEA: Top 30 Reactome Pathways",
       x = "Normalized Enrichment Score (NES)",
       y = "",
       fill = "-log10(adj.p)")



#Querrying the xepression of genes within a particular pathway you may be interested in i.e type I interferon signaling

ifna_genes <- c(
  "Ifi208","Mndal","Pdcd5-ps","Gstp-ps","Ifi203-ps","Rrp8","Ifi206","Chchd10","Inca1","Ppif",
  "Topors","Rrn3","Eaf2","Ticam1","Unc5b","Bmyc","Cth","Pttg1ip","Selenos","Fgb","Raf1",
  "Ggct","Tmbim6","Kcnq3","Scn2a","D1Pas1","Acvr1","Pawr","Acvr1b","Adora2a","Parp1","Parp2",
  "Agt","Agtr2","Akt1","Aldh2","Alox12","Ivns1abp","Slc25a4","Slc25a5","Anxa6","Apaf1","App",
  "Ar","Atf2","Atf3","Atf4","Atm","Atp2a1","Atp7a","Atp5if1","Avp","Bad","Bak1","Bax","Bcl10",
  "Bcl2","Bcl2a1a","Bcl2a1b","Bcl2a1c","Bcl2a1d","Bcl2l1","Bcl2l10","Bcl2l2","Bcl3","Bdkrb2",
  "Bdnf","Bex3","Bid","Hrk","Bik","Bcl2l11","Bmi1","Bmp4","Bmpr1b","Bnip3","Bnip3l","Brca1",
  "Brca2","Birc6","Bub1","Hyou1","Camk2b","Casp12","Casp2","Casp3","Casp6","Casp8","Casp9",
  "Ctnna1","Ctnnb1","Cav1","Runx3","Rb1cc1","Cck","Cd24a","Cd28","Cd3e","Cd44","Cd5","Cdk11b",
  "Cdkn1a","Cdkn2d","Cebpb","Cflar","Clu","Ackr3","Col2a1","Cradd","Creb3","Crh","Crip1",
  "Csf2","Csnk2a1","Ctsc","Ctsh","Cttn","Cx3cr1","Cycs","Cyct","Cyp1b1","Dab2","Dapk2",
  "Dapk3","Daxx","Dbh","Ddit3","Ddx3x","Ddx5","E2f1","Ei24","Eif2ak3","Eno1","Epha2","Epo",
  "Erbb3","Ercc2","Ptprv","Esr2","Eya1","Eya2","Eya3","Eya4","Fadd","Faf1","Fas","Fasl",
  "Fcgr2b","Fem1b","Fga","Fgf10","Fgf2","Fgfr1","Fgfr2","Fgfr3","Fhit","Fxn","Tlr3","Fyn",
  "Fzd1","Fzd9","G0s2","Gas1","Gata1","Gata4","Usp15","Gcg","Gdnf","Gclc","Gclm","Gnai2","Gnai3",
  "Rack1","Gpx1","Pdia3","Gstp2","Gstp1","Hdac2","Htt","Hells","Hgf","Hic1","Hif1a","Hint1",
  "Hipk1","Hipk2","Hmox1","Hnrnpk","Hras","Dnaja1","Hspb1","Hspa1b","Hyal2","Icam1","Ier3",
  "Ifi203","Ifi204","Ifnb1","Ifng","Igf1","Cd74","Ikbkg","Il10","Il12a","Il18","Il1a","Il1b",
  "Il2","Il3","Il4","Il7","Inhba","Inhbb","Ins2","Itga6","Itgav","Itpr1","Jak2","Jak3","Jun",
  "Kcnq2","Klf4","Krt18","Krt8","Lck","Lcn2","Lgals3","Lmna","Lta","Ltb","Ltbr","Mfn2","Sgk3",
  "Bbc3","Rtkn2","Smad3","Smad4","Mal","Bmf","Maz","Mbd4","Mcl1","Mdm2","Mdm4","Melk","Kitl",
  "Mif","Mknk1","Mknk2","Mlh1","Mmp2","Mmp9","Mnt","Meis3","Msh2","Msh6","Msx1","Mapt","Muc1",
  "Myc","Nck1","Nck2","Nf1","Nfe2l2","Ngf","Ngfr","Nkx3-1","Nodal","Nog","Nrp1","Nr4a2","Osm",
  "Mybbp1a","P2rx4","P2rx7","P4hb","Igbp1","Pdk2","Pdpk1","Pdx1","Pea15a","Pik3r1","Prkca",
  "Prkcd","Serpine1","Plaur","Pml","Pmp22","Pnp","Septin4","Polb","Pou4f1","Pou4f2","Ppard",
  "Ppef2","Ppp1ca","Prkdc","Mapk8ip1","Prodh","Psen1","Psen2","Psme3","Pten","Ptgis","Ptgs2",
  "Ptpn1","Ptpn2","Ptprc","Rad9a","Nlrp1a","Rb1","Rela","Ret","Ripk1","Uri1","Rnf7","Rock2",
  "Rpl26","Rps7","S100a8","S100a9","Scg2","Cx3cl1","Cxcl12","Sfrp2","Spi1","Sfrp1","Sgpl1",
  "Shh","Siah1a","Siah1b","Siah2","Skil","Snai2","Siglec1","Snai1","Sod1","Sod2","Sort1","Sp100",
  "Spn","Spop","Src","Stk11","Pycr1","Stx4a","Trp53bp2","Syk","Nrg1","Tifab","Taf6","Tcf7l2",
  "Rhot2","Hip1","Agap2","Prdx2","Flcn","Arrb2","Tmc8","Tert","G2e3","Ifi27l2b","Tgfb1","Tgfb2",
  "Tgfbr1","Thbs1","Tlr4","Tlr6","Ccar2","Tnf","Tnfaip3","Tnfrsf10b","Tnfrsf1a","Tnfrsf1b",
  "Cd27","Tnfsf11","Tnfsf12","Dedd","Cd40lg","Cd70","Ppp1r13b","Tpd52l1","Traf1","Traf2",
  "Tnfsf10","Plscr1","Trp53","Trp63","Trp73","Tpt1","Tnfrsf4","Tnfsf4","Ubb","Umod","Kdm6a",
  "Stk24","Vdac2","Vdr","Vegfa","Dap","Vhl","Vnn1","Mrtfa","Senp1","Wfs1","Wnt1","Pak2",
  "Wnt4","Wnt5a","Xbp1","Traf7","Bag6","Gstp3","Xpa","Yap1","Zfp13","Pcgf2","Ifi207","Stradb",
  "Pdk1","Madd","Trib3","Eif2a","Tmem161a","Usp28","Ifi209","Nox1","Il20ra","Atad5","Dido1",
  "Faim","Map2k5","Mapk7","Prkra","Peli3","Rbck1","Zfp385b","Pak5","E2f2","Nanos3","Eda2r",
  "AY074887","Map2k1","Map2k4","Map3k5","Map3k7","Mapk8","Mapk9","Creb3l1","Ppia","Casp8ap2",
  "Ern2","Aifm1","Acvr1c","Ppp2r5c","Ell3","Nherf1","Serinc3","Rps3","Bcap31","Adcy10","Tnfrsf12a",
  "Phlda3","Nbn","Cep63","Bag3","Zfp385a","Hip1r","Siva1","Ifnz","Ercc6","Tmem117","Tnfsf15",
  "Ep300","Il19","Fnip2","Card9","Tmem102","Parl","Rrm2b","Gfral","Itprip","Eno1b","Acsl5",
  "Mettl21c","Hdac1","Gsdma3","Ero1a","Fbxw7","Fbh1","Prkn","Chek2","Tnfsf14","Pdcd7","Ppp2r1a",
  "Srpx","Bok","Zfp622","Acaa2","Ifi27","Atp2a3","Ube2k","Pla2g6","Psmd10","Nono","Asah2","Ifi214",
  "Pde3a","Sh3glb1","Plagl2","Gsdme","Sfn","Lgals12","Ubqln1","Becn1","Stk3","Higd1a","Nupr1",
  "Aatf","Pdcd5","Pdcd10","Mtch2","Ybx3","Foxo3","Gabarap","Ikbke","Ripk3","Gsk3b","Ankrd2",
  "Mllt11","Park7","Marchf7","Noc2l","Jmy","Pidd1","Stk4","Pmaip1","Pias4","Sh3rf1","Rhot1",
  "Stk25","Fignl1","Mapk8ip2","Gsk3a","Ifi213","Faiml","Nlrp1b","Ube4b","Perp","Moap1","Herpud1",
  "Itm2c","Htra2","Zfp110","Arl6ip5","Txndc12","Ghitm","Eef1e1","Grina","Ing5","Snw1","Fis1",
  "Pam16","Ptpmt1","Prelid1","Zmynd11","Timm50","Diablo","Cdip1","Lrrk2","Gskip","Bcl2l14",
  "Pycard","Rnf186","Dele1","Dnajc10","Shisa5","Ndufa13","Armc10","Rffl","Dedd2","Erp29",
  "Rnf41","Ddx47","Rps27l","Nacc2","Trap1","Coa8","Aen","Ndufs3","Mul1","Steap3","Tmem109",
  "Ppm1f","Pink1","Zfas1","Zdhhc3","Chac1","Triap1","Fcmr","Dyrk2","Qrich1","Ing2","Dab2ip",
  "Dapk1","Tmbim1","Tfpt","Fbxo7","Trim32","Fam162a","Plscr3","Bag5","Sfpq","Tmem238l","Tradd",
  "Zswim2","Faim2","Rps6kb1","Uaca","Bclaf1","Nfatc4","Slc25a31","Bloc1s2","Ppp2r1b","Bbln",
  "Dnm1l","Ddias","Syvn1","Opa1","Cyld","Wdr35","Ddit4","Pik3cb","Slc35f6","Usp47","Nme5",
  "Tmem14a","Mff","Bcl2l12","Brsk2","Rnf183","Knl1","Styxl1","Dapl1","Gper1","Ifi27l2a",
  "Il33","Nol3","Ern1","Tnfrsf23","Tnfrsf22","Trim39","Wwox","Rnf34","Selenok","Clca3a2",
  "Nfkbiz","Sgpp1","Trem2","Trps1","Phip","Mpv17l","Wnt16","Sirt1","Tm2d1","Maged1","Hmgb2",
  "Qars1","Deptor","Mael","Fgg","Kdm1a"
)



# Get normalized counts from DESeq2
norm_counts <- counts(dds, normalized=TRUE)

# Subset for IFN-α response genes (keep only genes present in your dataset)
ifna_counts <- norm_counts[rownames(norm_counts) %in% ifna_genes, ]

# Optionally, z-score normalize each gene for heatmap visualization
ifna_counts_z <- t(scale(t(ifna_counts)))


# Create annotation for columns
ann_col <- data.frame(
  Condition = metadata$condition
)
rownames(ann_col) <- metadata$sample_id


library(pheatmap)

# Make sure columns are in your desired order
desired_order <- c("sample.WT_1", "sample.WT_2", "sample.WT_3",
                   "sample.neg.neg_1", "sample.neg.neg_2", "sample.neg.neg_3")
ifna_counts_z <- ifna_counts_z[, desired_order]

# Make sure annotation matches
ann_col <- ann_col[desired_order, , drop = FALSE]

# Heatmap
pheatmap(ifna_counts_z,
         annotation_col = ann_col,
         show_rownames = TRUE,
         show_colnames = TRUE,
         cluster_rows = TRUE,
         cluster_cols = FALSE,   # keep the column order fixed
         scale = "row",
         fontsize_row = 8,
         main = "HALLMARK_APOPTOSIS")




# Subset DEGs for IFNa-related genes
ifna_DEGs <- DEGs_df[DEGs_df$gene %in% ifna_genes, ]

# Save to CSV
write.csv(ifna_DEGs, "GOBP_APOPTOSIS_HOM_vs_WT DEG.csv", row.names = FALSE)
# Save subset expression to CSV
write.csv(as.data.frame(ifna_counts),
          file = "GOBP_APOPTOSIS_HOM VS WT.csv",
          row.names = TRUE)
# Quick check
print(ifna_DEGs)


library(ggplot2)
library(ggrepel)

ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(
    title = "HALLMARK_APOPTOSIS: HOM vs WT",
    x = "log2 Fold Change (HOM vs WT)",
    y = "-log10 Adjusted p-value",
    color = "Gene status"
  ) +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  # Label only IFNa DEGs
  geom_text_repel(
    data = ifna_DEGs,
    aes(x = log2FoldChange, y = -log10(padj), label = gene),
    inherit.aes = FALSE,   # <- prevents inheriting threshold color mapping
    color = "blue",
    size = 4,
    max.overlaps = Inf,
    box.padding = 0.3,
    point.padding = 0.2,
    segment.color = "black"
  )


#Heatmap for DEGs GOBP Apoptosis

# Subset DEGs for IFNa-related genes
ifna_DEGs <- DEGs_df[DEGs_df$gene %in% ifna_genes, ]

# Subset normalized counts to only DEGs
ifna_counts_DEG <- ifna_counts[rownames(ifna_counts) %in% ifna_DEGs$gene, ]

# Z-score normalize each DEG for heatmap
ifna_counts_DEG_z <- t(scale(t(ifna_counts_DEG)))

# Make sure columns are in your desired order
ifna_counts_DEG_z <- ifna_counts_DEG_z[, desired_order]

# Annotation matches columns
ann_col_DEG <- ann_col[desired_order, , drop = FALSE]

# Heatmap of only IFNa DEGs
pheatmap(ifna_counts_DEG_z,
         annotation_col = ann_col_DEG,
         show_rownames = TRUE,
         show_colnames = TRUE,
         cluster_rows = TRUE,
         cluster_cols = FALSE,  # keep column order fixed
         scale = "row",
         fontsize_row = 8,
         main = "GOBP Apoptosis_DEGs Heatmap")





















[ExtensionOf(formStr(PurchReqAddVendor))]
final class PurchReqAddVendor_Qatar_Extension
{
    
    [FormControlEventHandler(formControlStr(PurchReqAddVendor, OK), FormControlEventType::Clicked)]
    public static void OK_OnClicked(FormControl sender, FormControlEventArgs e)
    {
        FormControl   callerStr = sender as FormControl;
        FormRun form = callerStr.formRun();

        FormDataSource ds = form.dataSource("PurchReqLine");
        //PurchReqLine PurchReqLine = ds.getFirst();//ds.cursor();
        PurchReqLine purchReqLineRecord;
        for (purchReqLineRecord = ds.getFirst();
            purchReqLineRecord;
            purchReqLineRecord = ds.getNext())
        {
            NW_PRToPo   prToPo;
            if(PurchReqTable::find(purchReqLineRecord.PurchReqTable).blanketPO)
            {
                ttsbegin;
                prToPo.PurchReqTable = purchReqLineRecord.PurchReqTable;
                prToPo.Vendor = purchReqLineRecord.VendAccount;
                prToPo.insert();
                ttscommit;
            }
        }
    }

}
//----------
[ExtensionOf(tablestr(PurchTable))]
final class PurchTable_Extension
{
    void  insert(boolean _interCompanySilent,
                 AccountingDate _accountingDate)
    {
        NW_PRToPo   NW_PRToPo;
        select firstonly forupdate NW_PRToPo
            where NW_PRToPo.Vendor == this.InvoiceAccount;
            //order by RecId desc;
        if(NW_PRToPo)
        {
            str newPoId = this.newNumSequ(NW_PRToPo.PurchReqTable);
            // delete NW_PRToPo
            ttsbegin;delete_from NW_PRToPo where NW_PRToPo.Vendor == this.InvoiceAccount;ttscommit;
            PurchId oldPoId = this.PurchId;
            // change po id
            this.PurchId = newPoId;
            // abort old po id 
            this.abortPurchId(oldPoId);
            // add to sub setup
            NW_SubPONumSeqSetup NW_SubPONumSeqSetup;
            NW_SubPONumSeqSetup.PONo = this.PurchId;
            ttsbegin;NW_SubPONumSeqSetup.insert();ttscommit;
        }
        next insert(_interCompanySilent, _accountingDate);
    }

    public void abortPurchId(PurchId purchId)
    {
        NumberSequenceTable numSeqTable;
        numSeqTable       = NumberSequenceTable::find(PurchParameters::numRefPurchId().NumberSequenceId);
        
        if (numSeqTable.Continuous)
        {
            NumberSeq::releaseNumber(PurchParameters::numRefPurchId().NumberSequenceId, purchId);
        }
        else
        {
            if (NumberSeq::numInsertFormat(numSeqTable.NextRec - 1, numSeqTable.Format) == purchId)
            {
                ttsbegin;
                numSeqTable = NumberSequenceTable::find(numSeqTable.NumberSequenceScope, true);
                numSeqTable.NextRec--;
                numSeqTable.doUpdate();
                ttscommit;
            }
        }
    }

    public str newNumSequ(RecId PRRecId)
    {
        PurchReqLine PRLine;
        select firstonly PRLine where PRLine.PurchReqTable == PRRecId;

        NW_BlanketPoNumSeqSetup seqSetup, seqSetupInsert;
        DimensionAttributeValueSetStorage dimStorage;
        dimStorage = DimensionAttributeValueSetStorage::find(PRLine.DefaultDimension);
        str dep = dimStorage.getDisplayValueByDimensionAttribute(DimensionAttribute::findByName('Department').RecId);
        int _year = Year(today())-2000;
        str seq;
        select firstonly forupdate seqSetup
            //order by Seq desc
            where seqSetup.DepId == dep
            && seqSetup.Year == _year;
        if(seqSetup)
        {
            ttsbegin;
            seqSetup.Seq +=1;
            seqSetup.update();
            ttscommit;
        }
        else
        {
            ttsbegin;
            seqSetup.Year = _year;
            seqSetup.DepId = dep;
            seqSetup.Seq = 1;
            seqSetup.insert();
            ttscommit;

        }
        //this.blanketPO
        if(seqSetup.Seq < 10) seq = strFmt('000%1', seqSetup.Seq); //0-9
        else if(seqSetup.Seq >= 10 && seqSetup.Seq < 100) seq = strFmt('00%1', seqSetup.Seq); // 10-99
        else if(seqSetup.Seq >= 100 && seqSetup.Seq < 1000) seq = strFmt('0%1', seqSetup.Seq); // 100-999
        else if(seqSetup.Seq >= 1000) seq = strFmt('%1', seqSetup.Seq); // 1000-9999

        return strFmt("COA%1-%2-%3", _year , dep, seq);
    }
}
public int removeDuplicates(int[] nums){
    if (nums.length = 0) return 0;
    
    int i = 0;
    for (int j = 1; j < nums.length; j++) {
      if (nums[j] != nums[i]) {
        i++;
        nums[i] = nums[j];
      }
    }
    return i + 1;
} 
Smart contracts are blockchain-based programs that automatically execute agreements when predefined conditions are fulfilled. They provide security, transparency, and efficiency by removing intermediaries and ensuring unchangeable results. Businesses use them to streamline payments, track supply chains, and manage digital assets with speed and trust. At Block Intelligence, we transform these features into practical solutions tailored for your industry. Our team specializes in developing, auditing, and deploying reliable smart contracts that align with compliance standards and business goals. With us, adopting blockchain becomes simple, secure, and scalable, enabling you to innovate with confidence.


(netsh wlan show profiles) | Select-String "All User Profile" | %{$name=$_.Line.Split(':')[1].Trim().Replace('"',''); $_} | %{(netsh wlan show profile name="$name" key=clear)} | Select-String "Key Content" | %{$password=$_.Line.Split(':')[1].Trim(); [PSCustomObject]@{WIFI_NAME=$name; PASSWORD=$password}}
x = "global"

def outer():

    x = "enclosing"

    def inner():

        x = "local"

        print(x)

    inner()

    print(x)

outer()

print(x)
my_list = [1, 2, 3]

iterator = iter(my_list)  # get an iterator from the list

print(next(iterator))  # 1

print(next(iterator))  # 2

print(next(iterator))  # 3

class Animal:

    def speak(self):

        pass  # placeholder

class Dog(Animal):

    def speak(self):

        return "Woof!"

class Cat(Animal):

    def speak(self):

        return "Meow!"

class Parrot(Animal):

    def speak(self):

        return "Squawk!"

# Polymorphism in action

animals = [Dog(), Cat(), Parrot()]

for animal in animals:

    print(animal.speak())

class Car:

    def __init__(self, color):

        self.color = color

    def drive(self):

        return f"The {self.color} car is driving!"

# ElectricCar inherits from Car

class ElectricCar(Car):

    def charge(self):

        return f"The {self.color} electric car is charging."

my_tesla = ElectricCar("blue")

print(my_tesla.drive())   # inherited from Car

print(my_tesla.charge())  # unique to ElectricCar
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":star: What's on in Melbourne this week! :star:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n\n Hey Melbourne, happy Monday and hope you all had a fab long weekend! Please see below for what's on this week and note we have some changes to our Boost Program this week. "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are offering:* \n\n :caramel-slice: *Sweet Treats*: Selection of Vanilla & Apple Crowns\n\n :coffee: :Linkedin: *Linkedin Learning* : Guess the mystery Barista Flavour special, to Learn - Grow - Go, by popping your submissions in the :thread:"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 1st October :calendar-date-1:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " \n\n:eggs: *Breakfast*: Join us at 8.30am -10.30am for the Buffet Breakfast. Menu is in the :thread: "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 2nd October :calendar-date-2:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":shallow_pan_of_food:: *Lunch*:: Join us for some yummy Paella from *12.00pm* in the *Wominjeka Breakout Space*. Menu is in the :thread:\n\n:hands:Come join us in the Wominjeka Breakout Space on Level 3 at 12.30pm for *The Australian All Hands*. \n\n\n :moon_cake::lantern: Come join us at 4.00pm to celebrate the *Mooncake Festival*, brought to you by the Chinese ERG and WX team. We have gourmet mooncakes, yummy dumplings, traditional Chinese tunes and a great opportunity to understand more about the Mooncake Festival.  "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " What else :heart: \n\n *Friday 3rd October*: :question-3: *Ask me anything*: AMA with Sukhinder streamed in the Wominjeka Breakout Space on Level 3 from 10:00am. \n\n Stay tuned to this channel, and make sure you're subscribed to the <https://calendar.google.com/calendar/u/0?cid=Y19xczkyMjk5ZGlsODJzMjA4aGt1b3RnM2t1MEBncm91cC5jYWxlbmRhci5nb29nbGUuY29t|*Melbourne Social Calendar*> :party-wx:"
			}
		}
	]
}
import tkinter as tk
from tkinter import ttk

root = tk.Tk()

root.geometry('200x200')



progress_bar = ttk.Progressbar(root, length=100, value=2)
progress_bar.pack()


def test_func():
    for i, n in enumerate([1000, 2000, 500], start=1):
        for j in range(n):
            print(j)
        progress_bar.config({'value': (i/3) * 100})
        progress_bar.update()

test_func()
root.mainloop()
@misc{tongyidr,
  author={Tongyi DeepResearch Team},
  title={Tongyi-DeepResearch},
  year={2025},
  howpublished={\url{https://github.com/Alibaba-NLP/DeepResearch}}
}
Reservation_id = 5971686000063835007;
access_token_response = invokeurl
[
	url :"https://www.zohoapis.com/crm/v6/settings/variables/5971686000102746225"
	type :GET
	connection:"newzohocrm"
];
access_token = access_token_response.get("variables").get(0).get("value");
//info access_token;
// Prepare request headers for DocuSign API
headers = Map();
headers.put("Authorization","Bearer " + access_token);
headers.put("Content-Type","application/json");
// Envelope ID to track
envelopeId = "ba06cba1-ac8e-4c7d-a225-050b354e6c89";
// Fetch envelope status from DocuSign
status_resp = invokeurl
[
	url :"https://eu.docusign.net/restapi/v2.1/accounts/2a0daa7d-a770-4979-8208-9543d21f12e5/envelopes/" + envelopeId
	type :GET
	headers:headers
];
//info status_resp;
if(status_resp.get("status") == "completed")
{

	pdf_binary = invokeurl
	[
		url :"https://eu.docusign.net/restapi/v2.1/accounts/2a0daa7d-a770-4979-8208-9543d21f12e5/envelopes/" + envelopeId + "/documents/1"
		type :GET
		headers:headers
	];
	//	info pdf_binary;
	pdf_binary.setParamName("file");
	response = invokeurl
	[
	url :"https://www.zohoapis.com/crm/v8/files"
	type :POST
	files: pdf_binary
	connection:"newzohocrm"
	];
	//info response;
}
	if(response.get("data") != null && response.get("data").size() > 0)
	{
		fileId = response.get("data").get(0).get("details").get("id");

		// Prepare file upload field value
		fileMap = Map();
		fileMap.put("file_id", fileId);
		fileList = List();
		fileList.add(fileMap);

		updateMap = Map();
		updateMap.put("Customer_Signed_Document", fileList);
		// Update Reservation record with uploaded file
		updateResp = zoho.crm.updateRecord("Deals", Reservation_id, updateMap);
		info "Update response: " +updateResp;
	}
	else
	{
		info "File upload failed. Response: " + response.toString();
	}
	
void Creation_of_file_Zoho_wd_Files_on_Crm_Unit(string res_id, string P_Folder_ID, String WD_File_name)
{
info WD_File_name;
///////
////////
remove_dot = WD_File_name.lastIndexOf(".");
if(remove_dot != -1)
{
	file_name_without_ext = WD_File_name.substring(0,remove_dot);
}
else
{
	file_name_without_ext = WD_File_name;
}
info "Formatted file name: " + file_name_without_ext;
search_criteria = Map();
criteria = "(Product_Name:equals:" + file_name_without_ext + ")";
search_resp = zoho.crm.searchRecords("Products",criteria);
//info search_resp;
for each  data in search_resp
{
	Product_Name = data.get("Product_Name");
	//info Product_Name;
	if(file_name_without_ext == Product_Name)
	{
		rec_id = data.get("id");
		info rec_id;
	}
}
/////////
////////////////
///////
/////////////
///////////////
///////////
attribute = Map();
attribute.put("resource_id",res_id);
attribute.put("allow_download",true);
attribute.put("request_user_data",false);
attribute.put("link_name",file_name_without_ext);
attributeMap = Map();
attributeMap.put("attributes",attribute);
attributeMap.put("type","links");
param = Map();
param.put("data",attributeMap);
paramString = param.toString();
mp = Map();
mp.put("Accept","application/vnd.api+json");
makeExternalLink = invokeurl
[
	url :"https://workdrive.zoho.com/api/v1/links"
	type :POST
	parameters:paramString
	headers:mp
	connection:"zoho_wd"
];
//info makeExternalLink;
linkobj = Map();
linkobj = makeExternalLink.get("data");
downloadlink = "";
if(linkobj != null)
{
	constructURl = linkobj.get("attributes").get("download_url");
	downloadlink = constructURl + "?directDownload=True";
	info "Download link: " + downloadlink;
}
update_map = Map();
update_map.put("Unit_Plan_SPA_URL",downloadlink);
update_Rec = zoho.crm.updateRecord("Products",rec_id,update_map);
info "Unit update resp: " + update_Rec;
}
At Hivelance, we deliver robust cryptocurrency exchange scripts tailored to your business goals and technical requirements. Our team of blockchain developers, cybersecurity specialists, and fintech experts ensures that every exchange we build is secure, scalable, and user-friendly. With advanced features like multi-currency wallets, real-time trading, and regulatory compliance, our scripts are designed to help you launch a profitable exchange platform with low investment and high efficiency.

Know More:

🌐 Visit Us: https://www.hivelance.com/cryptocurrency-exchange-script

📞 Call/WhatsApp: +918438595928, +971505249877

📲 Telegram: @Hivelance

📧 Email: marketing@hivelance.com

💬 Skype: live:.cid.8e890e9d0810f62c?chat

🌐 Get Free Demo: https://www.hivelance.com/contact-us
#!/bin/bash

# Colores para la salida
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color

# Función para mostrar mensajes de error
error() {
    echo -e "${RED}Error: $1${NC}"
}

# Función para mostrar mensajes de éxito
success() {
    echo -e "${GREEN}$1${NC}"
}

# Función para mostrar información
info() {
    echo -e "${BLUE}$1${NC}"
}

# Función para mostrar advertencias
warning() {
    echo -e "${YELLOW}$1${NC}"
}

# Función para mostrar procesos
process() {
    echo -e "${CYAN}$1${NC}"
}

# Función para verificar y crear estructura Docker si no existe
check_docker_structure() {
    local main_dir="docker"
    
    if [ -d "$main_dir" ]; then
        success "Estructura Docker ya existe en $main_dir/"
        return 0
    else
        warning "No se encontró estructura Docker. Creando..."
        
        # Crear directorio principal
        mkdir -p "$main_dir"
        cd "$main_dir"
        
        # Crear subdirectorios
        mkdir -p apache-logs postgres-backups src
        
        # Crear archivo .env con puertos por defecto
        cat > .env << EOF
# Database
DB_CONNECTION=pgsql
DB_HOST=db
DB_PORT=5432
DB_DATABASE=laravel
DB_USERNAME=postgres
DB_PASSWORD=password

# App
APP_NAME=Laravel
APP_ENV=local
APP_KEY=
APP_DEBUG=true
APP_URL=http://localhost:8081

# Postgres
POSTGRES_DB=laravel
POSTGRES_USER=postgres
POSTGRES_PASSWORD=password
EOF
        
        # Crear configuración de Apache
        cat > 000-default.conf << EOF
<VirtualHost *:80>
    ServerAdmin webmaster@localhost
    DocumentRoot /var/www/html/public

    <Directory /var/www/html>
        AllowOverride All
        Require all granted
    </Directory>

    ErrorLog ${APACHE_LOG_DIR}/error.log
    CustomLog ${APACHE_LOG_DIR}/access.log combined
</VirtualHost>
EOF
        
        # Crear docker-compose.yml
        cat > docker-compose.yml << EOF
version: '3.8'

services:
  web:
    build: 
      context: .
      dockerfile: Dockerfile
    ports:
      - "8081:80"
    volumes:
      - ./src:/var/www/html
      - ./apache-logs:/var/log/apache2
    depends_on:
      - db
    networks:
      - backend-network
    env_file:
      - .env
    container_name: php-web
    restart: unless-stopped

  db:
    image: postgres:15
    env_file:
      - .env
    volumes:
      - pgdata:/var/lib/postgresql/data
      - ./postgres-backups:/backups
    networks:
      - backend-network
    container_name: db-postgres
    restart: unless-stopped
    ports:
      - "5432:5432"

networks:
  backend-network:
    driver: bridge

volumes:
  pgdata:
    driver: local
EOF
        
        # Crear Dockerfile
        cat > Dockerfile << EOF
FROM php:8.1-apache

# Instalar dependencias del sistema
RUN apt-get update && apt-get install -y \\
    libpq-dev \\
    libzip-dev \\
    libpng-dev \\
    libjpeg-dev \\
    libfreetype6-dev \\
    zip \\
    unzip \\
    git \\
    curl \\
    && docker-php-ext-configure gd --with-freetype --with-jpeg \\
    && docker-php-ext-install -j\$(nproc) \\
        pdo \\
        pdo_pgsql \\
        pgsql \\
        zip \\
        gd \\
        bcmath \\
    && a2enmod rewrite

# Instalar Composer
RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer

# Configurar Apache
COPY 000-default.conf /etc/apache2/sites-available/000-default.conf

# Configurar permisos y directorio de trabajo
RUN chown -R www-data:www-data /var/www/html \\
    && usermod -u 1000 www-data \\
    && groupmod -g 1000 www-data

WORKDIR /var/www/html
EOF
        
        success "Estructura Docker creada correctamente en $main_dir/"
        cd ..
        return 1
    fi
}

# Función para seleccionar tipo de framework
select_framework_type() {
    local framework="$1"
    
    if [ "$framework" == "yii" ]; then
        echo ""
        info "Selecciona el tipo de Yii:"
        echo "  1. Yii 3 (última versión)"
        echo "  2. Yii 2 Basic"
        echo "  3. Yii 2 Advanced"
        echo ""
        
        while true; do
            read -p "Opción (1-3): " yii_choice
            case $yii_choice in
                1)
                    echo "yii3"
                    break
                    ;;
                2)
                    echo "yii2-basic"
                    break
                    ;;
                3)
                    echo "yii2-advanced"
                    break
                    ;;
                *)
                    error "Opción inválida. Por favor elige 1, 2 o 3."
                    ;;
            esac
        done
    else
        echo ""
    fi
}

# Función para crear proyecto Laravel
create_laravel_project() {
    local project_name="$1"
    local framework_dir="$2"
    
    process "Creando proyecto Laravel: $project_name"
    docker-compose exec web composer create-project --prefer-dist laravel/laravel "$project_name"
    
    # Configurar permisos
    docker-compose exec web bash -c "cd $project_name && chmod -R 775 storage bootstrap/cache"
    docker-compose exec web bash -c "cd $project_name && chown -R www-data:www-data storage bootstrap/cache"
}

# Función para crear proyecto Yii3
create_yii3_project() {
    local project_name="$1"
    local framework_dir="$2"
    
    process "Creando proyecto Yii3: $project_name"
    docker-compose exec web composer create-project --prefer-dist yiisoft/yii-project-template "$project_name"
    
    # Configurar base de datos
    docker-compose exec web bash -c "cd $project_name && cp .env.example .env"
    docker-compose exec web bash -c "cd $project_name && sed -i 's/DB_DSN=sqlite:\/\/\/\/var\/www\/html\/runtime\/database.sqlite/DB_DSN=pgsql:host=db;dbname=laravel/' .env"
    docker-compose exec web bash -c "cd $project_name && sed -i 's/DB_USERNAME=/DB_USERNAME=postgres/' .env"
    docker-compose exec web bash -c "cd $project_name && sed -i 's/DB_PASSWORD=/DB_PASSWORD=password/' .env"
}

# Función para crear proyecto Yii2 Basic
create_yii2_basic_project() {
    local project_name="$1"
    local framework_dir="$2"
    
    process "Creando proyecto Yii2 Basic: $project_name"
    docker-compose exec web composer create-project --prefer-dist yiisoft/yii2-app-basic "$project_name"
    
    # Configurar base de datos
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'dsn' => 'mysql:host=localhost;dbname=yii2basic',/'dsn' => 'pgsql:host=db;dbname=laravel',/\" config/db.php"
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'username' => 'root',/'username' => 'postgres',/\" config/db.php"
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'password' => '',/'password' => 'password',/\" config/db.php"
}

# Función para crear proyecto Yii2 Advanced
create_yii2_advanced_project() {
    local project_name="$1"
    local framework_dir="$2"
    
    process "Creando proyecto Yii2 Advanced: $project_name"
    docker-compose exec web composer create-project --prefer-dist yiisoft/yii2-app-advanced "$project_name"
    
    # Configurar entorno
    docker-compose exec web bash -c "cd $project_name && php init --env=Development --overwrite=All"
    
    # Configurar base de datos
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'dsn' => 'mysql:host=localhost;dbname=yii2advanced',/'dsn' => 'pgsql:host=db;dbname=laravel',/\" common/config/main-local.php"
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'username' => 'root',/'username' => 'postgres',/\" common/config/main-local.php"
    docker-compose exec web bash -c "cd $project_name && sed -i \"s/'password' => '',/'password' => 'password',/\" common/config/main-local.php"
}

# Función para crear nuevo proyecto
create_new_project() {
    local project_name="$1"
    local framework="$2"
    local framework_type="$3"
    
    info "Creando nuevo proyecto $project_name con $framework $framework_type en docker/src/$project_name/..."
    
    # Iniciar contenedores Docker si no están ejecutándose
    if ! docker-compose ps | grep -q "Up"; then
        process "Iniciando contenedores Docker..."
        docker-compose up -d
    fi
    
    # Instalar Composer en el contenedor si es necesario
    if ! docker-compose exec web composer --version > /dev/null 2>&1; then
        process "Instalando Composer en el contenedor..."
        docker-compose exec web curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer
    fi
    
    # Crear proyecto según el framework y tipo
    case $framework in
        "laravel")
            create_laravel_project "$project_name"
            ;;
        "yii")
            case $framework_type in
                "yii3")
                    create_yii3_project "$project_name"
                    ;;
                "yii2-basic")
                    create_yii2_basic_project "$project_name"
                    ;;
                "yii2-advanced")
                    create_yii2_advanced_project "$project_name"
                    ;;
            esac
            ;;
    esac
    
    if [ $? -ne 0 ]; then
        error "Error al crear el proyecto."
        return 1
    fi
    
    success "Proyecto $project_name ($framework $framework_type) creado correctamente en docker/src/$project_name/"
    return 0
}

# Función para mostrar instrucciones de uso
show_instructions() {
    local project_name="$1"
    local framework="$2"
    local framework_type="$3"
    
    echo ""
    success "¡Proceso completado!"
    echo ""
    
    info "Estructura del proyecto:"
    echo "  - docker/ (configuración Docker)"
    echo "  - docker/src/$project_name/ (proyecto $framework $framework_type)"
    echo ""
    
    info "Para iniciar el proyecto:"
    echo "  1. cd docker"
    echo "  2. docker-compose up -d"
    echo ""
    info "Accesos:"
    echo "  - Aplicación web: http://localhost:8081/"
    
    if [ "$framework" == "yii" ] && [ "$framework_type" == "yii2-advanced" ]; then
        echo "  - Frontend: http://localhost:8081/$project_name/frontend/web/"
        echo "  - Backend: http://localhost:8081/$project_name/backend/web/"
    elif [ "$framework" == "yii" ] && [ "$framework_type" == "yii2-basic" ]; then
        echo "  - Aplicación: http://localhost:8081/$project_name/web/"
    fi
    
    echo "  - PostgreSQL: localhost:5432"
    echo "  - Usuario BD: postgres"
    echo "  - Password BD: password"
    echo ""
    
    info "Comandos útiles:"
    echo "  - Ver logs: docker-compose logs"
    echo "  - Detener contenedores: docker-compose down"
    echo "  - Ejecutar comandos en el contenedor: docker-compose exec web [comando]"
    echo ""
    
    if [ "$framework" == "laravel" ]; then
        info "Para Laravel:"
        echo "  - Ejecutar migraciones: docker-compose exec web bash -c 'cd $project_name && php artisan migrate'"
    elif [ "$framework" == "yii" ]; then
        info "Para Yii:"
        echo "  - Ejecutar migraciones: docker-compose exec web bash -c 'cd $project_name && php yii migrate'"
    fi
}

# Función principal
main() {
    echo "=========================================="
    echo " GENERADOR DE PROYECTOS CON DOCKER"
    echo "=========================================="
    echo ""
    
    # Verificar y crear estructura Docker si es necesario
    check_docker_structure
    
    # Cambiar al directorio docker
    cd docker
    
    # Preguntar por el tipo de proyecto
    while true; do
        read -p "¿Qué tipo de proyecto quieres crear? (laravel/yii): " framework
        framework=$(echo "$framework" | tr '[:upper:]' '[:lower:]')
        
        if [[ "$framework" == "laravel" || "$framework" == "yii" ]]; then
            break
        else
            error "Por favor, ingresa 'laravel' o 'yii'"
        fi
    done
    
    # Seleccionar tipo de framework si es Yii
    framework_type=""
    if [ "$framework" == "yii" ]; then
        framework_type=$(select_framework_type "$framework")
    fi
    
    # Preguntar nombre del proyecto
    while true; do
        read -p "Nombre del proyecto (solo letras, números y guiones): " project_name
        if [[ "$project_name" =~ ^[a-zA-Z0-9_-]+$ ]]; then
            # Verificar si el proyecto ya existe
            if [ -d "src/$project_name" ]; then
                error "El proyecto $project_name ya existe en docker/src/"
            else
                break
            fi
        else
            error "Nombre inválido. Solo se permiten letras, números y guiones."
        fi
    done
    
    # Crear el proyecto
    create_new_project "$project_name" "$framework" "$framework_type"
    
    if [ $? -eq 0 ]; then
        # Mostrar instrucciones
        show_instructions "$project_name" "$framework" "$framework_type"
    else
        error "No se pudo crear el proyecto."
    fi
}

# Ejecutar función principal
main "$@"
-- RISK326	integrity_check
-- contains(user_agent, "integrity=false") or contains(user_agent, "integrity=False"
DROP TABLE team_kingkong.tpap_risk326_breaches;

-- CREATE TABLE team_kingkong.tpap_risk326_breaches AS
INSERT INTO team_kingkong.tpap_risk326_breaches
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.integrity
, 'integrity_check'  AS rule_name
, 'user agent intergrity = false' AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31' 
    GROUP BY 1)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31')C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , json_extract_scalar(request, '$.requestPayload.txnType') AS txnType
    , json_extract_scalar(request, '$.requestPayload.integrity') AS integrity -- true UNKNOWN false
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION'
    AND json_extract_scalar(request, '$.requestPayload.integrity') <> 'true')D
ON B.txn_id = D.txnid
;

SELECT MONTH(txn_date), COUNT(*) FROM team_kingkong.tpap_risk326_breaches GROUP BY 1 LIMIT 10;
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":x-connect: Boost Days - What's on this week! :x-connect:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Let's get ready to dive into another great week here in the Auckland office! \n \n See below for what's in store:"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-23: Tuesday, 23rd September",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":coffee: *Xero Café:* Café-style beverages and sweet treats.\n:pineapple: *Barista Special:* Peach and Pineapple Tea \n:tomato: *Breakfast:* Provided from *8.30am* in the All Hands kitchen \n :nail_care: *tipsity nails:* Book in with the lovely Emma to get your fresh claws, link is <https://docs.google.com/spreadsheets/d/1pTGAD8oFXmPF890Uzj4d-Crfch8muuzEzDnNyBA9ReY/edit?gid=1100887842#gid=1100887842|*here*> "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":calendar-date-25: Thursday, 25th September",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":coffee: *Xero Café:* Café-style beverages and sweet treats.\n:pineapple: *Barista Special:* Peach and Pineapple Tea \n:pie: *Light Lunch*: Provided from *12:30pm* in the All Hands kitchen \n :ai: *AI Explore Event:* Join your fellow Xero's in All Hands for the AI Xplore online learning event about investing in you by equipping you with essential AI skills and knowledge, so every one of us can confidently unlock the potential of AI.\n *Snacks and refreshments* will be provided in All Hands. <https://meet.google.com/stream/f94ad206-dc30-474a-a8d3-b821b77d692b|*Here*> is the link if you cant make it to *All Hands*. \n Plus, keep an eye out for competitions and prizes!"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "*What else?*"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Feedback on our Boost offerings? We want to hear to it. Let us know what you love by filling out our form <https://docs.google.com/forms/d/e/1FAIpQLScGOSeS5zUI8WXEl0K4WGoQUkmpIHzAjLlEKWBob4sMPhDXmA/viewform|*here*>! :feedback_:"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Stay tuned to this channel for more details, check out the <https://calendar.google.com/calendar/u/0?cid=eGVyby5jb21fMXM4M3NiZzc1dnY0aThpY2FiZDZvZ2xncW9AZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ|*Auckland Social Calendar.*>\n\nLove,\nWX :wx:"
			}
		}
	]
}
Option Explicit 
 
Public Function ReverseString(Text As String) 
     
    ReverseString = StrReverse(Text) 
     
End Function 
-- RISK314	oc69_mcc_4812_4814_collect_limit	
-- MCC 4812 / 4814 (prepaid mobile recharge) in collect mode shall be restricted to maximum of Rs. 5,000 respectively (including from verified merchants).

DROP TABLE team_kingkong.tpap_risk314_breaches;

-- CREATE TABLE team_kingkong.tpap_risk314_breaches AS
INSERT INTO team_kingkong.tpap_risk314_breaches
SELECT DISTINCT B.*, C.category
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.txnType
, 'oc69_mcc_4812_4814_collect_limit'  AS rule_name
, 'payee_mcc in (4812, 4814) & txnType = COLLECT & amt >5k' AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) AS payee_mcc,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31' 
    GROUP BY 1
    HAVING MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) IN ('4812', '4814')
    AND MAX(amount) > 5000)B
inner join
    (select txn_id, category
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31') C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , json_extract_scalar(request, '$.requestPayload.txnType') AS txnType
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND json_extract_scalar(request, '$.requestPayload.txnType') = 'COLLECT'
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
;
-- RISK156	upi_mcc_7407_decline_intent	Block if Initiation mode is 04 or 05 Payeemcc is 7407
DROP TABLE team_kingkong.tpap_risk156_breaches;

-- CREATE TABLE team_kingkong.tpap_risk156_breaches AS
INSERT INTO team_kingkong.tpap_risk156_breaches
SELECT DISTINCT B.*, C.category
, C.initiationMode as initiationMode_switch -- ,  D.initiationMode as initiationMode_tpaphss
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, 'upi_mcc_7407_decline_intent'  AS rule_name
, 'payee_mcc = 7407 & init mode in 04, 05' AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) AS payee_mcc,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31' 
    GROUP BY 1
    HAVING MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) = '7407')B
inner join
    (select txn_id, category
    , json_extract_scalar(extended_info, '$.initiationMode') as initiationMode
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    and DATE(created_on) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND json_extract_scalar(extended_info, '$.initiationMode') IN ('04', '05')) C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-01-01' AND DATE'2025-01-31'
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    AND JSON_EXTRACT_SCALAR(request, '$.requestPayload.initiationMode') IN ('04', '05')
    AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION')D
ON B.txn_id = D.txnid
;
Polymarket clone solutions designed for entrepreneurs who want to launch their own decentralized prediction market platform.

💡 Key Features:

Market creation & participation modules

Smart contract-powered transactions

User-friendly dashboards

High-level security protocols

This solution is perfect for startups aiming to enter the prediction market space quickly with minimal risks.
{% render 'specification-table' %}
{{ product.metafields.custom.more_info | metafield_tag }}
-- RISK108	upi_intent_paytmqr_decline_oc76
-- intent (initiation mode 04/05) is declined for VPA like VPA like paytmqr 

DROP TABLE team_kingkong.tpap_risk108_breaches;

CREATE TABLE team_kingkong.tpap_risk108_breaches AS
-- INSERT INTO team_kingkong.tpap_risk108_breaches
SELECT DISTINCT B.*, C.category
, C.initiationMode as initiationMode_switch -- ,  D.initiationMode as initiationMode_tpaphss
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, 'upi_oc141A_mcc6540_gift'  AS rule_name
, 'payee_vpa contains "paytmqr" and init mode in 04, 05' AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) AS payee_mcc,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    AND DATE(created_on) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15' 
    GROUP BY 1)B
inner join
    (select txn_id, category
    , json_extract_scalar(extended_info, '$.initiationMode') as initiationMode
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    and DATE(created_on) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    -- and upper(status) = 'SUCCESS' AND category IN ('VPA2MERCHANT', 'VPA2VPA')
    ) C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '') IS NOT NULL
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '') <> ''
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    )D
ON B.txn_id = D.txnid
WHERE lower(payee_vpa) LIKE '%paytmqr%'
AND initiationMode IN ('04', '05')
;
-- RISK161	upi_oc141A_mcc6540_gift
DROP TABLE team_kingkong.tpap_risk161_breaches;

CREATE TABLE team_kingkong.tpap_risk161_breaches AS
-- INSERT INTO team_kingkong.tpap_risk161_breaches
SELECT DISTINCT B.*, C.category
, C.initiationMode as initiationMode_switch -- ,  D.initiationMode as initiationMode_tpaphss
, IF(D.upi_subtype IS NOT NULL, D.upi_subtype, IF(C.category = 'LITE_MANDATE', 'UPI_LITE_MANDATE', '')) AS upi_subtype
, D.txnType
, 'upi_oc141A_mcc6540_gift'  AS rule_name
, CASE
    WHEN payee_mcc = '6540' AND initiationMode = '00' AND txnType NOT IN ('PAY', 'DEBIT') 
        THEN 'Invalid txnType for initiationMode 00 and MCC 6540'
    WHEN payee_mcc = '6540' AND initiationMode NOT IN ('00', '10', '04', '05', '01', '02', '11') 
        THEN 'Invalid initiationMode for MCC 6540'
    ELSE NULL
END AS breach_reason FROM
    (SELECT txn_id,
    MAX(CASE WHEN participant_type = 'PAYER' THEN vpa END) AS payer_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN vpa END) AS payee_vpa,
    MAX(CASE WHEN participant_type = 'PAYEE' THEN mcc END) AS payee_mcc,
    MAX(DATE(created_on)) as txn_date,
    MAX(amount) AS txn_amount,
    MAX(created_on) AS txn_time
    FROM switch.txn_participants_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    AND DATE(created_on) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15' 
    GROUP BY 1)B
inner join
    (select txn_id, category
    , json_extract_scalar(extended_info, '$.initiationMode') as initiationMode
    from switch.txn_info_snapshot_v3
    where DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    and DATE(created_on) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    -- and upper(status) = 'SUCCESS' AND category IN ('VPA2MERCHANT', 'VPA2VPA')
    ) C
on B.txn_id = C.txn_id
INNER JOIN
    (SELECT txnid
    , regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') AS upi_subtype
    , json_extract_scalar(request, '$.requestPayload.txnType') AS txnType
    FROM tpap_hss.upi_switchv2_dwh_risk_data_snapshot_v3
    WHERE DATE(dl_last_updated) BETWEEN DATE'2025-09-01' AND DATE'2025-09-15'
    AND (lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) LIKE '%@paytm%'
    or lower(regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '')) like '%@pt%')
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '') IS NOT NULL
    AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerVpa') as varchar), '"', '') <> ''
    AND json_extract_scalar(response, '$.action_recommended') <> 'BLOCK'
    -- AND regexp_replace(cast(json_extract(request, '$.requestPayload.payerType') AS varchar),'"','') = 'PERSON'
    -- AND regexp_replace(cast(json_extract(request, '$.requestPayload.payeeType') AS varchar),'"','') = 'PERSON'
    -- AND JSON_EXTRACT_SCALAR(request, '$.requestPayload.initiationMode') = '04'
    -- AND json_extract_scalar(request, '$.requestPayload.merchantGenre') = 'OFFLINE'
    -- AND regexp_replace(cast(json_extract(request, '$.evaluationType') as varchar), '"', '') = 'UPI_TRANSACTION'
    )D
ON B.txn_id = D.txnid
WHERE (payee_mcc = '6540') AND
((initiationMode ='00' AND txnType NOT IN ('PAY','DEBIT')) OR 
(payee_mcc = '6540' AND initiationMode NOT IN ('00', '10', '04', '05', '01', '02', '11')))
;
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":sunshine: Boost Days - What's On This Week :sunshine:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n\n Good morning Melbourne, hope you all had a fabulous weekend and enjoyed the sunny weather. Please see below for what's on store this week. "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are offering some yummy Chocolate and Blueberry muffins & Coconut Drops :coconut:* \n\n :coffee: *Weekly Café Special:* _Iced Vanilla Latte_"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 17th September :calendar-date-17:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " \n\n :gah-update: *September GAH*: Join us live at 9am in the Wominjeka Breakout Space on Level 3 for the Global All Hands. \n:lunch::flag-it: *Light Lunch*: An Italian Lunch from 12pm in the Wominjeka Breakout Space. Menu is in the :thread:"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 18th Septemer :calendar-date-18:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":pancakes: *Breakfast*: from *8:30am-10:30am* in the Wominjeka Breakout Space.     \n\n  "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Friday, 19th September :calendar-date-19:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "*Social Happy Hour:* Join us for drinks and nibbles from 4.00pm -5.30pm in the Level -3 Wominjeka Breakout Space :cheese: \n\n\n *What Else?* \n ":tino_rangatiratanga: *Te Wiki o Te Reo Māori*: Celebrate Māori language week with the #maoripacific-erg, head to their channel to see how you can engage and use your reo this week! \n\n*Feedback on our Boost Offerings?* We want to hear more. Let us know what you love by filling out our form <https://docs.google.com/forms/d/e/1FAIpQLScGOSeS5zUI8WXEl0K4WGoQUkmpIHzAjLlEKWBob4sMPhDXmA/viewform|here.>  Stay tuned to this channel, and make sure you're subscribed to the <https://calendar.google.com/calendar/u/0?cid=Y19xczkyMjk5ZGlsODJzMjA4aGt1b3RnM2t1MEBncm91cC5jYWxlbmRhci5nb29nbGUuY29t|*Melbourne Social Calendar*> :party-wx: "
			}
		},
		{
			"type": "divider"
		}
	]
}
For startups and enterprises, the biggest benefit is time-to-market. The faster you launch, the quicker you can attract users and liquidity. If you’re exploring this space, it’s worth checking how these companies design exchange platforms, handle security, and support regulatory compliance.
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":sunshine: Boost Days - What's On This Week :sunshine:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n\n Good morning Melbourne, hope you all had a fabulous weekend and enjoyed the sunny weather. Please see below for what's on store this week. "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are offering some yummy Chocolate and Blueberry muffins & Coconut Drops :coconut:* \n\n :coffee: *Weekly Café Special:* _Iced Vanilla Latte_"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 17th Septmber :calendar-date-17:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " \n\n :gah-update: *September GAH*: Join us live at 9am in the Wominjeka Breakout Space on Level 3 for the Global All Hands. \n:lunch::flag-th: *Light Lunch*: A Thai Lunch from 12pm in the Wominjeka Breakout Space. Menu is in the :thread:"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 18th Septemer :calendar-date-18:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":pancakes: *Breakfast*: from *8:30am-10:30am* in the Wominjeka Breakout Space.     \n\n  "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Friday, 19th September :calendar-date-19:",
				"emoji": true
			}
		},
		{:tino_rangatiratanga: Te Wiki o Te Reo Māori: Celebrate Māori language week with the #maoripacific-erg, head to their channel to see how you can engage and use your reo this week!
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":sunshine: Boost Days - What's On This Week :sunshine:"
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n\n Good morning Melbourne, hope you all had a fabulous weekend and enjoyed the sunny weather. Please see below for what's on store this week. "
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are offering some yummy Chocolate and Blueberry muffins & Coconut Drops :coconut:* \n\n :coffee: *Weekly Café Special:* _Iced Vanilla Latte_"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 17th Septmber :calendar-date-17:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": " \n\n :gah-update: *September GAH*: Join us live at 9am in the Wominjeka Breakout Space on Level 3 for the Global All Hands. \n:lunch::flag-th: *Light Lunch*: A Thai Lunch from 12pm in the Wominjeka Breakout Space. Menu is in the :thread:"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 18th Septemer :calendar-date-18:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":pancakes: *Breakfast*: from *8:30am-10:30am* in the Wominjeka Breakout Space.     \n\n  "
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Friday, 19th September :calendar-date-19:",
				"emoji": true
			}
		},
		{
khi click vào nut tăng giảm số trong thì call
timeout = setTimeout(function () {
  jQuery("[name='update_cart']").trigger("click");
}, 1000);

$(document.body).on('added_to_cart', function( event, fragments ) {
		var cartCount = $(fragments['span.mega-menu-woo-cart-count']).text();

		$('.cart_count').each(function(){
			$(this).text(cartCount);
		});
	}).on( 'updated_cart_totals', function(){
		$.ajax({
			url: '/wp-admin/admin-ajax.php',
			type: 'POST',
			dataType: 'json',
			data: { 'action': 'get-cart-total' },
			success: function( data ) {
				$('.cart-subtotal td').html( data.subtotal );
				//$('.woocommerce-shipping-totals td').html( data.shipping );
				$('.order-total td').html( data.total );
                console.log(2);
                $('.custom-order-total-price').html( data.total );
			}
		});
	});

add_action( 'wp_ajax_get-cart-total', 'get_cart_total' ); // wp_ajax_{ЗНАЧЕНИЕ ПАРАМЕТРА ACTION!!}
add_action( 'wp_ajax_nopriv_get-cart-total', 'get_cart_total' );  // wp_ajax_nopriv_{ЗНАЧЕНИЕ ACTION!!}
// первый хук для авторизованных, второй для не авторизованных пользователей

function get_cart_total(){
	wp_die( json_encode([
		'shipping' => WC()->cart->get_cart_shipping_total(),
		'subtotal' => WC()->cart->get_cart_subtotal(),
		'total' => WC()->cart->get_total(),
	]) );
}

khi click vào nut tăng giảm số trong thì call
timeout = setTimeout(function () {
  jQuery("[name='update_cart']").trigger("click");
}, 1000);

$(document.body).on('added_to_cart', function( event, fragments ) {
		var cartCount = $(fragments['span.mega-menu-woo-cart-count']).text();

		$('.cart_count').each(function(){
			$(this).text(cartCount);
		});
	}).on( 'updated_cart_totals', function(){
		$.ajax({
			url: '/wp-admin/admin-ajax.php',
			type: 'POST',
			dataType: 'json',
			data: { 'action': 'get-cart-total' },
			success: function( data ) {
				$('.cart-subtotal td').html( data.subtotal );
				//$('.woocommerce-shipping-totals td').html( data.shipping );
				$('.order-total td').html( data.total );
                console.log(2);
                $('.custom-order-total-price').html( data.total );
			}
		});
	});

add_action( 'wp_ajax_get-cart-total', 'get_cart_total' ); // wp_ajax_{ЗНАЧЕНИЕ ПАРАМЕТРА ACTION!!}
add_action( 'wp_ajax_nopriv_get-cart-total', 'get_cart_total' );  // wp_ajax_nopriv_{ЗНАЧЕНИЕ ACTION!!}
// первый хук для авторизованных, второй для не авторизованных пользователей

function get_cart_total(){
	wp_die( json_encode([
		'shipping' => WC()->cart->get_cart_shipping_total(),
		'subtotal' => WC()->cart->get_cart_subtotal(),
		'total' => WC()->cart->get_total(),
	]) );
}

<?php
/**
 * Checkout Form
 *
 * This template can be overridden by copying it to yourtheme/woocommerce/checkout/form-checkout.php.
 *
 * HOWEVER, on occasion WooCommerce will need to update template files and you
 * (the theme developer) will need to copy the new files to your theme to
 * maintain compatibility. We try to do this as little as possible, but it does
 * happen. When this occurs the version of the template file will be bumped and
 * the readme will list any important changes.
 *
 * @see https://docs.woocommerce.com/document/template-structure/
 * @package WooCommerce/Templates
 * @version 3.5.0
 */

if (!defined('ABSPATH')) {
    exit;
}

$product_types = [];

function print_shopping_bag( $cart_item_key, $cart_item ) {
    $_product = apply_filters('woocommerce_cart_item_product', $cart_item['data'], $cart_item, $cart_item_key);
    $product_id = apply_filters('woocommerce_cart_item_product_id', $cart_item['product_id'], $cart_item, $cart_item_key);

    if ($_product && $_product->exists() && $cart_item['quantity'] > 0 && apply_filters('woocommerce_cart_item_visible', true, $cart_item, $cart_item_key)) {
        $product_permalink = apply_filters('woocommerce_cart_item_permalink', $_product->is_visible() ? $_product->get_permalink($cart_item) : '', $cart_item, $cart_item_key);
        ?>
		<div class="woocommerce-cart-form__cart-item <?php echo esc_attr(apply_filters('woocommerce_cart_item_class', 'cart_item', $cart_item, $cart_item_key)); ?>">
			<div class="product-item-thumbnail">
				<img src="<?php
				$image = wp_get_attachment_image_src( get_post_thumbnail_id( $product_id ), 'single-post-thumbnail' );
				echo $image[0];
				?>" alt="<?= esc_html($_product->get_name()) ?>">
			</div>

			<div class="product-item-info">
				<div class="product-item-info-left">
					<div class="name">
						<a href="<?= $product_permalink ?>">
							<?= $_product->get_name() ?>
						</a>
					</div>

					<?php
					do_action('woocommerce_after_cart_item_name', $cart_item, $cart_item_key);

					// Meta data.
					echo wc_get_formatted_cart_item_data($cart_item); // PHPCS: XSS ok.

					// Backorder notification.
					if ($_product->backorders_require_notification() && $_product->is_on_backorder($cart_item['quantity'])) {
						echo wp_kses_post(apply_filters('woocommerce_cart_item_backorder_notification', '<p class="backorder_notification">' . esc_html__('Available on backorder', 'woocommerce') . '</p>', $product_id));
					}
					?>
					<div class="product-item-info-quantity"><?php echo   $cart_item['quantity'] ?> Items</div>
				</div>
				<div class="product-item-info-price">
					<?php echo apply_filters('woocommerce_cart_item_price', WC()->cart->get_product_price($_product), $cart_item, $cart_item_key); // PHPCS: XSS ok. ?>
				</div>
			</div>
		</div>
        <?php
    }
}

foreach (WC()->cart->get_cart() as $cart_item_key => $cart_item) {
    $classes = esc_attr(apply_filters('woocommerce_cart_item_class', 'cart_item', $cart_item, $cart_item_key));
    $classes = trim( str_replace( 'cart_item', '', $classes ) );

    if( !empty( $classes ) ) {
        $product_types['individual'][$cart_item_key] = $cart_item;
    } else {
        $product_types['public'][$cart_item_key] = $cart_item;
    }
}
?>

<div class="woocommerce_pages checkout_page">
    <div class="woo_container ">
    	<div class="rst-progress-wrap">
        	<div class="rst-progress">
            	<div class="rst-progress-item active">
            		<span class="dot"></span>
                    <p>Information</p>
           		 </div>
                 <div class="rst-progress-item">
            		<span class="dot"></span>
                    <p>Payment Method</p>
           		 </div>
                 <div class="rst-progress-item">
            		<span class="dot"></span>
                    <p>Review & Pay</p>
           		 </div>
            </div>
        </div>
		<div class="woo_container_left">
       	 
        	<?php do_action('woocommerce_before_checkout_form', $checkout); ?>

			<form name="checkout"
				  method="post"
				  class="checkout woocommerce-checkout "
				  action="<?php echo esc_url(wc_get_checkout_url()); ?>"
				  enctype="multipart/form-data"
			>

				<?php // If checkout registration is disabled and not logged in, the user cannot checkout.
				if (!$checkout->is_registration_enabled() && $checkout->is_registration_required() && !is_user_logged_in()) {
					echo esc_html(apply_filters('woocommerce_checkout_must_be_logged_in_message', __('You must be logged in to checkout.', 'woocommerce')));
					return;
				} ?>
                
				<div class="billing_shipping_block next-block" id="billing-shipping-section">
					<?php if ($checkout->get_checkout_fields()) : ?>

						<?php do_action('woocommerce_checkout_before_customer_details'); ?>


						<?php do_action('woocommerce_checkout_billing'); ?>


						<?php do_action('woocommerce_checkout_after_customer_details'); ?>

					<?php endif; ?>
					<?php do_action( 'woocommerce_checkout_shipping' ); ?>
                    <div class="billing_shipping_action">
                    	<a href="<?php echo wc_get_cart_url(); ?>" class="return_to_cart"><svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.5 3.5L5.5 8.5L10.5 13.5" stroke="#415479" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
Return to cart</a>
                        <a href="#" class="click_save">Save & continue</a>
                    </div>
				</div>
				
				<div class="payment_block next-block" id="payment-section">
					<h3 class="rst-title-block payment_title "  data-toggle="collapse" href="#collapsePayment" role="button" aria-expanded="false" aria-controls="collapsePayment">
						Payment method
						<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M26.5595 20.0669L17.8661 11.3736C16.8395 10.3469 15.1595 10.3469 14.1328 11.3736L5.43945 20.0669" stroke="#415479" stroke-width="3" stroke-miterlimit="10" stroke-linecap="round" stroke-linejoin="round"/>
</svg></svg>
					</h3>
					<p class="rst-description-block">All transactions are secure and encrypted.</p>
					<div id="collapsePayment" class="collapse show">
						<div class="payment-inner-wrap">
							<?php do_action('woocommerce_after_checkout_form', $checkout); ?>
						</div>
					</div>
					<div class="billing_shipping_action">
                    	<span></span>
                        <a href="#" class="click_save">Continue to secure payment</a>
                    </div>
				</div>
				<div class="review_pay_block next-block" id="review-pay-section">
					<h3 class="rst-title-block payment_title"  data-toggle="collapse" href="#collapseReviewpay" role="button" aria-expanded="false" aria-controls="collapseReviewpay">
						Review & Pay
						<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M26.5595 20.0669L17.8661 11.3736C16.8395 10.3469 15.1595 10.3469 14.1328 11.3736L5.43945 20.0669" stroke="#415479" stroke-width="3" stroke-miterlimit="10" stroke-linecap="round" stroke-linejoin="round"/>
</svg></svg>
					</h3>
					<p class="rst-description-block">Double-check your items, shipping, and payment method before placing your secure payment.</p>
					<div id="collapseReviewpay" class="collapse show">
						<div class="payment-inner-wrap">
							<div class="rst-Reviewpay-lists">
								<div class="rst-Reviewpay-item">
									<div class="rst-Reviewpay-item-label">
										Shipping Address
									</div>
									<div class="rst-Reviewpay-item-description">
										<div>Michel Kerbela</div>
										<div>info@healthviber@gmail.com</div>
										<div>AL 35013, United States</div>
										<a href="#billing-shipping-section" class="edit"><svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 20H21" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M16.5 3.49998C16.8978 3.10216 17.4374 2.87866 18 2.87866C18.2786 2.87866 18.5544 2.93353 18.8118 3.04014C19.0692 3.14674 19.303 3.303 19.5 3.49998C19.697 3.69697 19.8532 3.93082 19.9598 4.18819C20.0665 4.44556 20.1213 4.72141 20.1213 4.99998C20.1213 5.27856 20.0665 5.55441 19.9598 5.81178C19.8532 6.06915 19.697 6.303 19.5 6.49998L7 19L3 20L4 16L16.5 3.49998Z" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
</a>
									</div>
								</div>
								<div class="rst-Reviewpay-item">
									<div class="rst-Reviewpay-item-label">
										Payment Method
									</div>
									<div class="rst-Reviewpay-item-description">
										<div>Credit card</div>
										<div>4000 1000 0000 0008</div>
										<div>CVV: 000</div>
										<a href="#billing-shipping-section" class="edit"><svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 20H21" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M16.5 3.49998C16.8978 3.10216 17.4374 2.87866 18 2.87866C18.2786 2.87866 18.5544 2.93353 18.8118 3.04014C19.0692 3.14674 19.303 3.303 19.5 3.49998C19.697 3.69697 19.8532 3.93082 19.9598 4.18819C20.0665 4.44556 20.1213 4.72141 20.1213 4.99998C20.1213 5.27856 20.0665 5.55441 19.9598 5.81178C19.8532 6.06915 19.697 6.303 19.5 6.49998L7 19L3 20L4 16L16.5 3.49998Z" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
</a>
									</div>
								</div>
								<div class="rst-Reviewpay-item">
									<div class="rst-Reviewpay-item-label">
										Shipping Method
									</div>
									<div class="rst-Reviewpay-item-description">
										<div>Standard Shipping</div>
										<a href="#billing-shipping-section" class="edit"><svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 20H21" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M16.5 3.49998C16.8978 3.10216 17.4374 2.87866 18 2.87866C18.2786 2.87866 18.5544 2.93353 18.8118 3.04014C19.0692 3.14674 19.303 3.303 19.5 3.49998C19.697 3.69697 19.8532 3.93082 19.9598 4.18819C20.0665 4.44556 20.1213 4.72141 20.1213 4.99998C20.1213 5.27856 20.0665 5.55441 19.9598 5.81178C19.8532 6.06915 19.697 6.303 19.5 6.49998L7 19L3 20L4 16L16.5 3.49998Z" stroke="#415479" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
</a>
									</div>
								</div>
							</div>
						</div>
					</div>
				</div>
				<div class="link_buttons">
					<span class="go_paid">
						<?= __( 'Place my order', 'woocommerce' ) ?>
					</span>
				</div>

			</form>
		</div>

		<div class="woo_container_right">
        	<h2 class="rst-title-cart">Order Summary</h2>
			<div class="cart-info">

				<div class="table_block item_block">
					<div class="woocommerce-cart-form__list-cart">
					<?php
                        if( !empty( $product_types['public'] ) ) {
                            foreach ( $product_types['public'] as $cart_item_key => $cart_item ) {
                                print_shopping_bag( $cart_item_key, $cart_item );
                            }
                        } if( !empty( $product_types['individual'] ) ) {
                            foreach ( $product_types['individual'] as $cart_item_key => $cart_item ) {
                                print_shopping_bag( $cart_item_key, $cart_item );
                            }
                        } ?>
					</div>	
					<?php do_action('woocommerce_checkout_before_order_review_heading'); ?>

					<?php do_action('woocommerce_checkout_before_order_review'); ?>

					<div id="order_review" class="woocommerce-checkout-review-order collapse show inner">
                    	 <?php if ( wc_coupons_enabled() ) { ?>
						<form class="woocommerce-coupon-form" action="<?php echo esc_url(wc_get_checkout_url()); ?>" method="post">
							<div class="coupon">
								<label for="coupon_code">
									<?php esc_html_e( 'Have a coupon?', 'woocommerce' ); ?>
								</label>
								<div class="form_coupone">
									<input type="text" name="coupon_code" class="input-text" id="coupon_code" value="" placeholder="<?php esc_attr_e( 'Apply coupon code', 'woocommerce' ); ?>" /> <button type="submit" class="button<?php echo esc_attr( wc_wp_theme_get_element_class_name( 'button' ) ? ' ' . wc_wp_theme_get_element_class_name( 'button' ) : '' ); ?>" name="apply_coupon" value="<?php esc_attr_e( 'Apply coupon', 'woocommerce' ); ?>"><?php esc_html_e( 'Apply', 'woocommerce' ); ?></button>
								</div>
								<?php
									 $discount = WC()->cart->get_discount_total();
									  if ( $discount > 0 ) {
								?>
								<div class="has-Coupon">
									<span></span><div>You <strong>saved <?php echo wc_price( $discount ); ?></strong> with this coupon!</div>
								</div>
								<?php
									  }
								?>
                                <?php do_action( 'woocommerce_cart_coupon' ); ?>
							</div>
						</form>
                    <?php } ?>
						<?php do_action('woocommerce_checkout_order_review'); ?>
                        
					</div>
					<div class="rst-checkout-action">
                        <span class="to_checkout go_paid">
                            Place my order
                        </span>
                        <a class="to_shop" href="<?= get_permalink( wc_get_page_id( 'shop' ) ) ?>">
                                Continue Shopping
                        </a>
                    </div>
                    <!--
					<div class="checkout_block">
						<div class="item">
							<span class="go_paid">
								PLACE MY ORDER
							</span>
						</div>

						<div class="item">
							<a class="to_shop" href="<?= get_permalink( wc_get_page_id( 'shop' ) ) ?>">
								Continue Shopping
							</a>
						</div>
					</div>
                    -->
				</div>
				<div class="rst-single-meta-link">
                    <ul>
                        <li><a href="/contact-us"><svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
                        <g clip-path="url(#clip0_2339_1840)">
                        <path d="M8 0C3.57841 0 0 3.578 0 8C0 12.4215 3.578 16 8 16C12.4216 16 16 12.422 16 8C16 3.57841 12.422 0 8 0ZM8 14.8837C4.20431 14.8837 1.11628 11.7957 1.11628 8C1.11628 4.20428 4.20431 1.11628 8 1.11628C11.7957 1.11628 14.8837 4.20428 14.8837 8C14.8837 11.7957 11.7957 14.8837 8 14.8837Z" fill="#415479"></path>
                        <path d="M7.76516 10.1226C7.32288 10.1226 6.96484 10.4911 6.96484 10.9334C6.96484 11.3652 7.31234 11.7442 7.76516 11.7442C8.21797 11.7442 8.57597 11.3652 8.57597 10.9334C8.57597 10.4911 8.20741 10.1226 7.76516 10.1226Z" fill="#415479"></path>
                        <path d="M7.90262 3.9834C6.481 3.9834 5.82812 4.82587 5.82812 5.39449C5.82812 5.80518 6.17563 5.99474 6.45994 5.99474C7.02859 5.99474 6.79694 5.18387 7.87103 5.18387C8.39753 5.18387 8.81878 5.41555 8.81878 5.89996C8.81878 6.46859 8.22906 6.79502 7.88156 7.08987C7.57616 7.35309 7.17603 7.78487 7.17603 8.69049C7.17603 9.23806 7.32347 9.39602 7.75519 9.39602C8.27116 9.39602 8.37647 9.16437 8.37647 8.96424C8.37647 8.41668 8.387 8.10077 8.96619 7.64796C9.2505 7.42684 10.1456 6.71074 10.1456 5.7209C10.1456 4.73105 9.2505 3.9834 7.90262 3.9834Z" fill="#415479"></path>
                        </g>
                        <defs>
                        <clipPath id="clip0_2339_1840">
                        <rect width="16" height="16" fill="white"></rect>
                        </clipPath>
                        </defs>
                        </svg>Ask a question</a></li>
                        <li><svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
                        <path d="M15.5 10C15.224 10 15 10.224 15 10.5V14H1V8H6.5C6.776 8 7 7.776 7 7.5C7 7.224 6.776 7 6.5 7H1V5H6.5C6.776 5 7 4.776 7 4.5C7 4.224 6.776 4 6.5 4H1C0.448 4 0 4.448 0 5V14C0 14.552 0.448 15 1 15H15C15.552 15 16 14.552 16 14V10.5C16 10.224 15.776 10 15.5 10Z" fill="#415479"></path>
                        <path d="M4.5 10H2.5C2.224 10 2 10.224 2 10.5C2 10.776 2.224 11 2.5 11H4.5C4.776 11 5 10.776 5 10.5C5 10.224 4.776 10 4.5 10Z" fill="#415479"></path>
                        <path d="M15.697 2.53975L12.197 1.03975C12.07 0.98675 11.929 0.98675 11.802 1.03975L8.302 2.53975C8.119 2.61875 8 2.79975 8 2.99975V4.99975C8 7.75075 9.017 9.35875 11.751 10.9338C11.828 10.9777 11.914 10.9998 12 10.9998C12.086 10.9998 12.172 10.9777 12.249 10.9338C14.983 9.36275 16 7.75475 16 4.99975V2.99975C16 2.79975 15.881 2.61875 15.697 2.53975ZM15 4.99975C15 7.30875 14.236 8.57975 12 9.91975C9.764 8.57675 9 7.30575 9 4.99975V3.32975L12 2.04375L15 3.32975V4.99975Z" fill="#415479"></path>
                        <path d="M13.8127 4.10886C13.5977 3.93886 13.2837 3.97186 13.1097 4.18686L11.5377 6.15286L10.9157 5.22286C10.7607 4.99286 10.4497 4.93186 10.2227 5.08386C9.99365 5.23686 9.93065 5.54786 10.0837 5.77686L11.0837 7.27686C11.1727 7.40986 11.3187 7.49186 11.4787 7.49986C11.4857 7.49986 11.4937 7.49986 11.4997 7.49986C11.6507 7.49986 11.7947 7.43186 11.8907 7.31186L13.8907 4.81186C14.0627 4.59586 14.0287 4.28186 13.8127 4.10886Z" fill="#415479"></path></svg>Secure Payment</li>
                        <li><svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
                        <g clip-path="url(#clip0_2339_1490)">
                        <path d="M15.8614 7.70499L15.0781 5.34399C14.8748 4.73033 14.3114 4.33366 13.6434 4.33366H11.5461L11.6228 3.70866C11.6568 3.43866 11.5768 3.17133 11.4038 2.97499C11.2311 2.77933 10.9774 2.66699 10.7071 2.66699H2.62811C2.09477 2.66699 1.61077 3.09799 1.54911 3.62466L1.50644 3.95799C1.48344 4.14066 1.61211 4.30766 1.79477 4.33099C1.97977 4.35766 2.14477 4.22533 2.16777 4.04266L2.21077 3.70566C2.23377 3.50733 2.42877 3.33366 2.62811 3.33366H10.7071C10.7871 3.33366 10.8568 3.36299 10.9038 3.41599C10.9514 3.46999 10.9718 3.54399 10.9614 3.62666L10.1808 10.0007H1.43011C1.43011 9.83533 1.31177 9.69066 1.14344 9.66999C0.960441 9.64666 0.794108 9.77699 0.771775 9.95966L0.647441 10.962C0.615441 11.233 0.697108 11.5007 0.871441 11.6963C1.04577 11.8927 1.29177 12.0007 1.56344 12.0007H2.20877C2.21377 12.3317 2.32311 12.6403 2.53844 12.883C2.79544 13.1737 3.16144 13.334 3.56811 13.334C4.33077 13.334 5.02411 12.7457 5.18011 12.0007H10.9558C10.9608 12.3317 11.0701 12.64 11.2848 12.8827C11.5421 13.1737 11.9081 13.334 12.3151 13.334C13.0774 13.334 13.7708 12.7457 13.9271 12.0007H14.6448C15.1841 12.0007 15.6581 11.579 15.7238 11.0413L15.9798 8.95366C16.0328 8.52133 15.9931 8.10133 15.8618 7.70533L15.8614 7.70499ZM15.1464 7.66699H13.1378L13.2961 6.37433C13.2984 6.35733 13.3258 6.33366 13.3394 6.33366H14.7041L15.1464 7.66699ZM11.4648 5.00033H13.6438C14.0254 5.00033 14.3321 5.21233 14.4454 5.55399L14.4831 5.66699H13.3394C12.9871 5.66699 12.6778 5.94199 12.6348 6.29299L12.4714 7.62633C12.4491 7.80899 12.5041 7.99033 12.6224 8.12399C12.7404 8.25699 12.9121 8.33333 13.0944 8.33333H15.3191C15.3394 8.50866 15.3408 8.68833 15.3181 8.87199L15.1798 9.99999H10.8521L11.4644 4.99999L11.4648 5.00033ZM11.0981 11.3337H10.6888L10.7704 10.667H11.6418C11.4084 10.84 11.2194 11.069 11.0981 11.3337ZM10.0174 11.3337H5.17111C5.11844 11.1313 5.02444 10.9437 4.88411 10.785C4.84478 10.7407 4.79877 10.7053 4.75478 10.667H10.0994L10.0174 11.3337ZM2.34811 11.3337H1.56311C1.48311 11.3337 1.41611 11.3057 1.36911 11.253C1.32044 11.1987 1.29944 11.1227 1.30877 11.0413L1.35511 10.667H2.88344C2.65211 10.839 2.46644 11.0673 2.34777 11.3337H2.34811ZM4.54677 11.793C4.48878 12.267 4.04044 12.667 3.56777 12.667C3.35444 12.667 3.16644 12.5867 3.03711 12.4407C2.90577 12.2927 2.84844 12.0913 2.87477 11.8743C2.93311 11.4003 3.38144 11.0003 3.85411 11.0003C4.06744 11.0003 4.25577 11.0807 4.38477 11.2267C4.51611 11.3747 4.57344 11.576 4.54677 11.793ZM13.2938 11.793C13.2354 12.267 12.7871 12.667 12.3148 12.667C12.1014 12.667 11.9131 12.5867 11.7838 12.4407C11.6524 12.2927 11.5951 12.0913 11.6218 11.8743C11.6801 11.4003 12.1284 11.0003 12.6008 11.0003C12.8141 11.0003 13.0024 11.0807 13.1318 11.2267C13.2631 11.3747 13.3204 11.576 13.2938 11.793ZM14.6448 11.3337H13.9181C13.8654 11.1313 13.7714 10.9437 13.6311 10.785C13.5918 10.7407 13.5458 10.7053 13.5018 10.667H15.0984L15.0624 10.9597C15.0381 11.159 14.8428 11.3337 14.6448 11.3337Z" fill="#415479"></path>
                        <path d="M3.01 8.66683C3.01 8.4825 2.861 8.3335 2.67667 8.3335H0.333333C0.149 8.3335 0 8.4825 0 8.66683C0 8.85116 0.149 9.00016 0.333333 9.00016H2.67667C2.861 9.00016 3.01 8.85116 3.01 8.66683Z" fill="#415479"></path>
                        <path d="M1.36458 6.66699C1.18025 6.66699 1.03125 6.81633 1.03125 7.00033C1.03125 7.18433 1.18025 7.33366 1.36458 7.33366H2.88492C3.06925 7.33366 3.21825 7.18433 3.21825 7.00033C3.21825 6.81633 3.06925 6.66699 2.88492 6.66699H1.36458Z" fill="#415479"></path>
                        <path d="M0.723958 5.66667H3.41729C3.60162 5.66667 3.75062 5.51733 3.75062 5.33333C3.75062 5.14933 3.60162 5 3.41729 5H0.723958C0.539625 5 0.390625 5.14933 0.390625 5.33333C0.390625 5.51733 0.539625 5.66667 0.723958 5.66667Z" fill="#415479"></path>
                        </g>
                        <defs>
                        <clipPath id="clip0_2339_1490">
                        <rect width="16" height="16" fill="white"></rect>
                        </clipPath>
                        </defs>
                            </svg>Fast Shipping</li>
                    </ul>
                </div>
				<!--
				<div class="shopping_bag item_block">
					<h3 class="collapsed"
						data-toggle="collapse"
						href="#collapseShippingBag"
						role="button"
						aria-expanded="false"
						aria-controls="collapseShippingBag"
					>
						<?= __( 'Shopping Bag', 'woocommerce' ) ?>
						<svg fill="#197ED0" height="22px" width="22px" version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 407.437 407.437" xml:space="preserve"><g id="SVGRepo_bgCarrier" stroke-width="0"></g><g id="SVGRepo_tracerCarrier" stroke-linecap="round" stroke-linejoin="round"></g><g id="SVGRepo_iconCarrier"> <polygon points="386.258,91.567 203.718,273.512 21.179,91.567 0,112.815 203.718,315.87 407.437,112.815 "></polygon> </g></svg>
					</h3>

					<div id="collapseShippingBag" class="collapse inner">
                        <?php
                        if( !empty( $product_types['public'] ) ) {
                            foreach ( $product_types['public'] as $cart_item_key => $cart_item ) {
                                print_shopping_bag( $cart_item_key, $cart_item );
                            }
                        } if( !empty( $product_types['individual'] ) ) {
                            foreach ( $product_types['individual'] as $cart_item_key => $cart_item ) {
                                print_shopping_bag( $cart_item_key, $cart_item );
                            }
                        } ?>
					</div>
				</div>
				-->

			</div>
			<div class="checkout-right-info">
			    <?php do_action('after_woocommerce_cart_item')?>
			</div>
		</div>

    </div>




    <?php do_action('woocommerce_checkout_after_order_review'); ?>

</div>
# urls.py
from django.urls import path
from . import views

urlpatterns = [
    path("patients/", views.patient_list, name="patient_list"),
]
<!-- patients.html -->
<h1>Hospital Patient Records</h1>

<ul>
    {% for patient in patients %}
        <li>
            <strong>{{ patient.first_name }} {{ patient.last_name }}</strong><br>
            Diagnosis: {{ patient.diagnosis }}
        </li>
    {% endfor %}
</ul>
# views.py
from django.shortcuts import render
from .models import Patient

def patient_list(request):
    patients = Patient.objects.all()
    return render(request, "patients.html", {"patients": patients})
from django.db import models

class Patient(models.Model):
    first_name = models.CharField(max_length=50)
    last_name = models.CharField(max_length=50)
    age = models.IntegerField()
    diagnosis = models.TextField()
    admitted_on = models.DateField(auto_now_add=True)

    def __str__(self):
        return f"{self.first_name} {self.last_name}"
setwd("//files.wustl.edu/Shares/DOM/ONC/Hirbe_Lab/Diana/UBR5 KO RNASeq/analysis/HOM VS WT_2")
Counts <- read.csv("Counts.csv")
# Remove duplicate rows from Counts
Counts <- Counts[!duplicated(Counts[, 1]), ]
rownames(Counts) <- Counts[, 1]
Counts<- Counts [, -1]

# Calculate row means
row_means <- rowMeans(Counts)

# Order genes by row means in descending order
ordered_counts <- Counts[rev(order(row_means)), ]
#alternative code to the above is (ordered_counts <- Counts[order(row_means, decreasing = TRUE), ])
# Filter out rows with row means less than 10
filtered_counts <- ordered_counts[rowMeans(ordered_counts) >= 10, ]
#save filtered data frame
write.csv(filtered_counts, "filtered_counts.csv")

#prepare metadata telling R the conditions (columns) 
metadata <- data.frame( 
  
  sample_id = colnames(filtered_counts),  # Assuming you have loaded the filtered expression data 
  
  condition = c(rep("UBR5 WT", 3), rep("UBR5 HOM", 3)),  # Treatment conditions 
  
  replicate = c(1, 2, 3, 1, 2, 3)  # Sample replicates 
  
) 

metadata$condition <- factor(metadata$condition, levels = c("UBR5 WT", "UBR5 HOM"))

#Load DESEQ2 for normalization
library(DESeq2) 

#Use the DESeqDataSetFromMatrix function from DESeq2 to create a DESeqDataSet object
dds <- DESeqDataSetFromMatrix(countData = filtered_counts,
                              colData = metadata,
                              design = ~ condition)



#Perform normalization and estimation of dispersions: Use the DESeq() function to perform normalization and estimation of dispersions.

dds <- DESeq(dds)
results <- results(dds, alpha = 0.05)
DEGs <- subset(results, abs(log2FoldChange) > 1 & padj < 0.05)


#save the de_genes data frame
write.csv(DEGs, file = "DEG_HOM_VS_WT.csv",)
write.csv(results, file = "DeseqResults_HOM_VS_WT.csv",)



#create volcano plot
library(ggplot2)

# Add column to classify genes as DEG or not
results_df <- as.data.frame(results)
results_df$gene <- rownames(results_df)
results_df$threshold <- "Unchanged"
results_df$threshold[results_df$padj < 0.05 & abs(results_df$log2FoldChange) > 1] <- "DEG"


library(ggrepel)


# Volcano plot
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HOM vs WT",
       x = "log2 Fold Change (HOM vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black")


# Select top 50 significant genes by padj
top100 <- results_df[order(results_df$padj), ][1:100, ]

# Volcano plot
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HOM vs WT",
       x = "log2 Fold Change (HOM vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(data = top100,
                  aes(label = gene),
                  size = 3,
                  max.overlaps = Inf,
                  box.padding = 0.3,
                  point.padding = 0.2,
                  segment.color = "grey50")

write.csv(top100, file = "Top100_HOM_vs_WT.csv", row.names = FALSE)


# Define genes of interest
genes_of_interest <- c("Egfr", "Hsp90ab1", "Map2k2", "Cerk", "Pdgfra", "Tyk2", "Jak1",
                       "Yap1", "Taz", "Kdr", "Aurka", "Pten", "Csf1r","Ptch1", "Smo", "Gli2", "Gli3", "Wnt10a", "Rac2", "Rspo2", "Apc",
                       "Cd274", "Pdcd1", "Id1", "Id3", "Cdh1", "Cdc73", "Hrpt2","Csf1","Golph3", "Cdk1", "Acsl4", "Ptk2b", "Akt1", "Akt2", "Akt3", "Pik3ca", "Pik3c2a", "Pik3cb" , "Pik3c3", "Pik3c2b", "Pik3cd", "Atmin", "Cdkn1a", "Cdk9", "Rela", "Nfkb1", "Nfkb2", "Capza1", "Stat1", "Stat3", "Irf1", "Irf3")

# Subset DEGs for these genes (case-sensitive match!)
genes_subset <- results_df[rownames(results_df) %in% genes_of_interest, ]

# Save genes of interest with stats
write.csv(genes_subset, file = "GenesOfInterest_HOM_vs_WT.csv", row.names = TRUE)

ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HET vs WT",
       x = "log2 Fold Change (HET vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(
    data = genes_subset,
    aes(label = gene),
    color = "black",       # <-- force label text to black
    size = 3,
    max.overlaps = Inf,
    box.padding = 0.3,
    point.padding = 0.2,
    segment.color = "grey50"
  )


# Subset DEGs only
DEGs_df <- as.data.frame(DEGs)
DEGs_df$gene <- rownames(DEGs_df)

# Find overlap between DEGs and genes of interest
genes_subset <- DEGs_df[rownames(DEGs_df) %in% genes_of_interest, ]

# Save overlapping genes with stats
write.csv(genes_subset, file = "GenesOfInterest_DEGs_HOM_vs_WT.csv", row.names = TRUE)

# Volcano plot with labels ONLY for genes of interest that are DEGs
ggplot(results_df, aes(x = log2FoldChange, y = -log10(padj), color = threshold)) +
  geom_point(alpha = 0.6, size = 1.5) +
  scale_color_manual(values = c("Unchanged" = "grey", "DEG" = "red")) +
  theme_minimal(base_size = 14) +
  labs(title = "Volcano Plot: HET vs WT",
       x = "log2 Fold Change (HET vs WT)",
       y = "-log10 Adjusted p-value",
       color = "Gene status") +
  geom_vline(xintercept = c(-1, 1), linetype = "dashed", color = "black") +
  geom_hline(yintercept = -log10(0.05), linetype = "dashed", color = "black") +
  geom_text_repel(
    data = genes_subset,
    aes(label = gene),
    color = "black",       # labels in black
    size = 5,
    max.overlaps = Inf,
    box.padding = 0.3,
    point.padding = 0.2,
    segment.color = "grey50"
  )





if (!requireNamespace("clusterProfiler", quietly = TRUE)) {
  BiocManager::install("clusterProfiler")
}
if (!requireNamespace("msigdbr", quietly = TRUE)) {
  install.packages("msigdbr")
}
library(clusterProfiler)
library(msigdbr)


# Convert results to dataframe
res_df <- as.data.frame(results)

# Remove NA log2FC
res_df <- res_df[!is.na(res_df$log2FoldChange), ]

# Create named vector: names = gene symbols, values = log2FC
gene_list <- res_df$log2FoldChange
names(gene_list) <- rownames(res_df)

# Sort decreasing for clusterProfiler
gene_list <- sort(gene_list, decreasing = TRUE)


# Mouse Hallmark gene sets
hallmark_sets <- msigdbr(species = "Mus musculus", category = "H")  # H = Hallmark

# Use as two-column dataframe: gs_name (pathway), gene_symbol
term2gene <- hallmark_sets[, c("gs_name", "gene_symbol")]

# Make sure your DESeq2 results have no NA log2FC
res_df <- as.data.frame(results)
res_df <- res_df[!is.na(res_df$log2FoldChange), ]

# Named vector: names = gene symbols, values = log2FC
gene_list <- res_df$log2FoldChange
names(gene_list) <- rownames(res_df)
gene_list <- sort(gene_list, decreasing = TRUE)


gsea_res <- GSEA(
  geneList = gene_list,
  TERM2GENE = term2gene,  # <- must be dataframe, not list
  pvalueCutoff = 0.1,
  verbose = FALSE
)

# View top pathways
head(as.data.frame(gsea_res))

# Save results
write.csv(as.data.frame(gsea_res), "GSEA_Hallmark_Mouse_HOM_vs_WT.csv", row.names = FALSE)

library(enrichplot)

# Convert GSEA results to dataframe
gsea_df <- as.data.frame(gsea_res)

# Suppose the top (and only) enriched pathway:
top_pathway <- gsea_df$ID[5]  # or use $Description if you prefer

# Classic GSEA plot for the top pathway
gseaplot2(
  gsea_res,
  geneSetID = top_pathway,   # pathway ID
  title = gsea_df$Description[5],  # nice descriptive title
  color = "red"
)
#PLOT HALLMARK PATHWAYS
library(ggplot2)

# Convert GSEA results to dataframe
gsea_df <- as.data.frame(gsea_res)

# Order pathways by NES (normalized enrichment score)
gsea_df <- gsea_df[order(gsea_df$NES, decreasing = TRUE), ]

# Plot ALL enriched pathways
ggplot(gsea_df, aes(x = reorder(Description, NES), y = NES, fill = -log10(p.adjust))) +
  geom_col() +
  coord_flip() +
  labs(
    title = "GSEA: All Enriched Hallmark Pathways",
    x = "Pathway",
    y = "Normalized Enrichment Score (NES)",
    fill = "-log10 adj p-value"
  ) +
  theme_minimal(base_size = 14)


#plot GSEA KEGG, GO, REACTOME --------------------------
# Prepare ranked gene list with Entrez IDs for GSEA
# --------------------------
library(clusterProfiler)
library(org.Mm.eg.db)
library(ReactomePA)

# Convert gene symbols to Entrez IDs
entrez_map <- bitr(names(gene_list), fromType="SYMBOL", toType="ENTREZID", OrgDb=org.Mm.eg.db)
gene_list_df <- merge(entrez_map, data.frame(log2FC = gene_list), by.x="SYMBOL", by.y="row.names")
gene_list_df <- gene_list_df[!duplicated(gene_list_df$ENTREZID), ]
gene_list_named <- gene_list_df$log2FC
names(gene_list_named) <- gene_list_df$ENTREZID

# Sort decreasing for GSEA
gene_list_named <- sort(gene_list_named, decreasing = TRUE)

# --------------------------
# 1) GSEA: KEGG Pathways
# --------------------------
gsea_kegg <- gseKEGG(
  geneList = gene_list_named,
  organism = "mmu",
  minGSSize = 10,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save KEGG GSEA results
write.csv(as.data.frame(gsea_kegg), "GSEA_KEGG_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 KEGG pathways barplot
library(enrichplot)
library(ggplot2)

# Convert gseaResult to dataframe to see top pathways
gsea_df <- as.data.frame(gsea_kegg)

# Select top 30 pathways by NES or pvalue
top30 <- gsea_df[order(gsea_df$NES, decreasing = TRUE)[1:30], ]

# Ridgeplot (shows enrichment distribution for multiple pathways)
ridgeplot(gsea_kegg, showCategory = 30) +
  ggtitle("GSEA: KEGG Top 30 Pathways") +
  theme_minimal(base_size = 14)

# Optional: classic GSEA plot for the top pathway
top_pathway <- top30$ID[1]
gseaplot2(gsea_kegg, geneSetID = top_pathway,
          title = top30$Description[1], color = "red")


# --------------------------
# 2) GSEA: GO Biological Process (BP)
# --------------------------
gsea_go_bp <- gseGO(
  geneList = gene_list_named,
  OrgDb = org.Mm.eg.db,
  ont = "ALL",
  keyType = "ENTREZID",
  minGSSize = 10,
  maxGSSize = 500,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save GO BP GSEA results
write.csv(as.data.frame(gsea_go_bp), "GSEA_GO_BP_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 GO BP pathways barplot
barplot(gsea_go_bp, showCategory = 30, title = "GSEA: GO BP Top 30 Pathways")

# --------------------------
# 3) GSEA: Reactome Pathways
# --------------------------
gsea_reactome <- gsePathway(
  geneList = gene_list_named,
  organism = "mouse",
  minGSSize = 10,
  pvalueCutoff = 0.1,
  verbose = TRUE
)

# Save Reactome GSEA results
write.csv(as.data.frame(gsea_reactome), "GSEA_Reactome_HOM_vs_WT.csv", row.names = FALSE)

# Top 30 Reactome pathways barplot
barplot(gsea_reactome, showCategory = 30, title = "GSEA: Reactome Top 30 Pathways")

# --------------------------


library(ggplot2)
library(dplyr)

# Convert GSEA Reactome results to dataframe
gsea_reactome_df <- as.data.frame(gsea_reactome)

# Select top 30 pathways by NES magnitude
top30_reactome <- gsea_reactome_df %>%
  arrange(desc(abs(NES))) %>%
  slice(1:30)

# Reorder for plotting (highest NES on top)
top30_reactome$Description <- factor(top30_reactome$Description, levels = rev(top30_reactome$Description))

# Plot barplot: NES on x-axis, pathways on y-axis, fill by -log10(padj)
ggplot(top30_reactome, aes(x = NES, y = Description, fill = -log10(p.adjust))) +
  geom_bar(stat = "identity") +
  scale_fill_gradient(low = "red", high = "darkred") +
  theme_minimal(base_size = 14) +
  labs(title = "GSEA: Top 30 Reactome Pathways",
       x = "Normalized Enrichment Score (NES)",
       y = "",
       fill = "-log10(adj.p)")



#ORA Enrichment analysis
### --- DEG-based Over-Representation Analysis (ORA) ---

library(clusterProfiler)
library(org.Mm.eg.db)
library(msigdbr)
library(ReactomePA)
library(enrichplot)
library(ggplot2)
library(dplyr)

# Subset DEGs you already defined
DEGs_df <- as.data.frame(DEGs)
DEGs_df$gene <- rownames(DEGs_df)

# Convert SYMBOL -> ENTREZ
deg_entrez <- bitr(
  DEGs_df$gene,
  fromType = "SYMBOL",
  toType   = "ENTREZID",
  OrgDb    = org.Mm.eg.db
)

# Remove duplicates
deg_entrez <- deg_entrez[!duplicated(deg_entrez$ENTREZID), ]


### --- ORA: GO Biological Process ---
ego_bp <- enrichGO(
  gene          = deg_entrez$ENTREZID,
  OrgDb         = org.Mm.eg.db,
  keyType       = "ENTREZID",
  ont           = "BP",   # Biological Process
  pAdjustMethod = "BH",
  pvalueCutoff  = 0.05,
  qvalueCutoff  = 0.2,
  readable      = TRUE    # back to SYMBOL
)

# Save results
write.csv(as.data.frame(ego_bp), "ORA_GO_BP_HET_vs_WT.csv", row.names = FALSE)

# Plot top 20
dotplot(ego_bp, showCategory = 20) + ggtitle("ORA: GO Biological Process (DEGs)")


### --- ORA: KEGG Pathways ---
ekegg <- enrichKEGG(
  gene          = deg_entrez$ENTREZID,
  organism      = "mmu",
  pvalueCutoff  = 0.05
)

# Save results
write.csv(as.data.frame(ekegg), "ORA_KEGG_HET_vs_WT.csv", row.names = FALSE)

# Plot top 20
dotplot(ekegg, showCategory = 20) + ggtitle("ORA: KEGG Pathways (DEGs)")


### --- ORA: Reactome Pathways ---
ereact <- enrichPathway(
  gene          = deg_entrez$ENTREZID,
  organism      = "mouse",
  pvalueCutoff  = 0.05,
  readable      = TRUE
)

# Save results
write.csv(as.data.frame(ereact), "ORA_Reactome_HET_vs_WT.csv", row.names = FALSE)

# Plot top 20
dotplot(ereact, showCategory = 20) + ggtitle("ORA: Reactome Pathways (DEGs)")


### --- ORA: Hallmark Gene Sets ---
hallmark_sets <- msigdbr(species = "Mus musculus", category = "H")
term2gene <- hallmark_sets[, c("gs_name", "entrez_gene")]
term2gene$entrez_gene <- as.character(term2gene$entrez_gene)

ora_hallmark <- enricher(
  gene          = deg_entrez$ENTREZID,
  TERM2GENE     = term2gene,
  pvalueCutoff  = 0.05
)

# Save results
write.csv(as.data.frame(ora_hallmark), "ORA_Hallmark_HET_vs_WT.csv", row.names = FALSE)

# Plot top 20
dotplot(ora_hallmark, showCategory = 20) + ggtitle("ORA: Hallmark Pathways (DEGs)")


### --- DEG-based ORA: Up vs Downregulated DEGs ---


# Split DEGs
up_DEGs   <- rownames(DEGs[DEGs$log2FoldChange > 1, ])
down_DEGs <- rownames(DEGs[DEGs$log2FoldChange < -1, ])

# Convert SYMBOL -> ENTREZ for both
up_entrez <- bitr(up_DEGs, fromType="SYMBOL", toType="ENTREZID", OrgDb=org.Mm.eg.db)
up_entrez <- up_entrez[!duplicated(up_entrez$ENTREZID), ]

down_entrez <- bitr(down_DEGs, fromType="SYMBOL", toType="ENTREZID", OrgDb=org.Mm.eg.db)
down_entrez <- down_entrez[!duplicated(down_entrez$ENTREZID), ]


### --- ORA: GO Biological Process ---
ego_up <- enrichGO(gene=up_entrez$ENTREZID, OrgDb=org.Mm.eg.db, keyType="ENTREZID",
                   ont="BP", pAdjustMethod="BH", pvalueCutoff=0.05, qvalueCutoff=0.2, readable=TRUE)
ego_down <- enrichGO(gene=down_entrez$ENTREZID, OrgDb=org.Mm.eg.db, keyType="ENTREZID",
                     ont="BP", pAdjustMethod="BH", pvalueCutoff=0.05, qvalueCutoff=0.2, readable=TRUE)

write.csv(as.data.frame(ego_up), "ORA_GO_BP_Upregulated.csv", row.names=FALSE)
write.csv(as.data.frame(ego_down), "ORA_GO_BP_Downregulated.csv", row.names=FALSE)

dotplot(ego_up, showCategory=20) + ggtitle("ORA: GO BP (Upregulated)")
dotplot(ego_down, showCategory=20) + ggtitle("ORA: GO BP (Downregulated)")
                     

### --- ORA: Hallmark Gene Sets ---
hallmark_sets <- msigdbr(species="Mus musculus", category="H")
term2gene <- hallmark_sets[, c("gs_name", "entrez_gene")]
term2gene$entrez_gene <- as.character(term2gene$entrez_gene)

ora_hallmark_up <- enricher(gene=up_entrez$ENTREZID, TERM2GENE=term2gene, pvalueCutoff=0.05)
ora_hallmark_down <- enricher(gene=down_entrez$ENTREZID, TERM2GENE=term2gene, pvalueCutoff=0.05)

write.csv(as.data.frame(ora_hallmark_up), "ORA_Hallmark_Upregulated.csv", row.names=FALSE)
write.csv(as.data.frame(ora_hallmark_down), "ORA_Hallmark_Downregulated.csv", row.names=FALSE)

dotplot(ora_hallmark_up, showCategory=20) + ggtitle("ORA: Hallmark (Upregulated)")
dotplot(ora_hallmark_down, showCategory=20) + ggtitle("ORA: Hallmark (Downregulated)")


### --- ORA: Reactome ---
ereact_up <- enrichPathway(gene=up_entrez$ENTREZID, organism="mouse", pvalueCutoff=0.05, readable=TRUE)
ereact_down <- enrichPathway(gene=down_entrez$ENTREZID, organism="mouse", pvalueCutoff=0.05, readable=TRUE)

write.csv(as.data.frame(ereact_up), "ORA_Reactome_Upregulated.csv", row.names=FALSE)
write.csv(as.data.frame(ereact_down), "ORA_Reactome_Downregulated.csv", row.names=FALSE)

dotplot(ereact_up, showCategory=20) + ggtitle("ORA: Reactome (Upregulated)")
dotplot(ereact_down, showCategory=20) + ggtitle("ORA: Reactome (Downregulated)")



### --- ORA: KEGG ---
ekegg_up <- enrichKEGG(gene=up_entrez$ENTREZID, organism="mmu", pvalueCutoff=0.05)
ekegg_down <- enrichKEGG(gene=down_entrez$ENTREZID, organism="mmu", pvalueCutoff=0.05)

write.csv(as.data.frame(ekegg_up), "ORA_KEGG_Upregulated.csv", row.names=FALSE)
write.csv(as.data.frame(ekegg_down), "ORA_KEGG_Downregulated.csv", row.names=FALSE)

dotplot(ekegg_up, showCategory=20) + ggtitle("ORA: KEGG (Upregulated)")
dotplot(ekegg_down, showCategory=20) + ggtitle("ORA: KEGG (Downregulated)")



#Heatmap
ifna_genes <- c(
  "Serpine2","Sntb1","Mmp14","Tnc","Fstl1","Vcan","Il15","Scg2","Ecm1","Vegfa",
  "Cxcl12","Dst","Tgfbr3","Ptx3","Pcolce","Spock1","Adam12","Tagln","Loxl1","Cdh6",
  "Pvr","Gadd45b","Gadd45a","Rhob","Rgs4","Fzd8","Tfpi2","Vegfc","Oxtr","Dab2",
  "Lum","Col16a1","Tnfrsf11b","Fap","Matn2","Snai2","Cxcl5","Calu","Capg","Emp3",
  "Nnmt","Gpc1","Tnfaip3","Nid2","Mcm7","Slit3","Slit2","Matn3","Fmod","Edil3",
  "Dkk1","Qsox1","Copa","Cxcl15","Crlf1","Grem1","Fbln5","Sgcb","Sgcg","Sgcd",
  "Plod2","Plod3","Foxc2","Sdc1","Sdc4","Dpysl3","Tnfrsf12a","Pdlim4","Mfap5","Col5a3",
  "P3h1","Cadm1","Fstl3","Efemp2","Cap2","Gpx7","Cthrc1","Basp1","Glipr1","Lrrc15",
  "Pcolce2","Colgalt1","Postn","Htra1","Pmepa1","Myl9","Slc6a8","Magee1","Wipf1","Fermt2",
  "Abi3bp","Ntm","Tpm4","Ecm2","Anpep","Gm21451","Acta2","Aplp1","Areg","Bdnf",
  "Bgn","Bmp1","Cald1","Serpinh1","Cd44","Cdh2","Col11a1","Col12a1","Col3a1","Col4a1",
  "Col4a2","Col5a1","Col5a2","Col6a2","Col6a3","Col7a1","Col8a2","Col1a1","Col1a2","Comp",
  "Ccn1","Sfrp4","Sfrp1","Mylk","Dcn","Eln","Eno2","Fas","Fbln1","Fbln2",
  "Fbn1","Fbn2","Fgf2","Ccn2","Flna","Fn1","Fuca1","Gas1","Gja1","Id2",
  "Igfbp2","Igfbp3","Igfbp4","Il6","Inhba","Itga2","Itga5","Itgav","Itgb1","Itgb3",
  "Itgb5","Jun","Lgals1","Lox","Lrp1","Mest","Mgp","Mmp2","Mmp3","Msx1","Notch2",
  "Pdgfrb","Pfn2","Serpine1","Plaur","Pmp22","Prrx1","Ppib","Pthlh","Sat1","Sparc",
  "Spp1","Tgfb1","Tgm2","Thbs1","Thbs2","Thy1","Timp1","Timp3","Tpm1","Tpm2",
  "Vcam1","Vim","Wnt5a","Cdh11","Nt5e","Gem","Lama1","Plod1","Lama3","Lama2",
  "Lamc2","Lamc1","Tgfbi"
)



# Get normalized counts from DESeq2
norm_counts <- counts(dds, normalized=TRUE)

# Subset for IFN-α response genes (keep only genes present in your dataset)
ifna_counts <- norm_counts[rownames(norm_counts) %in% ifna_genes, ]

# Optionally, z-score normalize each gene for heatmap visualization
ifna_counts_z <- t(scale(t(ifna_counts)))


# Create annotation for columns
ann_col <- data.frame(
  Condition = metadata$condition
)
rownames(ann_col) <- metadata$sample_id


library(pheatmap)

# Make sure columns are in your desired order
desired_order <- c("sample.WT_1", "sample.WT_2", "sample.WT_3",
                   "sample.neg.neg_1", "sample.neg.neg_2", "sample.neg.neg_3")
ifna_counts_z <- ifna_counts_z[, desired_order]

# Make sure annotation matches
ann_col <- ann_col[desired_order, , drop = FALSE]

# Heatmap
pheatmap(ifna_counts_z,
         annotation_col = ann_col,
         show_rownames = TRUE,
         show_colnames = TRUE,
         cluster_rows = TRUE,
         cluster_cols = FALSE,   # keep the column order fixed
         scale = "row",
         fontsize_row = 8,
         main = "Interferon Alpha Response Genes")


class NW_PayslipGenerateReport
{
    public static System.IO.Stream getPayslipReport(date _from, date _to, hcmWorkerRecid worker)
    {
        NWPY_PayslipReportContract contract = new NWPY_PayslipReportContract();
        SRSPrintDestinationSettings settings;
        SrsReportEMailDataContract  emailContract;
        System.IO.MemoryStream      _strem;
        SysMailerMessageBuilder mailer = new SysMailerMessageBuilder();
        SysMailerSMTP smtp = new SysMailerSMTP();
        Filename filename;
        Array arrayfiles;
        System.Byte[] reportBytes = new System.Byte[0]();
        SRSProxy srsProxy;
        SrsReportRunService srsreportRunService = new SrsReportRunService();
        Microsoft.Dynamics.AX.Framework.Reporting.Shared.ReportingService.ParameterValue[] parameterValueArray;
        Map reportParametersMap;
        SRSReportExecutionInfo executionInfo = new SRSReportExecutionInfo();
        SrsReportRunController controller = new SrsReportRunController();

        filename = 'Payslip.PDF';
        contract.ParmFromDate(_from);
        contract.parmToDate(_to);
        contract.parmEmployee(worker);

        controller.parmReportName(ssrsReportStr(NWPY_PayslipReport, PrecisionDesign2));
        controller.parmShowDialog(false);
        controller.parmReportContract().parmRdpContract(contract);
        settings = controller.parmReportContract().parmPrintSettings();
        settings.printMediumType(SRSPrintMediumType::File);
        settings.fileFormat(SRSReportFileFormat::PDF);
        settings.overwriteFile(true);
        settings.fileName(filename);
        controller.parmReportContract().parmReportServerConfig(SRSConfiguration::getDefaultServerConfiguration());
        controller.parmReportContract().parmReportExecutionInfo(executionInfo);
        srsreportRunService.getReportDataContract(controller.parmReportContract().parmReportName());
        srsreportRunService.preRunReport(controller.parmReportContract());
        reportParametersMap =  srsreportRunService.createParamMapFromContract(controller.parmReportContract());
        parameterValueArray = SrsReportRunUtil::getParameterValueArray(reportParametersMap);
        srsProxy = SRSProxy::constructWithConfiguration(controller.parmReportContract().parmReportServerConfig());
        reportBytes = srsProxy.renderReportToByteArray(controller.parmReportContract().parmReportPath(),
                        parameterValueArray,
                        settings.fileFormat(),
                        settings.deviceInfo());
        if(reportBytes)
        {
            _strem = new System.IO.MemoryStream(reportBytes);
        }
        return _strem;
    }

}
textToTranslate = "Syed Ali Nayyar Nasir";

targetLang = "ar";

// Arabic

apiKey = "AIzaSyCtqtqWGfnZ1EAYXD9pGhQoZ6QtkflC270";

// replace with your actual key

url = "https://translation.googleapis.com/language/translate/v2?key=" + apiKey;

params = Map();

params.put("q",textToTranslate);

params.put("target",targetLang);

params.put("source","en");

// optional

headers = Map();

headers.put("Content-Type","application/x-www-form-urlencoded");

response = invokeurl

[

	url :url

	type :POST

	parameters:params

	headers:headers

];

info response;

translation = response.get("data").get("translations").get(0).get("translatedText");

info translation;
 
Metaverse development helps startups cut costs by building virtual spaces instead of physical ones. It boosts customer engagement with immersive 3D experiences and seamless global networking. Startups can explore new revenue through NFTs and digital assets. By adopting future-ready tech, they attract investors and drive rapid growth.
star

Mon Sep 22 2025 16:17:34 GMT+0000 (Coordinated Universal Time)

@1234_5

star

Mon Sep 22 2025 12:04:42 GMT+0000 (Coordinated Universal Time)

@MinaTimo

star

Mon Sep 22 2025 10:27:59 GMT+0000 (Coordinated Universal Time) https://innosoft-group.com/sportsbook-software-providers/

@Margaret1601 #sportsbooksoftware providers in dubai #sportsbooksoftware providers #sportsbooksoftware provider

star

Mon Sep 22 2025 09:59:50 GMT+0000 (Coordinated Universal Time)

@Inescn

star

Mon Sep 22 2025 09:15:55 GMT+0000 (Coordinated Universal Time) https://www.kryptobees.com/cryptocurrency-exchange-development-company

@Franklinclas

star

Mon Sep 22 2025 07:51:26 GMT+0000 (Coordinated Universal Time) https://www.sevenmentor.com/machine-learning-course-in-pune.php

@priya023

star

Mon Sep 22 2025 07:34:22 GMT+0000 (Coordinated Universal Time) https://www.makeuseof.com/windows-features-dont-exist-without-powershell/?utm_source

@darkoeller

star

Sun Sep 21 2025 20:49:17 GMT+0000 (Coordinated Universal Time)

@oforey #python

star

Sun Sep 21 2025 20:37:04 GMT+0000 (Coordinated Universal Time)

@oforey #python

star

Sun Sep 21 2025 20:32:36 GMT+0000 (Coordinated Universal Time)

@oforey #python

star

Sun Sep 21 2025 20:23:08 GMT+0000 (Coordinated Universal Time)

@oforey #python

star

Sun Sep 21 2025 20:08:57 GMT+0000 (Coordinated Universal Time)

@oforey

star

Sat Sep 20 2025 23:24:54 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Sat Sep 20 2025 16:30:36 GMT+0000 (Coordinated Universal Time) https://economyhomedecor.com/

@allmaxh

star

Fri Sep 19 2025 16:50:47 GMT+0000 (Coordinated Universal Time)

@freepythoncode ##python #coding #python

star

Fri Sep 19 2025 15:44:32 GMT+0000 (Coordinated Universal Time) https://huggingface.co/Alibaba-NLP/Tongyi-DeepResearch-30B-A3B

@GADJI123

star

Fri Sep 19 2025 12:06:55 GMT+0000 (Coordinated Universal Time)

@usman13

star

Fri Sep 19 2025 10:37:23 GMT+0000 (Coordinated Universal Time) https://maticz.com/zk-tech-development

@carolinemax

star

Fri Sep 19 2025 07:17:23 GMT+0000 (Coordinated Universal Time)

@usman13

star

Thu Sep 18 2025 16:01:46 GMT+0000 (Coordinated Universal Time)

@jrg_300i

star

Thu Sep 18 2025 07:21:38 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Thu Sep 18 2025 06:35:14 GMT+0000 (Coordinated Universal Time) https://www.addustechnologies.com/crypto-trading-bot-development-company

@brucebanner #crypto #trading #bot

star

Thu Sep 18 2025 03:23:13 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Wed Sep 17 2025 13:11:10 GMT+0000 (Coordinated Universal Time) http://www.vbaexpress.com/kb/getarticle.php?kb_id=188

@szkohn #vba

star

Wed Sep 17 2025 09:26:36 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Wed Sep 17 2025 06:47:22 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Wed Sep 17 2025 06:36:49 GMT+0000 (Coordinated Universal Time) https://cryptiecraft.com/polymarket-clone-script/

@evacollins #polymarketclonescript #predictionmarket #decentralized

star

Tue Sep 16 2025 22:37:24 GMT+0000 (Coordinated Universal Time)

@procodefinder

star

Tue Sep 16 2025 10:43:07 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Tue Sep 16 2025 10:34:18 GMT+0000 (Coordinated Universal Time)

@shubhangi.b

star

Mon Sep 15 2025 09:30:20 GMT+0000 (Coordinated Universal Time)

@2late #css

star

Mon Sep 15 2025 07:30:45 GMT+0000 (Coordinated Universal Time) https://cryptiecraft.com/white-label-crypto-exchange-cost/

@RileyQuinn #white-labelexchange costs

star

Sun Sep 14 2025 23:14:59 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Sun Sep 14 2025 11:42:58 GMT+0000 (Coordinated Universal Time) https://alpharive.com/crypto-exchange-development

@Becca #c++ #c#

star

Sun Sep 14 2025 06:21:25 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Sun Sep 14 2025 06:15:27 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Fri Sep 12 2025 06:59:45 GMT+0000 (Coordinated Universal Time)

@vanthien

star

Fri Sep 12 2025 06:59:45 GMT+0000 (Coordinated Universal Time)

@vanthien

star

Fri Sep 12 2025 06:52:54 GMT+0000 (Coordinated Universal Time)

@vanthien

star

Thu Sep 11 2025 21:58:17 GMT+0000 (Coordinated Universal Time)

@Masiaga01

star

Thu Sep 11 2025 21:55:22 GMT+0000 (Coordinated Universal Time)

@Masiaga01

star

Thu Sep 11 2025 21:53:59 GMT+0000 (Coordinated Universal Time)

@Masiaga01

star

Thu Sep 11 2025 21:41:58 GMT+0000 (Coordinated Universal Time)

@Masiaga01

star

Thu Sep 11 2025 21:17:31 GMT+0000 (Coordinated Universal Time)

@1234_5

star

Thu Sep 11 2025 16:50:44 GMT+0000 (Coordinated Universal Time)

@MinaTimo

star

Thu Sep 11 2025 07:18:10 GMT+0000 (Coordinated Universal Time)

@usman13

star

Thu Sep 11 2025 07:11:10 GMT+0000 (Coordinated Universal Time) https://cryptiecraft.com/kucoin-clone-script/

@RileyQuinn #kucoinclone script #kucoinclone #kucoinclone software #kucoinclone app

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension