Snippets Collections
void RateCard.getValidations(int ids)
{
	RateCardID = RATE_CARD[ID == input.ids];
	if(RateCardID.CP_Validation == "" || RateCardID.CP_Validation == "Failure")
	{
		//	info "inside if ";
		verification_inv = RATE_CARD[ID in RateCardID];
		idsString = "";
		Internal_User = zoho.loginuserid;
		getID = RATE_CARD[ID != null] sort by Rate_Card_ID desc;
		var2 = 0;
		if(getID == null)
		{
			var2 = 1;
		}
		else
		{
			lastRateCardID = getID.Rate_Card_ID;
			suffix = lastRateCardID.getsuffix("RA-");
			if(!suffix.isEmpty())
			{
				var2 = suffix.toLong() + 1;
			}
			else
			{
				var2 = 1;
			}
		}
		for each  test in verification_inv
		{
			// Append IDs to idsString
			if(idsString != "")
			{
				idsString = idsString + ",";
			}
			idsString = idsString + test.ID.toString();
			autoList = var2.toString().length();
			RateList = {1:"RA-00",2:"RA-0",3:"RA-"};
			Rate_Card_ID = RateList.get(autoList) + var2;
			test.Rate_Card_ID=Rate_Card_ID;
			var2 = var2 + 1;
			// 			cp_mas = Rate_Card_Master[Partner_Entity_Name == test.Partner_Entity_Name.Partner_Entity_Name && Contracting_organisation == test.Contracting_organisation];
			// 			if(cp_mas.count() > 0)
			// 			{
			// 				test.CP_Validation="Success";
			// 			}
			// 			else
			// 			{
			// 				test.CP_Validation="Failure";
			// 			}
			cp_mas = Partner_Onboarding_and_KYC[Partner_Entity_Name == test.Partner_Entity_Name.Partner_Entity_Name && Contracting_organisation1 == test.Contracting_organisation];
			// 			cp_mas = Partner_Details[Partner_Entity_Name == test.Partner_Entity_Name.Partner_Entity_Name && Contracting_organisation == test.Contracting_organisation];
			// 			masterdata = Partner_Onboarding_and_KYC[Partner_Entity_Name == test.pa]
			//	info "partner name " + test.Partner_Entity_Name.Partner_Entity_Name;
			//	info "org " + test.Contracting_organisation;
			if(cp_mas.count() > 0)
			{
				test.CP_Validation="Success";
			}
			else
			{
				test.CP_Validation="Failure";
				test.Validation_Comments="Contracting organisation does not match as per Partner Master Data. Rectify the data and re-initiate rate card import.";
			}
			// 			if(test.CP_Validation = "Success")
			// 			{
			getPartnerInfo = Partner_Onboarding_and_KYC[Partner_Entity_Name == test.Partner_Entity_Name.Partner_Entity_Name];
			if(getPartnerInfo.count() > 0)
			{
				//		info "PARTNERID"+ getPartnerInfo.Partner_ID;
				test.Partner_Unique_ID=getPartnerInfo.Partner_ID;
			}
			//	}
		}
		// 		openUrl("https://creatorapp.zoho.in/centralisedprocurement_usdcglobal/usdc1/#Report:Approved_Rate_Card_By_Internal_User","same window");
	}
	else
	{
		info "This RateCard was already validate";
	}
	thisapp.RateCard.getRateCardMaster(input.ids);
}
void RateCard.Generate_ratecard_id(int ids)
{
	verification_inv = RATE_CARD[ID == input.ids];
	getID = RATE_CARD[ID != null] sort by Rate_Card_ID desc;
	if(getID == null)
	{
		var2 = 1;
	}
	else
	{
		lastRateCardID = getID.Rate_Card_ID;
		suffix = lastRateCardID.getsuffix("RA-");
		if(!suffix.isEmpty())
		{
			var2 = suffix.toLong() + 1;
		}
		else
		{
			var2 = 1;
		}
	}
	for each  test in verification_inv
	{
		partnerdet = Partner_Onboarding_and_KYC[Partner_Entity_Name == test.Partner_Entity_Name.Partner_Entity_Name];
		autoList = var2.toString().length();
		RateList = {1:"RA-00",2:"RA-0",3:"RA-"};
		Rate_Card_ID = RateList.get(autoList) + var2;
		// 		info Rate_Card_ID;
		test.Rate_Card_ID=Rate_Card_ID;
		test.Partner_Unique_ID=partnerdet.Partner_ID;
	}
}
void Test1.testpayment(int id)
{
	fetpay = Payment_form[ID == id];
	fetinv = Internal_Invoice[ID == fetpay.CP_Internal_Invoice_ID];
	cp_invoice_id = fetpay.CP_Internal_Invoice_ID;
	all_payments = Payment_form[CP_Internal_Invoice_ID == cp_invoice_id];
	pay = 0;
	for each  rec in all_payments
	{
		pay = pay + rec.Payment_Amount.toDecimal();
	}
	fetinv.Paid_Amount=pay;
}
void Books.create_bills(int ids)
{
	billdata = Bills[ID == input.ids];
	if(billdata.Books_Bill_ID.isEmpty() == true)
	{
		getID = Bills[ID != null] sort by Books_Bill_ID desc;
		if(getID.count() == 0)
		{
			billdata.Books_Bill_ID="Bill-001";
		}
		else
		{
			var1 = getID.Books_Bill_ID.getsuffix("Bill-");
			if(var1.isEmpty() || !var1.isNumber())
			{
				var2 = 1;
			}
			else
			{
				var2 = var1.tolong() + 1;
			}
			autoList = var2.toString().length();
			TarnsList = {1:"Bill-00",2:"Bill-0",3:"Bill-"};
			billdata.Books_Bill_ID=TarnsList.get(autoList) + var2;
			billnum = TarnsList.get(autoList) + var2;
		}
	}
	// Create Bill Process to Books
	iternal_inv = Internal_Invoice[ID == billdata.Bill_Id1];
	test = billdata.Partner_Details.Zoho_books_ID;
	var_par = Partner_Details[Partner_Entity_Name == billdata.Vendor_Name];
	vendordet = Partner_Onboarding_and_KYC[Partner_Entity_Name == billdata.Vendor_Name];
	book = vendordet.Zoho_Book_vendor_ID;
	info book;
	item_list = List();
	item_map = Map();
	item_map.put("rate",billdata.Total_Amount);
	item_map.put("account_id",2293182000000041035);
	item_map.put("bill_number",billnum);
	// // 	check the GST details from zoho books 
	vendorDetailsResponse = invokeurl
	[
		url :"https://www.zohoapis.in/books/v3/contacts/" + book + "?organization_id=60036667486"
		type :GET
		connection:"zoho_books_connection"
	];
	vendorDetails = vendorDetailsResponse.get("contact");
	gstTreatment = vendorDetails.get("gst_treatment");
	info "GST Treatment: " + gstTreatment;
	// 	   taxResponse = invokeurl
	// 	[
	// 	    url :"https://www.zohoapis.in/books/v3/settings/taxes?organization_id=60036667486"
	// 	    type :GET
	// 	    connection:"zoho_books_connection"
	// 	];
	// 	info taxResponse;
	if(gstTreatment != null)
	{
		item_map.put("gst_treatment_code","out_of_scope");
	}
	item_list.add(item_map);
	Head1 = Map();
	if(billdata.Contracting_organisation == "USDC")
	{
		Head1.put("branch_id",2293182000000188007);
	}
	if(billdata.Contracting_organisation == "Jain University")
	{
		Head1.put("branch_id",2293182000000188048);
	}
	Head1.put("reference_number",billdata.Bill_Id1.Internal_Invoice_ID);
	Head1.put("bill_number",billdata.Books_Bill_ID);
	Head1.put("notes",billdata.Order_Number);
	Head1.put("date_formatted",zoho.currentdate);
	Head1.put("is_draft",true);
	Head1.put("vendor_id",book);
	Head1.put("line_items",item_list);
	//Head1.put("tax_total",billdata.GST_Amount);
	Head1.put("total",billdata.Total_Amount);
	custom_field_list = List();
	customfields = Map();
	customfields.put("api_name","cf_internal_invoice_id");
	customfields.put("value",iternal_inv.ID);
	custom_field_list.add(customfields);
	Head1.put("custom_fields",custom_field_list);
	info customfields;
	var = invokeurl
	[
		url :"https://www.zohoapis.in/books/v3/bills?organization_id=60036667486"
		type :POST
		parameters:Head1.toString()
		connection:"zoho_books_connection"
	];
	info "Bill Creation API Status " + var;
	if(var.get("code") == 0 && var.get("bill") != null)
	{
		// 				/*create record in New Bill*/
		if(var.get("code") == 0 && var.get("bill") != null)
		{
			getBill = var.get("bill");
			addNewBills = insert into New_Bills
			[
				Bill_ID=getBill.get("bill_number")
				Bill_Date=getBill.get("date").toString("dd-mm-YYYY")
				Bill_Status=getBill.get("status")
				Total_Amount=getBill.get("total")
				Vendor_Name=getBill.get("vendor_name")
				Zoho_books_ID=getBill.get("bill_id")
				Internal_Invoice=billdata.Bill_Id1
				Added_User=zoho.loginuser
			];
		}
	}
	billcreateform = Create_Bill[Bills == input.ids];
	// 	invoicebackend = Create_Bill[CP_Internal_Invoice_Backend.inp]
	if(var.getJson("code") == 0)
	{
		for each  recs12 in billcreateform.CP_Internal_Invoice_Backend
		{
			recs12.Bill_Creation_Status="Yes";
		}
		iternal_inv.Invoice_Amount=ifnull(iternal_inv.Invoice_Amount,0) + ifnull(billdata.Total_Amount,0);
		billcreateform.Bill_Creation_Status="Yes";
		billdata.Bill_Creation_Status="Yes";
		bills = var.get("bill");
		bills_id = bills.getJSON("bill_id");
		total1 = bills.getJSON("total");
		iternal_inv.Books_Bill_ID=bills_id;
		// 		info bills_id;
		file = invokeurl
		[
			url :"https://www.zohoapis.in/creator/v2.1/data/centralisedprocurement_usdcglobal/usdc1/report/All_Bills/" + billdata.ID + "/External_Invoice/download"
			type :GET
			connection:"zoho_oauth_connection"
		];
		file.setparamname("attachment");
		info "download files " + file;
		response = invokeurl
		[
			url :"https://www.zohoapis.in/books/v3/bills/" + bills_id + "/attachment?organization_id=60036667486"
			type :POST
			files:file
			connection:"zoho_books_connection1"
		];
		// 		info file;
		billdata.Zoho_Books_Id=bills_id;
		billdata.Total_Invoice_Amount_Incl_GST=total1;
		var_bill = var.get("bill").getJSON("reference_number");
		info "var_bill" + var_bill;
		// 		openUrl("#Report:Associated_Bill?Internal_Invoice_ID=" + var_bill,"same window");
		iternal_inv = Internal_Invoice[ID == billdata.Bill_Id1];
		iternal_inv.Balance_Amount=billdata.Balance_Amount;
		// iternal_inv.Total_Amount=input.Total_Amount;
		iternal_inv.Total_Amount=ifnull(iternal_inv.Total_Amount,0) + billdata.Total_Amount;
		iternal_inv.Balance_Amount=billdata.Accumulated_Commission_Amount - ifnull(iternal_inv.Total_Amount,0);
		iternal_inv.External_Invoice="";
		iternal_inv.Status="New";
		/*Sending mail to CP*/
		// 		sendmail
		// 		[
		// 			from :zoho.adminuserid
		// 			to :billdata.CP_Details1.Partner_Entity_Name,"vimal@techvaria.com"
		// 			subject :"CP Invoice Verification Successfull"
		// 			message :"CP invoice Verification Done and Submitted to Finance team"
		// 		]
		totalAmount = 0;
		item_list = List();
		hard_lst = {1,2};
		for each  split in hard_lst
		{
			if(split == 1)
			{
				get_creator_amount = billdata.Total_Amount;
				get_credit_debit = "debit";
				get_creator_Description = "Comments";
				item_map = Map();
				item_map.put("amount",get_creator_amount);
				item_map.put("debit_or_credit",get_credit_debit);
				item_map.put("account_id",2293182000000114065);
				// 				2293182000000114073
				item_map.put("customer_id",book);
			}
			if(split == 2)
			{
				get_creator_amount = billdata.Total_Amount;
				get_credit_debit = "credit";
				get_creator_Description = "Test";
				item_map = Map();
				item_map.put("amount",get_creator_amount);
				item_map.put("debit_or_credit",get_credit_debit);
				item_map.put("account_id",2293182000000114073);
				item_map.put("customer_id",book);
			}
			item_list.add(item_map);
		}
		mymap = Map();
		if(billdata.Contracting_organisation == "USDC")
		{
			mymap.put("branch_id",2293182000000188007);
		}
		if(billdata.Contracting_organisation == "Jain University")
		{
			mymap.put("branch_id",2293182000000188048);
		}
		mymap.put("journal_date",zoho.currentdate.toString("yyyy-MM-dd"));
		mymap.put("reference_number",billdata.Order_Number);
		mymap.put("notes","test");
		mymap.put("line_items",item_list);
		mymap.put("total",billdata.Total_Invoice_Amount_Incl_GST);
		//mymap.put("tax_total",billdata.GST_Amount);
		responseBooks = invokeurl
		[
			url :"https://www.zohoapis.in/books/v3/journals?organization_id=60036667486"
			type :POST
			parameters:mymap.toString()
			connection:"zoho_books_connection1"
		];
		getJournal = responseBooks.get("journal");
		Zoho_Books_ID = getJournal.get("journal_id");
		file = invokeurl
		[
			url :"https://www.zohoapis.in/creator/v2.1/data/centralisedprocurement_usdcglobal/usdc1/report/All_Bills/" + billdata.ID + "/External_Invoice/download"
			type :GET
			connection:"zoho_oauth_connection"
		];
		file.setparamname("attachment");
		response = invokeurl
		[
			url :"https://www.zohoapis.in/books/v3/journals/" + Zoho_Books_ID + "/attachment?organization_id=60036667486"
			type :POST
			files:file
			connection:"zoho_books_connection1"
		];
	}
	else
	{
		for each  recs123 in billcreateform.CP_Internal_Invoice_Backend
		{
			recs123.Bill_Creation_Status="No";
			recs123.Bill_Creation_Error_Message=var;
		}
		billcreateform.Bill_Creation_Status="No";
		billcreateform.Bill_Creation_Error_Message=var;
		billdata.Bill_Creation_Status="No";
		billdata.Bill_Creation_Error_Message=var;
	}
}
var_org = organization.get("organization_id");
aaa = vendor_payment.get("payment_id");
amount = vendor_payment.get("amount");
paymentnumber = vendor_payment.get("payment_number");
dateformatted = vendor_payment.getJSON("date_formatted");
branch = vendor_payment.getJSON("branch_name");
refno = vendor_payment.getJSON("reference_number");
//billID = vendor_payment.get("bills").get(0).get("bill_id");
tbill = vendor_payment.getJSON("bills");
resp = invokeurl
[
	url :"https://www.zohoapis.in/books/v3/vendorpayments/" + aaa + "?organization_id=" + var_org
	type :GET
	connection:"books"
];
// info resp;
item_list = List();
for each  rec in tbill
{
	billID = rec.get("bill_id");
	billnum = rec.get("bill_number");
	amtapp = rec.get("amount_applied");
	// Get amount from the bill
	paymentid = rec.get("bill_payment_id");
	info "PAYMENTID: " + paymentid;
	info "AmtApp: " + amtapp;
	// Store amount in item_map for tracking
	item_map = Map();
	item_map.put("Bill_ID",billID);
	item_map.put("Payment_Amount",amtapp);
	item_map.put("Payment_Utr",zoho.currentdate);
	item_map.put("Payment_Number",paymentnumber);
	item_map.put("Branch",branch);
	item_map.put("Reference_Number",refno);
	item_list.add(item_map);
	response = invokeurl
	[
		url :"https://www.zohoapis.in/creator/v2.1/data/centralisedprocurement_usdcglobal/usdc1/report/All_Bills?Zoho_Books_Id=" + billID
		type :GET
		connection:"creator"
	];
	payres = invokeurl
	[
		url :"https://www.zohoapis.in/creator/v2.1/data/centralisedprocurement_usdcglobal/usdc1/report/All_Payments?Bill_Payment_ID=" + paymentid
		type :GET
		connection:"creator"
	];
	payvar = payres.get("data");
	varre = response.get("data");
	booksbilldata = zoho.books.getRecordsByID("bills","60036667486",billID,"books");
	crtrid = booksbilldata.getJSON("bill").getJSON("custom_fields");
	//info "BooksData"+booksbilldata;
	amount = booksbilldata.getJSON("payments");
	//info "Amount"+billID;
	for each  cfid in crtrid
	{
		if(cfid.getJSON("field_id") == "2293182000002730100")
		{
			crtrinvid = cfid.getJSON("value_formatted");
			info "INVID" + crtrinvid;
		}
	}
	// If payment record does not exist, create a new one	
	if(payvar.size() == 0)
	{
		creator_id1 = varre.getJSON("ID");
		paymap = Map();
		paymap.put("Payment_Amount",amtapp);
		paymap.put("Bill_ID",billID);
		paymap.put("Bill_No",billnum);
		paymap.put("Payment_Utr",zoho.currentdate);
		paymap.put("Payment_Number",paymentnumber);
		paymap.put("Branch",branch);
		paymap.put("Bill_Payment_ID",paymentid);
		paymap.put("Reference_Number",refno);
		paymap.put("Zoho_book_id",aaa);
		paymap.put("Bills",creator_id1);
		paymap.put("Zoho_book_id",aaa);
		paymap.put("CP_Internal_Invoice_ID",crtrinvid);
		info "Creating Payment Record: " + paymap;
		otherParams = Map();
		createPaymentres = zoho.creator.createRecord("centralisedprocurement_usdcglobal","usdc1","Payment_form",paymap,otherParams,"creator");
	}
	else
	{
		upmap = Map();
		upothermap = Map();
		getpaymentResponse1 = zoho.creator.getRecords("centralisedprocurement_usdcglobal","usdc1","All_Payments","Bill_Payment_ID ==\"" + paymentid + "\"",1,200,"creator");
		newresponse1 = getpaymentResponse1.getJson("data");
		upmap.put("Payment_Amount",amtapp);
		upmap.put("Bill_ID",billID);
		upmap.put("CP_Internal_Invoice_ID",crtrinvid);
		updatePayment1 = zoho.creator.updateRecord("centralisedprocurement_usdcglobal","usdc1","All_Payments",newresponse1.getJson("ID"),upmap,upothermap,"creator");
		info "Updated Payment Record: " + updatePayment1;
	}
	resp1 = invokeurl
	[
		url :"https://www.zohoapis.in/creator/v2.1/data/centralisedprocurement_usdcglobal/usdc1/report/Associated_Bill?ID=" + crtrinvid
		type :GET
		connection:"creator"
	];
	if(resp1.get("code") == 3000)
	{
		info "RESP1" + resp1;
		pdamt = resp1.getJSON("data").getJSON("Paid_Amount").toNumber();
		pdamt = pdamt + amtapp.toDecimal();
		info "PaidAMOUNT" + pdamt;
		mps = Map();
		Other = Map();
		//mps.put("Paid_Amount",amtapp.toDecimal().round(2));
		// 	info mps;
		upcreatorrec = zoho.creator.updateRecord("centralisedprocurement_usdcglobal","usdc1","Admin_Associated_Bill",crtrinvid,mps,Other,"creator");
		//info "CRT" + upcreatorrec;
	}
}
sendmail
[
	from :zoho.adminuserid
	to :"pooja.s@techvaria.com"
	subject :"Payment Test"
	message :upcreatorrec + "-" + mps
]
A white-label BET365 clone script is a customizable, ready-to-launch betting platform that replicates BET365’s key features—real-time odds, live streaming, and multi-language support.
At Coinsqueens, we build secure, scalable, and user-friendly BET365 clone platforms tailored to your business goals using the latest technology.
Know More:
For more info:
Call/Whatsapp - +91 87540 53377
Email:sales@coinsqueen.com
Visit https://www.coinsqueens.com/blog/bet365-clone-script 
photo_str = input.Image_Pan;
photo_str = photo_str.replaceFirst("sharedBy",zoho.adminuser);
photo_str = photo_str.replaceFirst("appLinkName",zoho.appname);
photo_str = photo_str.replaceFirst("viewLinkName","KYC");
photo_str = photo_str.replaceFirst("fieldName",input.ID + "/Image_Pan");
photo_str = photo_str.replaceFirst("image","image-download");
photo_str = photo_str.replaceFirst("<img","<img width='100' height='100' ");
input.PAN_Card_img = photo_str;
photo_str1 = input.Certificate_of_Incorporation2;
photo_str1 = photo_str1.replaceFirst("sharedBy",zoho.adminuser);
photo_str1 = photo_str1.replaceFirst("appLinkName",zoho.appname);
photo_str1 = photo_str1.replaceFirst("viewLinkName","KYC");
photo_str1 = photo_str1.replaceFirst("fieldName",input.ID + "/Certificate_of_Incorporation2");
photo_str1 = photo_str1.replaceFirst("image","image-download");
photo_str1 = photo_str1.replaceFirst("<img","<img width='100' height='100' ");
input.Certificate_of_Incorporation = photo_str1;
photo_str2 = input.Partnership_Deed;
photo_str2 = photo_str2.replaceFirst("sharedBy",zoho.adminuser);
photo_str2 = photo_str2.replaceFirst("appLinkName",zoho.appname);
photo_str2 = photo_str2.replaceFirst("viewLinkName","KYC");
photo_str2 = photo_str2.replaceFirst("fieldName",input.ID + "/Partnership_Deed");
photo_str2 = photo_str2.replaceFirst("image","image-download");
photo_str2 = photo_str2.replaceFirst("<img","<img width='100' height='100' ");
input.Partnership_Deed1_Img = photo_str2;
photo_str3 = input.Partnership_Deed1;
photo_str3 = photo_str3.replaceFirst("sharedBy",zoho.adminuser);
photo_str3 = photo_str3.replaceFirst("appLinkName",zoho.appname);
photo_str3 = photo_str3.replaceFirst("viewLinkName","KYC");
photo_str3 = photo_str3.replaceFirst("fieldName",input.ID + "/Partnership_Deed1");
photo_str3 = photo_str3.replaceFirst("image","image-download");
photo_str3 = photo_str3.replaceFirst("<img","<img width='100' height='100' ");
input.Partnership_Deed1_Img = photo_str3;
photo_str4 = input.Proprietorship_Declaration1;
photo_str4 = photo_str4.replaceFirst("sharedBy",zoho.adminuser);
photo_str4 = photo_str4.replaceFirst("appLinkName",zoho.appname);
photo_str4 = photo_str4.replaceFirst("viewLinkName","KYC");
photo_str4 = photo_str4.replaceFirst("fieldName",input.ID + "/Proprietorship_Declaration1");
photo_str4 = photo_str4.replaceFirst("image","image-download");
photo_str4 = photo_str4.replaceFirst("<img","<img width='100' height='100' ");
input.Proprietorship_Declaration_Img = photo_str4;
photo_str5 = input.Permanent_Establishment1;
photo_str5 = photo_str5.replaceFirst("sharedBy",zoho.adminuser);
photo_str5 = photo_str5.replaceFirst("appLinkName",zoho.appname);
photo_str5 = photo_str5.replaceFirst("viewLinkName","KYC");
photo_str5 = photo_str5.replaceFirst("fieldName",input.ID + "/Permanent_Establishment1");
photo_str5 = photo_str5.replaceFirst("image","image-download");
photo_str5 = photo_str5.replaceFirst("<img","<img width='100' height='100' ");
input.Permanent_Establishment_Img = photo_str5;
photo_str6 = input.Cancelled_Cheque;
photo_str6 = photo_str6.replaceFirst("sharedBy",zoho.adminuser);
photo_str6 = photo_str6.replaceFirst("appLinkName",zoho.appname);
photo_str6 = photo_str6.replaceFirst("viewLinkName","KYC");
photo_str6 = photo_str6.replaceFirst("fieldName",input.ID + "/Cancelled_Cheque");
photo_str6 = photo_str6.replaceFirst("image","image-download");
photo_str6 = photo_str6.replaceFirst("<img","<img width='100' height='100' ");
input.Cancelled_Cheque_img = photo_str6;
photo_str7 = input.GST_Registration_Certificate;
photo_str7 = photo_str7.replaceFirst("sharedBy",zoho.adminuser);
photo_str7 = photo_str7.replaceFirst("appLinkName",zoho.appname);
photo_str7 = photo_str7.replaceFirst("viewLinkName","KYC");
photo_str7 = photo_str7.replaceFirst("fieldName",input.ID + "/GST_Registration_Certificate");
photo_str7 = photo_str7.replaceFirst("image","image-download");
photo_str7 = photo_str7.replaceFirst("<img","<img width='100' height='100' ");
input.Gst_regestration_img = photo_str7;
photo_str8 = input.Additional_documents;
photo_str8 = photo_str8.replaceFirst("sharedBy",zoho.adminuser);
photo_str8 = photo_str8.replaceFirst("appLinkName",zoho.appname);
photo_str8 = photo_str8.replaceFirst("viewLinkName","KYC");
photo_str8 = photo_str8.replaceFirst("fieldName",input.ID + "/Additional_documents");
photo_str8 = photo_str8.replaceFirst("image","image-download");
photo_str8 = photo_str8.replaceFirst("<img","<img width='100' height='100' ");
input.Additional_documents_img = photo_str8;
photo_str9 = input.Policy_Document_Data_Privacy;
photo_str9 = photo_str9.replaceFirst("sharedBy",zoho.adminuser);
photo_str9 = photo_str9.replaceFirst("appLinkName",zoho.appname);
photo_str9 = photo_str9.replaceFirst("viewLinkName","KYC");
photo_str9 = photo_str9.replaceFirst("fieldName",input.ID + "/Policy_Document_Data_Privacy");
photo_str9 = photo_str9.replaceFirst("image","image-download");
photo_str9 = photo_str9.replaceFirst("<img","<img width='100' height='100' ");
input.Policy_Document_Img = photo_str9;
photo_str10 = input.Policy_Document_Confidentality_Of_Information;
photo_str10 = photo_str10.replaceFirst("sharedBy",zoho.adminuser);
photo_str10 = photo_str10.replaceFirst("appLinkName",zoho.appname);
photo_str10 = photo_str10.replaceFirst("viewLinkName","KYC");
photo_str10 = photo_str10.replaceFirst("fieldName",input.ID + "/Policy_Document_Confidentality_Of_Information");
photo_str10 = photo_str10.replaceFirst("image","image-download");
photo_str10 = photo_str10.replaceFirst("<img","<img width='100' height='100' ");
input.Policy_Document1_Img = photo_str10;
photo_str11 = input.Partnership_Deed;
photo_str11 = photo_str11.replaceFirst("sharedBy",zoho.adminuser);
photo_str11 = photo_str11.replaceFirst("appLinkName",zoho.appname);
photo_str11 = photo_str11.replaceFirst("viewLinkName","KYC");
photo_str11 = photo_str11.replaceFirst("fieldName",input.ID + "/Partnership_Deed");
photo_str11 = photo_str11.replaceFirst("image","image-download");
photo_str11 = photo_str11.replaceFirst("<img","<img width='100' height='100' ");
input.Partnership_deed_img = photo_str2;
This Power BI Tutorial guides you through data visualization, reporting, and analytics using Microsoft Power BI. Learn how to connect data sources, create interactive dashboards, and generate insightful reports. Whether you're a beginner or an advanced user, this tutorial covers key features like DAX, Power Query, and AI-powered analytics for effective data-driven decision-making.
var pageLink = $(".sidebar"); // Select the section
    var initialTop = pageLink.offset().top; // Store the initial position
    var stickyPosition = initialTop - 120; // Activate sticky when reaching 120px

    $(window).scroll(function () {
      var scrollTop = $(window).scrollTop(); // Current scroll position

      if (scrollTop >= stickyPosition) {
        pageLink.addClass("sticky").css("transition", "300ms");
        pageLink.parents("div").css("overflow", "visible"); // Apply to all parent divs
      }
      // Remove sticky class when scrolled back to original position
      else if (scrollTop < initialTop) {
        pageLink.removeClass("sticky").css("transition", "300ms");
      }
    }); 
Discover the best Online R Compilers for instant coding, testing, and debugging. Whether you're a beginner or an experienced data scientist, these web-based tools offer seamless execution, syntax highlighting, and real-time collaboration. Explore top R compilers with features like cloud storage, package support, and fast processing. Optimize your workflow with the best online R coding environments available today. Start coding in R instantly—no installation required!
End-to-end MEV Bot Development solutions are designed for smart investors and businesses. Trade effortlessly for big profits. Our skilled staff is most likely available to help you in making the greatest trade! Join us and experience consistent success. Discover the possibilities of MEV Bot Development and explore profitable opportunities with experienced guidance every step of the way.
Let's be real - the crypto space is booming, and it is not slowing down any time soon. If you are someone like me - curious, entrepreneurial, and always looking ahead - then you have probably noticed how p2p crypto exchanges are quickly becoming the heart of decentralized trading.

Gone are the days when users were happy to rely on traditional centralized platforms with their long KYC checks, frozen accounts, and high fees. People want freedom. They want control. They want options. And that's exactly what peer-to-peer (P2P) crypto exchange offer.

But here's the thing - not all P2P exchanges are created equal. With new platforms popping up every month, it is easy to get overwhelmed. So, if you are creating a platform, investing in one, or just trading, knowing how to choose the best p2p crypto exchange in 2025 is more important than ever.

Let me walk you through how i would do it, based on experience, observation, and a little bit of that entrepreneurial gut instinct.

1. Check the Security Like It's Your Vault

The first thing I always look for? Security.

Think about it - P2P means you are directly dealing with another person, so you need a platform that has rock-solid protection in place. Look for exchanges that offer multi-signature wallets, escrow services, end-to-end encryption, and a transparent dispute resolution system.

If an exchange can't make you feel like your assets are safe there, it doesn't belong on your top 10 list.

2. User Interface: Simple, Smooth, Stress-Free

No matter how advanced the backend is, if the platform looks like it’s stuck in 2017, people won’t use it. Trust me.

In 2025, user experience is king. The best P2P platforms should be clean, intuitive, and mobile-friendly. Whether you're a newbie or a seasoned trader, you should be able to navigate the platform without Googling every other term.

Pro tip: A strong UI often shows that the team behind it actually listens to its users.

3. Liquidity and Trade Volume

Let’s not forget—a P2P exchange without liquidity is just a pretty website. If you can’t find buyers or sellers quickly, it defeats the whole purpose of “peer-to-peer.”

When ranking top P2P exchanges, always consider:

How many active traders are on the platform daily?
Are there enough listings for different cryptos?
Is the spread (price difference) reasonable?

High liquidity = fast trades, better prices, and happy users.

4. Payment Options: The More, the Merrier

One of the biggest wins for P2P exchanges is payment flexibility. Some users want to pay with PayPal. Others prefer bank transfers or even gift cards.

The best platforms support multiple payment methods across different countries, making it easier for global users to get involved. When reviewing platforms, make sure they’re offering more than just the basics.

5. Reputation & Community Trust

Let’s talk reputation—because in the crypto space, trust is currency.

Top P2P exchanges usually have:

Positive user reviews
Active communities on Reddit, Telegram, or X (formerly Twitter)
Transparent teams or leadership behind them

If the platform is a ghost online or has shady reviews all over, you know what to do—exit stage left.

6. KYC Flexibility (But Legally Sound)

Here’s the tricky part. Some users love anonymity, while others are okay with KYC for security. The best exchanges in 2025 will strike a balance—offering tiered KYC levels or region-specific compliance, while still protecting privacy where possible.

Don’t go for extremes. Go for balance.

7. Dispute Resolution That Actually Works

Sometimes, things go sideways. Maybe the seller didn’t send the crypto. Or maybe there was a payment glitch. What matters then is how the platform handles disputes.

Top-tier P2P platforms will have:

Escrow protection
Fast and fair dispute handling
A responsive support team

Because in a peer-to-peer setup, how well the platform mediates trust is everything.

8. Fees That Make Sense

Now, we all want low fees—but be careful. If a platform is too cheap, they might be cutting corners somewhere else.

Look for exchanges with:

Transparent fee structures
Competitive rates
No hidden charges

And remember, value > price. It’s better to pay a small fee for a smooth, safe transaction than to risk your crypto with a shady “free” platform.

9. Global Reach, Local Touch

2025 is the year of global adoption, but that doesn’t mean ignoring local markets. The top exchanges will support:

Multiple languages
Localized currencies
Region-specific features

If a platform wants to lead, it can’t be one-size-fits-all. It has to speak the language of its users—literally and culturally.

10. Innovation & Roadmap Vision

Last but not least—look at the vision.

The best platforms aren’t just coasting—they’re innovating. Maybe it’s integrating AI for fraud detection, or building cross-chain swaps, or launching governance tokens for community voting.

Ask this: Where is this platform heading in the next 2 years?
Because if you're picking a top exchange, you want to ride with those who are driving forward—not stuck in neutral.

Final Thoughts


So there you have it—10 things I look at when choosing or recommending a top P2P crypto exchange in 2025. Whether you’re a trader, an investor, or a builder, remember this: great platforms don’t just offer transactions—they offer trust, speed, and future-readiness.

We’re stepping into a world where peer-to-peer trading will be as common as sending a text. The top platforms of this era won’t just keep up—they’ll lead the way.

And if you’re someone who dreams of launching your own P2P exchange? I say go for it. The tools are out there. The market is ready. All it takes is the right strategy—and maybe a little inspiration from platforms like these.

Let’s build the future, one peer-to-peer transaction at a time.
{
	"blocks": [
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": ":cute-sun: Boost Days - What's On This Week :cute-sun:"
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Xero Café :coffee:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "\n :new-thing: *This week we are offering:* \n\n :hotcrossbun:Chocolate Hotcross Bun Cookies  \n\n :passionfruit: Lemon Passionfruit Coconut Slice \n\n *Weekly Café Special:* _Salted Caramel Latte_"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": " Wednesday, 16th April :calendar-date-16:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":hands: *Global All Hands* From 11am in the Level 3 Breakout Space \n\n \n\n :lunch: *Light Lunch*: Provided by Kartel Catering from *12pm* \n\n"
			}
		},
		{
			"type": "header",
			"text": {
				"type": "plain_text",
				"text": "Thursday, 17th April :calendar-date-17:",
				"emoji": true
			}
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": ":breakfast: *Breakfast*: Provided by *Kartel Catering* from *8:30am - 10:30am* in the Wominjeka Breakout Space."
			}
		},
		{
			"type": "divider"
		},
		{
			"type": "section",
			"text": {
				"type": "mrkdwn",
				"text": "Stay tuned for more fun throughout the year. :party-wx:"
			}
		}
	]
}
details {
  overflow: hidden;
}

details > summary {
  display: flex;
  justify-content: space-between;
  align-items: center;
  cursor: pointer;
}

details::details-content {
  block-size: 0;
  transition:
    block-size 0.3s ease,
    content-visibility 0.3s ease;
  transition-behavior: allow-discrete;
}

details[open]::details-content {
  block-size: auto;
}

details > summary svg {
  transition: transform 0.1s ease;
}

details[open] > summary svg {
  transform: rotate(90deg);
}
As blockchain adoption surges across various industries, businesses are increasingly exploring decentralized technologies to elevate their online gaming platforms. One standout approach is integrating blockchain with a BET365 clone script—a customizable software solution that replicates the features and functionalities of globally renowned platforms. But to make this integration seamless and future-ready, choosing the right blockchain network becomes critical.
In this forum discussion, we’ll explore the technical suitability of leading blockchain networks such as Ethereum, Solana, and Polygon for scalable applications based on a BET365 clone script. We’ll examine each network’s scalability, transaction speed, fees, ecosystem maturity, and compatibility with smart contract integration.

Why Blockchain Integration Matters in a BET365 Clone Script
Before diving into network comparisons, it’s essential to understand why blockchain is being integrated with a BET365 clone script in the first place. The key benefits include:

Transparency: Every transaction is immutably recorded, enabling trust among users.
Smart Contracts: Automate payouts, manage game rules, and reduce human intervention.
Security: Decentralized infrastructure reduces single points of failure.
Crypto Payments: Offer support for various cryptocurrencies, facilitating global transactions.

With these enhancements, a BET365 clone script becomes more than just a clone—it becomes a next-gen platform powered by decentralized finance (DeFi).

Ethereum: The Pioneer With Trade-offs
Pros:
Vast developer community and extensive documentation
Secure and time-tested smart contract capabilities (Solidity-based)
Large ecosystem with DeFi and NFT integration potential
Cons:
Scalability bottlenecks: Ethereum handles ~15 TPS (transactions per second), which could lead to latency during peak usage
High gas fees: Unpredictable and expensive transaction costs can frustrate users and impact platform profitability

Verdict: While Ethereum remains the industry standard for smart contracts, it’s better suited for projects prioritizing security and interoperability over transaction speed. A BET365 clone script running on Ethereum may require Layer-2 solutions (e.g., Arbitrum or Optimism) to address scalability and cost concerns.

Solana: High-Performance for Real-Time Interactions
Pros:
Blazing-fast transaction speeds (65,000+ TPS)
Exceptionally low transaction fees (~$0.00025 per transaction)
Ideal for real-time applications due to its Proof-of-History consensus
Cons:
Network downtime has occurred in the past
Smaller developer ecosystem compared to Ethereum

Verdict: Solana is a top choice for platforms that need high-frequency interactions with minimal latency. A BET365 clone script deployed on Solana can efficiently handle a large user base, ensuring seamless transactions even under heavy load. Its scalability makes it ideal for launching platforms where rapid user interactions are crucial.

Polygon: The Best of Ethereum, Without the Bottlenecks
Pros:
Built as a Layer-2 solution for Ethereum, maintaining compatibility
Significantly reduced transaction fees
Scalable infrastructure (up to 7,000 TPS)
Supported by major DeFi projects and exchanges
Cons:
Layer-2 architecture may introduce centralization concerns for some use cases
Occasionally relies on Ethereum mainnet, which could inherit its latency

Verdict: Polygon strikes a balance between Ethereum's security and Solana's scalability. It’s ideal for businesses seeking faster deployment and cost-efficiency while maintaining compatibility with Ethereum tooling. For developers using a BET365 clone script, Polygon offers a seamless path to build user-centric platforms without compromising performance.

Technical Considerations When Choosing a Blockchain for Your BET365 Clone Script
When choosing the right blockchain, consider these core technical criteria:
Smart Contract Support: Solidity is the dominant language. Ethereum and Polygon support it natively, while Solana uses Rust.

EVM Compatibility: If your BET365 clone script is EVM-compatible, networks like Polygon and BNB Chain offer smooth integration.
Transaction Finality: Look for networks that ensure rapid confirmation times to enhance user experience.
Development Ecosystem: A vibrant developer community means more tools, libraries, and support.
Infrastructure Maturity: Uptime, node availability, and integration with oracles (like Chainlink) are vital for real-world data feeds.


Conclusion: The Right Blockchain Can Supercharge Your BET365 Clone Script
To sum it up:
Use Ethereum for security, rich features, and when you're targeting users familiar with DeFi ecosystems.

Go with Solana if you need lightning-fast processing, high throughput, and near-zero fees.

Choose Polygon for scalability, low cost, and Ethereum compatibility—ideal for mainstream adoption.

Ultimately, the choice depends on your platform’s priorities: speed, cost, developer familiarity, or ecosystem integration.
Looking to Build a Blockchain-Integrated BET365 Clone Script?
  
If you're considering launching a scalable, future-ready platform using blockchain, Coinsqueens offers industry-leading development solutions. Our BET365 clone script is customizable, secure, and built to integrate seamlessly with blockchain networks like Ethereum, Solana, and Polygon. With full-stack blockchain development expertise, we help businesses launch fast, scale faster, and stay ahead in a rapidly evolving space.

For more info:
Call/Whatsapp - +91 87540 53377
Email:sales@coinsqueen.com
Visit https://www.coinsqueens.com/blog/bet365-clone-script 

open setting in vs code (ctrl + ,) ,then  go to  search bar and search -->>      .codeiumconfig        
then open first opection ,  and click  edit in setting.json .  then redirect  -->> 
   codeium.enableConfig            and   add this 2 line --->>>>





    // aftr this line, the code is not visible in the provided snippet 53-54
    "autoSuggestions": false,
    "autoComplete": false,  
fzZ1rAEzVaIZRiajU9yVeC:APA91bFoE3_Ftjw1YSm-k9tQE4JGT_fi1nvQO4z9EV1LNXwSnVMG8DuhO_28atJj0cN5Cnu1MV--sawwCFhqf_gVrG6tjrY4ADPvQmVsmCzVmo9RgiRmfGWa9pQfpJfLKI1sEkxaE8tr
import $ from 'jquery';

class Masthead {
	constructor(element) {
		this.$element = $(element);
		this.$slider = this.$element.find('.masthead--slider');

		this.init();
	}

	init() {
		const itemCount = this.$slider.find('.masthead--slider-item').length;

		if (itemCount > 1) {
			this.$slider.addClass('owl-carousel');

			this.$slider.owlCarousel({
				loop: true,
				dots: true,
				animateIn: true,
				items: 1,
				onInitialized: (event) => this.firstSlideClass(event),
				onTranslated: (event) => this.firstSlideClass(event),
			});
		} else {
			this.$slider.removeClass('owl-carousel');
		}
	}

	firstSlideClass(event) {
		// Get all real (non-cloned) items
		const $realItems = this.$slider.find('.owl-item:not(.cloned)');
		const $allItems = this.$slider.find('.owl-item');

		// Get the first real item DOM reference
		const $firstRealSlide = $realItems.first();

		// Get the index of current slide
		const currentIndex = event.item.index;

		// Get the current DOM element
		const $currentItem = $allItems.eq(currentIndex);

		// Remove first-slide from all
		// $allItems.removeClass('first-slide');

		// Compare: if the current item is the original first slide (or a clone of it)
		const firstSlideContent = $firstRealSlide.html();
		const currentContent = $currentItem.html();

		// Use HTML comparison or some unique attribute to detect match
		if (firstSlideContent === currentContent) {
			$currentItem.addClass('first-slide');
		}
	}
}

$('[data-masthead]').each((index, element) => new Masthead(element));
clientIPAddress 2600:8800:1114:2100:20cc:3f94:9269:851b
X-ClientId: E43FA849401B4D9E9932483BDCAD74B1
X-FEServer PH0PR07CA0032
Date:4/9/2025 4:18:18 AM
T1. Write a python program to import and export data using Pandas Library Functions.
import pandas as pd
csv_file = r"\sample_data.csv"
csv_data = pd.read_csv(csv_file,sep=",")
print("Data imported successfully:")
print(csv_data)
excel_file = r"\sample.xlsx"
excel_data = pd.read_excel(excel_file)
print(excel_data)

T2. Demonstrate the following data pre-processing techniques on the given dataset 2
a. Standardization
b. normalization
c. summarization
d. de-duplication
e. Imputation

Program:
import pandas as pd
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.impute import SimpleImputer
# Sample Data with Missing Values and Duplicates
data = {
 'Name': ['Alice', 'Bob', 'Charlie', 'Alice'],
 'Age': [25, 30, 35, 25],
 'Salary': [50000, 60000, None, 50000],
 'City': ['New York', 'Los Angeles', 'Chicago', 'New York']
}
# Create DataFrame
df = pd.DataFrame(data)
# a. Standardization
scaler = StandardScaler()
df[['Age', 'Salary']] = scaler.fit_transform(df[['Age', 'Salary']])
print("\nStandardized Data:\n", df)
# b. Normalization
normalizer = MinMaxScaler()
df[['Age', 'Salary']] = normalizer.fit_transform(df[['Age', 'Salary']])
print("\nNormalized Data:\n", df)
# c. Summarization
summary = df.describe()
print("\nData Summary:\n", summary)
# d. De-duplication
df_deduplicated = df.drop_duplicates()
print("\nDe-duplicated Data:\n", df_deduplicated)
# e. Imputation
imputer = SimpleImputer(strategy='mean')
df[['Salary']] = imputer.fit_transform(df[['Salary']])
print("\nData with Imputed Values:\n", df)

T3.Implement findS algorithm and Candidate elimination algorithm
def find_s_algorithm(examples):
    # Start with the most specific hypothesis
    hypothesis = ['0'] * len(examples[0][0])

    for attributes, label in examples:
        if label == 'Yes':  # Only consider positive examples
            for i in range(len(hypothesis)):
                if hypothesis[i] == '0':
                    hypothesis[i] = attributes[i]
                elif hypothesis[i] != attributes[i]:
                    hypothesis[i] = '?'  # Generalize
    return hypothesis

def candidate_elimination_algorithm(examples):
    num_attributes = len(examples[0][0])
    # Start with most specific S and most general G
    S = ['0'] * num_attributes
    G = [['?' for _ in range(num_attributes)]]

    for instance, label in examples:
        if label == 'Yes':
            # Remove from G any hypothesis inconsistent with the instance
            G = [g for g in G if consistent(g, instance)]

            for i in range(num_attributes):
                if S[i] == '0':
                    S[i] = instance[i]
                elif S[i] != instance[i]:
                    S[i] = '?'
        else:  # label == 'No'
            G_new = []
            for g in G:
                for i in range(num_attributes):
                    if g[i] == '?':
                        if S[i] != instance[i]:
                            g_new = g.copy()
                            g_new[i] = S[i]
                            if g_new not in G_new:
                                G_new.append(g_new)
            G = G_new
    return S, G

def consistent(hypothesis, instance):
    for h, x in zip(hypothesis, instance):
        if h != '?' and h != x:
            return False
    return True

# Each row is a tuple (attributes, label)
dataset = [
    (['Sunny', 'Warm', 'Normal', 'Strong', 'Warm', 'Same'], 'Yes'),
    (['Sunny', 'Warm', 'High', 'Strong', 'Warm', 'Same'], 'Yes'),
    (['Rainy', 'Cold', 'High', 'Strong', 'Warm', 'Change'], 'No'),
    (['Sunny', 'Warm', 'High', 'Strong', 'Cool', 'Change'], 'Yes'),
]

# Find-S Output
hypothesis = find_s_algorithm(dataset)
print("Final hypothesis from Find-S:", hypothesis)

# Candidate Elimination Output
S, G = candidate_elimination_algorithm(dataset)
print("Final specific hypothesis (S):", S)
print("Final general hypotheses (G):", G)


T4. . Demonstrate regression technique to predict the responses at unknown locations by fitting the linear and polynomial regression surfaces. Extract error measures and plot the residuals. Further, add a regulizer and demonstrate the reduction in the variance. (Ridge and LASSO)

import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.linear_model import LinearRegression, Ridge, Lasso
from sklearn.preprocessing import PolynomialFeatures
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.model_selection import train_test_split
#incmd , pip install statsmodel
# 1. Generate synthetic data
np.random.seed(42)
X = 2 - 3 * np.random.normal(0, 1, 100)
y = X**3 + X**2 + np.random.normal(0, 5, 100)
X = X.reshape(-1, 1)

# 2. Split the dataset
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# 3. Linear Regression
lin_reg = LinearRegression()
lin_reg.fit(X_train, y_train)
y_pred_lin = lin_reg.predict(X_test)
rmse_lin = np.sqrt(mean_squared_error(y_test, y_pred_lin))
r2_lin = r2_score(y_test, y_pred_lin)

# 4. Polynomial Regression
poly = PolynomialFeatures(degree=3)
X_train_poly = poly.fit_transform(X_train)
X_test_poly = poly.transform(X_test)

poly_reg = LinearRegression()
poly_reg.fit(X_train_poly, y_train)
y_pred_poly = poly_reg.predict(X_test_poly)
rmse_poly = np.sqrt(mean_squared_error(y_test, y_pred_poly))
r2_poly = r2_score(y_test, y_pred_poly)

# 5. Residual Plot for Polynomial Regression
residuals = y_test - y_pred_poly
plt.figure(figsize=(8, 5))
sns.residplot(x=y_pred_poly, y=residuals, lowess=False, color='g')
plt.title("Residual Plot - Polynomial Regression")
plt.xlabel("Predicted")
plt.ylabel("Residuals")
plt.axhline(0, color='red', linestyle='--')
plt.show()

# 6. Ridge Regression
ridge = Ridge(alpha=1)
ridge.fit(X_train_poly, y_train)
y_pred_ridge = ridge.predict(X_test_poly)
rmse_ridge = np.sqrt(mean_squared_error(y_test, y_pred_ridge))
r2_ridge = r2_score(y_test, y_pred_ridge)

# 7. Lasso Regression
lasso = Lasso(alpha=0.1)
lasso.fit(X_train_poly, y_train)
y_pred_lasso = lasso.predict(X_test_poly)
rmse_lasso = np.sqrt(mean_squared_error(y_test, y_pred_lasso))
r2_lasso = r2_score(y_test, y_pred_lasso)

# 8. Plotting all models
X_range = np.linspace(X.min(), X.max(), 100).reshape(-1, 1)
X_range_poly = poly.transform(X_range)

plt.figure(figsize=(10, 6))
plt.scatter(X, y, label="Original Data", alpha=0.6)
plt.plot(X_range, lin_reg.predict(X_range), label="Linear", color="blue")
plt.plot(X_range, poly_reg.predict(X_range_poly), label="Polynomial (deg 3)", color="green")
plt.plot(X_range, ridge.predict(X_range_poly), label="Ridge", color="purple")
plt.plot(X_range, lasso.predict(X_range_poly), label="Lasso", color="orange")
plt.title("Regression Models Comparison")
plt.xlabel("X")
plt.ylabel("y")
plt.legend()
plt.grid(True)
plt.show()

# 9. Print performance
print("Model Performance Summary:\n")
print(f"Linear Regression     -> RMSE: {rmse_lin:.2f}, R²: {r2_lin:.2f}")
print(f"Polynomial Regression -> RMSE: {rmse_poly:.2f}, R²: {r2_poly:.2f}")
print(f"Ridge Regression      -> RMSE: {rmse_ridge:.2f}, R²: {r2_ridge:.2f}")
print(f"Lasso Regression      -> RMSE: {rmse_lasso:.2f}, R²: {r2_lasso:.2f}")

T5. Demonstrate the capability of PCA and LDA in dimensionality reduction.
# Import necessary libraries
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis

# Load the Iris dataset
iris = load_iris()
X, y = iris.data, iris.target
target_names = iris.target_names

# Apply PCA to reduce dimensions to 2
pca = PCA(n_components=2)
X_pca = pca.fit_transform(X)

# Apply LDA to reduce dimensions to 2
lda = LinearDiscriminantAnalysis(n_components=2)
X_lda = lda.fit_transform(X, y)

# Plot PCA results
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
colors = ['navy', 'turquoise', 'darkorange']
for color, i, target_name in zip(colors, [0, 1, 2], target_names):
    plt.scatter(X_pca[y == i, 0], X_pca[y == i, 1], alpha=0.8, color=color, label=target_name)
plt.title('PCA on Iris Dataset')
plt.xlabel('Principal Component 1')
plt.ylabel('Principal Component 2')
plt.legend()

# Plot LDA results
plt.subplot(1, 2, 2)
for color, i, target_name in zip(colors, [0, 1, 2], target_names):
    plt.scatter(X_lda[y == i, 0], X_lda[y == i, 1], alpha=0.8, color=color, label=target_name)
plt.title('LDA on Iris Dataset')
plt.xlabel('Linear Discriminant 1')
plt.ylabel('Linear Discriminant 2')
plt.legend()

plt.tight_layout()
plt.show()

# Explained variance for PCA
explained_variance_ratio = pca.explained_variance_ratio_
print("Explained variance by PCA components:", explained_variance_ratio)


T6. KNN
# Import necessary libraries
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import classification_report, accuracy_score, confusion_matrix
import seaborn as sns
import matplotlib.pyplot as plt

# Load the Iris dataset
iris = load_iris()
X, y = iris.data, iris.target

# Split into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

# Standardize the features
scaler = StandardScaler()
X_train_scaled = scaler.fit_transform(X_train)
X_test_scaled = scaler.transform(X_test)

# Initialize the K-NN classifier with K=5
knn = KNeighborsClassifier(n_neighbors=5)

# Fit the model
knn.fit(X_train_scaled, y_train)

# Predict on the test set
y_pred = knn.predict(X_test_scaled)

# Evaluation
print("Accuracy:", accuracy_score(y_test, y_pred))
print("\nClassification Report:\n", classification_report(y_test, y_pred))

# Confusion Matrix
conf_matrix = confusion_matrix(y_test, y_pred)
sns.heatmap(conf_matrix, annot=True, cmap="Blues", xticklabels=iris.target_names, yticklabels=iris.target_names)
plt.xlabel("Predicted")
plt.ylabel("Actual")
plt.title("Confusion Matrix - KNN")
plt.show()


T7.Apply suitable classifier model to classify the credit status to be good or bad on German credit dataset.csv, create confusion matrix to measure the accuracy of the model (using Logistic Regression/SVM/Naïve Bayes).
Dataset -> https://online.stat.psu.edu/stat857/node/215/
# Step 1: Import libraries
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix, accuracy_score, ConfusionMatrixDisplay

# Step 2: Load the German Credit dataset
df = pd.read_csv("German credit dataset.csv")

# Step 3: Preprocess the data
# Encode categorical columns
df_encoded = df.copy()
label_encoders = {}

for column in df_encoded.select_dtypes(include=['object']).columns:
    le = LabelEncoder()
    df_encoded[column] = le.fit_transform(df_encoded[column])
    label_encoders[column] = le

# Step 4: Split data into features (X) and target (y)
# Assuming 'Creditability' or similar is the target column; adjust if needed
target_column = 'Creditability'  # Update this if your dataset has a different column
X = df_encoded.drop(target_column, axis=1)
y = df_encoded[target_column]

# Step 5: Split into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Step 6: Train a classifier (e.g., Logistic Regression)
model = LogisticRegression(max_iter=1000)
model.fit(X_train, y_train)

# Step 7: Make predictions
y_pred = model.predict(X_test)

# Step 8: Evaluate the model
cm = confusion_matrix(y_test, y_pred)
accuracy = accuracy_score(y_test, y_pred)

print("✅ Confusion Matrix:\n", cm)
print("\n🎯 Accuracy Score:", accuracy)

# Optional: Display confusion matrix visually
ConfusionMatrixDisplay(confusion_matrix=cm).plot()


T8.Apply train set split and develop a regression model to predict the sold price of players using imb381ipl2013.csv build a correlation matrix between all the numeric features in dataset and visualize the heatmap. RMSE of train and test data.

# Import necessary libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error

# Load the dataset
file_path = "imb381ipl2013.csv"  # Replace with your file path if needed
data = pd.read_csv(file_path)

# Display basic information and head of the dataset
print("Dataset Info:")
print(data.info())
print("\nFirst 5 Rows:")
print(data.head())

# Check for missing values and drop rows with NaN
data.dropna(inplace=True)

# Define target variable (Sold Price) and features
X = data.drop(columns=['Sold Price'])
y = data['Sold Price']

# Split the data into training and testing sets (80-20 split)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Build the Linear Regression model
model = LinearRegression()
model.fit(X_train, y_train)

# Make predictions
y_train_pred = model.predict(X_train)
y_test_pred = model.predict(X_test)

# Calculate RMSE for train and test data
rmse_train = np.sqrt(mean_squared_error(y_train, y_train_pred))
rmse_test = np.sqrt(mean_squared_error(y_test, y_test_pred))

print(f"\nRMSE on Training Data: {rmse_train:.2f}")
print(f"RMSE on Test Data: {rmse_test:.2f}")

# Build correlation matrix for numeric features
numeric_features = data.select_dtypes(include=[np.number])
correlation_matrix = numeric_features.corr()

# Plot heatmap of the correlation matrix
plt.figure(figsize=(10, 8))
sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm', fmt='.2f', linewidths=0.5)
plt.title('Correlation Matrix of Numeric Features')
plt.show()

T11. For the glass identification dataset, fit random forest classifier to classify glass type

import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
from sklearn.preprocessing import StandardScaler
import seaborn as sns
import matplotlib.pyplot as plt

# Load dataset
# Download from: https://archive.ics.uci.edu/ml/datasets/glass+identification
# Assuming the file is named 'glass.csv' with proper column names
column_names = ['RI', 'Na', 'Mg', 'Al', 'Si', 'K', 'Ca', 'Ba', 'Fe', 'Type']
data = pd.read_csv('glass.csv', names=column_names)

# Features and target
X = data.drop('Type', axis=1)
y = data['Type']

# Normalize features
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X)

# Train-test split
X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, test_size=0.2, random_state=42)

# Random Forest Classifier
clf = RandomForestClassifier(n_estimators=100, random_state=42)
clf.fit(X_train, y_train)

# Predictions
y_pred = clf.predict(X_test)

# Evaluation
print("Accuracy:", accuracy_score(y_test, y_pred))
print("\nClassification Report:\n", classification_report(y_test, y_pred))
print("\nConfusion Matrix:\n", confusion_matrix(y_test, y_pred))

# Optional: Plot confusion matrix heatmap
plt.figure(figsize=(8,6))
sns.heatmap(confusion_matrix(y_test, y_pred), annot=True, fmt="d", cmap='Blues')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.title('Confusion Matrix')
plt.show()


T12.. Implement the K-Means clustering algorithm using Python. You may use a library such as scikit-learn for this purpose

# Import necessary libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.datasets import make_blobs
from sklearn.preprocessing import StandardScaler

# Generate sample data using make_blobs
# Create 300 samples with 3 cluster centers
X, y_true = make_blobs(n_samples=300, centers=3, cluster_std=0.60, random_state=42)

# Standardize the data
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X)

# Visualize the raw data
plt.scatter(X_scaled[:, 0], X_scaled[:, 1], s=50, c='gray', marker='o')
plt.title('Generated Raw Data')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()

# Apply K-Means Clustering
kmeans = KMeans(n_clusters=3, random_state=42, n_init=10)
kmeans.fit(X_scaled)

# Get the cluster labels and cluster centers
y_kmeans = kmeans.labels_
centers = kmeans.cluster_centers_

# Visualize the clusters
plt.scatter(X_scaled[:, 0], X_scaled[:, 1], c=y_kmeans, s=50, cmap='viridis')
plt.scatter(centers[:, 0], centers[:, 1], c='red', marker='X', s=200, label='Centroids')
plt.title('K-Means Clustering Results')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.legend()
plt.show()

# Print cluster centers and inertia
print("Cluster Centers (after scaling):\n", centers)
print(f"Inertia (Sum of Squared Distances): {kmeans.inertia_:.2f}")

# Calculate the optimal number of clusters using the Elbow Method
inertia_values = []
k_range = range(1, 11)

for k in k_range:
    kmeans = KMeans(n_clusters=k, random_state=42, n_init=10)
    kmeans.fit(X_scaled)
    inertia_values.append(kmeans.inertia_)

# Plot the Elbow Method
plt.plot(k_range, inertia_values, marker='o')
plt.title('Elbow Method to Determine Optimal k')
plt.xlabel('Number of Clusters (k)')
plt.ylabel('Inertia (Sum of Squared Distances)')
plt.show()


T13. Implement the Agglomerative Hierarchical clustering algorithm using Python. Utilize linkage methods such as 'ward,' 'complete,' or 'average.

# Import necessary libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import dendrogram, linkage
from sklearn.datasets import make_blobs
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import AgglomerativeClustering

# Generate sample data using make_blobs
# Create 300 samples with 3 cluster centers
X, y_true = make_blobs(n_samples=300, centers=3, cluster_std=0.70, random_state=42)

# Standardize the data
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X)

# Plot the raw data
plt.scatter(X_scaled[:, 0], X_scaled[:, 1], s=50, c='gray', marker='o')
plt.title('Generated Raw Data')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()

# Define linkage methods to be used
linkage_methods = ['ward', 'complete', 'average']

# Plot dendrograms for different linkage methods
plt.figure(figsize=(15, 5))
for i, method in enumerate(linkage_methods):
    plt.subplot(1, 3, i + 1)
    Z = linkage(X_scaled, method=method)
    dendrogram(Z, truncate_mode='level', p=5)
    plt.title(f'Dendrogram using {method.capitalize()} Linkage')
    plt.xlabel('Data Points')
    plt.ylabel('Distance')

plt.tight_layout()
plt.show()

# Apply Agglomerative Clustering using 'ward' linkage
n_clusters = 3  # Number of clusters
model = AgglomerativeClustering(n_clusters=n_clusters, linkage='ward')
y_pred = model.fit_predict(X_scaled)

# Plot the clusters
plt.scatter(X_scaled[:, 0], X_scaled[:, 1], c=y_pred, cmap='viridis', s=50)
plt.title('Agglomerative Hierarchical Clustering (Ward Linkage)')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()

T
[
    {
        "id": "1a995dd199999201",
        "type": "tab",
        "label": "Flow 6",
        "disabled": false,
        "info": "",
        "env": []
    },
    {
        "id": "805da3545a35d191",
        "type": "inject",
        "z": "1a995dd199999201",
        "name": "",
        "props": [
            {
                "p": "payload"
            },
            {
                "p": "topic",
                "vt": "str"
            }
        ],
        "repeat": "1",
        "crontab": "",
        "once": true,
        "onceDelay": "1",
        "topic": "",
        "payload": "",
        "payloadType": "date",
        "x": 190,
        "y": 100,
        "wires": [
            [
                "89542d9b10fef801",
                "6f99897aef86caef"
            ]
        ]
    },
    {
        "id": "89542d9b10fef801",
        "type": "http request",
        "z": "1a995dd199999201",
        "name": "",
        "method": "GET",
        "ret": "obj",
        "paytoqs": "ignore",
        "url": "https://api.openweathermap.org/data/2.5/weather?q=London&appid=8ff56d883da02c95c015b78fbb6dd8ed",
        "tls": "",
        "persist": false,
        "proxy": "",
        "insecureHTTPParser": false,
        "authType": "",
        "senderr": false,
        "headers": [],
        "x": 410,
        "y": 100,
        "wires": [
            [
                "f62fc4e2650c38ac",
                "2f73374e96d77a29",
                "fe5a67f4e6f8058c",
                "cdaa8f80ebb3ea0d"
            ]
        ]
    },
    {
        "id": "f62fc4e2650c38ac",
        "type": "debug",
        "z": "1a995dd199999201",
        "name": "debug 4",
        "active": true,
        "tosidebar": true,
        "console": false,
        "tostatus": false,
        "complete": "false",
        "statusVal": "",
        "statusType": "auto",
        "x": 660,
        "y": 100,
        "wires": []
    },
    {
        "id": "2f73374e96d77a29",
        "type": "change",
        "z": "1a995dd199999201",
        "name": "",
        "rules": [
            {
                "t": "set",
                "p": "payload",
                "pt": "msg",
                "to": "payload.name",
                "tot": "str"
            }
        ],
        "action": "",
        "property": "",
        "from": "",
        "to": "",
        "reg": false,
        "x": 460,
        "y": 220,
        "wires": [
            [
                "5b84d6a82970ab89"
            ]
        ]
    },
    {
        "id": "fe5a67f4e6f8058c",
        "type": "change",
        "z": "1a995dd199999201",
        "name": "",
        "rules": [
            {
                "t": "set",
                "p": "payload",
                "pt": "msg",
                "to": "payload.main.temp",
                "tot": "str"
            }
        ],
        "action": "",
        "property": "",
        "from": "",
        "to": "",
        "reg": false,
        "x": 460,
        "y": 320,
        "wires": [
            [
                "64fac722a91457fc"
            ]
        ]
    },
    {
        "id": "cdaa8f80ebb3ea0d",
        "type": "change",
        "z": "1a995dd199999201",
        "name": "",
        "rules": [
            {
                "t": "set",
                "p": "payload",
                "pt": "msg",
                "to": "payload.main.humidity",
                "tot": "str"
            }
        ],
        "action": "",
        "property": "",
        "from": "",
        "to": "",
        "reg": false,
        "x": 460,
        "y": 420,
        "wires": [
            [
                "dc4b2487c962c02e"
            ]
        ]
    },
    {
        "id": "5b84d6a82970ab89",
        "type": "ui_text",
        "z": "1a995dd199999201",
        "group": "0fdf39278f746926",
        "order": 0,
        "width": 0,
        "height": 0,
        "name": "",
        "label": "city",
        "format": "{{msg.payload}}",
        "layout": "row-spread",
        "className": "",
        "style": false,
        "font": "",
        "fontSize": 16,
        "color": "#000000",
        "x": 670,
        "y": 220,
        "wires": []
    },
    {
        "id": "64fac722a91457fc",
        "type": "ui_text",
        "z": "1a995dd199999201",
        "group": "0fdf39278f746926",
        "order": 1,
        "width": 0,
        "height": 0,
        "name": "",
        "label": "temperature",
        "format": "{{msg.payload}}",
        "layout": "row-spread",
        "className": "",
        "style": false,
        "font": "",
        "fontSize": 16,
        "color": "#000000",
        "x": 690,
        "y": 320,
        "wires": []
    },
    {
        "id": "dc4b2487c962c02e",
        "type": "ui_text",
        "z": "1a995dd199999201",
        "group": "0fdf39278f746926",
        "order": 2,
        "width": 0,
        "height": 0,
        "name": "",
        "label": "humidity",
        "format": "{{msg.payload}}",
        "layout": "row-spread",
        "className": "",
        "style": false,
        "font": "",
        "fontSize": 16,
        "color": "#000000",
        "x": 680,
        "y": 420,
        "wires": []
    },
    {
        "id": "fbf47a74ddea9456",
        "type": "ui_slider",
        "z": "1a995dd199999201",
        "name": "",
        "label": "slider",
        "tooltip": "",
        "group": "0fdf39278f746926",
        "order": 3,
        "width": 0,
        "height": 0,
        "passthru": true,
        "outs": "all",
        "topic": "topic",
        "topicType": "msg",
        "min": "1",
        "max": "100",
        "step": 1,
        "className": "",
        "x": 670,
        "y": 280,
        "wires": [
            [
                "64fac722a91457fc"
            ]
        ]
    },
    {
        "id": "95f7fb543be84820",
        "type": "ui_button",
        "z": "1a995dd199999201",
        "name": "",
        "group": "0fdf39278f746926",
        "order": 4,
        "width": 0,
        "height": 0,
        "passthru": false,
        "label": "reset",
        "tooltip": "",
        "color": "",
        "bgcolor": "",
        "className": "",
        "icon": "",
        "payload": "0",
        "payloadType": "num",
        "topic": "topic",
        "topicType": "msg",
        "x": 690,
        "y": 380,
        "wires": [
            [
                "dc4b2487c962c02e"
            ]
        ]
    },
    {
        "id": "6f99897aef86caef",
        "type": "function",
        "z": "1a995dd199999201",
        "name": "function 3",
        "func": "msg.payload = Math.random()*30;\nreturn msg;",
        "outputs": 1,
        "timeout": 0,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 140,
        "y": 280,
        "wires": [
            [
                "ade13a3d209e1dd6",
                "da8cd534482d8752"
            ]
        ]
    },
    {
        "id": "da8cd534482d8752",
        "type": "ui_gauge",
        "z": "1a995dd199999201",
        "name": "",
        "group": "0fdf39278f746926",
        "order": 6,
        "width": 0,
        "height": 0,
        "gtype": "gage",
        "title": "gauge",
        "label": "units",
        "format": "{{value}}",
        "min": 0,
        "max": "100",
        "colors": [
            "#00b500",
            "#e6e600",
            "#ca3838"
        ],
        "seg1": "",
        "seg2": "",
        "diff": false,
        "className": "",
        "x": 250,
        "y": 420,
        "wires": []
    },
    {
        "id": "ade13a3d209e1dd6",
        "type": "ui_chart",
        "z": "1a995dd199999201",
        "name": "",
        "group": "0fdf39278f746926",
        "order": 5,
        "width": 0,
        "height": 0,
        "label": "chart",
        "chartType": "line",
        "legend": "false",
        "xformat": "HH:mm:ss",
        "interpolate": "linear",
        "nodata": "",
        "dot": false,
        "ymin": "1",
        "ymax": "100",
        "removeOlder": 1,
        "removeOlderPoints": "",
        "removeOlderUnit": "3600",
        "cutout": 0,
        "useOneColor": false,
        "useUTC": false,
        "colors": [
            "#1f77b4",
            "#aec7e8",
            "#ff7f0e",
            "#2ca02c",
            "#98df8a",
            "#d62728",
            "#ff9896",
            "#9467bd",
            "#c5b0d5"
        ],
        "outputs": 1,
        "useDifferentColor": false,
        "className": "",
        "x": 230,
        "y": 340,
        "wires": [
            []
        ]
    },
    {
        "id": "0fdf39278f746926",
        "type": "ui_group",
        "name": "reddy",
        "tab": "62b00e22779b2a07",
        "order": 1,
        "disp": true,
        "width": 6,
        "collapse": false,
        "className": ""
    },
    {
        "id": "62b00e22779b2a07",
        "type": "ui_tab",
        "name": "suhas",
        "icon": "dashboard",
        "disabled": false,
        "hidden": false
    }
]
[
    {
        "id": "7b56d60993240319",
        "type": "tab",
        "label": "Flow 5",
        "disabled": false,
        "info": "",
        "env": []
    },
    {
        "id": "591d73438d3b1756",
        "type": "mqtt in",
        "z": "7b56d60993240319",
        "name": "subscriber",
        "topic": "test 1",
        "qos": "2",
        "datatype": "auto-detect",
        "broker": "7c853a4f0832bb4a",
        "nl": false,
        "rap": true,
        "rh": 0,
        "inputs": 0,
        "x": 240,
        "y": 180,
        "wires": [
            [
                "1848ca99d0e87843"
            ]
        ]
    },
    {
        "id": "1848ca99d0e87843",
        "type": "debug",
        "z": "7b56d60993240319",
        "name": "debug 3",
        "active": true,
        "tosidebar": true,
        "console": false,
        "tostatus": false,
        "complete": "false",
        "statusVal": "",
        "statusType": "auto",
        "x": 520,
        "y": 180,
        "wires": []
    },
    {
        "id": "7c853a4f0832bb4a",
        "type": "mqtt-broker",
        "name": "suhas",
        "broker": "127.0.0.1",
        "port": 1883,
        "clientid": "",
        "autoConnect": true,
        "usetls": false,
        "protocolVersion": 4,
        "keepalive": 60,
        "cleansession": true,
        "autoUnsubscribe": true,
        "birthTopic": "",
        "birthQos": "0",
        "birthRetain": "false",
        "birthPayload": "",
        "birthMsg": {},
        "closeTopic": "",
        "closeQos": "0",
        "closeRetain": "false",
        "closePayload": "",
        "closeMsg": {},
        "willTopic": "",
        "willQos": "0",
        "willRetain": "false",
        "willPayload": "",
        "willMsg": {},
        "userProps": "",
        "sessionExpiry": ""
    }
]
[
    {
        "id": "31e5d1095b1eb838",
        "type": "tab",
        "label": "Flow 4",
        "disabled": false,
        "info": "",
        "env": []
    },
    {
        "id": "add8abb43c706953",
        "type": "mqtt out",
        "z": "31e5d1095b1eb838",
        "name": "",
        "topic": "",
        "qos": "2",
        "retain": "",
        "respTopic": "",
        "contentType": "",
        "userProps": "",
        "correl": "",
        "expiry": "",
        "broker": "7c853a4f0832bb4a",
        "x": 450,
        "y": 160,
        "wires": []
    },
    {
        "id": "1f712c7d35bd3b86",
        "type": "inject",
        "z": "31e5d1095b1eb838",
        "name": "",
        "props": [
            {
                "p": "payload"
            },
            {
                "p": "topic",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "",
        "once": false,
        "onceDelay": 0.1,
        "topic": "test",
        "payload": "hello",
        "payloadType": "str",
        "x": 180,
        "y": 160,
        "wires": [
            [
                "add8abb43c706953"
            ]
        ]
    },
    {
        "id": "7c853a4f0832bb4a",
        "type": "mqtt-broker",
        "name": "suhas",
        "broker": "127.0.0.1",
        "port": 1883,
        "clientid": "",
        "autoConnect": true,
        "usetls": false,
        "protocolVersion": 4,
        "keepalive": 60,
        "cleansession": true,
        "autoUnsubscribe": true,
        "birthTopic": "",
        "birthQos": "0",
        "birthRetain": "false",
        "birthPayload": "",
        "birthMsg": {},
        "closeTopic": "",
        "closeQos": "0",
        "closeRetain": "false",
        "closePayload": "",
        "closeMsg": {},
        "willTopic": "",
        "willQos": "0",
        "willRetain": "false",
        "willPayload": "",
        "willMsg": {},
        "userProps": "",
        "sessionExpiry": ""
    }
]
[
    {
        "id": "008f2793d8e0b213",
        "type": "tab",
        "label": "Flow 3",
        "disabled": false,
        "info": "",
        "env": []
    },
    {
        "id": "a31c98c0e6a8f44c",
        "type": "inject",
        "z": "008f2793d8e0b213",
        "name": "",
        "props": [
            {
                "p": "payload"
            },
            {
                "p": "topic",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "",
        "once": false,
        "onceDelay": 0.1,
        "topic": "num",
        "payload": "[10,20,30]",
        "payloadType": "json",
        "x": 160,
        "y": 180,
        "wires": [
            [
                "f38eccc2aa9fc71a"
            ]
        ]
    },
    {
        "id": "f38eccc2aa9fc71a",
        "type": "function",
        "z": "008f2793d8e0b213",
        "name": "function 2",
        "func": "var num = msg.payload;\nvar sum = 0;\nfor (var i in num){\n    sum+=num[i];\n}\nmsg.payload = `sum is ${sum}`\nreturn msg;",
        "outputs": 1,
        "timeout": 0,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 420,
        "y": 180,
        "wires": [
            [
                "42a45807b1f2b163"
            ]
        ]
    },
    {
        "id": "42a45807b1f2b163",
        "type": "debug",
        "z": "008f2793d8e0b213",
        "name": "debug 2",
        "active": true,
        "tosidebar": true,
        "console": false,
        "tostatus": false,
        "complete": "false",
        "statusVal": "",
        "statusType": "auto",
        "x": 620,
        "y": 180,
        "wires": []
    }
]
[
    {
        "id": "72c82f5f84d17524",
        "type": "tab",
        "label": "Flow 2",
        "disabled": false,
        "info": "",
        "env": []
    },
    {
        "id": "inject_num1",
        "type": "inject",
        "z": "72c82f5f84d17524",
        "name": "Inject Number 1",
        "props": [
            {
                "p": "payload"
            }
        ],
        "repeat": "",
        "crontab": "",
        "once": false,
        "onceDelay": 0.1,
        "topic": "",
        "payload": "5",
        "payloadType": "num",
        "x": 140,
        "y": 160,
        "wires": [
            [
                "change_num1"
            ]
        ]
    },
    {
        "id": "inject_num2",
        "type": "inject",
        "z": "72c82f5f84d17524",
        "name": "Inject Number 2",
        "props": [
            {
                "p": "payload"
            }
        ],
        "repeat": "",
        "crontab": "",
        "once": false,
        "onceDelay": 0.1,
        "topic": "",
        "payload": "7",
        "payloadType": "num",
        "x": 140,
        "y": 220,
        "wires": [
            [
                "change_num2"
            ]
        ]
    },
    {
        "id": "change_num1",
        "type": "change",
        "z": "72c82f5f84d17524",
        "name": "Set msg.num1",
        "rules": [
            {
                "t": "set",
                "p": "num1",
                "pt": "msg",
                "to": "payload",
                "tot": "msg"
            }
        ],
        "x": 330,
        "y": 160,
        "wires": [
            [
                "function_sum"
            ]
        ]
    },
    {
        "id": "change_num2",
        "type": "change",
        "z": "72c82f5f84d17524",
        "name": "Set msg.num2",
        "rules": [
            {
                "t": "set",
                "p": "num2",
                "pt": "msg",
                "to": "payload",
                "tot": "msg"
            }
        ],
        "x": 330,
        "y": 220,
        "wires": [
            [
                "function_sum"
            ]
        ]
    },
    {
        "id": "function_sum",
        "type": "function",
        "z": "72c82f5f84d17524",
        "name": "Add Two Numbers",
        "func": "var a = msg.num1 || flow.get(\"num1\");\nvar b = msg.num2 || flow.get(\"num2\");\n\nflow.set(\"num1\", a);\nflow.set(\"num2\", b);\n\nif (a !== undefined && b !== undefined) {\n    var s = a + b;\n    msg.payload = `sum of ${a} and ${b} is ${s}`;\n    return msg;\n}\nreturn null;\n",
        "outputs": 1,
        "timeout": "",
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 550,
        "y": 190,
        "wires": [
            [
                "debug_sum"
            ]
        ]
    },
    {
        "id": "debug_sum",
        "type": "debug",
        "z": "72c82f5f84d17524",
        "name": "Show Sum",
        "active": true,
        "tosidebar": true,
        "console": false,
        "tostatus": false,
        "complete": "payload",
        "targetType": "msg",
        "statusVal": "",
        "statusType": "auto",
        "x": 740,
        "y": 190,
        "wires": []
    }
]
import pandas as pd

# Import data from a CSV file
data = pd.read_csv('input_data.csv')
print("Imported Data:")
print(data.head())

# Export data to a new CSV file
data.to_csv('exported_data.csv', index=False)
print("\nData exported to 'exported_data.csv'")
$("div [widget='widget']").css("border", "1px solid red").css("padding-top", "20px").css("position", "relative").each(function(i, obj){
    var scope = angular.element(obj).scope();
    var widget = scope.widget;
    var elem = $("<div style='position: absolute; top: 1px; left: 1px'><a target='_blank' href='/$sp.do?id=widget_editor&sys_id="+ widget.sys_id+"'> "+ widget.name +"</a>&nbsp;&nbsp;</div>");
    var printScope = $("<a href='javascript:void(0);'>Print scope</a>").on('click', function(){ console.info(scope); });
    elem.append(printScope);
    $(this).append(elem);
    });
adb kill-server
adb start-server
adb usb
adb tcpip 5555
adb connect <DEVICE_LOCAL_IP>
Join the best Optometry College in Madurai! Boston Institute offers expert training, top placements, and modern facilities to shape your future in eye care. Enroll today for a brighter tomorrow!
Statistics is a fundamental subject in various academic disciplines, including mathematics, economics, business, and social sciences. It involves data collection, analysis, interpretation, and presentation, making it an essential skill for students pursuing research and analytical careers. However, many students struggle with statistics assignments due to the complexity of concepts such as probability distributions, hypothesis testing, regression analysis, and data visualization.

Challenges in Statistics Assignments

Statistics assignments require precision, critical thinking, and a deep understanding of formulas and methodologies. Some common challenges students face include:

Complex Theories and Formulas – Understanding statistical formulas like standard deviation, chi-square tests, and ANOVA can be daunting for many students.

Data Interpretation – Analyzing large datasets and deriving meaningful insights requires both technical knowledge and logical reasoning.

Software Proficiency – Many assignments require using software like SPSS, R, Python, or Excel, which can be overwhelming for beginners.

Time Constraints – Students often juggle multiple subjects and deadlines, making it difficult to allocate enough time for statistics assignments.

How Seeking Help Can Improve Academic Performance

Getting Statistics assignment help https://myassignmenthelp.com/statistics_assignment_help.html allows students to enhance their learning experience and develop a structured approach to problem-solving. Some benefits of seeking help include:

Conceptual Clarity – Expert guidance helps in understanding statistical theories more effectively.

Error Reduction – Professional insights minimize calculation and interpretation errors.

Efficient Time Management – With proper guidance, students can complete assignments efficiently and focus on other academic responsibilities.

Exposure to Advanced Techniques – Learning from experts familiarizes students with modern statistical tools and methodologies.

Enhancing Your Statistical Skills

To excel in statistics, students should focus on strengthening their analytical skills, practicing regularly, and utilizing reliable resources. Engaging in online courses, academic forums, and practice exercises can significantly improve proficiency in statistics.

For those struggling with assignments, seeking assistance from platforms like Myassignmenthelp.com can be beneficial. While external support can provide clarity, developing a strong conceptual foundation remains crucial for long-term academic success.
20 NHÓM CÔNG CỤ AI NHẤT ĐỊNH CẦN BIẾT VÀ THÀNH THẠO TRONG 2025
A. NHÓM AI giúp Viết, nói & giao tiếp hiệu quả
1. Tư duy & giải quyết vấn đề:
ChatGPT, Gemini, Meta AI, DeepSeek, Copilot
2. Tóm tắt, dịch & xử lý văn bản dài:
Claude, Qwen, Wordtune, ChatGPT
3. Viết nội dung & chỉnh sửa:
Writesonic, Grammarly, DeepAI
4. Tạo slide & thuyết trình:
SlidesAI, Gamma.app, Copilot, SlideGo
5. Nâng cao năng suất cá nhân:
Copilot, ExcelGPT, Notion AI, Taskade AI
(Giúp tăng hiệu quả công việc, làm báo cáo, lập kế hoạch)
B. NHÓM Thiết kế hình ảnh, video & thương hiệu cá nhân
6. Tạo hình ảnh sáng tạo:
MidJourney, DALL·E 3, Diffusion, OpenArt
7. Thiết kế đồ họa chuyên nghiệp:
Leonardo AI, Adobe Firefly, Designs AI
8. Chỉnh sửa ảnh nâng cao:
Remini, Canva AI, DeepImage
9. Tạo avatar & hình cá nhân hóa:
StarryAI, Fotor, Creatify
10. Xây dựng thương hiệu hình ảnh:
Looka, Brandmark, Logo AI
(Giúp phụ nữ tự tạo logo, nhận diện thương hiệu)
C. Sản xuất video & nhạc AI
11. Tạo video AI chuyên nghiệp:
Synthesia, HeyGen, VideoGen, TopView, Pictory
12. Tạo video ngắn TikTok/Reels:
Fliki, Steve. ai, Veed. io, Short
13. Chỉnh sửa video đơn giản – dễ dùng:
Capcut, Pictory, VideoGen
14. Sáng tác nhạc AI:
Soundraw, Suno, iLoveSong
15. Tạo podcast & giọng nói ảo:
ElevenLabs, Play. ht, Voicemaker
(Tạo nội dung podcast, video thuyết trình)
D. Dành cho lập trình & hệ thống
16. Viết mã code & lập trình:
Replit, Github Copilot, Codeium
17. Tạo chatbot AI:
Manychat, Chatbase, Botpress
(Phù hợp làm chăm sóc khách hàng tự động)
18. Phân tích dữ liệu & AI cho Excel:
SheetAI, MonkeyLearn, ExcelGPT
(Áp dụng cho công việc văn phòng & kế toán)
E. Công cụ tổng hợp đa năng
19. Tạo trang web & landing page:
Durable AI, 10Web, Framer AI
(Giúp kinh doanh online nhanh chóng)
20.  Hệ sinh thái AI đa năng (All-in-one):
Notion AI, Taskade, FlowGPT, AIToolsKit
(Kết hợp nhiều công cụ trong một nền tảng)
const express = require('express');
const jwt = require('jsonwebtoken');

const app = express();
const PORT = 3000;
const SECRET_KEY = 'secret';

app.use(express.json());


app.post('/login', (req, res) => {
  const { username, password } = req.body;

  if (username === 'user' && password === '123') {
    const token = jwt.sign({ username }, SECRET_KEY);
    res.json({ token });
  } else {
    res.status(401).json({ message: 'Invalid credentials' });
  }
});


function auth(req, res, next) {
  const token = req.headers.authorization?.split(' ')[1];
  if (!token) return res.sendStatus(401);

  jwt.verify(token, SECRET_KEY, (err, user) => {
    if (err) return res.sendStatus(403);
    req.user = user;
    next();
  });
}


app.get('/protected', auth, (req, res) => {
  res.json({ message: 'Welcome!', user: req.user });
});

app.listen(PORT, () => {
  console.log(`Server running on http://localhost:${PORT}`);
});

npm init -y
npm install express jsonwebtoken body-parser

npm init -y
npm install express ejs node-fetch

project-folder/
├── views/
│   └── posts.ejs
├── app.js

app.js

const express = require('express');
const fetch = require('node-fetch');
const app = express();
const PORT = 3000;

// Set EJS as the view engine
app.set('view engine', 'ejs');

// Route to fetch API data and render
app.get('/', async (req, res) => {
  try {
    const response = await fetch('https://jsonplaceholder.typicode.com/posts');
    const posts = await response.json();

    res.render('posts', { posts: posts.slice(0, 5) }); // limit to 5 posts
  } catch (error) {
    res.status(500).send('Error fetching data');
  }
});

app.listen(PORT, () => {
  console.log(`Server running at http://localhost:${PORT}`);
});


views/posts.ejs

<!DOCTYPE html>
<html>
<head>
  <title>Posts Table</title>
  <style>
    table {
      width: 80%;
      border-collapse: collapse;
      margin: 20px auto;
    }
    th, td {
      padding: 10px;
      border: 1px solid #ccc;
      text-align: left;
    }
    th {
      background-color: #f4f4f4;
    }
  </style>
</head>
<body>
  <h2 style="text-align:center;">Posts from API</h2>
  <table>
    <thead>
      <tr>
        <th>ID</th>
        <th>Title</th>
        <th>Body</th>
      </tr>
    </thead>
    <tbody>
      <% posts.forEach(post => { %>
        <tr>
          <td><%= post.id %></td>
          <td><%= post.title %></td>
          <td><%= post.body %></td>
        </tr>
      <% }) %>
    </tbody>
  </table>
</body>
</html>
function* evenNumberGenerator() {
  let num = 0;
  while (true) {
    yield num;
    num += 2;
  }
}

const evenGen = evenNumberGenerator();

console.log(evenGen.next().value);
console.log(evenGen.next().value);
console.log(evenGen.next().value);
console.log(evenGen.next().value);
console.log(evenGen.next().value);
function delayStep(stepName) {
  return new Promise((resolve) => {
    setTimeout(() => {
      console.log(`${stepName} completed`);
      resolve();
    }, 1000);
  });
}


async function runSteps() {
  await delayStep("Step 1");
  await delayStep("Step 2");
  await delayStep("Step 3");
  console.log("All steps completed");
}

runSteps();
function step1(callback) {
  setTimeout(() => {
    console.log("Step 1 completed");
    callback();
  }, 1000);
}

function step2(callback) {
  setTimeout(() => {
    console.log("Step 2 completed");
    callback();
  }, 1000);
}

function step3(callback) {
  setTimeout(() => {
    console.log("Step 3 completed");
    callback();
  }, 1000);
}


step1(() => {
  step2(() => {
    step3(() => {
      console.log("All steps completed");
    });
  });
});
<!DOCTYPE html>
<html>
<head>
  <title>Fetch API Example</title>
</head>
<body>
  <h1>Posts</h1>
  <div id="posts"></div>

  <script>
    fetch('https://jsonplaceholder.typicode.com/posts')
      .then(response => response.json())
      .then(data => {
        const postsDiv = document.getElementById('posts');
        data.slice(0, 5).forEach(post => {
          const postElement = document.createElement('div');
          postElement.innerHTML = `<h3>${post.title}</h3><p>${post.body}</p>`;
          postsDiv.appendChild(postElement);
        });
      })
      .catch(error => console.error('Error fetching data:', error));
  </script>
</body>
</html>
const path = require('path');

const filePath = '/users/student/projects/app/index.js';

console.log('Directory Name:', path.dirname(filePath));
console.log('Base Name:', path.basename(filePath));
console.log('Extension Name:', path.extname(filePath));
console.log('Join Paths:', path.join('/users', 'student', 'docs'));
console.log('Resolve Path:', path.resolve('app', 'index.js'));
console.log('Is Absolute:', path.isAbsolute(filePath));
const http = require('http');

const server = http.createServer((req, res) => {
  const { url } = req;

  if (url === '/html') {
    res.writeHead(200, { 'Content-Type': 'text/html' });
    res.end('<h1>Welcome to the HTML response</h1>');
  } else if (url === '/json') {
    res.writeHead(200, { 'Content-Type': 'application/json' });
    res.end(JSON.stringify({ message: 'This is a JSON response', status: 'success' }));
  } else if (url === '/text') {
    res.writeHead(200, { 'Content-Type': 'text/plain' });
    res.end('This is a plain text response.');
  } else if (url === '/js') {
    res.writeHead(200, { 'Content-Type': 'application/javascript' });
    res.end('console.log("JavaScript response from server");');
  } else {
    res.writeHead(404, { 'Content-Type': 'text/plain' });
    res.end('Resource not found');
  }
});

server.listen(3000, () => {
  console.log('Server running at http://localhost:3000');
});
const fs = require('fs');

fs.writeFile('async.txt', 'This is written using writeFile (Async)', (err) => {
  if (err) throw err;
  console.log('File created and written successfully.');

  fs.readFile('async.txt', 'utf8', (err, data) => {
    if (err) throw err;
    console.log('File content:', data);

    fs.appendFile('async.txt', '\nThis is an additional line (Async)', (err) => {
      if (err) throw err;
      console.log('Content appended.');

      fs.unlink('async.txt', (err) => {
        if (err) throw err;
        console.log('File deleted.');
      });
    });
  });
});
star

Fri Apr 11 2025 04:06:28 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Fri Apr 11 2025 04:05:59 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Fri Apr 11 2025 04:05:10 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Fri Apr 11 2025 04:03:22 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Fri Apr 11 2025 03:56:56 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Thu Apr 10 2025 22:47:23 GMT+0000 (Coordinated Universal Time)

@javacha

star

Thu Apr 10 2025 14:04:09 GMT+0000 (Coordinated Universal Time) https://www.coinsqueens.com/blog/bet365-clone-script

@athenapetridis #gaming #bet365clone #scinlinebetting

star

Thu Apr 10 2025 11:18:35 GMT+0000 (Coordinated Universal Time) https://www.kryptobees.com/blog/paxful-clone-script

@Franklinclas

star

Thu Apr 10 2025 08:41:04 GMT+0000 (Coordinated Universal Time)

@Pooja

star

Thu Apr 10 2025 08:35:14 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/power-bi

@tpointtechedu

star

Thu Apr 10 2025 08:34:17 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/power-bi

@tpointtechedu

star

Thu Apr 10 2025 07:12:01 GMT+0000 (Coordinated Universal Time)

@divyasoni23 #css #html

star

Thu Apr 10 2025 06:51:48 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/compiler/r

@tpointtechedu

star

Thu Apr 10 2025 06:50:52 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/compiler/r

@tpointtechedu

star

Thu Apr 10 2025 06:46:43 GMT+0000 (Coordinated Universal Time) https://www.addustechnologies.com/blog/mev-bot-development

@Seraphina

star

Thu Apr 10 2025 06:42:00 GMT+0000 (Coordinated Universal Time) https://maticz.com/best-p2p-crypto-exchanges

@jamielucas #bestp2pcryptoexchange

star

Thu Apr 10 2025 06:37:43 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/sql-tutorial

@elizabeth ##sql-tutorial ##sql-full-form ##education

star

Thu Apr 10 2025 06:36:49 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/how-to-create-a-thread-in-java

@jonhill #java #java-tutorial

star

Thu Apr 10 2025 05:38:13 GMT+0000 (Coordinated Universal Time)

@FOHWellington

star

Wed Apr 09 2025 16:14:38 GMT+0000 (Coordinated Universal Time)

@StephenThevar #css

star

Wed Apr 09 2025 08:48:18 GMT+0000 (Coordinated Universal Time) https://www.coinsqueens.com/blog/bet365-clone-script

@athenapetridis #gaming #bet365clone #scinlinebetting

star

Wed Apr 09 2025 07:50:04 GMT+0000 (Coordinated Universal Time) https://www.coinsclone.com/coinsmart-clone-script/

@janetbrownjb #coinsmartlikeexchange #cryptoexchangescript #coinsmartclone #startupcryptosolution #blockchaindevelopment

star

Wed Apr 09 2025 07:01:46 GMT+0000 (Coordinated Universal Time)

@codeing #javascript #react.js #nodejs

star

Wed Apr 09 2025 06:40:38 GMT+0000 (Coordinated Universal Time)

@zeinrahmad99

star

Wed Apr 09 2025 05:45:06 GMT+0000 (Coordinated Universal Time)

@divyasoni23 #css #html

star

Wed Apr 09 2025 04:18:21 GMT+0000 (Coordinated Universal Time) https://outlook.office365.com/owa/auth/errorfe.aspx?redirectType

@najeebemad

star

Wed Apr 09 2025 02:08:14 GMT+0000 (Coordinated Universal Time)

@sem

star

Tue Apr 08 2025 18:42:48 GMT+0000 (Coordinated Universal Time)

@salam123

star

Tue Apr 08 2025 18:42:14 GMT+0000 (Coordinated Universal Time)

@salam123

star

Tue Apr 08 2025 18:41:47 GMT+0000 (Coordinated Universal Time)

@salam123

star

Tue Apr 08 2025 18:40:52 GMT+0000 (Coordinated Universal Time)

@salam123

star

Tue Apr 08 2025 18:40:07 GMT+0000 (Coordinated Universal Time)

@salam123

star

Tue Apr 08 2025 16:44:11 GMT+0000 (Coordinated Universal Time)

@exam2

star

Tue Apr 08 2025 14:32:00 GMT+0000 (Coordinated Universal Time)

@awesomekite

star

Tue Apr 08 2025 13:49:09 GMT+0000 (Coordinated Universal Time)

@lewiseman #adb #flutter #android

star

Tue Apr 08 2025 12:20:39 GMT+0000 (Coordinated Universal Time) https://www.troniextechnologies.com/blog/how-dream11-makes-money

@karlpeterson #dream11 #ipl #sa20 #bbl #lpl #cpl #ilt20 #icc #bcci #wpl #wbbl #wcpl #tnpl #apl #rpl

star

Tue Apr 08 2025 11:17:39 GMT+0000 (Coordinated Universal Time) https://www.coinsclone.com/coinbase-wallet-clone/

@CharleenStewar

star

Tue Apr 08 2025 10:27:00 GMT+0000 (Coordinated Universal Time) https://www.tpointtech.com/compiler/java

@alisa #javacompiler #onlinejava compiler

star

Tue Apr 08 2025 06:25:33 GMT+0000 (Coordinated Universal Time)

@michaelhaydon

star

Tue Apr 08 2025 00:08:39 GMT+0000 (Coordinated Universal Time)

@p9876543

star

Tue Apr 08 2025 00:02:17 GMT+0000 (Coordinated Universal Time)

@p9876543

star

Tue Apr 08 2025 00:01:12 GMT+0000 (Coordinated Universal Time)

@p9876543

star

Tue Apr 08 2025 00:00:29 GMT+0000 (Coordinated Universal Time)

@p9876543

star

Mon Apr 07 2025 23:57:30 GMT+0000 (Coordinated Universal Time)

@p9876543

star

Mon Apr 07 2025 23:55:17 GMT+0000 (Coordinated Universal Time)

@p9876543

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension