Snippets Collections
class Solution {
public:
    int MOD = 1000000007;
    int sumDistance(vector<int>& nums, string s, int d) {
        for(int i = 0; i < nums.size(); i++){
            if(s[i] == 'R') nums[i] += d;
            else nums[i] -= d;
        }
        long long ans = 0;
        long long pref = 0;
        sort(nums.begin(), nums.end());
        long long n = s.length()-1;
        for(long long i = 0; i < nums.size(); i++){
            ans += i * (long long) nums[i] - pref;
            ans %= MOD;
            pref += nums[i];
        }
        return ans;
    }
};
Code 1:
"""
# TODO: Implement this method
from typing import List


def findElement(n: int, arr: List[int], x: int) -> int:

# NOTE: Please do not modify this function
def main():
    n = int(input().strip())
    arr = list(map(int, input().strip().split(' ')))
    x = int(input().strip())

    xIndex = findElement(n, arr, x)
    print(xIndex)

if __name__=="__main__":
    main()
"""

Code 2:
"""
def find_index(n,arr,x):
    for i in range(n):
        if arr[i]==x:
            return i
    return -1
"""


Consider you are a python devekoper.
Help me to integrate the Code 2 into Code 1.
Return the updated version of code. 
import time
import requests
from bs4 import BeautifulSoup
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score

# URL of the webpage containing the chart
url = 'https://example.com/chart'

# Initialize an empty DataFrame to store the historical chart data
historical_data = pd.DataFrame(columns=['Timestamp', 'Value'])

# Continuously monitor the website and extract chart data
while True:
    # Send a GET request to the webpage
    response = requests.get(url)

    # Parse the HTML content of the webpage using BeautifulSoup
    soup = BeautifulSoup(response.content, 'html.parser')

    # Extract the chart data from the webpage using appropriate HTML selectors and attributes
    chart_data = []
    # ...

    # Process the chart data and append it to the historical data DataFrame
    df = pd.DataFrame(chart_data, columns=['Timestamp', 'Value'])
    historical_data = pd.concat([historical_data, df], ignore_index=True)

    # Perform data preprocessing (e.g., cleaning, feature engineering, etc.) on the historical data
    # ...

    # Create target variable 'Direction' indicating price direction
    historical_data['Direction'] = historical_data['Value'].diff().apply(lambda x: 1 if x > 0 else 0)

    # Train a prediction model using the historical data
    X = historical_data.drop(columns=['Timestamp', 'Value', 'Direction'])  # Input features
    y = historical_data['Direction']  # Target variable

    # Split the dataset into training and testing sets
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

    # Train a prediction model (e.g., logistic regression)
    model = LogisticRegression()
    model.fit(X_train, y_train)

    # Make predictions on the testing set
    y_pred = model.predict(X_test)

    # Map the predicted outcomes to actions (Buy/Sell)
    predicted_actions = ['Buy' if pred == 1 else 'Sell' for pred in y_pred]

    # Extract the actual price values for the testing set
    actual_prices = historical_data.iloc[X_test.index]['Value']

    # Check for mistakes in the predictions based on price movement
    mistakes = [i for i, (predicted, actual) in enumerate(zip(predicted_actions, actual_prices)) if
                (predicted == 'Buy' and actual < actual_prices.iloc[i - 1]) or
                (predicted == 'Sell' and actual > actual_prices.iloc[i - 1])]

    # Print the predicted actions
    print("Predicted actions:")
    print(predicted_actions)

    # Print the mistakes made in the predictions
    print("Mistakes:")
    for idx in mistakes:
        print("Instance:", X_test.iloc[idx])
        print("Predicted action:", predicted_actions[idx])
        print("Actual price:", actual_prices.iloc[idx])
        print()

    # Delay for 30 seconds before extracting data again
    time.sleep(30)
import time
import requests
from bs4 import BeautifulSoup
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import MinMaxScaler

# URL of the webpage containing the chart
url = 'https://example.com/chart'

# Initialize an empty DataFrame to store the historical chart data
historical_data = pd.DataFrame(columns=['Timestamp', 'Value'])

# Initialize the model
model = RandomForestClassifier()

# Continuously monitor the website and extract chart data
while True:
    # Send a GET request to the webpage
    response = requests.get(url)

    # Parse the HTML content of the webpage using BeautifulSoup
    soup = BeautifulSoup(response.content, 'html.parser')

    # Extract the chart data from the webpage using appropriate HTML selectors and attributes
    chart_data = []
    # ...

    # Process the chart data and append it to the historical data DataFrame
    df = pd.DataFrame(chart_data, columns=['Timestamp', 'Value'])
    historical_data = pd.concat([historical_data, df], ignore_index=True)

    # Perform data preprocessing (e.g., cleaning, feature engineering, etc.) on the historical data
    # ...

    # Add a moving average feature
    window_size = 5
    historical_data['MovingAverage'] = historical_data['Value'].rolling(window=window_size).mean()

    # Create target variable 'Direction' indicating price direction
    historical_data['Direction'] = historical_data['Value'].diff().apply(lambda x: 1 if x > 0 else 0)

    # Split the historical data into features and target variable
    X = historical_data[['Value', 'MovingAverage']]
    y = historical_data['Direction']

    # Scale the features
    scaler = MinMaxScaler()
    X_scaled = scaler.fit_transform(X)

    # Update the model with the current data
    model.partial_fit(X_scaled, y, classes=[0, 1])

    # Make predictions on the current data
    y_pred = model.predict(X_scaled)

    # Calculate the model's accuracy
    accuracy = accuracy_score(y, y_pred)
    print("Model Accuracy:", accuracy)

    # Extract the actual price values
    actual_prices = historical_data['Value']

    # Compare the predicted actions with the actual price movements
    mistakes = []
    for i, (predicted, actual, timestamp) in enumerate(zip(y_pred, actual_prices, historical_data['Timestamp'])):
        if (predicted == 1 and actual < actual_prices.iloc[i - 1]) or (predicted == 0 and actual > actual_prices.iloc[i - 1]):
            mistakes.append((timestamp, predicted, actual))

    # Print the predicted actions
    predicted_actions = ['Buy' if pred == 1 else 'Sell' for pred in y_pred]
    print("Predicted actions:")
    for timestamp, action in zip(historical_data['Timestamp'], predicted_actions):
        print(timestamp, action)

    # Print the mistakes made in the predictions
    print("Mistakes:")
    for timestamp, action, actual in mistakes:
        print("Timestamp:", timestamp)
        print("Action:", action)
        print("Actual price:", actual)
        print()

    # Delay for 30 seconds before extracting data again
    time.sleep(30)
import time
import requests
from bs4 import BeautifulSoup
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score

# URL of the webpage containing the chart
url = 'https://example.com/chart'

# Initialize an empty DataFrame to store the historical chart data
historical_data = pd.DataFrame(columns=['Timestamp', 'Value'])

# Continuously monitor the website and extract chart data
while True:
    # Send a GET request to the webpage
    response = requests.get(url)

    # Parse the HTML content of the webpage using BeautifulSoup
    soup = BeautifulSoup(response.content, 'html.parser')

    # Extract the chart data from the webpage using appropriate HTML selectors and attributes
    chart_data = []
    # ...

    # Process the chart data and append it to the historical data DataFrame
    df = pd.DataFrame(chart_data, columns=['Timestamp', 'Value'])
    historical_data = pd.concat([historical_data, df], ignore_index=True)

    # Perform data preprocessing (e.g., cleaning, feature engineering, etc.) on the historical data
    # ...

    # Create target variable 'Direction' indicating price direction
    historical_data['Direction'] = historical_data['Value'].diff().apply(lambda x: 1 if x > 0 else 0)

    # Train a prediction model using the historical data
    X = historical_data.drop(columns=['Timestamp', 'Value', 'Direction'])  # Input features
    y = historical_data['Direction']  # Target variable

    # Split the dataset into training and testing sets
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

    # Train a prediction model (e.g., logistic regression)
    model = LogisticRegression()
    model.fit(X_train, y_train)

    # Make predictions on the testing set
    y_pred = model.predict(X_test)

    # Map the predicted outcomes to actions (Buy/Sell)
    predicted_actions = ['Buy' if pred == 1 else 'Sell' for pred in y_pred]

    # Extract the actual price values for the testing set
    actual_prices = historical_data.iloc[X_test.index]['Value']

    # Check for mistakes in the predictions based on price movement
    mistakes = [i for i, (predicted, actual) in enumerate(zip(predicted_actions, actual_prices)) if
                (predicted == 'Buy' and actual < actual_prices.iloc[i - 1]) or
                (predicted == 'Sell' and actual > actual_prices.iloc[i - 1])]

    # Print the predicted actions
    print("Predicted actions:")
    print(predicted_actions)

    # Print the mistakes made in the predictions
    print("Mistakes:")
    for idx in mistakes:
        print("Instance:", X_test.iloc[idx])
        print("Predicted action:", predicted_actions[idx])
        print("Actual price:", actual_prices.iloc[idx])
        print()

    # Delay for 30 seconds before extracting data again
    time.sleep(30)
By byText = MobileBy.AndroidUIAutomator("new UiSelector().text(\"" + elementText + "\")");
WebElement element = driver.findElement(byText);
element.click();

if (driver.getPageSource().contains("خرید")) {
       LOGGER.info("PASS");
} else {
     LOGGER.error("Test failed ");
         Assert.fail("Test failed ");
}
    }
////////////////////
WebElement element2 = driver.findElement(MobileBy.AndroidUIAutomator("new UiSelector().text(\"امکان فعالسازی همزمان دو بسته وجود ندارد.\")"));
////////////////////
WebElement element4 = driver.findElement(MobileBy.AndroidUIAutomator("new UiSelector().text(\"خرید مودم\")"));
/////////////////
    public static void verifySnackbarText(WebDriver driver, String expectedText) {
        String snackbarText = driver.findElement(By.id("ir.mci.ecareapp:id/snackbar_text")).getText();

        if (snackbarText.equals(expectedText)) {
            System.out.println("Pass");
        } else {
            String pageName = getExecutingClassName();

            Screenshot.takeScreenshotIfSnackbarVisible(driver, pageName);
            System.out.println("Fail");
            Assert.fail("Test failed");
        }
    }
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<html>
<head>

<!-- Include: AddEvent theme css -->
<link rel="stylesheet" href="https://cdn.addevent.com/libs/atc/themes/fff-theme-6/theme.css" type="text/css" media="all" />

<!-- Include: AddEvent JS file -->
<script type="text/javascript" src="https://cdn.addevent.com/libs/atc/1.6.1/atc.min.js"></script>

<!-- AddEvent button -->
<div title="Add to Calendar" class="addeventatc" data-styling="none">
    Add to Calendar
    <span class="start">06/23/2023 08:00 AM</span>
    <span class="end">06/23/2023 10:00 AM</span>
    <span class="timezone">America/Los_Angeles</span>
    <span class="title">Summary of the event</span>
    <span class="description">Description of the event</span>
    <span class="location">Location of the event</span>
</div>



<!-- AddEvent script -->
<script type="text/javascript" src="https://cdn.addevent.com/libs/atc/1.6.1/atc.min.js" async defer></script>

</head>
<body>

<!-- Button code -->
<div title="Add to Calendar" class="addeventatc">
    Add to Calendar
    <span class="start">06/23/2023 08:00 AM</span>
    <span class="end">06/23/2023 10:00 AM</span>
    <span class="timezone">America/Los_Angeles</span>
    <span class="title">Summary of the event</span>
    <span class="description">Description of the event</span>
    <span class="location">Location of the event</span>
</div>

</body>
</html>
<!-- Include: AddEvent theme css -->
<link rel="stylesheet" href="https://cdn.addevent.com/libs/atc/themes/fff-theme-3/theme.css" type="text/css" media="all" />

<!-- Include: AddEvent JS file -->
<script type="text/javascript" src="https://cdn.addevent.com/libs/atc/1.6.1/atc.min.js"></script>

<!-- AddEvent button -->
<div class="addeventatc" data-styling="none">
    <div class="date">
        <span class="mon">Jun</span>
        <span class="day">22</span>
        <div class="bdr1"></div>
        <div class="bdr2"></div>
    </div>
    <div class="desc">
        <p>
            <strong class="hed">Prayer Gathering</strong>
            <span class="des">Grand Hall, Parliament<br />When: 7:20 PM - 9:30 PM - Signup or login to attend</span>
        </p>
    </div>
    <span class="start">06/22/2023 07:20 PM</span>
    <span class="end">06/22/2023 09:30 PM</span>
    <span class="timezone">New Zealand/Wellington</span>
    <span class="title">Prayer at Parliament Gathering</span>
    <span class="description">To pray for our leaders and nation <a href="http://prayeratparliament.org.nz/login" target="_blank">login at</a> and set your RSVP to YES</span>
    <span class="location">Grand Hall, Parliament Buildings, Wellington</span>
</div>

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<html>
<head>

<!-- AddEvent script -->
<script type="text/javascript" src="https://cdn.addevent.com/libs/atc/1.6.1/atc.min.js" async defer></script>

</head>
<body>

<!-- Button code -->
<div title="Add to Calendar" class="addeventatc">
    Add to Calendar
    <span class="start">06/23/2023 08:00 AM</span>
    <span class="end">06/23/2023 10:00 AM</span>
    <span class="timezone">America/Los_Angeles</span>
    <span class="title">Summary of the event</span>
    <span class="description">Description of the event</span>
    <span class="location">Location of the event</span>
</div>

</body>
</html>
if(input.Start_Date_Time != null && input.Service_Lookup != null)
{
	getServiceDetails = Add_Services[ID == input.Service_Lookup];
	serviceMinutes = getServiceDetails.Service_Time;
	input.End_Date_Time = input.Start_Date_Time.addMinutes(serviceMinutes);
}
dbfs cp Downloads/InternetSales.csv dbfs:/FileStore/
docker exec -it <container_id> bash
docker run --privileged -idt kindest/node:v1.21.2
class Person {
  name;
  age;
  constructor(properties) {
    Object.assign(this, properties);
  }
}
class Solution {
    public int[][] matrixRankTransform(int[][] matrix) {
        /* Q32 graph playlist , 
        1) we will sort the array to get smaller nos. first to give them rank so we make a pair class
        to store the orignal positions of elements

        2)we sort the pair array
        
        3) we make a public rows and cols array which will store the max 

        4)we group same elements in an arraylist and process them together , as same elements in a row
        or coloumn will have same rank , so we will use DSU concept to group them by having same 
        parent
        */
        int m = matrix.length , n = matrix[0].length;
        pair arr[] = new pair[m*n];
        
        //making pair array 
        int k = 0 ;
        for(int i = 0 ;i < m ; i++){
            for(int j = 0 ;j < n ;j++){
                arr[k] = new pair(matrix[i][j] , i , j);
                k++;
            }
        }

        Arrays.sort(arr);
        
        //initialising public rows and cols
        rows = new int[m];
        cols = new int[n];

        //grouping and processing same elements together
        ArrayList<pair> group = new ArrayList<>();
        int last = Integer.MIN_VALUE;

        for(int i =0 ; i < m*n ; i++){
            int val = arr[i].point;
            
            //if new val , send last group to process ranks
            if(val != last){
                process(group , matrix);
                last = val;
                group = new ArrayList<>();
            }
            group.add(arr[i]);
        }

        //if last group is left in the arraylist it will get processed here
        process(group , matrix);
        
        //we have done all the changes in matrix itself
        return matrix;
    }
    
    //for storing current maximum rank in respective row and col
    int rows[];
    int cols[];


    //will return parent , final parent will still have -1 
    public int findparent(int u , int[] parent){
        if(parent[u] < 0){
            return u;           //as in parent array we initiliaze parent[i] = i;
        }
        int temp = findparent(parent[u] , parent);
        parent[u] = temp;

        return temp;
    }

    public void process(ArrayList<pair> group , int[][] matrix){
        int m = matrix.length , n = matrix[0].length;

        //defining parent array for both row and coloumn in 1 array only
        int parent[] = new int[m+n]; 
        Arrays.fill(parent , -1);

        /* when p1!=p2 we can group either p1 to p2 or p2 to p1 , therefore we group p2 to p1 
        now parent[p1] still has -1 , so we will store maximum rank of this group int parent[p1] 
        but in negative (so findparent still works properly)
        */
        for(pair element : group){
            int x = element.x , y = element.y ;

            //finding parents in row parent array and col parent array
            int p1 = findparent(x , parent);
            int p2 = findparent(y+m , parent);
            
            //grouping
            if(p1 != p2){
                /* finding maxrank of this group  , maxrank will be previous maxrank +1 , as all value
                are being stored in negative we find maxrank -1 and we find math.min of all 
                
                -> but in rows and cols values are stored in +ve only
                */
                int maxRank = Math.min(parent[p1] , Math.min(parent[p2] ,-Math.max(rows[x],cols[y])-1));
                
                //storing max rank in p1 but in negative
                parent[p1] = maxRank;

                //grouping p2 to p1
                parent[p2] = p1;
                
            }  
        }
        //updating matrix
        for(pair element : group){
            int x = element.x , y = element.y , point = element.point;

            //p1 & p2 already grouped now , so find rank 
            int p1 = findparent(x , parent);
            int rank = -parent[p1];

            //update rank
            rows[x] = rank;
            cols[y] = rank;
            
            matrix[x][y] = rank;
        } 
    }
    public class pair implements Comparable<pair>{
        int point , x , y;

        pair(){}

        pair(int point , int x , int y){
            this.point = point ; this.x = x ; this.y = y ; 
        }

        //ascending order
        public int compareTo(pair o){
            return this.point - o.point;
        }
    }
}
cntrl + shift + t: abrir una ventana del navegador que ya habias cerrado con anterioridad
windows + l: bloquea el ordenador y guarda todo el trabajo
;MainWP Requriement - cURL timeout
default_socket_timeout = 300
;END MainWP Requriement
# summarize multiple columns
df %>% dplyr::summarize(dplyr::across(
  c(col1, col2),
  .fns = \(x) sum(x, na.rm = TRUE)
), .groups = "drop")

# summarize every column
df %>% dplyr::summarize(dplyr::across(
  .cols = dplyr::everything(),
  .fns = \(x) sum(x, na.rm = TRUE)
), .groups = "drop")

# summarize multiple columns with suffixes
df %>% dplyr::summarize(dplyr::across(
  .cols = c(col1, col2), .fns = list("sum" = sum, "mean" = mean),
  .names = "{.col}_{.fn}"
), .groups = "drop")
"""
 <Typography

              >
                {moment(dateOfBirth).format('DD/MM/YYYY')}

              </Typography>
"""

Consider you are a jaavscript developer.

I want the dateofBirth to shown as `17 May 1998` format . currenlty it is shown as `17/05/1998`.

I want you to change format i wish to. Here i used moment package you can use the same package.

Please update rhe code and return the udpated one


# inner join
merge(dt1, dt2, by = c("col1", "col2"))

# left join
merge(dt1, dt2, by = c("col1", "col2"), all.x = TRUE)

# right join
merge(dt1, dt2, by = c("col1", "col2"), all.y = TRUE)
names(df)[names(df) == "col1"] <- "col2"
df <- df[, !(names(df) %in% c("col1", "col2")), drop = FALSE]
"""
import React, { useState } from "react";
import "./style.scss";
import Typography from "@mui/material/Typography";
import WitnessTable from "@components/WitnessTable";
import { useForm } from "react-hook-form";
import Autocomplete from "@mui/material/Autocomplete";
import AutocompleteOption from "@mui/joy/AutocompleteOption";
import { DatePicker } from "@mui/x-date-pickers";
import { TextField } from "@mui/material";

// import custom hooks
import useNationalityList from "@utils/hooks/useNationalityList";
import useCountryList from "@utils/hooks/useCountryList";
import useCityList from "@utils/hooks/useCityList";
import useIsdCodesList from "@utils/hooks/useIsdCodesList";
import { useDispatch, useSelector } from "react-redux";

// selectors
import {
  dropDownSelector,
  fetchCityList,
  fetchCountryList,
  fetchNationalityList,
} from "@redux/slices/dropDowns";
import { willsValidatorSelector } from "@redux/slices/willsValidator";
import getGuid from "@utils/Wills/getGuid";
import axios from "axios";
import {
  submitWitnessDetails,
  willWitnessDetailsSelector,
  fetchWitnessList,
  fetchWitnessDetailsByGuid,
} from "@redux/slices/wills-witness-details";

import moment from "moment";

"""
I have the given the imports above. what you have to do is to import them in correct order and it should be easilty readable. organize the imports. please dont change any package name or change into default or named import. just reorder the imports for user readablity.



# data.frame
df$col2[df$col1 < 0] <- 0

# data.table
dt[col1 < 0, col1 := 0]
selector .elementor {
display: inline-flex !important;
}

.elementor-section-wrap{
display: inline-flex;
}

.elementor-section{
width:100vw;
}

.custom-width{
width: 100px;
}

body{
overflow-y: hidden;
overscroll-behavior-y: none;
}

@media (max-width:768px){
.elementor-section-wrap{
display: block;
}
}



וזה הJS:

Javascript Code (remember to add the opening and closing script tags):

function replaceVerticalScrollByHorizontal( event ) {
if ( event.deltaY !== 0 ) {
window.scroll(window.scrollX + event.deltaY * 2, window.scrollY );
event.preventDefault();
}
}

const mediaQuery = window.matchMedia( ‘(min-width: 770px)’ );

if ( mediaQuery.matches ) {
window.addEventListener( ‘wheel’, replaceVerticalScrollByHorizontal );
}

payload for posting data:

"""
{
  "address": "binto",
  "bookedForProfileGUID": "05fa50b0-a450-437f-ade3-19bb128192a5",
  "city": "Wadi Zinati",
  "countryGUID": "07217663-f131-46b7-876b-8402f7a8312d",
  "dateOfBirth": "2023-06-06T18:30:00.000Z",
  "email": "gfdd@gmail.com",
  "emiratesId": "987978",
  "foreName": "test",
  "isdCode": "+81",
  "isdCodeGUID": "8841f91d-73b4-494c-b794-d8341faf80ab",
  "nationality": "33a784eb-6e88-43d6-8bf7-f41785851f25",
  "passportNo": "7456546",
  "phoneNumber": "76575675",
  "state": "rtfytrytr",
  "surName": "user",
  "zipCode": "654654"
}
"""

Data got while calling fetching:

"""
{
    "Output": {
        "profileguid": "34b4dd38-4c9c-42ee-9391-added811e2ad",
        "surName": "user",
        "foreName": "test",
        "address": "binto",
        "city": "Wadi Zinati",
        "countryName": null,
        "dateOfBirth": "2023-06-06T00:00:00",
        "passportNo": "7456546",
        "emiratesId": "",
        "phoneNumber": "76575675",
        "email": "gfdd@gmail.com",
        "nationality": "83",
        "isdCode": 106,
        "isdCodeGUID": "8841f91d-73b4-494c-b794-d8341faf80ab",
        "zipCode": "654654",
        "countryGUID": "07217663-f131-46b7-876b-8402f7a8312d",
        "state": "rtfytrytr",
        "dob": "2023-6-6",
        "emiratesIdNo": null,
        "serviceGUID": null,
        "bookedForProfileGUID": null
    },
    "status": true,
    "errorCode": "",
    "errorMessage": "",
    "statusCode": "200",
    "statusMessage": "Success",
    "responseDate": "08/Jun/2023 16:24:06",
    "version": "V1"
}

"""

Consider you are API expert.

emiratesId field is posting but when we call get method emiratesId is returned as empty string.
Post api is working. get api is also working i think but emiratedId field is retunred empty string even i inserted the same.

I had given the payload while posting data, also the response i got while fetching below it. 

I want to know what is the reason behind this issue.

Please help me to find is this a problem in the api side.
class Solution {
    
    public LinkedList<String> ans = new LinkedList<>();
    public HashMap<String , PriorityQueue<String>> graph = new HashMap<>();

    public List<String> findItinerary(List<List<String>> tickets) {
        /* Q32 of graph playlist , eularian path and circuit , 

            we have to use every ticket once or every edge once which is a eularian path .
            
            1)make graph of HM<string , PQ<string>>
            2)fire DFS
            3)add nodes while backtracking in linkedlist int addfirst manner 

            -> LL is used as addfirst in LL is O(1)
        */

        //making graph
        for(List<String> nodes : tickets){
            String a = nodes.get(0) , b = nodes.get(1);

            graph.computeIfAbsent(a , k -> new PriorityQueue<>()).add(b);
        }

        //firing DFS
        DFS("JFK");
        return ans;
    }

    public void DFS(String source){

        PriorityQueue<String> pq = graph.get(source);

        //might be that PQ of some node is not even initiallized therefore check for null
        //going to neighours
        while(pq != null && pq.size() > 0){
            String nbrs = pq.remove();
            DFS(nbrs);
        }

        //adding nodes while backtracking
        ans.addFirst(source);
    }
}
















Sub Close_Workbook()

    Dim Wkb As Workbook
    Set Wkb = Workbooks.Open("D:Sample.xlsx")
    Wkb.Close Savechanges:=True
    
End Sub
extension ServiceProviderProfileViewController : UICollectionViewDelegate, UICollectionViewDataSource {
    
    func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
        return categories.count
    }

    func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
        
        let cell = categoryCollectionView.dequeueReusableCell(withReuseIdentifier: "CategoryCollectionViewCell", for: indexPath) as! CategoryCollectionViewCell

            cell.categoryLabel.text = categories[indexPath.row]
        
            if indexPath.row == selectedCellIndex {
                cell.categoryView.layer.borderColor = UIColor.blue.cgColor
                cell.categoryLabel.textColor = UIColor.blue
            } else {
                cell.categoryView.layer.borderColor = UIColor.lightGray.cgColor
                cell.categoryLabel.textColor = UIColor.gray
            }
            return cell
    }
    
    func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
        selectedCellIndex = indexPath.row
        self.categoryCollectionView.reloadData()
    }
    
}
$(function () {
  $("select").change(function () {
    if ($(this).val() === "RU") {
      $(".country1").text("Россия");
    } else if ($(this).val() === "UZ") {
      $(".country1").text("Узбекистан");
    }
  })
});
class Solution {
    public int regionsBySlashes(String[] grid) {
        /* Q31 graph  , DSU question
        1) convert n x n grid to n+1 x n+1 points on graph 
        2) convert 2d graph to 1d graph 
        3) connect boundaries and start answer with 1 as 1st region will be matrix itself
        4) figure out which type of slash connects which 2 points
        5) apply kruskal and check for cycle , if cycle exist increase region by 1
        */
        int n = grid.length;
        int dots = n+1;

        DSU temp = new DSU(dots * dots);

        //connecting boundaries point to 1 to form 1st region by matrix itself
        for(int i = 0 ;i < dots ; i++){
            for(int j = 0 ;j < dots ; j++){
                
                if(i == 0 || i == dots-1 || j==0 || j== dots-1){
                    //convert to cell no.
                    int cellno = i * dots + j;

                    // 0 and 0 will have same leader
                    temp.union(0 , cellno , DSU.parent , DSU.rank);
                }
                
            }
        }
        int ans = 1;

        //applying kruskal on grid
        for(int i = 0 ;i < grid.length ; i++){
            String str = grid[i];
            
            for(int j = 0 ;j < str.length(); j++){

                //(i+1 , j) && (i , j+1);
                if(str.charAt(j) == '/'){
                    int a = (i+1) * dots + j , b = i * dots + (j+1);

                    if(temp.union(a , b , temp.parent , temp.rank) == false)
                    ans++;
                }
                //(i , j) && (i+1 , j+1);
                else if(str.charAt(j) == '\\'){
                    int a = (i) * dots + j , b = (i+1) * dots + (j+1);

                    if(temp.union(a , b , temp.parent , temp.rank) == false)
                    ans++;

                    // j++;    //as its given for / we have '//' therefore we skip 1 char
                }
            }
        }
        return ans;

    }
    public static class DSU{
        static int parent[] , rank[];

        DSU(int n ){
            parent = new int[n];
            rank = new int[n];

            for(int i = 0 ;i < n ; i++){
                parent[i] = i;
                rank[i] = 1;
            }
        }

        public int find(int child , int parent[]) {
            //if it is its own parent
            if(parent[child] == child){
                return parent[child];
            }

            int temp  = find(parent[child] , parent);    //optimization 1 (path compression)
            parent[child] = temp;

            return temp;
        }

        //x and y are childrens
        public boolean union(int x , int y , int parent[] , int rank[]){
            //finding leaders
            int lx = find(x , parent);
            int ly = find(y , parent);
            
            //combining based on ranks
            if(lx != ly){
                if(rank[lx] > rank[ly]){
                    parent[ly] = lx;
                }
                else if(rank[lx] < rank[ly]){
                    parent[lx] = ly;
                }

                else{
                    parent[lx] = ly;
                    rank[ly] ++ ;
                }
                return true;
            }
            return false;
        }
        
    }

}
git rm -r --cached .

git add .

git commit -m 'update .gitignore'
class Solution
{
    public:
    //Function to find the maximum number of meetings that can
    //be performed in a meeting room.
    int maxMeetings(int start[], int end[], int n)
    {
        // Your code here
        using pi = pair<int,int> ;
        vector<pi> vp;
        for(int i = 0; i < n; i++)
            vp.push_back({start[i] , end[i]});
        sort(vp.begin(), vp.end(),[](auto a, auto b){
           return a.second < b.second; 
        });
        int ans = 0, r = INT_MIN;
        for(int  i =0 ; i < n ; i++){
            if(vp[i].first > r){
                ans++;
                r = vp[i].second;
            }
        }
        return ans;
    }
};
Could not find artifact com.aspose:aspose-cells:pom:20.7 in maven-public (http://10.30.154.118:8888/repository/maven-public/)
                                                                          
                                                                          https://lh4.googleusercontent.com/1Ju7yEQaVrd3e04fNRiI3iWMtlqJQUfJxK8cgTEWaNcxnB1VaJICzxIdhtZMv7zpZpCXrP1iyFp7qIPtbY0-U2Q65pSgF6VDOQG_HKgBK2RtX8paUMe4hzqE3JqYj4ClP8OsS2vf
                                                                          
                                                                          
                                                                          https://lh3.googleusercontent.com/tF7Km5-YwcQYBWKnSv62PHReo5MdsCDD6mcuXfxe-hge42rJyKQvKG4Vm_ZUqgntdtjZAVmO1rXJQpNKVP85mx1nm8rLByaC1n_vkLsms-1CZdlGsOjXWkveVR3addqFGk51BN7M
                                                                          
                                                                          https://lh5.googleusercontent.com/ksNIUAKmSvWG4dXOuNspGWz2hR1oZnjkFtglKdRwxjg--mdA_lKyA9tKumcYfQyjXINph8psn6STDYdGKjKyQi3ECy0pebxkYZGStd0U3bnWJM7dg9CmDYQ_VbhzCG64tqD58uLd
                                                                          
                                                                          http://10.121.43.43/api/chat-bot/pdf/download/receipt-a58303db-2013-431f-92fd-8d190b1416ce.pdf
{
    "Output": {
        "saveStatus": false,
        "editStatus": true,
        "deleteStatus": false,
        "itemGUID": null,
        "isNotFirstGuardianValue": false,
        "errorMessage": null,
        "isAlreadyExists": false,
        "status": false
    },
    "status": true,
    "errorCode": "",
    "errorMessage": "",
    "statusCode": "200",
    "statusMessage": "Success",
    "responseDate": "08/Jun/2023 12:02:32",
    "version": "V1"
}
{
  "address": "dfdfg dgdf",
  "bookedForProfileGUID": "05fa50b0-a450-437f-ade3-19bb128192a5",
  "city": "San Ignacio",
  "countryGUID": "b71f3fa5-a42b-4a0b-a6c3-8d956bb57757",
  "dateOfBirth": "2023-06-06T18:30:00.000Z",
  "email": "jgh@gmail.com",
  "emiratesId": "9897897",
  "foreName": "fgsdfsdf",
  "isdCode": "+591",
  "isdCodeGUID": "4d07418d-1d45-4500-ab49-0682eaae51b7",
  "nationality": "44d7c4c3-9690-4fe8-a733-bb9b1c1240e8",
  "passportNo": "67856876",
  "phoneNumber": "8567567567",
  "profileguid": "9294c7f8-8fb0-4e61-8f02-49db54ba7254",
  "state": "jmjhmh",
  "surName": "sdfsdfs",
  "zipCode": "6756765"
}
class Solution{
	static int spanningTree(int V, int E, int edges[][]){
	    //already submitted through prims , now through kruskal
	    
	    int ans =0;
	    
	    Arrays.sort(edges , (a, b) -> a[2] - b[2]); //sort ascending order of weights
	    
	    DSU temp = new DSU(V);  //making DSU
	    
	    //taking sorted edges and processing them 1 by 1
	    for(int i = 0 ; i < edges.length ; i++){
	        int edge[] = edges[i];
	        int a = edge[0] , b = edge[1] , wt = edge[2];
	        
	        //if an edge already has a leader this will fail as that will make a cycle
	        if(temp.union(a , b , temp.parent , temp.rank)){
	            ans += edge[2];
	        }
	    }
	    return ans;
	    
	}
	//DSU template
	public static class DSU{
        int parent[] , rank[];

        DSU(int n ){
            parent = new int[n];
            rank = new int[n];

            for(int i = 0 ;i < n ; i++){
                parent[i] = i;
                rank[i] = 1;
            }
        }

        public static int find(int child , int parent[]) {
            //if it is its own parent
            if(parent[child] == child){
                return parent[child];
            }

            int temp  = find(parent[child] , parent);    //optimization 1 (path compression)
            parent[child] = temp;

            return temp;
        }

        //x and y are childrens
        public static boolean union(int x , int y , int parent[] , int rank[]){
            //finding leaders
            int lx = find(x , parent);
            int ly = find(y , parent);
            
            //combining based on ranks
            if(lx != ly){
                if(rank[lx] > rank[ly]){
                    parent[ly] = lx;
                }
                else if(rank[lx] < rank[ly]){
                    parent[lx] = ly;
                }

                else{
                    parent[lx] = ly;
                    rank[ly] ++ ;
                }
                return true;
            }
            return false;
        }
        
    }

}
2L266AP6-2UD8TFLB-DHCXZ2UQ-JTMHQK68-83KXZWQQ-4CR3AUB6-PKNEQT4K-F54UNXK4
    void print(vector<int>& v){
        for(auto ele : v)
            cout << ele << " ";
        cout<< endl;
    }
    void backtrack(int index, vector<int> ans){
        if(index == (ans.size() - 1)){
            print(ans);
            return;
        }
        for(int i = index; i < ans.size(); i++){
            swap(ans[index], ans[i]);
            backtrack(index+1, ans);
            swap(ans[index], ans[i]);
        }
    }
// Phone Title and Address Link
$('.business-info-logo').attr('title','The Carr Shoppe LLC');
$('.business-info-phone').attr('title', 'Call Us: (660) 646-5999');
$('.stacked-address').attr('title', 'View Map & Hours');
$('.stacked-address').wrap($('<a href="/locations"></a>'));

// CTA Component Title Attribute
$('.CTA_1-0-0 .dynamic-cta').attr('title', function(i,v){
	if($(this).find('h2').text()){
		return $(this).find('h2').text().trim() + ' ' + $(this).find('p').text().trim();
	} else{
		return $(this).find('a').text().trim();
	}
});

// CTAs Component With Alt Text Title Attribute
$('.CTA-Component-with-Alt-Text_1-0-0 .dynamic-cta').attr('title', function(i,v){
	return $(this).find('img').attr('title');
});
<script async src="https://platform.twitter.com/widgets.js" charset=utf-8"></script>
                                                                                   

                                                                                


var result = this.allDomicileFlags.filter((item) => item.isChecked);
var incompatibleFlags = this.allDomicileFlags.filter(
  (item) => item.disableCheckbox === true
);
handleSearch(event) {
  this.searchTerm = event.detail.value;
  const letter = this.searchTerm.toLowerCase();
  this.allDomicileFlags = this.allDomicileFlagsOG.filter((item) => item.nm_card.toLowerCase().startsWith(letter));
}
star

Sat Jun 10 2023 18:23:27 GMT+0000 (Coordinated Universal Time) https://codepen.io/kevinpowell/pen/NWOgVga

@rstringa

star

Sat Jun 10 2023 18:16:11 GMT+0000 (Coordinated Universal Time) https://leetcode.com/problems/movement-of-robots/discuss/

@DxBros #biweekly #prefix_sum #ants_on_plank #collision

star

Sat Jun 10 2023 17:05:12 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Sat Jun 10 2023 14:56:30 GMT+0000 (Coordinated Universal Time)

@evrimuygar33

star

Sat Jun 10 2023 14:54:21 GMT+0000 (Coordinated Universal Time)

@evrimuygar33

star

Sat Jun 10 2023 14:51:38 GMT+0000 (Coordinated Universal Time)

@evrimuygar33

star

Sat Jun 10 2023 13:14:24 GMT+0000 (Coordinated Universal Time)

@mehran

star

Fri Jun 09 2023 05:27:47 GMT+0000 (Coordinated Universal Time) https://www.addevent.com/c/documentation/add-to-calendar-button

@Kiwifruit

star

Fri Jun 09 2023 05:22:55 GMT+0000 (Coordinated Universal Time) https://www.addevent.com/c/documentation/add-to-calendar-button

@Kiwifruit

star

Fri Jun 09 2023 05:10:08 GMT+0000 (Coordinated Universal Time) https://www.addevent.com/c/documentation/add-to-calendar-button

@Kiwifruit

star

Fri Jun 09 2023 04:52:48 GMT+0000 (Coordinated Universal Time) https://creatorapp.zoho.com/sapphireelitetech/datebook#Form:Book_an_Appointment

@SapphireElite #deluge #zoho #delugescript #creator #workflow

star

Thu Jun 08 2023 22:19:07 GMT+0000 (Coordinated Universal Time) https://www.nerdsgene.com/Article/BulkCopyToSQLDB

@knguyencookie

star

Thu Jun 08 2023 16:59:03 GMT+0000 (Coordinated Universal Time) https://chat.openai.com/

@orherzog #k8s #docker #bash

star

Thu Jun 08 2023 16:58:09 GMT+0000 (Coordinated Universal Time) https://chat.openai.com/

@orherzog #bash #k8s #docker

star

Thu Jun 08 2023 15:46:35 GMT+0000 (Coordinated Universal Time) https://vpn-expert.info/vpn-router-raspberry-pi-raspap-and-nordvpn-wi-fi-hotspot-access-point/

@orherzog #bash #linux #raspberry

star

Thu Jun 08 2023 15:45:57 GMT+0000 (Coordinated Universal Time) https://www.christopherlouvet.com/posts/raspberry-pi-dual-wifi-wireguard-vpn-apple-tv/

@orherzog #bash #linux

star

Thu Jun 08 2023 15:45:46 GMT+0000 (Coordinated Universal Time) https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes/Public_class_fields

@sabi_k

star

Thu Jun 08 2023 15:43:14 GMT+0000 (Coordinated Universal Time)

@orherzog #bash #linux #raspberry

star

Thu Jun 08 2023 15:20:01 GMT+0000 (Coordinated Universal Time) https://leetcode.com/problems/rank-transform-of-a-matrix/

@Ayush_dabas07

star

Thu Jun 08 2023 15:11:49 GMT+0000 (Coordinated Universal Time)

@jrg_300i #php #poo #mvc #jquery #postgresql

star

Thu Jun 08 2023 14:34:22 GMT+0000 (Coordinated Universal Time)

@fostira #wordpress #config #nginx #apache #php

star

Thu Jun 08 2023 14:06:23 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 13:37:38 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Thu Jun 08 2023 12:49:41 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 12:43:18 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 12:30:29 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 12:22:46 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 12:20:37 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 12:08:17 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Thu Jun 08 2023 12:01:51 GMT+0000 (Coordinated Universal Time)

@Abbyharbor

star

Thu Jun 08 2023 11:57:46 GMT+0000 (Coordinated Universal Time)

@vs #r

star

Thu Jun 08 2023 11:53:55 GMT+0000 (Coordinated Universal Time)

@odesign

star

Thu Jun 08 2023 11:40:20 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Thu Jun 08 2023 11:05:19 GMT+0000 (Coordinated Universal Time) https://leetcode.com/problems/reconstruct-itinerary/

@Ayush_dabas07

star

Thu Jun 08 2023 10:56:21 GMT+0000 (Coordinated Universal Time) https://analysistabs.com/vba-code/workbook/m/close/

@Walid

star

Thu Jun 08 2023 10:07:26 GMT+0000 (Coordinated Universal Time)

@Annie #jquery

star

Thu Jun 08 2023 09:52:13 GMT+0000 (Coordinated Universal Time) https://leetcode.com/problems/regions-cut-by-slashes/

@Ayush_dabas07

star

Thu Jun 08 2023 08:07:28 GMT+0000 (Coordinated Universal Time) https://developer.aliyun.com/article/634481

@adoin

star

Thu Jun 08 2023 07:52:02 GMT+0000 (Coordinated Universal Time) https://practice.geeksforgeeks.org/problems/n-meetings-in-one-room-1587115620/1

@DxBros #n_meetings #greedy #gfg #c++

star

Thu Jun 08 2023 06:56:22 GMT+0000 (Coordinated Universal Time)

@manhmd #java

star

Thu Jun 08 2023 06:37:12 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Thu Jun 08 2023 06:35:37 GMT+0000 (Coordinated Universal Time)

@JISSMONJOSE #react.js #css #javascript

star

Thu Jun 08 2023 06:22:34 GMT+0000 (Coordinated Universal Time) https://practice.geeksforgeeks.org/problems/minimum-spanning-tree/1

@Ayush_dabas07

star

Thu Jun 08 2023 05:13:50 GMT+0000 (Coordinated Universal Time) https://www.cleverbridge.com/882/p/122105102-sIJQM7G4sjDqcWkzQGV6?

@TheHutt

star

Thu Jun 08 2023 02:58:31 GMT+0000 (Coordinated Universal Time) https://practice.geeksforgeeks.org/problems/find-kth-permutation-0932/1

@DxBros #c++ #permutations #backtracking

star

Thu Jun 08 2023 01:59:45 GMT+0000 (Coordinated Universal Time)

@vishalsingh21

star

Wed Jun 07 2023 21:49:49 GMT+0000 (Coordinated Universal Time)

@tarahb

star

Wed Jun 07 2023 21:20:39 GMT+0000 (Coordinated Universal Time)

@gbritgs

star

Wed Jun 07 2023 21:19:52 GMT+0000 (Coordinated Universal Time)

@gbritgs

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension