Snippets Collections
// Function to fetch weather data from an API
const fetchWeather = () => {
  return fetch('https://api.example.com/weather')
    .then((response) => {
      if (!response.ok) {
        throw new Error('Failed to fetch weather data');
      }
      return response.json();
    })
    .then((data) => {
      return { temperature: data.temperature, condition: data.condition };
    });
};

// Function to fetch stock market data from an API
const fetchStocks = () => {
  return fetch('https://api.example.com/stocks')
    .then((response) => {
      if (!response.ok) {
        throw new Error('Failed to fetch stock data');
      }
      return response.json();
    })
    .then((data) => {
      return { symbol: data.symbol, price: data.price };
    });
};

// Function to fetch news headlines from an API
const fetchNews = () => {
  return fetch('https://api.example.com/news')
    .then((response) => {
      if (!response.ok) {
        throw new Error('Failed to fetch news data');
      }
      return response.json();
    })
    .then((data) => {
      return data.articles.map(article => ({ title: article.title }));
    });
};

// Fetch all data in parallel using Promise.all
Promise.all([fetchWeather(), fetchStocks(), fetchNews()])
  .then(([weatherData, stockData, newsData]) => {
    console.log('Weather:', weatherData);
    console.log('Stocks:', stockData);
    console.log('News:', newsData);

    // Now you can update the UI with all the data
    updateDashboard(weatherData, stockData, newsData);
  })
  .catch((error) => {
    console.error('Error fetching data:', error);
  });

// Example of how you could update the dashboard
function updateDashboard(weather, stocks, news) {
  console.log('Updating dashboard with all the data...');
  // Logic to update the UI with the fetched data
}
async function fetchData(url) {
  try {
    const response = await fetch(url);
    if (!response.ok) {
      throw new Error(`Error: ${response.status} ${response.statusText}`);
    }
    const data = await response.json(); // Parse and return JSON data
    return data;
  } catch (error) {
    console.error('Fetch failed:', error.message);
    throw error; // Re-throw the error if necessary for handling elsewhere
  }
}

async function useData() {
  try {
    const data1 = await fetchData('https://jsonplaceholder.typicode.com/posts/1');
    console.log('First Request Data:', data1);
  
    // If you need to fetch data from another endpoint:
    const data2 = await fetchData('https://jsonplaceholder.typicode.com/posts/2');
    console.log('Second Request Data:', data2);
  } catch (error) {
    console.error('Error in data retrieval:', error);
  }
}

useData();
# Analyzing Clicks A Python Function for Extracting Weekly Stats

users_data = [
    {'day': 0, 'clicks': 100}, {'day': 1, 'clicks': 10},
    {'day': 2, 'clicks': 150}, {'day': 3, 'clicks': 1000},
    {'day': 4, 'clicks': 100}, {'day': 5, 'clicks': 190},
    {'day': 6, 'clicks': 150}, {'day': 7, 'clicks': 1000}
]

def get_stats(day):
    # -1 to start from day 0 because day in date is 1
    return users_data[day - 1: day + 7]

print(get_stats(1))
from random import randint
from faker import Faker
from datetime import date
import pandas as pd

f = Faker()

s_date = date(2018, 5, 1)
e_date = date(2018, 5, 30)

dict_data = {'date': [], 'email': [], 'money': []}

for _date in pd.date_range(start = s_date, end = e_date):
    dict_data['date'].append(_date)
    dict_data['email'].append(f.email())
    dict_data['money'].append(randint(1, 100) * 0.99)

df = pd.DataFrame.from_dict(dict_data)
df.to_csv('out.csv', index = 0)
from datetime import date
from pandas import date_range
from uuid import uuid4
from random import randint


s_date = date(2019, 1, 1)
e_date = date(2019, 1, 30)

stats = {}

for d in date_range(start=s_date, end=e_date):
    d = str(d.date())
    stats[d] = {
        'user_id': str(uuid4()),
        'clicks': randint(0, 1000)
    }


print(stats)
curl --request POST \
  --url 'https://api.apyhub.com/convert/rss-file/json?detailed=true' \
  --header 'apy-token: APY0BOODK2plpXgxRjezmBOXqID51DGpFq8QnHJeBQrrzuIBc25UIglN93bbwvnkBWlUia1' \
  --header 'content-type: multipart/form-data' \
  --form 'file=@"test.xml"'
curl --request POST \
  --url 'https://api.apyhub.com/convert/word-file/pdf-file?output=test-sample.pdf&landscape=false' \
  --header 'apy-token: APY0BOODK2plpXgxRjezmBOXqID51DGpFq8QnHJeBQrrzuIBc25UIglN93bbwvnkBWlUia1' \
  --header 'content-type: multipart/form-data' \
  --form 'file=@"test.doc"'
curl --request POST \
  --url 'https://api.apyhub.com/generate/charts/bar/file?output=sample.png' \
  --header 'Content-Type: application/json' \
  --header 'apy-token: APY0BOODK2plpXgxRjezmBOXqID51DGpFq8QnHJeBQrrzuIBc25UIglN93bbwvnkBWlUia1' \
  --data '{
    "title":"Simple Bar Chart",
    "theme":"Light",
    "data":[
        {
            "value":10,
            "label":"label a"
        },
        {
            "value":20,
            "label":"label b"
        },
        {
            "value":80,
            "label":"label c"
        },
        {
            "value":50,
            "label":"label d"
        },
        {
            "value":70,
            "label":"label e"
        },
        {
            "value":25,
            "label":"label f"
        },
        {
            "value":60,
            "label":"label g"
        }
    ]
}'
curl --request POST \
  --url 'https://api.apyhub.com/generate/charts/bar/file?output=sample.png' \
  --header 'Content-Type: application/json' \
  --header 'apy-token: APY0BOODK2plpXgxRjezmBOXqID51DGpFq8QnHJeBQrrzuIBc25UIglN93bbwvnkBWlUia1' \
  --data '{
    "title":"Simple Bar Chart",
    "theme":"Light",
    "data":[
        {
            "value":10,
            "label":"label a"
        },
        {
            "value":20,
            "label":"label b"
        },
        {
            "value":80,
            "label":"label c"
        },
        {
            "value":50,
            "label":"label d"
        },
        {
            "value":70,
            "label":"label e"
        },
        {
            "value":25,
            "label":"label f"
        },
        {
            "value":60,
            "label":"label g"
        }
    ]
}'
curl --request POST \
  --url 'https://api.apyhub.com/generate/qr-code/file?output=sample.png' \
  --header 'Content-Type: application/json' \
  --header 'apy-token: APY0BOODK2plpXgxRjezmBOXqID51DGpFq8QnHJeBQrrzuIBc25UIglN93bbwvnkBWlUia1' \
  --data '{
    "content": "https://apyhub.com",
    "logo": "https://apyhub.com/logo.svg",
    "background_color": "#000000",
    "foreground_color": ["#e8bf2a", "#e8732a"]
}'
// file data.json

{
"localItems": {
	"items": [
      {
		"title": "Halabut",
		"type": "string",
        "status": false
		},
			{
        "title": "Taco",
		"type": "string",
        "status": true
		},
      {
        "title": "Fish",
		"type": "string",
        "status": true
		},
      {
        "title": "Pork",
		"type": "string",
        "status": false
		}
		]
	}
}




//js file must be a module
// <script src="scripts.js" defer type="module"></script>

// JS file 
import data from "./data.json" assert { type: "json" };
const {localItems} = data;
const JSONDATA = localItems['items'];

const waitForSiblingElementToBeRemoved = (form) => {
    // This Element are where any errors would appear
    const input = form.querySelector(`input[type='submit']`);

    // This observer looks for the sibling of the input submit button that appears 
    // and then removes when the request is complete
    new MutationObserver((entries, observer) => {
      log('entries[0].removeNodes: ', entries[0].removedNodes);
      if (!entries[0].removedNodes[0]?.classList.contains('ajax-progress')) return; 
      observer.disconnect(); // should this be moved after sendEvent 
      window.setTimeout(() => {
        if (input.classList.contains('error')) return; 
        log('Email has been submitted');
        sendEvent('pjs_email_submitted');
      }, 0);
    }).observe(input.parentElement, { childList: true, subtree: true }); // should be actually have two separate observers for each input?
  };
window.optimizely.get('visitor')

// example of getting specific pjs segment (from tb440)
window.optimizely.get('visitor').custom['22521880007']
import logging

logging.basicConfig(filename='codelogging.log', level=logging.DEBUG,
                format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S')

a_variable = 200

# for loop with logging
for i in range(100):
    x = print(i)
    logging.info('Iteriating')

logging.info('Script completed')
import dtale
from pandas_profiling import ProfileReport
from dataprep.eda import create_report

class AutomateEDA:
    
    def __init__(self, df):
        self.df = df
     
    def show_dtale(self):
        d = dtale.show(self.df)
        d.open_browser()
        print('dtale opened in browser!')
        
    def show_pandas_profile_report(self):
        profile = ProfileReport(self.df, title="Pandas Profiling Report", explorative=True)
        profile.to_file("pandas-profiling-report.html")
        print('pandas-profile-report created and saved in the project folder!')
        
    def show_dataprep(self):
        create_report(self.df).show_browser()
        print('dataprep opened in browser!')
        
    def show_all(self):
        self.show_dtale()
        self.show_dataprep()
        self.show_pandas_profile_report();


eda = AutomateEDA(df)
eda.show_all()
import pandas as pd
data = pd.read_csv('../input/udemy-courses/udemy_courses.csv')

x = pd.Series([int(i[:4]) for i in data.published_timestamp])
xx = pd.DataFrame(x,columns=['published_year'])
z = pd.Series([int(i[5:7]) for i in data.published_timestamp])
zz = pd.DataFrame(z,columns=['published_day'])

data = pd.concat([data,xx,zz],axis=1)
data
   let User = Auth.auth().currentUser
        if let user = User{
            let db = Firestore.firestore()
            db.collection("Appointments").whereField("patient_id", isEqualTo: user.uid).getDocuments { [self] (document, error) in
                guard let data = document?.documents else {
                    return
                }
                      for d in data{
                    self.chatStatus = d.get("chat_status") as? String ?? ""
                    self.startTime = d.get("start_time") as? String ?? ""
                }
                      //updating data
                    let document = document!.documents.first
                    document?.reference.updateData([
                                   "chat_status": "Started"
                               ])
@IBAction func saveChangesPressed(_ sender: UIButton) {
        
        let user = Auth.auth().currentUser
        if let user = user {
            let db = Firestore.firestore()
            let docRef = db.collection("Users").document(user.uid)
            docRef.updateData(["Name": self.nameField.text ?? "User","Email": self.emailField.text ?? "a@gmail.com", "MobileNumber": self.phoneNumberField.text ?? "123"])
        }
    }
func getDataFromFireStore() {
        print("getting data")
        let activityView = activityIndicatorView()
        activityView.startAnimating()
        let db = Firestore.firestore()
        db.collection("Activities").getDocuments() { (querySnapshot, error) in
            if error != nil {
                print("Error getting documents: \(error!)")
            } else {
                self.activitiesArray.removeAll()
                for document in querySnapshot!.documents {
                    print("for loop")
                    let activities = ActivitiesModel()
                    let data = document.data()
                    activities.title = data["title"] as! String
                    activities.description = data["description"] as! String
                    activities.image_url = data["image_url"] as! String
                    self.activitiesArray.append(activities)
                }
                activityView.stopAnimating()
                self.activitiesTableView.reloadData()
            }
        }
        
    }
def log_scale(x):
    C = 1 / np.log(10)
    return np.sign(x) * np.log10(1 + np.abs(x / C))
df['Purchase'] = df['Purchase'].apply(lambda x: 1 if x=='Yes' else 0)
df2.loc[["one",'three'],['pop','state']]
df2.loc["one":'three'],['pop','state']]
df2.loc[:'three',['pop','state']]
df.apply(lambda x: x == ' ', axis = 1).mean()
df.loc[df.TotalCharges == ' ', :]
df.loc[df.TotalCharges == ' ', 'TotalCharges'] = 0
def detect_outliers(df,n,features): 
    outlier_indices = []
    for col in features:
        Q1 = np.percentile(df[col],25)
        Q3 = np.percentile(df[col],75)
        IQR = Q3 - Q1
        outlier_step = 1.5 * IQR 
        outlier_list_col = df[(df[col] < Q1 - outlier_step) | (df[col] > Q3 + outlier_step)].index
        outlier_indices.extend(outlier_list_col)

    outlier_indices = Counter(outlier_indices)
    multiple_outliers = list(k for k, v in outlier_indices.items() if v>n)
    return multiple_outliers

Outliers_to_drop = detect_outliers(data1,2,['Age','Parch','Fare','SibSp'])
data1.iloc[Outliers_to_drop]
import tkinter as tk
from tkinter import ttk

root = tk.Tk()

# Pack a big frame so, it behaves like the window background
big_frame = ttk.Frame(root)
big_frame.pack(fill="both", expand=True)

# Set the initial theme
root.tk.call("source", "sun-valley.tcl")
root.tk.call("set_theme", "light")

def change_theme():
    # NOTE: The theme's real name is sun-valley-<mode>
    if root.tk.call("ttk::style", "theme", "use") == "sun-valley-dark":
        # Set light theme
        root.tk.call("set_theme", "light")
    else:
        # Set dark theme
        root.tk.call("set_theme", "dark")

# Remember, you have to use ttk widgets
button = ttk.Button(big_frame, text="Change theme!", command=change_theme)
button.pack()

root.mainloop()
from google.colab import drive
drive.mount('/content/drive/')
import sys
sys.path.insert(0,'/content/drive/MyDrive/')
train_df = pd.read_csv("/content/drive/MyDrive/train.csv")
var endExperiment = function() {

  prompt_resubmit = function() {
    replaceBody(error_message);
    $("#resubmit").click(resubmit);
  };

  resubmit = function() {
    replaceBody("<h1>Trying to resubmit...</h1>");
    reprompt = setTimeout(prompt_resubmit, 10000);
    
    psiTurk.saveData({
      success: function() {
          clearInterval(reprompt); 
      }, 
      error: prompt_resubmit
    });
  };
  // Load the debriefing page 
  psiTurk.showPage('debriefing.html');

  //code for bonus??
  $("#next").click(function () {
      record_responses();
      psiTurk.saveData({
            success: function(){
                psiTurk.computeBonus('compute_bonus', function() { 
                  psiTurk.completeHIT(); // when finished saving compute bonus, the quit
                }); 
            }, 
            error: prompt_resubmit});
  });
#importing Autoviz class
from autoviz.AutoViz_Class import AutoViz_Class#Instantiate the AutoViz class
AV = AutoViz_Class()

df = AV.AutoViz('car_design.csv')
# Discretization
df3["Total_Amt_Chng_Q4_Q1_qcut"]=pd.qcut(df3["Total_Amt_Chng_Q4_Q1"],4)
df3["Total_Trans_Amt_qcut"]=pd.qcut(df3["Total_Trans_Amt"],4)
df3["Total_Ct_Chng_Q4_Q1_qcut"]=pd.qcut(df3["Total_Ct_Chng_Q4_Q1"],4)
star

Mon Sep 09 2024 05:40:46 GMT+0000 (Coordinated Universal Time)

#fetch #endpoint #data #promise.all
star

Mon Sep 09 2024 05:36:26 GMT+0000 (Coordinated Universal Time)

#fetch #endpoint #data
star

Fri Mar 22 2024 16:44:48 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/converter-rss-json

#api #data
star

Fri Mar 22 2024 16:43:55 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/converter-doc-pdf

#api #data
star

Fri Mar 22 2024 16:42:46 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/bar-graph

#api #data
star

Fri Mar 22 2024 16:42:45 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/bar-graph

#api #data
star

Fri Mar 22 2024 16:41:45 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/generate-qr-code

#api #data
star

Fri Mar 22 2024 16:40:29 GMT+0000 (Coordinated Universal Time) https://apyhub.com/utility/analyse-keywords

#api #data
star

Sun Jul 23 2023 00:55:56 GMT+0000 (Coordinated Universal Time)

#javascript #module #json #data #import
star

Thu Mar 09 2023 21:10:32 GMT+0000 (Coordinated Universal Time)

#visitor #data #info #optimizely
star

Thu Feb 09 2023 18:35:22 GMT+0000 (Coordinated Universal Time)

#optimizely #data
star

Mon Oct 31 2022 01:30:47 GMT+0000 (Coordinated Universal Time)

#graphs #data #jupyter_notebooks
star

Thu Jan 27 2022 08:01:04 GMT+0000 (Coordinated Universal Time)

#ios #swift #getandupdate #data #getandupdatedata
star

Mon Jan 10 2022 06:37:19 GMT+0000 (Coordinated Universal Time)

#ios #swift #update #updatedata #data
star

Sat Jan 08 2022 09:43:20 GMT+0000 (Coordinated Universal Time)

#getdata #data #firestore #getdatafrom firestore
star

Thu Dec 02 2021 11:45:37 GMT+0000 (Coordinated Universal Time)

#python #scaling #data
star

Sun Nov 14 2021 22:23:48 GMT+0000 (Coordinated Universal Time)

#data
star

Sun Nov 07 2021 21:16:28 GMT+0000 (Coordinated Universal Time)

#data
star

Fri Nov 05 2021 21:39:10 GMT+0000 (Coordinated Universal Time)

#data
star

Sat Oct 16 2021 20:01:06 GMT+0000 (Coordinated Universal Time)

#data
star

Mon Aug 23 2021 03:08:13 GMT+0000 (Coordinated Universal Time)

#python #data #visualisation #graphs #plots #charts #dashboards
star

Fri Aug 13 2021 08:28:34 GMT+0000 (Coordinated Universal Time)

#colab #drive #data
star

Wed Jun 02 2021 18:58:00 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/61445528/how-to-save-data-from-app-engine-to-datastore-google-cloud-javascript

#data #javascript #jspsych
star

Wed Jun 02 2021 17:37:05 GMT+0000 (Coordinated Universal Time) https://github.com/danieljwilson/ObjectVsTask

#js #jspsych #data #saving
star

Wed Jun 02 2021 17:05:03 GMT+0000 (Coordinated Universal Time) https://github.com/colinquirk/psiturkParser

##js #js #javascript #psiturk #data
star

Thu May 27 2021 02:57:45 GMT+0000 (Coordinated Universal Time)

#python #data #visualisation #graphs #plots #charts #dashboards
star

Thu May 27 2021 02:56:24 GMT+0000 (Coordinated Universal Time) https://towardsdatascience.com/autoviz-automatically-visualize-any-dataset-75876a4eede4

#python #data #visualisation #graphs #plots #charts #dashboards
star

Wed Jan 27 2021 07:43:36 GMT+0000 (Coordinated Universal Time)

#numerical #data #discretization #datamining

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension