Snippets Collections
SQL> with t1(col) as(
  2    select 1    from dual union all
  3    select 2    from dual union all
  4    select null from dual union all
  5    select 3    from dual
  6  )
  7  select *
  8    from t1
  9  order by col asc nulls last
  10  ;
select ord_num, agent_code, ord_date, ord_amount
from orders
where(agent_code, ord_amount) IN
(SELECT agent_code, MIN(ord_amount)
FROM orders 
GROUP BY agent_code);  
ALTER TABLE stars
ALTER COLUMN name TYPE varchar(50);
DECLARE @AnyDate DATETIME
SET @AnyDate = GETDATE()

SELECT @AnyDate AS 'Input Date',
  DATEADD(q, DATEDIFF(q, 0, @AnyDate), 0) 
                        AS 'Quarter Start Date',       
  DATEADD(d, -1, DATEADD(q, DATEDIFF(q, 0, @AnyDate) + 1, 0)) 
                        AS 'Quarter End Date'
DECLARE @Year DATE = '2013-01-01'
DECLARE @Quarter INT = 4;

SELECT  DATEADD(QUARTER, @Quarter - 1, @Year) ,
        DATEADD(DAY, -1, DATEADD(QUARTER,  @Quarter, @Year))
SELECT title, extract("year" from current_date) - "year" AS age
  FROM films
  ORDER BY age ASC;
postgres-# ( terminal commands)
- list all databases : \l
- list of all tables : \d
- list only the tables : \dt
- list all fields in a table : \d person (name of the table)
- import from external file : \i c:/postgrescourse/person.sql

- datatypes info:
bigserial : increment int in sequence
date : year,month,day



- create database : create database test;
- drop database : drop database test;
- drop table : drop table person;
- connect to a database ( two forms ) :  
\c test (or) -h localhost -p 5432 -U postgres test
- create table sintax : create table table_name ( column name + data type + constraints if any ) 

- create table : 
create table person ( 
	id int,
	first_name varchar(50),
  	last_name varchar(50),
    gender varchar(6),
    date_of_birth date
    );
- create table w/constraints ( need to satisfied rules): 
create table person (
 	id bigserial not null primary key,
 	first_name varchar(50) not null,
 	last_name varchar(50) not null,
    gender varchar(6) not null, 
    date_of_birth date not null
	);

- create table : 
insert into person ( first_name, last_name, gender, date_of_birth) values ('Anne','Smith','Female',date '1988-01-09');

- Using OFFSET and LIMIT : select * from person OFFSET 5 LIMIT 5;
- Using OFFSET and FETCH : select * from person OFFSET 5 FETCH 5 ROW ONLY;
- Using BETWEEN : select * from person where date_of_birth BETWEEN DATE '2000-01-01' and '2015-01-01';
- Diferences between LIKE and ILIKE : ILIKE is case insensitive. 

- Using Group By : select country_of_birth, count(*) from person group by country_of_birth; ( will count how many people are from that country )
- Using Having with Group By : select country_of_birth, count(*) from person having count(*) > 5 group by country_of_birth; ( must have above 5 to show )





 USE Your_Database;
 GO
 EXECUTE AS USER = N'the_user_name';
 GO
 SELECT 
    s.name,
    o.name,
    p.[permission_name]
 FROM sys.objects AS o 
 INNER JOIN sys.schemas AS s
 ON o.[schema_id] = s.[schema_id]
 CROSS APPLY sys.fn_my_permissions(QUOTENAME(s.name) 
   + N'.' + QUOTENAME(o.name), N'OBJECT') AS p
   WHERE o.[type] IN (N'U', N'V') -- tables and views
   AND p.subentity_name = N''; -- ignore column permissions 
 GO
 REVERT;
SELECT setval(
        pg_get_serial_sequence('TABLE','COLUMN'), 
        max(id)) 
FROM TABLE;
<?php
$results = $wpdb->get_results( "SELECT * FROM $table_name"); // Query to fetch data from database table and storing in $results
if(!empty($results))                        // Checking if $results have some values or not
{    
    echo "<table width='100%' border='0'>"; // Adding <table> and <tbody> tag outside foreach loop so that it wont create again and again
    echo "<tbody>";      
    foreach($results as $row){   
    $userip = $row->user_ip;               //putting the user_ip field value in variable to use it later in update query
    echo "<tr>";                           // Adding rows of table inside foreach loop
    echo "<th>ID</th>" . "<td>" . $row->id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>User IP</th>" . "<td>" . $row->user_ip . "</td>";   //fetching data from user_ip field
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Post ID</th>" . "<td>" . $row->post_id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Time</th>" . "<td>" . $row->time . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    }
    echo "</tbody>";
    echo "</table>"; 

}
?>
SELECT column-names
  FROM table-name1
 WHERE value IN (SELECT column-name
                   FROM table-name2
                  WHERE condition)
SELECT Fname, Lname
FROM Employee
ORDER BY Salary
OFFSET 2 ROWS;
select schema_name(tab.schema_id) as [schema_name], 
    tab.[name] as table_name
from sys.tables tab
    left outer join sys.indexes pk
        on tab.object_id = pk.object_id 
        and pk.is_primary_key = 1
where pk.object_id is null
order by schema_name(tab.schema_id),
    tab.[name]
DELETE FROM [dbo].[logs] WHERE date < DATEADD(DAY, -30, GETDATE())
DECLARE
  dates STRING;
SET
  dates = (
  SELECT
    CONCAT('("', STRING_AGG(DISTINCT REPLACE(LEFT(started_at,10),"-",""), '", "'), '")'),
  FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption` );

EXECUTE IMMEDIATE
  FORMAT("""
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in %s)""",dates)
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in (list_of_values))
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT DISTINCT * FROM talentcards.users
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT
## replace "sale" with variable name and "sales" with table name
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sale) FROM sales;
\d tablename # view table
\dt # show all tables
\q # exit psql console
\du # show all users
\l # show all databases
\conninfo # get connection info
psql -U username -d dbname
psql -h 127.0.0.1 -U postgres
sudo -u username createdb dbname
sudo -u postgres createuser --superuser username
sudo /etc/init.d/postgresql restart
DELETE FROM tbl_sku
WHERE sku <> 'hello';
INSERT INTO tbl_name (col1, col2, col3) VALUES ('03/03/2021', 13.76, 'MAS-465');
ALTER ROLE postgres
WITH PASSWORD 'newpassword';
ALTER TABLE tbl_sku
ALTER COLUMN col_name TYPE character(50);
ALTER TABLE tbl_sku
RENAME COLUMN old_name TO new_name;
ALTER TABLE tbl_sku AS ts
ADD COLUMN new_col character(15);
SELECT 
	t.col1,
	s.col3
FROM tbl1 AS t, tbl2 AS s
WHERE t.col2 = s.col1;
CREATE TEMPORARY TABLE __copy AS (SELECT * FROM tbl_sku LIMIT 0);

COPY __copy (col_1, col_2, col_3) FROM 'C:/Users/CB06/Downloads/upload5.csv' WITH (FORMAT csv, DELIMITER ',', FORCE_NULL (sku, title, supplier, ean), HEADER); # import from csv file

UPDATE tbl_sku SET col1 = __copy.col1, col2 = __copy.col2 FROM __copy WHERE tbl_sku.sku = __copy.sku # update rows
           UPDATE table1 
           SET status = (SELECT t2.status FROM table2 t2 WHERE t2.trans_id = id) , 
               name = (SELECT t2.name FROM table2 t2 WHERE t2.trans_id = id)
           WHERE id IN (SELECT trans_id FROM table2 t2 WHERE t2.trans_id= id)
SELECT CompanyName, 
       ProductCount = (SELECT COUNT(P.id)
                         FROM [Product] P
                        WHERE P.SupplierId = S.Id)
  FROM Supplier S
DATE_SUB(CURRENT_DATE('UTC-3:00'), INTERVAL 1 DAY)
CURRENT_DATE('UTC-3:00')
SELECT
     *
     FROM
     EXTERNAL_QUERY("reby-cloud.eu.reby_prod",
     '''
     select 
     concat('cu_',id_to_text(cu.id)) as id,
     id_to_time(cu.id) as created_at,
     concat('acc_',id_to_text(cu.balance_account_id)) as account_id,
     concat('c_',id_to_text(cu.company_id)) as company_id,
     concat('usr_',id_to_text(cu.user_id)) as user_id,
     --id_to_time(cu.user_id) as user_first_created_at,
     firebase_user_token,
     is_deleted,
     updated_at
     from company_user cu 
     --where id >= id_from_time(NOW() - INTERVAL '1 DAY')
     --where date(id_to_time(id)) > now() - interval '3 day'
     where id < id_from_time(date('2020-06-01'))
     ''' )
     ;
# Packages
 
import os
from google.cloud import bigquery
from google.oauth2 import service_account
 
 
# Parameters
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]='/Users/jmbenedetto/Library/Mobile Documents/com~apple~CloudDocs/#Formação/Dissertation/Research/02 Research v2/00 Base/key.json'
 
client = bigquery.Client(project=project_name)
dataset_ref = client.dataset(dataset_name)
load_config = bigquery.LoadJobConfig()
 
 
# Code
 
query = """
        DELETE 
        FROM dataset_name.table_name
        WHERE criteria;
"""
query_params = [
    bigquery.ArrayQueryParameter("file", "STRING", file_source)
]
 
job_config = bigquery.QueryJobConfig()
job_config.query_parameters = query_params
query_job = client.query(
    query,
    # Location must match that of the dataset(s) referenced in the query.
    location="EU",
    job_config=job_config,
)  # API request - starts the query
 
query_job.result(timeout=60.0)
assert query_job.state == "DONE"
You are able to search only 15 private snippets.  to search them all.
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
postgresql://[user[:password]@][netloc][:port][/dbname][?param1=value1&...]
--Creación de usuarixs nuevxs
with base_data as (
select 
    date(u.created_at) as date,
    date_trunc(date(u.created_at),MONTH) as month,
    mv.service_area,
    sum(if(mv.first_ride_date is not null,1,0)) as has_ride,
    count(*) users_created
from `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv
    on u.id = mv.user_id
where mv.service_area = 'Sevilla'
group by 1,2,3
--order by 1 desc
)
select 
    *
    --lag(users_created) over (partition by month order by month asc) as prev_month_users
from base_data
order by date desc
;
--métricas de viajes individuales
select
    date(created_at) as date,
    r.id as ride_id,
    r.minutes,
    extract(hour from datetime(created_at,"Europe/Madrid")) as hour,
    if(plan_usage_id is null,0,1) as plan_usage,
    FORMAT_DATE('%A', date(created_at)) AS weekday_name,
    r.distance/100 as distance_km,
    r.ride_cost_time as ride_cost,
    geo.Distri_11D as distrito_inicial,
    geo2.Distri_11D as distrito_final,
from analytics_reby_v1_eu.pg_rides r
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo
    on st_within(st_geogpoint(r.longitude_initial,r.latitude_initial),geo.geometry)
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo2
    on st_within(st_geogpoint(r.longitude_final,r.latitude_final),geo2.geometry)
where r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
order by 1 desc
;
--ride linestrings
with rides_data as (
select
	id,
  created_at,
  starting.latitude as lat_init,
  starting.longitude as long_init,
  ending.latitude as lat_fin,
  ending.longitude as long_fin,
  distance/1000 as dist_km,
  minutes,
	--CONCAT(starting.latitude,',',starting.longitude) AS marker_start,
	--CONCAT(ending.latitude,',',ending.longitude) AS marker_end
from (
select
  r.created_at,
  r.minutes,
  r.distance,
  r.id,
  --path,
  path[offset(0)] as starting,
  path[offset(array_length(path)-1)] as ending,
  --array_length(path)
from `analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_vehicles` v
  on r.vehicle_id = v.id
where array_length(path) > 3
and date(r.created_at) >= '2021-06-01'
and date(r.created_at) <= '2021-09-01'
and r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
--and v.vehicle_type = 'motorcycle'
--where id = 'r_3qm5ua4jymv1ta3tbmq1'
--group by 1
  )
),

linestrings as (
select 
  ri.id,
  --st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  concat("Linestring(",string_agg(concat(p.longitude," ",p.latitude), ","),")") as path
from `analytics_reby_v1_eu.pg_rides_json` ri, unnest(path) as p
where id in (select id from rides_data)
and date(created_at) >= '2021-06-01'
and date(created_at) <= '2021-09-01'
group by 1
),

linestrings_geo as (
select 
  rd.id,
  st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  st_asgeojson(st_geogpoint(rd.long_fin,rd.lat_fin)) as ending_point,
  st_asgeojson(SAFE.st_geogfromtext(path)) as paths
from linestrings ls
join rides_data rd on ls.id = rd.id
)

select * from linestrings_geo
with dte as (
    SELECT dte
    FROM unnest(generate_date_array(date('2021-03-11'), date('2021-07-31'))) dte
),
ids as (
    SELECT clientId,
    PARSE_DATE("%Y%m%d", date) as dte
    FROM `*table*`
)

SELECT dte.dte, count( distinct ids.clientId)
FROM dte, ids
WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL 7 DAY) and dte.dte
group by 1
order by 1 asc 
create or replace table `temp_eu.delete_gerardm_rcnp_` as (
select npr_rscb = RCnp as bool,* from (
SELECT 
        --vd.*,
        r.created_at,
        r.user_id,
        r.id as ride_id,
        SUM(rc.theoretical_cost)/100 AS RC, 
        SUM(rc.non_promoted)/100 AS RCnp, 
        SUM(rc.theoretical_cost-rc.non_promoted)/100 AS RCp,
        --ops_managers_dashboard.TRUNC_DATE_ISO('DAY', date(datetime(r.created_at,"Europe/Madrid"))) as week_at 
    FROM`reby-cloud.analytics_reby_v1_eu.pg_rides` AS r
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.vehicle_daily` AS vd
        ON r.vehicle_id = vd.vehicle_id AND date(datetime(r.created_at,"Europe/Madrid")) = vd.date
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` AS rc
        ON r.id = rc.ride_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_vehicles` AS v
        ON r.vehicle_id = v.id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_service_area` AS sa
        ON r.service_area_id = sa.service_area_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_tpl` AS tpl
        ON vd.tpl_provider_id = tpl.id
   -- WHERE 
        --date(datetime(r.created_at,"Europe/Madrid")) >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND ST_DISTANCE (ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GEOGPOINT(tpl.longitude, tpl.latitude)) > 200
        --AND ST_WITHIN(ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GeogFromText(sa.geometry))
        --AND v.printed_code IS NOT NULL AND vd.date >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND rc.ride_id IS NOT NULL
        --where r.id = 'r_3rytmr4br7g5qlu4jbqh'
        group by 1,2,3
    --GROUP BY week_at 
    --ORDER BY week_at DESC
) as a--where ride_id = 'r_3t26l79xmu2xa7nyu3n1'
full join (select ride_id ride_id_rscb,service_area, sum(non_promoted)/100 as npr_rscb
            from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` group by 1,2) b
    on a.ride_id = b.ride_id_rscb
)

/*
create or replace table `reby-cloud.analytics_reby_v1_eu.vehicle_daily` 
PARTITION BY date
CLUSTER BY service_area_id,company_id
AS
select * except(r_n) from (
select
    *,
    row_number() over (partition by date,vehicle_id order by created_at asc) as r_n
from `reby-cloud.analytics_reby_v1_eu.vehicle_daily`
) where r_n = 1
*/
WHERE DATE(cat_tbl.date)
    BETWEEN PARSE_DATE('%Y%m%d', @DS_START_DATE) AND
    PARSE_DATE('%Y%m%d', @DS_END_DATE)
select * from table where random() < 0.01 limit 1000;
%sql SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';
INSERT `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
    (date, name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount)
SELECT date('2021-07-17'), name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount
FROM `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
where date = '2021-07-18'
--'usr_3t7nh9pkvax1j6412nv1'
select * from (
with promotions as (
SELECT * FROM EXTERNAL_QUERY("reby-cloud.eu.reby_prod", 
'''
SELECT
    id_to_time(id) as created_at,
    concat('acc_',id_to_text(account_id)) as account_id,
    balance__amount as balance_amount,
    balance__currency as balance_currency,
    concat('c_',id_to_text(company_id)) as company_id,
    free_unlocks,
    concat('cpua_',id_to_text(id)) as id,
    concat('tx_',id_to_text(last_promotion_transaction_id)) as transaction_id,
    concat('usr_',id_to_text(user_id)) as user_id
FROM company_user_promotion_account
--where user_id = id_from_text('3t7nh9pkvax1j6412nv1')
;
'''
    )
)
select
    *
from (
    SELECT
    --promotions.balance_amount,
    sa.service_area,
    cp.code,
    cpu.*
    FROM
    `reby-cloud.analytics_reby_v1_eu.pg_company_promotion_usage` cpu
    left join `reby-cloud.analytics_reby_v1_eu.pg_company_promotion` cp on cpu.company_promotion_id = cp.id
    left join `reby-cloud.analytics_reby_v1_eu.reby_users_servicearea` sa on sa.user_id = cpu.user_id
    --left join promotions on cpu.user_id =promotions.user_id
    --where sa.service_area in ('Zaragoza','Gijón')
    --and date(expiration_date) > current_date and date(cpu.created_at) > current_date -15
    --where cpu.user_id = 'usr_3t7nh9pkvax1j6412nv1'
    --order by cpu.created_at desc
)
where
    (service_area = 'Zaragoza' AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Sevilla' AND code in ('REBYBCN','REBYJON','REBYGONA','TOTAREBY','HOLAMOTOS','REBYGO')) OR
    (service_area = 'Gijón' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Terrassa' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES')) OR
    (service_area in ('Napoles','Lecce','Minturno','Bergamo','Grosseto','Caserta') AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','REBYGO','HOLAMOTOS'))
)
--where user_id = 'usr_3t7nh9pkvax1j6412nv1'
where value_remaining_amount > 0
and is_expired is false
with ads_data as (
SELECT
    ad_id,
    adset_id,
    campaign_id,
    status,
    sum(impressions) clicks,
    sum(spend) as investment
FROM `reby-cloud.facebook_ads.insights` ai
left join `reby-cloud.facebook_ads.ads` ads on ai.ad_id = ads.id

--WHERE DATE(_PARTITIONTIME) = "2021-07-06"
group by 1,2,3,4
)
select * from ads_data where ad_id = '23843560033720454' order by clicks desc
--with application_opened as (
select
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id
from(
SELECT
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id,
    row_number() over (partition by anonymous_id order by original_timestamp asc) as r_n
FROM
  `reby-safir.react_native.install_attributed`
WHERE
  context_app_name = 'Reby'
  --and user_id = 'usr_3t8h6t8c4ppubjp9vjvh'
  and anonymous_id = '1eafe4e3-f4d8-416d-80c1-db0a95191b55'
  /*
  and _PARTITIONTIME BETWEEN TIMESTAMP_TRUNC(TIMESTAMP_MICROS(UNIX_MICROS(CURRENT_TIMESTAMP()) - 60 * 60 * 60 * 24 * 1000000), DAY, 'UTC')
					AND TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY, 'UTC')
    
			
    */
    )
--where r_n = 1
order by r_n asc
--)
;
select *
from `reby-safir.react_native.identifies`
where user_id = 'usr_3t8cxanx6kehxg38n5m1'
order by original_timestamp asc
;
select *
from `reby-safir.react_native.finish_ride`
where user_id = 'usr_3t8ethqfzs32sp1mcu5h'
order by original_timestamp asc
;

SELECT
    id,
    stats[offset(0)].total_rides,
    mv.service_area
FROM
  `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv on u.id = mv.user_id
where date(created_at) = current_date -2
order by 2 desc
;
select
    money_in,
    money_out,
    count(*),
    sum(amount)
from (
select
 amount,
 date(date_ts) as date,
 case
    when amount between 0 and 20 then "0-20"
    when amount between 21 and 500 then "20-500"
    when amount between 21 and 500 then "500-1000"
    when amount >1000 then "+1000"
    else "other"
end as money_in,
case
    when amount between -20 and 0 then "0-20"
    when amount between -500 and -1000 then "20-500"
    when amount between -1000 and -500 then "500-1000"
    when amount <-1000 then "+1000"
    else "other"
end as money_out,

from `reby-cloud.reby_fin_eu.payments_all_platforms`
where
  date(date_ts) BETWEEN '2021-04-01' and '2021-06-30'
  and payment_provider = 'kernel'
--order by date_ts desc
)
group by 1,2
with data as (
select
    date(created_at) as date,
    sum(if(app_promoting_co = 'Reby' and owner_co != 'Reby',1,0)) as hey,
    count(*) as all_
from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
where type like 'ride%'
    and date(created_at) > current_date - 50
    and service_area = 'Barcelona'
group by 1
--order by 1 desc
)
select hey / all_ from data order by 1 desc
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Search column</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[SELECT s.name [Schema], tb.name [Table], c.name [Column], tp.name [Type]
FROM sys.schemas s
JOIN sys.tables tb ON tb.schema_id = s.schema_id
JOIN sys.columns c ON c.object_id = tb.object_id
JOIN sys.types tp ON tp.system_type_id = c.system_type_id
WHERE c.name LIKE N'$ColumnName$'
AND NOT tp.name = N'sysname'
ORDER BY [Schema], [Table], [Column];]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Check nr of fractional digits</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>Exponent</ID>
					<ToolTip>Exponentiation</ToolTip>
					<Default>2</Default>
				</Literal>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
				<Literal>
					<ID>TableName</ID>
					<ToolTip>Name of the table</ToolTip>
					<Default>TableName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[DECLARE @power DECIMAL = POWER(10, $Exponent$);
SELECT Value
FROM (SELECT $ColumnName$ AS Value FROM dbo.$TableName$) AS Focus
WHERE ROUND(Value * @power, 0, 1) <> (Value * @power);]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
select pgc.conname as constraint_name,
       ccu.table_schema as table_schema,
       ccu.table_name,
       ccu.column_name,
       pgc.* as definition
from pg_constraint pgc
join pg_namespace nsp on nsp.oid = pgc.connamespace
join pg_class  cls on pgc.conrelid = cls.oid
left join information_schema.constraint_column_usage ccu
          on pgc.conname = ccu.constraint_name
          and nsp.nspname = ccu.constraint_schema
where contype ='c'
order by pgc.conname;
CREATE TABLE test ( 
  id INT NOT NULL AUTO_INCREMENT,
  name VARCHAR(50) NOT NULL,
  email VARCHAR(100) NOT NULL,
  PRIMARY KEY (id)
);
import wrds
db = wrds.Connection(wrds_username='joe')
db.raw_sql('SELECT date,dji FROM djones.djdaily')
/* Gets reps */
SELECT fieldA, COUNT(*)
FROM tableA
GROUP BY fieldA
HAVING COUNT(*) > 1

/* Use reps to filter results */
SELECT a.*
FROM tableA a
JOIN (
	SELECT fieldA, COUNT(*) as 'count'
	FROM tableA
	GROUP BY fieldA
	HAVING COUNT(*) > 1
) b
ON a.fieldA = b.fieldA
CREATE TABLE Persons (
    ID int NOT NULL,
    LastName varchar(255) NOT NULL,
    FirstName varchar(255),
    Age int,
    CHECK (Age>=18)
);
select studentID, FirstName, LastName, FirstName + ' ' + LastName as FullName
from student;
select users.last_login,us.*,users.* from users left join user_school us on us.user_id = users.id where school_id is not null
and last_login between '2021-01-01'::date and '2021-12-31'::date
UPDATE public.users
SET name=concat('User',id), email=concat('user',id,'@email.com')
WHERE name not like '%Testbruker';
/*
This script is given "As Is" with no warranties and plenty of caveats. Use at your own risk!
For more on data profiling, see Chapter 10 in "SQL Server 2012 Data Integration Recipes", Apress, 2012
*/
-----------------------------------------------------------------------
-- User-defined variables
-----------------------------------------------------------------------
USE CarSales -- Your database here
GO
DECLARE @TABLE_SCHEMA NVARCHAR(128) = 'dbo'  -- Your schema here
DECLARE @TABLE_NAME NVARCHAR(128) = 'client' -- Your table here
DECLARE @ColumnListIN NVARCHAR(4000) = ''    -- Enter a comma-separated list of specific columns
                                                     -- to profile, or leave blank for all
DECLARE @TextCol BIT = 1  -- Analyse all text (char/varchar/nvarchar) data type columns
DECLARE @NumCol BIT = 1   -- Analyse all numeric data type columns
DECLARE @DateCol BIT = 1  -- Analyse all date data type data type columns
DECLARE @LobCol BIT = 1   -- Analyse all VAR(char/nchar/binary) MAX data type columns (potentially time-consuming)
DECLARE @AdvancedAnalysis BIT = 1 -- Perform advanced analysis (threshold counts/domain analysis) 
                                  --(potentially time-consuming)
DECLARE @DistinctValuesMinimum INT = 200 -- Minimum number of distinct values to suggest a reference 
                                         -- table and/or perform domain analysis
DECLARE @BoundaryPercent NUMERIC(3,2) = 0.57 -- Percent of records at upper/lower threshold to suggest
                                             -- a possible anomaly
DECLARE @NullBoundaryPercent NUMERIC(5,2) = 90.00 -- Percent of NULLs to suggest a possible anomaly
DECLARE @DataTypePercentage INT = 2 -- Percentage variance allowed when suggesting another data type 
                                    -- for a column
-----------------------------------------------------------------------
-- Process variables
-----------------------------------------------------------------------
DECLARE @DATA_TYPE VARCHAR(128) = ''
DECLARE @FULLSQL VARCHAR(MAX) = ''
DECLARE @SQLMETADATA VARCHAR(MAX) = ''
DECLARE @NUMSQL VARCHAR(MAX) = ''
DECLARE @DATESQL VARCHAR(MAX) = ''
DECLARE @LOBSQL VARCHAR(MAX) = ''
DECLARE @COLUMN_NAME VARCHAR(128)
DECLARE @CHARACTER_MAXIMUM_LENGTH INT
DECLARE @ROWCOUNT BIGINT = 0
DECLARE @ColumnList VARCHAR(4000) = ' '
DECLARE @TableCheck TINYINT
DECLARE @ColumnCheck SMALLINT
DECLARE @DataTypeVariance INT
-----------------------------------------------------------------------
-- Start the process:
BEGIN
TRY
-- Test that the schema and table exist
SELECT
 @TableCheck = COUNT (*) 
   FROM INFORMATION_SCHEMA.TABLES 
   WHERE TABLE_SCHEMA = @TABLE_SCHEMA 
   AND TABLE_NAME = @TABLE_NAME
IF @TableCheck <> 1
 BEGIN
  RAISERROR ('The table does not exist',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Parse list of columns to process / get list of columns according to types required
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ColumnList') IS NOT NULL
 DROP TABLE tempdb..#ColumnList;
CREATE TABLE #ColumnList (COLUMN_NAME VARCHAR(128), DATA_TYPE VARCHAR(128), CHARACTER_MAXIMUM_LENGTH INT) -- Used to hold list of columns to process
IF @ColumnListIN <> '' -- See if there is a list of columns to process
BEGIN
 -- Process list
 SET @ColumnList = @ColumnListIN + ','
 DECLARE @CharPosition int
 WHILE CHARINDEX(',', @ColumnList) > 0
  BEGIN
   SET @CharPosition = CHARINDEX(',', @ColumnList)
   INSERT INTO #ColumnList (COLUMN_NAME) VALUES (LTRIM(RTRIM(LEFT(@ColumnList, @CharPosition - 1))))
   SET @ColumnList = STUFF(@ColumnList, 1, @CharPosition, '')
  END -- While loop
-- update with datatype and length
  UPDATE CL
   SET CL.CHARACTER_MAXIMUM_LENGTH = ISNULL(ISC.CHARACTER_MAXIMUM_LENGTH,0)
      ,CL.DATA_TYPE = ISC.DATA_TYPE
   FROM #ColumnList CL
   INNER JOIN INFORMATION_SCHEMA.COLUMNS ISC
     ON CL.COLUMN_NAME = ISC.COLUMN_NAME
  WHERE ISC.TABLE_NAME = @TABLE_NAME
  AND ISC.TABLE_SCHEMA = @TABLE_SCHEMA
 END
-- If test for list of column names
ELSE
 BEGIN
 -- Use all column names, to avoid filtering
  IF @TextCol = 1
   BEGIN
    INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
     SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
     WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar', 'binary')
     AND TABLE_NAME = @TABLE_NAME
     AND TABLE_SCHEMA = @TABLE_SCHEMA
     AND CHARACTER_MAXIMUM_LENGTH > 0
   END
 IF @NumCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
 IF @DateCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('Date', 'DateTime', 'SmallDateTime', #39;DateTime2', 'time')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
IF @LOBCol = 1
 BEGIN
  INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'varbinary', 'xml')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
   AND CHARACTER_MAXIMUM_LENGTH = -1
 END
END
-- Else test to get all column names
-----------------------------------------------------------------------
-- Test that there are columns to analyse
SELECT @ColumnCheck = COUNT (*) FROM #ColumnList WHERE DATA_TYPE IS NOT NULL
IF @ColumnCheck = 0
 BEGIN
  RAISERROR('The columns do not exist in the selected database or no columns are selected',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Create Temp table used to hold profiling data
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ProfileData') IS NOT NULL
 DROP TABLE tempdb..#ProfileData;
 CREATE TABLE #ProfileData
 (
  TABLE_SCHEMA NVARCHAR(128),
  TABLE_NAME NVARCHAR(128),
  COLUMN_NAME NVARCHAR(128),
  ColumnDataLength INT,
  DataType VARCHAR(128),
  MinDataLength BIGINT,
  MaxDataLength BIGINT,
  AvgDataLength BIGINT,
  MinDate SQL_VARIANT,
  MaxDate SQL_VARIANT,
  NoDistinct BIGINT,
  NoNulls NUMERIC(32,4),
  NoZeroLength NUMERIC(32,4),
  PercentageNulls NUMERIC(9,4),
  PercentageZeroLength NUMERIC(9,4),
  NoDateWithHourminuteSecond BIGINT NULL,
  NoDateWithSecond BIGINT NULL,
  NoIsNumeric BIGINT NULL,
  NoIsDate BIGINT NULL,
  NoAtLimit BIGINT NULL,
  IsFK BIT NULL DEFAULT 0,
  DataTypeComments NVARCHAR(1500)
 );
-- Get row count
DECLARE @ROWCOUNTTEXT NVARCHAR(1000) = ''
DECLARE @ROWCOUNTPARAM NVARCHAR(50) = ''
SET @ROWCOUNTTEXT = 'SELECT @ROWCOUNTOUT = COUNT (*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)'
SET @ROWCOUNTPARAM = '@ROWCOUNTOUT INT OUTPUT'
EXECUTE sp_executesql @ROWCOUNTTEXT, @ROWCOUNTPARAM, @ROWCOUNTOUT = @ROWCOUNT OUTPUT
-----------------------------------------------------------------------
-- Test that there are records to analyse
IF @ROWCOUNT = 0
 BEGIN
  RAISERROR('There is no data in the table to analyse',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Define the dynamic SQL used for each column to analyse
-----------------------------------------------------------------------
SET @SQLMETADATA = 'INSERT INTO #ProfileData (ColumnDataLength,COLUMN_NAME,TABLE_SCHEMA,TABLE_NAME,DataType,MaxDataLength,MinDataLength,AvgDataLength,MaxDate,MinDate,NoDateWithHourminuteSecond,NoDateWithSecond,NoIsNumeric,NoIsDate,NoNulls,NoZeroLength,NoDistinct)'
DECLARE SQLMETADATA_CUR CURSOR LOCAL FAST_FORWARD FOR 
 SELECT COLUMN_NAME, CHARACTER_MAXIMUM_LENGTH, DATA_TYPE FROM #ColumnList
OPEN SQLMETADATA_CUR 
FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
WHILE @@FETCH_STATUS = 0 
 BEGIN 
  SET @SQLMETADATA = @SQLMETADATA +'
  SELECT TOP 100 PERCENT ' + CAST(@CHARACTER_MAXIMUM_LENGTH AS VARCHAR(20)) + ' ,''' + QUOTENAME(@COLUMN_NAME) + '''
  ,''' + QUOTENAME(@TABLE_SCHEMA) + '''
  ,''' + QUOTENAME(@TABLE_NAME) + '''
  ,''' + @DATA_TYPE + ''''
   + CASE
      WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') 
	   AND @CHARACTER_MAXIMUM_LENGTH >= 0 
	     THEN + '
  , MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISNUMERIC(' + QUOTENAME(@COLUMN_NAME) + ') = 1) 
  ,(SELECT COUNT (*) from ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISDATE(' + QUOTENAME(@COLUMN_NAME) + ') = 1) '
  WHEN @DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN + '
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,AVG(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NUMERIC(36,2)))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('DateTime', 'SmallDateTime') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,(SELECT COUNT (*) from ' 
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE (CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ' ) - FLOOR(CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ')) <> 0))
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE DATEPART(ss,' + QUOTENAME(@COLUMN_NAME) + ') <> 0 OR DATEPART(mcs,' + QUOTENAME(@COLUMN_NAME) + ') <> 0) 
  ,NULL 
  ,NULL '
    WHEN @DATA_TYPE IN ('DateTime2') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL
  ,NULL
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('Date') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX('
   + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN('
  + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NLL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('xml') THEN + '
  ,MAX(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,MIN(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,AVG(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  WHEN @DATA_TYPE IN ('varbinary','varchar','nvarchar') AND  @CHARACTER_MAXIMUM_LENGTH = -1 THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('binary') THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('time') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   ELSE + '
  ,NULL 
  ,NULL
  ,NULL
  ,NULL
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  END + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NULL)'
   + CASE
   WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') THEN + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) +  ' WHERE LEN(LTRIM(RTRIM(' + QUOTENAME(@COLUMN_NAME) + '))) = '''')'
   ELSE + '
  ,NULL'
   END + '
  ,(SELECT COUNT(DISTINCT ' + QUOTENAME(@COLUMN_NAME) + ') FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NOT NULL )
  FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)
  UNION'
 FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
END 
CLOSE SQLMETADATA_CUR 
DEALLOCATE SQLMETADATA_CUR 
SET @SQLMETADATA = LEFT(@SQLMETADATA, LEN(@SQLMETADATA) -5)
EXEC (@SQLMETADATA)
-----------------------------------------------------------------------
-- Final Calculations
-----------------------------------------------------------------------
-- Indicate Foreign Keys
; WITH FK_CTE (FKColumnName)
AS
(
 SELECT
   DISTINCT CU.COLUMN_NAME
  FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
   INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CU
     ON TC.CONSTRAINT_NAME = CU.CONSTRAINT_NAME 
     AND TC.TABLE_SCHEMA = CU.TABLE_SCHEMA 
     AND TC.TABLE_NAME = CU.TABLE_NAME
     AND TC.TABLE_SCHEMA = @TABLE_SCHEMA
     AND TC.TABLE_NAME = @TABLE_NAME
     AND CONSTRAINT_TYPE = 'FOREIGN KEY'
)
UPDATE P
 SET P.IsFK = 1
 FROM #ProfileData P
  INNER JOIN FK_CTE CTE
   ON P.COLUMN_NAME = CTE.FKColumnName
-- Calculate percentages
UPDATE #ProfileData
 SET PercentageNulls = (NoNulls / @ROWCOUNT) * 100
    ,PercentageZeroLength = (NoZeroLength / @ROWCOUNT) * 100
-- Add any comments
-- Datatype suggestions
-- First get number of records where a variation could be an anomaly
SET @DataTypeVariance = ROUND((@ROWCOUNT * @DataTypePercentage) / 100, 0)
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the DATE types. '
 WHERE NoIsDate BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the NUMERIC types. '
 WHERE NoIsNumeric BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be INT type. '
 WHERE MinDataLength >= -2147483648
 AND MaxDataLength <= 2147483648
 AND DataType IN ('bigint')
 
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLINT type. '
 WHERE MinDataLength >= -32768
 AND MaxDataLength <= 32767
 AND DataType IN ('bigint','int')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be TINYINT type. '
 WHERE MinDataLength >= 0
 AND MaxDataLength <= 255
 AND DataType IN ('bigint','int','smallint')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLDATE type. '
 WHERE NoDateWithSecond = 0
 AND MinDate >= '19000101'
 AND MaxDate <= '20790606'
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATE type (SQL Server 2008 only). '
 WHERE NoDateWithHourminuteSecond = 0
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATETIME type. '
 WHERE MinDate >= '17530101'
 AND MaxDate <= '99991231'
 AND DataType IN ('datetime2')
-- Empty column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Seems empty - is it required? '
 WHERE (PercentageNulls = 100 OR PercentageZeroLength = 100)
 AND IsFK = 0
-- Null column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'There is a large percentage of NULLs - attention may be required. '
 WHERE PercentageNulls >= @NullBoundaryPercent
-- Distinct value suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for reference/lookup table (contains NULLs).'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND PercentageNulls <> 100
 AND NoNulls <> 0
-- FK suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for Foreign Key.'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND NoNulls = 0
 AND DataType NOT LIKE '%Date%'
 AND DataType <> 'Time'
-- Filestream suggestions
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength >= 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly not a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength < 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
-- Sparse Column Suggestions
IF OBJECT_ID('tempdb..#SparseThresholds') IS NOT NULL
  DROP TABLE tempdb..#SparseThresholds;
  CREATE TABLE #SparseThresholds (DataType VARCHAR(128), Threshold NUMERIC(9,4))
  INSERT INTO #SparseThresholds (DataType, Threshold)
   VALUES 
    ('tinyint',86),
    ('smallint',76),    
    ('int',64),    
    ('bigint',52),    
    ('real',64),    
    ('float',52),    
    ('money',64),    
    ('smallmoney',64),    
    ('smalldatetime',52),    
    ('datetime',52),    
    ('uniqueidentifier',43),    
    ('date',69),    
    ('datetime2',52),    
    ('decimal',42),    
    ('nuumeric',42),    
    ('char',60),    
    ('varchar',60),    
    ('nchar',60),    
    ('nvarchar',60),    
    ('binary',60),    
    ('varbinary',60),    
    ('xml',60)    
; WITH Sparse_CTE (COLUMN_NAME, SparseComment)
AS
(
SELECT
  P.COLUMN_NAME
 ,CASE
  WHEN P.PercentageNulls >= T.Threshold THEN 'Could benefit from sparse columns. '
  ELSE ''
  END AS SparseComment
FROM #ProfileData P
 INNER JOIN #SparseThresholds T
  ON P.DataType = T.DataType
)
UPDATE PT
  SET PT.DataTypeComments = 
      CASE WHEN PT.DataTypeComments IS NULL THEN CTE.SparseComment
           ELSE ISNULL(PT.DataTypeComments,'') + CTE.SparseComment + '. '
      END
 FROM #ProfileData PT
  INNER JOIN Sparse_CTE CTE
   ON PT.COLUMN_NAME = CTE.COLUMN_NAME
-----------------------------------------------------------------------
-- Optional advanced analysis
-----------------------------------------------------------------------
IF @AdvancedAnalysis = 1
 BEGIN
-----------------------------------------------------------------------
-- Data at data boundaries
-----------------------------------------------------------------------
  IF OBJECT_ID('tempdb..#LimitTest') IS NOT NULL
    DROP TABLE tempdb..#LimitTest;
    CREATE TABLE #LimitTest (COLUMN_NAME VARCHAR(128), NoAtLimit BIGINT);
    DECLARE @advancedtestSQL VARCHAR(MAX) = 'INSERT INTO #LimitTest (COLUMN_NAME, NoAtLimit)' + CHAR(13)
    SELECT @advancedtestSQL = @advancedtestSQL + 'SELECT '''+ COLUMN_NAME + ''', COUNT('+ COLUMN_NAME + ') FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + 
     CASE
       WHEN DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN ' WHERE '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
       ELSE ' WHERE LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
     END
    FROM #ProfileData 
    WHERE DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real','varchar', 'nvarchar', 'char', 'nchar', 'binary')
    SET @advancedtestSQL = LEFT(@advancedtestSQL,LEN(@advancedtestSQL) -6) 
    EXEC (@advancedtestSQL)
    UPDATE M
      SET M.NoAtLimit = T.NoAtLimit
         ,M.DataTypeComments = 
           CASE
             WHEN CAST(T.NoAtLimit AS NUMERIC(36,2)) / CAST(@ROWCOUNT AS NUMERIC(36,2)) >= @BoundaryPercent THEN ISNULL(M.DataTypeComments,'') + 'Large numbers of data elements at the max/minvalues. '
             ELSE M.DataTypeComments
           END
    FROM #ProfileData M
     INNER JOIN #LimitTest T
      ON M.COLUMN_NAME = T.COLUMN_NAME
   -----------------------------------------------------------------------
   -- Domain analysis
   -----------------------------------------------------------------------
   IF OBJECT_ID('tempdb..#DomainAnalysis') IS NOT NULL
     DROP TABLE tempdb..#DomainAnalysis;
   CREATE TABLE #DomainAnalysis
   (
    DomainName NVARCHAR(128)
   ,DomainElement NVARCHAR(4000)
   ,DomainCounter BIGINT
   ,DomainPercent NUMERIC(7,4)
   );
   DECLARE @DOMAINSQL VARCHAR(MAX) = 'INSERT INTO #DomainAnalysis (DomainName, DomainElement, DomainCounter) '
   DECLARE SQLDOMAIN_CUR CURSOR LOCAL FAST_FORWARD FOR 
     SELECT COLUMN_NAME, DataType 
	  FROM #ProfileData 
	   WHERE NoDistinct < @DistinctValuesMinimum
   OPEN SQLDOMAIN_CUR 
   FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   WHILE @@FETCH_STATUS = 0 
    BEGIN 
     SET @DOMAINSQL = @DOMAINSQL + 'SELECT ''' + @COLUMN_NAME + ''' AS DomainName, CAST( '+ @COLUMN_NAME + ' AS VARCHAR(4000)) AS DomainElement, COUNT(ISNULL(CAST(' + @COLUMN_NAME + ' AS NVARCHAR(MAX)),'''')) AS DomainCounter FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + ' GROUP BY ' + @COLUMN_NAME + ''
     + ' UNION '
     FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   END 
  CLOSE SQLDOMAIN_CUR 
  DEALLOCATE SQLDOMAIN_CUR 
  SET @DOMAINSQL = LEFT(@DOMAINSQL, LEN(@DOMAINSQL) -5) + ' ORDER BY DomainName ASC, DomainCounter DESC '
   EXEC (@DOMAINSQL)
   -- Now calculate percentages (this appraoch is faster than doing it when performing the domain analysis)
   ; WITH DomainCounter_CTE (DomainName, DomainCounterTotal)
   AS
  (
   SELECT DomainName, SUM(ISNULL(DomainCounter,0)) AS DomainCounterTotal
    FROM #DomainAnalysis 
    GROUP BY DomainName
  )
  UPDATE D
    SET D.DomainPercent = (CAST(D.DomainCounter AS NUMERIC(36,4)) / CAST(CTE.DomainCounterTotal AS NUMERIC(36,4))) * 100
   FROM #DomainAnalysis D
    INNER JOIN DomainCounter_CTE CTE
     ON D.DomainName = CTE.DomainName
   WHERE D.DomainCounter <> 0
 END
-- Advanced analysis
-----------------------------------------------------------------------
-- Output results from the profile and domain data tables
-----------------------------------------------------------------------
select
   *
 from #ProfileData
IF @AdvancedAnalysis = 1
 BEGIN
  select
    *
   from #DomainAnalysis
 END
END TRY
BEGIN CATCH
 SELECT
  ERROR_NUMBER() AS ErrorNumber
 ,ERROR_SEVERITY() AS ErrorSeverity
 ,ERROR_STATE() AS ErrorState
 ,ERROR_PROCEDURE() AS ErrorProcedure
 ,ERROR_LINE() AS ErrorLine
 ,ERROR_MESSAGE() AS ErrorMessage;
 
END CATCH
--this will update only one row that matches id = 1
update sessions
set start_date = '2020-04-20 10:12:15.653',
    end_date = '2020-04-22 15:40:30.123'
where id = 1;

--this will update multiple rows that match category = 1
update sessions
set end_date = null
where category = 1;
SELECT u.id, u.name as user_name, u.email as user_email, st.name as street_name, scr.id as scr_id, scr.name as scr_name, scr.orientation, scr.url_token, c.id AS clip_id, c.title AS clip_title FROM users AS u
JOIN city_user AS cu ON cu.user_id = u.id
JOIN streets AS st ON st.city_id = cu.city_id
JOIN screens AS scr ON scr.street_id = st.id
JOIN clip_screen as cs ON cs.screen_id = scr.id
JOIN clips AS c ON c.id = cs.clip_id
JOIN templates AS t ON t.id = c.template_id
WHERE c.id = 217130
GROUP BY u.id, scr.id, c.id
with tc as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined`
    where date(created_at) < current_date
    group by 1
),

trf as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
    where date(created_at) < current_date
    group by 1
)
select * from (
select  
    tc.user_id,
    tc.cnt as tc_cnt,
    trf.cnt as trf_cnt
from tc left join trf on tc.user_id = trf.user_id
--where tc.user_id = 'usr_3sqh76qtht1s97xa1qu1'
)
where tc_cnt > trf_cnt
-- these scripts will delete the tables if it's already there

DROP TABLE IF EXISTS StudentEnrollments;
DROP TABLE IF EXISTS Students;
DROP TABLE IF EXISTS Classrooms;


-- create and populate the students table
CREATE TABLE Students
(
	StudentId INTEGER PRIMARY KEY,
	FirstName VARCHAR(200) NOT NULL,
	LastName VARCHAR(200) NOT NULL,
	Nationality VARCHAR(100) NOT NULL,
	DateOfBirth DATETIME NULL
);


INSERT INTO Students
	(StudentId, FirstName, LastName, Nationality, DateOfBirth)
VALUES
	('1','Mickey', 'Mouse', 'American', '1991-05-02'),
	('2','Donald', 'Duck', 'Japanese', '1992-11-12'),
	('3','Goofy', 'Goof', 'American', '1980-04-15'),
	('4','Daisy', 'Duck', 'French', '1985-02-16'),
	('5','Huey', 'Duck', 'French', '1986-05-19'),
	('6','Scrooge', 'McDuck', 'Japanese', '1983-11-11'),
	('7','Minnie', 'Mouse', 'Canadian', '1983-11-30'),
	('8','Louie', 'Duck', 'French', '1985-09-09');

-- create and populate the classroom table
CREATE TABLE Classrooms
(
	ClassroomId INTEGER PRIMARY KEY,
	ClassName VARCHAR(200) NOT NULL,
	Weight DECIMAL NOT NULL
);

INSERT INTO Classrooms
	(ClassroomId, ClassName, Weight)
VALUES
	(1, 'Public Interaction', 0.10),
	(2, 'Pranks', 0.15),
	(3, 'Running', 0.15),
	(4, 'Acting', 0.30),
	(5, 'Making Jokes', 0.30);

-- create and populate the student enrollment table
CREATE TABLE StudentEnrollments
(
	StudentEnrollmentId INTEGER PRIMARY KEY,
	StudentId INTEGER NOT NULL,
	ClassroomId INTEGER NOT NULL,
	Grade DECIMAL NOT NULL,
	FOREIGN KEY(StudentId) REFERENCES Students(StudentId),
	FOREIGN KEY(ClassroomId) REFERENCES Classrooms(ClassroomId)
);

INSERT INTO StudentEnrollments
	(StudentEnrollmentId, StudentId, ClassroomId, Grade)
VALUES
	(1, 1, 1, 91),
	(2, 1, 2, 68),
	(3, 1, 3, 89),
	(4, 1, 4, 60),
	(5, 1, 5, 65),
	(6, 2, 1, 79),
	(7, 2, 2, 85),
	(8, 2, 3, 68),
	(9, 2, 4, 89),
	(10, 2, 5, 80),
	(11, 3, 1, 96),
	(12, 3, 2, 62),
	(13, 3, 3, 78),
	(14, 3, 4, 100),
	(15, 3, 5, 64),
	(16, 4, 1, 81),
	(17, 4, 2, 90),
	(18, 4, 3, 85),
	(19, 4, 4, 95),
	(20, 4, 5, 64),
	(21, 5, 1, 81),
	(22, 5, 2, 73),
	(23, 5, 3, 60),
	(24, 5, 4, 99),
	(25, 5, 5, 70),
	(26, 6, 1, 75),
	(27, 6, 2, 74),
	(28, 6, 3, 69),
	(29, 6, 4, 79),
	(30, 6, 5, 88),
	(31, 7, 1, 60),
	(32, 7, 2, 75),
	(33, 7, 3, 82),
	(34, 7, 4, 66),
	(35, 7, 5, 65),
	(36, 8, 1, 69),
	(37, 8, 2, 81),
	(38, 8, 3, 100),
	(39, 8, 4, 63),
	(40, 8, 5, 62);
CREATE TABLE tbl_price_history (
	pk_price_history SERIAL PRIMARY KEY,
	fk_sku character(10) REFERENCES tbl_sku (sku),
	date_of_change date,
	retail_price numeric
);
SELECT (DATE '2011-01-28', DATE '2011-02-01' + 1) OVERLAPS
       (DATE '2011-02-01', DATE '2011-02-01'    )
select
    v.printed_code,
    id_to_text(vd.id) as id_,
    id_to_time(vd.id) as created_at,
    vd.*
from vehicle_diagnostic vd 
left join vehicle v on vd.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
select
    v.printed_code,
    id_to_text(vr.id) as id_,
    id_to_time(vr.id) as created_at,
    vr.*
from vehicle_repair vr
left join vehicle v on vr.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
----
select
    *
from vehicle_distribution v
order by id desc
Better: COPY {table_name}({table_columns}) FROM STDIN WITH (DELIMITER ',', NULL '?', FORMAT CSV, HEADER TRUE);


Also Possible, but worse: copy r from 'file.csv' WITH NULL AS 'NULL' DELIMITER ';' CSV HEADER;
-- updating a table during runtime of the script with a join

ALTER TABLE shops_per_city ADD merchant_type DOUBLE;
/*ALTER TABLE shops_per_city ADD city_latitude DOUBLE*/

UPDATE shops_per_city AS shop
INNER JOIN city_list AS city
ON shop.city_id = city.city_id
SET shop.city_latitude = city.city_latitude;


-- selecting the list for a where statement with a subquery

SELECT * 
FROM shopstation
WHERE Dealer_ID_Long IN 
	(select Dealer_ID_Long
	FROM shop_list);
	
-- also possible to join on a subquery (notice that there is no ; in the subquery)
SELECT *, -- this select needs to have all columns of the result table
FROM shopcounter_category_daily_v2 AS shop
INNER JOIN 
	(SELECT *
	FROM weather_owm_daily_2014
	WHERE cityId = 2761369) AS weather
ON shop.RECORD_DATE = weather.DATE;

-- update a column using CASE
UPDATE creditcard_merchtype 
SET online =
	(CASE
    	WHEN Merchantname LIKE "%.at%" THEN 1
    	WHEN Merchantname LIKE "%.com%" THEN 1
    	ELSE 0
	END);
	
-- alter column name	
ALTER TABLE creditcard_at_v1 CHANGE cityId city_id INT;


-- creating temporary table for later use:

CREATE TEMPORARY TABLE id_list AS
SELECT COUNT(*) AS days, dealer_name, DEALER_ID, Dealer_ID_Long
FROM shopcounter
WHERE YEAR(RECORD_DATE) > 2017
GROUP BY dealer_name, DEALER_ID, Dealer_ID_Long
HAVING days > 752;

CREATE TABLE shopcounter_stable AS
SELECT *
FROM shopcounter
WHERE Dealer_ID_Long IN 
	(SELECT Dealer_ID_Long
	FROM id_list) && YEAR(RECORD_DATE) > 2017;
#!/bin/bash -v

hadoop fs -put ./AllstarFull.csv /user/bigdataproject/AllstarFull.csv
hive -f ./AllstarFull.hive






hadoop fs -put ./Appearances.csv /user/bigdataproject/Appearances.csv
hive -f ./Appearances.hive






hadoop fs -put ./AwardsManagers.csv /user/bigdataproject/AwardsManagers.csv
hive -f ./AwardsManagers.hive
insert into table db_name.table_name
select 'ALL','Done';
-- create the databases
CREATE DATABASE IF NOT EXISTS projectone;

-- create the users for each database
CREATE USER 'projectoneuser'@'%' IDENTIFIED BY 'somepassword';
GRANT CREATE, ALTER, INDEX, LOCK TABLES, REFERENCES, UPDATE, DELETE, DROP, SELECT, INSERT ON `projectone`.* TO 'projectoneuser'@'%';

FLUSH PRIVILEGES;
RUN sed -Ei 's/^(bind-address|log)/#&/' /etc/mysql/my.cnf
select * 
from folder f
  join uploads u ON u.id = f.folderId 
where '8' = ANY (string_to_array(some_column,','))
SELECT string_to_array('xx~^~yy~^~zz', '~^~', 'yy');


ALTER TABLE test1 ADD COLUMN id SERIAL PRIMARY KEY;

This is all you need to:

Add the id column
Populate it with a sequence from 1 to count(*).
Set it as primary key / not null.
SELECT POSITION('our' in 'w3resource');


SELECT id, account_id, amount, created_at, null as type
 	FROM subscription_invoice
UNION ALL
SELECT id, account_id, amount, created_at, 
	`type` as type
FROM subscription_payment
ORDER BY created_at DESC
function processSQLFile(fileName) {

  // Extract SQL queries from files. Assumes no ';' in the fileNames
  var queries = fs.readFileSync(fileName).toString()
    .replace(/(\r\n|\n|\r)/gm," ") // remove newlines
    .replace(/\s+/g, ' ') // excess white space
    .split(";") // split into all statements
    .map(Function.prototype.call, String.prototype.trim)
    .filter(function(el) {return el.length != 0}); // remove any empty ones

  // Execute each SQL query sequentially
  queries.forEach(function(query) {
    batch.push(function(done) {
      if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
        var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
        var matches = regexp.exec(query);
        var table = matches[1];
        var fileName = matches[2];
        var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
        var stream = client.copyFrom(copyString);
        stream.on('close', function () {
          done();
        });
        var csvFile = __dirname + '/' + fileName;
        var str = fs.readFileSync(csvFile);
        stream.write(str);
        stream.end();
      } else { // Other queries don't need special treatment
        client.query(query, function(result) {
          done();
        });
      }
    });
  });
}
SELECT 
    orderNumber,
    orderDate,
    customerName,
    orderLineNumber,
    productName,
    quantityOrdered,
    priceEach
FROM
    orders
INNER JOIN orderdetails 
    USING (orderNumber)
INNER JOIN products 
    USING (productCode)
INNER JOIN customers 
    USING (customerNumber)
ORDER BY 
    orderNumber, 
    orderLineNumber;
ALTER PARTITION SCHEME [sh_trans_date]
 NEXT USED [PRIMARY]
 ALTER PARTITION FUNCTION [pf_trans_date_byhour]() SPLIT RANGE('2019/03/29')
ALTER DATABASE database CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE USER 'user'@'localhost' IDENTIFIED BY 'pass';
GRANT ALL PRIVILEGES ON database.* TO 'user'@'localhost';
SELECT 
   SS.SEC_NAME,
   STUFF((SELECT '; ' + US.USR_NAME 
          FROM USRS US
          WHERE US.SEC_ID = SS.SEC_ID
          ORDER BY USR_NAME
          FOR XML PATH('')), 1, 1, '') [SECTORS/USERS]
FROM SALES_SECTORS SS
GROUP BY SS.SEC_ID, SS.SEC_NAME
ORDER BY 1
SELECT
    TableName = tbl.table_schema + '.' + tbl.table_name, 
    TableDescription = tableProp.value,
    ColumnName = col.column_name, 
    ColumnDataType = col.data_type,
    ColumnDescription = colDesc.ColumnDescription
FROM information_schema.tables tbl
INNER JOIN information_schema.columns col 
    ON col.table_name = tbl.table_name
LEFT JOIN sys.extended_properties tableProp 
    ON tableProp.major_id = object_id(tbl.table_schema + '.' + tbl.table_name) 
        AND tableProp.minor_id = 0
        AND tableProp.name = 'MS_Description' 
LEFT JOIN (
    SELECT sc.object_id, sc.column_id, sc.name, colProp.[value] AS ColumnDescription
    FROM sys.columns sc
    INNER JOIN sys.extended_properties colProp
        ON colProp.major_id = sc.object_id
            AND colProp.minor_id = sc.column_id
            AND colProp.name = 'MS_Description' 
) colDesc
    ON colDesc.object_id = object_id(tbl.table_schema + '.' + tbl.table_name)
        AND colDesc.name = col.COLUMN_NAME
WHERE tbl.table_type = 'base table'
--AND tableProp.[value] IS NOT NULL OR colDesc.ColumnDescription IS NOT null
mysql > SET GLOBAL sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SELECT column_name as 'Column Name', data_type as 'Data Type'
FROM information_schema.columns
WHERE table_name = 'hrStaff' 
-- From https://github.com/bertwagner/SQLServer/blob/master/Non-SARGable%20Execution%20Plans.sql
-- This script will check the execution plan cache for any queries that are non-SARGable.
-- It does this by finding table and index scans that contain a scalar operators

SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED
 
DECLARE @dbname SYSNAME
SET @dbname = QUOTENAME(DB_NAME());
 
WITH XMLNAMESPACES (DEFAULT 'http://schemas.microsoft.com/sqlserver/2004/07/showplan')

SELECT
   stmt.value('(@StatementText)[1]', 'varchar(max)') AS [Query],
   query_plan AS [QueryPlan],
   sc.value('(.//Identifier/ColumnReference/@Schema)[1]', 'varchar(128)') AS [Schema], 
   sc.value('(.//Identifier/ColumnReference/@Table)[1]', 'varchar(128)') AS [Table], 
   sc.value('(.//Identifier/ColumnReference/@Column)[1]', 'varchar(128)') AS [Column] ,
   CASE WHEN s.exist('.//TableScan') = 1 THEN 'TableScan' ELSE 'IndexScan' END AS [ScanType],
   sc.value('(@ScalarString)[1]', 'varchar(128)') AS [ScalarString]
FROM 
	sys.dm_exec_cached_plans AS cp
	CROSS APPLY sys.dm_exec_query_plan(cp.plan_handle) AS qp
	CROSS APPLY query_plan.nodes('/ShowPlanXML/BatchSequence/Batch/Statements/StmtSimple') AS batch(stmt)
	CROSS APPLY stmt.nodes('.//RelOp[TableScan or IndexScan]') AS scan(s)
	CROSS APPLY s.nodes('.//ScalarOperator') AS scalar(sc)
WHERE
    s.exist('.//ScalarOperator[@ScalarString]!=""') = 1 
    AND sc.exist('.//Identifier/ColumnReference[@Database=sql:variable("@dbname")][@Schema!="[sys]"]') = 1
	AND sc.value('(@ScalarString)[1]', 'varchar(128)') IS NOT NULL
select 
				   a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 day'::interval, 'DD-MM-YYYY')  jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
				   a.plafond_or,
				   a.penjaminan nilai_penjaminan,
				   c.is_konvensional,
				   0 nilai_asuransi_1,
					0 nilai_asuransi_2,
				  (a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
				 CASE
				        when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
				 END umur,
				  '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
							--(ijp cogar - feebase cogar) x persen komisi

						

                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_obligee f on a.id_obligee = f.id_obligee
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where 
				--( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND
				--d.id_reference is null and
				--a.flag_deletion is null and
	              -- d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				  a.flag_deletion is null and 		
					d.flag_delete is null and 
					( 
						( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
							( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
						) OR  
						
						( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
					)  
					
	          
				 union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					0 nilai_asuransi_1,
					0 nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship != 9
	              
	                union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					nilai_asuransi_1,
					nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    a.catatan note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							nilai_asuransi_2 * 5/100 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship_asuransi a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	          left join (
                    SELECT id_transaksi_suretyship, 
                            sum(nilai_asuransi_1)  nilai_asuransi_1, 
                            sum(nilai_asuransi_2)  nilai_asuransi_2 FROM (
                                SELECT
                                    id_transaksi_suretyship,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 1
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_1,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 2
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_2 
                                FROM
                                tr_transaksi_suretyship_ext	 
                            ) ss GROUP BY id_transaksi_suretyship
                ) i ON a.id_transaksi_suretyship = i.id_transaksi_suretyship
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship = 9
	               
ALTER TABLE provider DROP PRIMARY KEY, ADD PRIMARY KEY(person, place, thing);
Use Sum aggregate function with case statement as below, if you want sum of those t.value based on t.clock then do group by on that column so that you will get clock while sum of values.

select sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
from hosts h, items i,history_uint t
where i.hostid=h.hostid and t.itemid=i.itemid
If it is based on t.clock then

   select t.clock, sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
    from hosts h, items i,history_uint t
    where i.hostid=h.hostid and t.itemid=i.itemid
    group by t.clock
    0

You can try this below logic-

SELECT i.hostid,t.itemid,t.value as OneValues, t.clock as time
FROM hosts h 
INNER JOIN items i 
    ON i.hostid=h.hostid
    AND i.hostid IN ('223344','112233')
    AND i.itemid IN ('0223344','0112233')
INNER JOIN  history_uint t 
    ON t.itemid=i.itemid
dim tempo = listbox1.SelectedItems
dim students as string = ""
for each selected in tempo
students = selected.ToString & "," & students
next
star

Fri Oct 15 2021 14:30:46 GMT+0000 (UTC) https://stackoverflow.com/questions/18698311/oracle-order-null-last-by-default

#sql
star

Fri Oct 15 2021 14:24:45 GMT+0000 (UTC) https://www.w3resource.com/sql/subqueries/multiple-row-column-subqueries.php

#sql
star

Wed Oct 13 2021 10:16:01 GMT+0000 (UTC)

#sql
star

Sat Oct 09 2021 17:28:32 GMT+0000 (UTC) https://sqlhints.com/2013/07/20/how-to-get-quarter-start-end-date-sql-server/

#sql
star

Sat Oct 09 2021 17:18:15 GMT+0000 (UTC) https://social.msdn.microsoft.com/Forums/sqlserver/en-US/7ac4bd64-0d12-4959-b535-6f28624132eb/how-to-get-begin-and-end-date-of-any-quarter-in-sql-server-2008

#sql
star

Wed Oct 06 2021 08:34:02 GMT+0000 (UTC) https://launchschool.com/lessons/a1779fd2/assignments/fa05a889

#sql
star

Sun Oct 03 2021 01:16:58 GMT+0000 (UTC) https://www.postgresql.org/docs/

#php #sql #postgresql
star

Thu Sep 30 2021 14:51:49 GMT+0000 (UTC) https://dba.stackexchange.com/questions/41234/how-to-find-which-tables-and-views-a-user-has-access-to

#sql
star

Wed Sep 22 2021 11:44:38 GMT+0000 (UTC)

#sql
star

Sat Sep 11 2021 20:10:00 GMT+0000 (UTC) https://wordpress.stackexchange.com/questions/233086/how-to-fetch-data-in-wordpress-using-mysqli-or-wpdb

#sql
star

Tue Aug 31 2021 23:11:50 GMT+0000 (UTC) https://www.dofactory.com/sql/subquery

#sql
star

Thu Aug 26 2021 16:54:05 GMT+0000 (UTC) https://www.google.com/search?q

#sql
star

Mon Aug 23 2021 21:25:13 GMT+0000 (UTC)

#sql
star

Mon Aug 23 2021 12:02:49 GMT+0000 (UTC)

#sql #tsql
star

Sun Aug 22 2021 17:23:00 GMT+0000 (UTC)

#sql
star

Sun Aug 22 2021 17:21:44 GMT+0000 (UTC)

#sql
star

Sat Aug 21 2021 14:41:38 GMT+0000 (UTC)

#sql
star

Sat Aug 21 2021 14:39:50 GMT+0000 (UTC)

#sql
star

Wed Aug 18 2021 21:04:17 GMT+0000 (UTC) https://ubiq.co/database-blog/calculate-median-postgresql/

#sql
star

Tue Aug 17 2021 07:47:18 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:47:00 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:46:45 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:45:51 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:45:28 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:45:10 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:44:48 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:44:29 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:43:00 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:42:41 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:42:20 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:42:01 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:41:36 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:41:11 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:40:46 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:40:23 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:39:55 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 07:39:20 GMT+0000 (UTC)

#sql
star

Tue Aug 17 2021 04:46:38 GMT+0000 (UTC) https://stackoverflow.com/questions/28668817/update-column-with-value-from-another-table-using-sqlite

#sql
star

Fri Aug 13 2021 20:14:09 GMT+0000 (UTC) https://www.dofactory.com/sql/subquery

#sql
star

Fri Aug 13 2021 19:20:30 GMT+0000 (UTC)

#sql
star

Fri Aug 13 2021 19:16:23 GMT+0000 (UTC)

#sql
star

Fri Aug 13 2021 06:40:38 GMT+0000 (UTC)

#sql #bigquery #datastudio
star

Mon Aug 09 2021 18:09:00 GMT+0000 (UTC)

#sql
star

Mon Aug 09 2021 10:25:54 GMT+0000 (UTC)

#sql #bigquery #datastudio
star

Mon Aug 09 2021 10:25:13 GMT+0000 (UTC)

#sql #bigquery #datastudio
star

Sun Aug 08 2021 01:06:09 GMT+0000 (UTC) https://stackoverflow.com/questions/3582552/what-is-the-format-for-the-postgresql-connection-string-url

#sql
star

Wed Jul 28 2021 12:26:29 GMT+0000 (UTC)

#sql #bigquery #datastudio
star

Wed Jul 28 2021 07:46:52 GMT+0000 (UTC)

#sql #athena
star

Fri Jul 23 2021 21:12:42 GMT+0000 (UTC) https://stackoverflow.com/questions/8674718/best-way-to-select-random-rows-postgresql

#sql
star

Fri Jul 09 2021 08:48:04 GMT+0000 (UTC)

#sql #bigquery
star

Thu Jul 08 2021 07:53:42 GMT+0000 (UTC)

#sql #bigquery
star

Thu Jul 08 2021 07:47:43 GMT+0000 (UTC)

#sql #bigquery
star

Tue Jul 06 2021 11:20:18 GMT+0000 (UTC)

#sql #bigquery
star

Thu Jul 01 2021 14:04:39 GMT+0000 (UTC)

#sql
star

Thu Jul 01 2021 13:46:30 GMT+0000 (UTC)

#sql
star

Mon Jun 14 2021 17:06:09 GMT+0000 (UTC)

#sql #postgres
star

Sun Jun 13 2021 04:34:51 GMT+0000 (UTC)

#sql #mysql
star

Sun Jun 13 2021 04:23:51 GMT+0000 (UTC)

#sql #mysql
star

Wed Jun 09 2021 02:43:28 GMT+0000 (UTC) https://wrds-www.wharton.upenn.edu/pages/support/programming-wrds/programming-python/python-from-your-computer/

#python #sql #thesis
star

Tue Jun 01 2021 10:18:19 GMT+0000 (UTC) https://www.google.com/search?q

#sql
star

Fri May 28 2021 15:03:32 GMT+0000 (UTC) https://www.freecodecamp.org/news/best-sql-database-tutorial/

#sql
star

Fri May 28 2021 15:03:04 GMT+0000 (UTC) https://www.freecodecamp.org/news/best-sql-database-tutorial/

#sql
star

Fri May 28 2021 15:01:34 GMT+0000 (UTC)

#sql #postgres
star

Thu May 27 2021 20:27:23 GMT+0000 (UTC)

#sql #postgres
star

Thu May 27 2021 11:27:55 GMT+0000 (UTC) https://www.sqlservercentral.com/articles/data-profiling-with-t-sql

#sql
star

Mon May 24 2021 03:55:36 GMT+0000 (UTC)

#sql
star

Wed May 19 2021 19:06:46 GMT+0000 (UTC) https://popsql.com/learn-sql/snowflake/how-to-update-data-in-snowflake

#sql
star

Sun May 16 2021 09:41:20 GMT+0000 (UTC)

#sql
star

Sun May 02 2021 09:41:55 GMT+0000 (UTC) https://static.skillshare.com/uploads/attachment/1192226128/8c1374ad/Create%20the%20Initial%20Tables%20Script.txt

#sql #mysql
star

Thu Apr 29 2021 09:53:47 GMT+0000 (UTC)

#sql
star

Mon Apr 26 2021 14:11:52 GMT+0000 (UTC) https://gist.github.com/ekumachidi/aac9316496fb2ca84dcef00920fede9b

#sql
star

Wed Apr 14 2021 16:51:05 GMT+0000 (UTC) https://stackoverflow.com/questions/5218370/sql-overlaps-operator-problem-how-to-get-rid-of-it

#sql
star

Thu Apr 08 2021 09:52:52 GMT+0000 (UTC)

#sql
star

Sun Mar 21 2021 16:13:35 GMT+0000 (UTC) https://stackoverflow.com/questions/39729901/interpret-several-values-as-null-when-copying-csv-file-in-postgresql-table

#sql
star

Thu Mar 11 2021 06:54:31 GMT+0000 (UTC)

#sql
star

Tue Mar 09 2021 19:08:44 GMT+0000 (UTC) https://stackoverflow.com/questions/11053567/inserting-data-into-hive-table

#sql
star

Tue Mar 09 2021 19:07:44 GMT+0000 (UTC) https://stackoverflow.com/questions/11053567/inserting-data-into-hive-table

#sql
star

Wed Feb 24 2021 20:06:09 GMT+0000 (UTC) https://stackoverflow.com/questions/43322033/create-database-on-docker-compose-startup

#sql #docker
star

Wed Feb 24 2021 19:37:28 GMT+0000 (UTC) https://stackoverflow.com/questions/34633961/mysql-bind-address-in-a-docker-container

#sql
star

Tue Feb 02 2021 18:00:05 GMT+0000 (UTC) https://stackoverflow.com/questions/35169412/mysql-find-in-set-equivalent-to-postgresql

#sql
star

Tue Feb 02 2021 17:58:47 GMT+0000 (UTC) https://w3resource.com/PostgreSQL/postgresql_string_to_array-function.php

#sql
star

Tue Feb 02 2021 17:48:42 GMT+0000 (UTC) https://stackoverflow.com/questions/2944499/how-to-add-an-auto-incrementing-primary-key-to-an-existing-table-in-postgresql

#sql
star

Tue Feb 02 2021 17:32:55 GMT+0000 (UTC) https://w3resource.com/PostgreSQL/position-function.php

#sql
star

Fri Dec 11 2020 11:39:30 GMT+0000 (UTC)

#sql #mysql
star

Fri Nov 27 2020 18:42:18 GMT+0000 (UTC) https://stackoverflow.com/questions/22636388/import-sql-file-in-node-js-and-execute-against-postgresql

#sql #nodejs
star

Fri Nov 13 2020 21:36:50 GMT+0000 (UTC) https://www.mysqltutorial.org/mysql-inner-join.aspx/

#sql
star

Wed Nov 11 2020 15:40:19 GMT+0000 (UTC) https://stackoverflow.com/questions/55377661/drop-partition-by-name-sql-server

#sql
star

Tue Oct 27 2020 17:03:36 GMT+0000 (UTC)

#sql #mysql
star

Tue Oct 27 2020 17:02:54 GMT+0000 (UTC)

#sql
star

Tue Oct 13 2020 21:08:05 GMT+0000 (UTC) https://www.mssqltips.com/sqlservertip/2914/rolling-up-multiple-rows-into-a-single-row-and-column-for-sql-server-data/

#sql
star

Sun Oct 11 2020 21:55:16 GMT+0000 (UTC) https://stackoverflow.com/questions/27435839/how-to-list-active-connections-on-postgresql

#sql
star

Sun Oct 11 2020 00:39:53 GMT+0000 (UTC) https://stackoverflow.com/questions/887370/sql-server-extract-table-meta-data-description-fields-and-their-data-types

#sql
star

Mon Sep 21 2020 07:46:18 GMT+0000 (UTC) https://stackoverflow.com/questions/23921117/disable-only-full-group-by

#sql
star

Thu Sep 17 2020 20:48:35 GMT+0000 (UTC)

#sql
star

Sun Aug 16 2020 11:59:20 GMT+0000 (UTC) https://bertwagner.com/2017/08/22/how-to-search-and-destroy-non-sargable-queries-on-your-server/

#sql #query
star

Mon Aug 10 2020 10:41:37 GMT+0000 (UTC)

#sql
star

Thu Aug 06 2020 13:55:46 GMT+0000 (UTC) https://stackoverflow.com/a/39816161/6942743

#sql #database #querying-data
star

Sun Jul 05 2020 14:40:15 GMT+0000 (UTC) https://stackoverflow.com/questions/8859353/alter-table-to-add-a-composite-primary-key

#sql
star

Wed Apr 01 2020 08:18:27 GMT+0000 (UTC) https://stackoverflow.com/questions/60963447/sql-how-to-sum-select

#sql #sum
star

Wed Apr 01 2020 07:15:27 GMT+0000 (UTC) https://stackoverflow.com/questions/60963585/storing-selected-items-from-listbox-for-sql-where-statement

#sql

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension