Snippets Collections
SELECT DT as TheDate, Count(*) as TheCount
From ManImp
Group By DT 
Having count(*)>1
Order by TheCount DESC
delete
FROM ManImp
WHERE ID NOT IN
(
    SELECT MAX(ID)
	FROM ManImp
	GROUP BY DT
)
BULK INSERT ManualImport
FROM 'C:\temp\Second_Data-2022-09-15.csv'
WITH
(
    FIRSTROW = 2, -- as 1st one is header
    FIELDTERMINATOR = ',',  --CSV field delimiter
    ROWTERMINATOR = '\n',   --Use to shift the control to next row
    TABLOCK
)

SELECT MIN(DT) as MinDate, MAX(DT) as MaxDate, Count(DT) as [RowCount]
FROM ManualImport
ALTER TABLE DopingTest
ADD(
  CONSTRAINT fk_RaceIDDT
    FOREIGN KEY (RaceID, HorseID)
    REFERENCES Results(RaceID, HorseID)
);
/*
Show all connections to the database
*/

SELECT
	datname, pid, usename, client_addr, client_port,
    query_start, state_change, state, query
FROM
	pg_stat_activity
WHERE
	datname = '<database name HERE>'
ORDER BY
	state_change DESC;
SELECT
  table_schema,
  table_name
FROM
  information_schema.tables
WHERE
  table_schema NOT IN ('pg_catalog', 'information_schema')
  AND table_type = 'BASE TABLE';
DECLARE @selectsql nvarchar(4000),
DECLARE @cnt int

SELECT @selectsql = N' SELECT ' + CAST(@cnt AS NVARCHAR(10)) + N'= COUNT(*) FROM Vwbckup' 
Select SUM(CASE When CPayment='Cash' Then CAmount Else 0 End ) as CashPaymentAmount,
       SUM(CASE When CPayment='Check' Then CAmount Else 0 End ) as CheckPaymentAmount
from TableOrderPayment
Where ( CPayment='Cash' Or CPayment='Check' ) AND CDate<=SYSDATETIME() and CStatus='Active';
select 
  case greatest(col1,col2,col3,col4) 
    when col1 then 'col1:' || col1
    when col2 then 'col2:' || col2
    when col3 then 'col3:' || col3
    when col4 then 'col4:' || col4
    else null
  end as greatestcolumnname
from mytable;
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
  FROM v_codevalue_web_codes_full cv
 WHERE cv.CODEVALUENAME LIKE 'Alipay%'
   AND cv.DOMAINNAME LIKE 'ClgSys';
# connection name == any name we can give 
#ex: connection name = Mysql_connection
#connection_method = standard(tcp/ip)
#host name  = localhost , port = 3306
#username = root (for local then only it work)
#password = root (for local server we much give the root as password )
#test connection
select 44652, '4/1/2022', dateadd(d,44652,'1899-12-30'), DateDiff(dd, '1899-12-30', '4/1/2022')
CREATE OR ALTER FUNCTION DATE_TO_CHAR(TDATE TIMESTAMP, FORMAT VARCHAR(100))
RETURNS VARCHAR(100)
AS
DECLARE SDATE VARCHAR(100);
DECLARE DD VARCHAR(2);
DECLARE MM VARCHAR(2);
DECLARE YY VARCHAR(4);
DECLARE HH VARCHAR(2);
DECLARE MI VARCHAR(2);
DECLARE SS VARCHAR(2);
DECLARE XFORMAT VARCHAR(100);
DECLARE AM VARCHAR(2);
BEGIN
	XFORMAT = UPPER(FORMAT);
	SDATE = CAST(TDATE AS VARCHAR(100));
	YY = SUBSTRING(SDATE FROM 1 FOR 4);
	MM = SUBSTRING(SDATE FROM 6 FOR 2);
	DD = SUBSTRING(SDATE FROM 9 FOR 2);
	HH = SUBSTRING(SDATE FROM 12 FOR 2);
	MI = SUBSTRING(SDATE FROM 15 FOR 2);
	SS = SUBSTRING(SDATE FROM 18 FOR 2);
	XFORMAT = REPLACE(XFORMAT, 'YYYY', YY);
	XFORMAT = REPLACE(XFORMAT, 'MM', MM);
	XFORMAT = REPLACE(XFORMAT, 'DD', DD);
	XFORMAT = REPLACE(XFORMAT, 'YY', SUBSTRING(YY FROM 3 FOR 2));
	XFORMAT = REPLACE(XFORMAT, 'HH24', HH);
	AM = 'AM';

	IF (HH='12') THEN
	BEGIN
		AM = 'M';
		IF (MI > '00') THEN 
		BEGIN
			AM='PM';
		END
	END
	
	IF (HH='00') THEN 
	BEGIN
		HH='12';
		AM='AM';
	END
	
	IF (HH>'12') THEN
	BEGIN
		HH = TRIM(CAST(CAST(HH AS INTEGER)-12 AS VARCHAR(2)));
		IF (CHAR_LENGTH(HH)<2) THEN 
		BEGIN
			HH='0'||HH;
		END
		AM='PM';
	END
	XFORMAT = REPLACE(XFORMAT, 'HH12', HH);
	XFORMAT = REPLACE(XFORMAT, 'HH', HH);
	XFORMAT = REPLACE(XFORMAT, 'MI', MI);
	XFORMAT = REPLACE(XFORMAT, 'SS', SS);
	
	RETURN XFORMAT;
END;

/* Examples:

    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH24:MI:SS') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH12:MI AM') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YY HH24:MI') FROM RDB$DATABASE;
    -- still uncomplete...
*/
CREATE OR ALTER PROCEDURE date_range(
    startdate date, 
    enddate date, 
    interv integer DEFAULT 1, 
	unit varchar(6) DEFAULT 'DAY', 
	fromtime time DEFAULT null, 
	totime time DEFAULT null
) RETURNS (dateval timestamp)
AS
BEGIN
    dateval = startdate;
    while (dateval < enddate) do
    BEGIN
	    IF ((:fromtime IS NULL OR cast(:dateval AS time) >= :fromtime)
		  AND (:totime IS NULL OR cast(:dateval AS time) < :totime)) THEN 
		BEGIN 
			suspend;
		END 
		
  		SELECT 
	  		CASE  
		  		WHEN :unit = 'MINUTE' THEN dateadd(MINUTE, :interv, :dateval)
		  		WHEN :unit = 'HOUR' THEN dateadd(HOUR, :interv, :dateval)
		  		WHEN :unit = 'YEAR' THEN dateadd(YEAR, :interv, :dateval)
		  		WHEN :unit = 'MONTH' THEN dateadd(MONTH, :interv, :dateval)
		  		else dateadd(DAY, :interv, :dateval)
			END 
		FROM RDB$DATABASE 
		INTO :dateval;
    END
END;

/*
Example: ranges from today to next 10 days, from 8:00am to 6:00pm, in fraction of 30 minutes:

SELECT 
    cast(dateval AS date) TheDate, 
    CAST(dateval AS time) FromTime, 
    CAST(dateadd(MINUTE, 30, dateval) AS time) ToTime 
FROM date_range(
    current_date, ---- initial date
    current_date + 10, ---- next 10 days
    30, 'MINUTE', ---- fraction
    time '08:00', time '18:00') --- from time, to time
*/
select 
  DATE_PART('year', AGE('2012-03-05', '2010-04-01')) AS years,
  DATE_PART('month', AGE('2012-03-05', '2010-04-01')) AS months,
  DATE_PART('day', AGE('2012-03-05', '2010-04-01')) AS days;
CREATE OR ALTER FUNCTION AGE(DFROM TIMESTAMP, DTO TIMESTAMP, CODED BOOLEAN = FALSE)
RETURNS VARCHAR(30)
AS
DECLARE y varchar(3);
DECLARE m varchar(2);
DECLARE d varchar(2);
BEGIN
	y = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				THEN datediff(year, :DFROM, :DTO)-1
			ELSE datediff(year, :DFROM, :DTO)
		END;
	m = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)-1
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)>=0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)
			WHEN datediff(year, :DFROM, :DTO) = datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)-1
			ELSE datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)
		END;
	d = CASE 
			WHEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO)-1, :DFROM), :DTO)
			ELSE datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)
		END;
	RETURN 
		CASE 
			WHEN :CODED THEN lpad(Y,3,'0')||'-'||lpad(m,2,'0')||'-'||lpad(d,2,'0')
			ELSE Y||'y '||m||'m '||d||'d' 
		END;
END;
#Backup

gbak -b -v -user SYSDBA -password "masterkey" D:\database.FDB E:\database.fbk

#Restore

gbak -c -user SYSDBA -password masterkey E:\database.fbk E:\database_restore.fdb
docker run -v /home/marco:/backup --rm svarcoe/mssql-scripter mssql-scripter -S 172.18.0.3 -d CMUCE -U sa -P CMuce1970@ --schema-and-data -f /backup/mssql-scripter-CMUCE.sql

sqlcmd -S localhost -U SA -Q "BACKUP LOG [demodb] TO DISK = N'/var/opt/mssql/data/demodb_LogBackup.bak' WITH NOFORMAT, NOINIT, NAME = N'demodb_LogBackup', NOSKIP, NOREWIND, NOUNLOAD, STATS = 5"

sqlcmd -S localhost -U SA -Q "RESTORE DATABASE [demodb] FROM DISK = N'/var/opt/mssql/data/demodb.bak' WITH FILE = 1, NOUNLOAD, REPLACE, NORECOVERY, STATS = 5"
select * from 
( 
    select 
        sql_id, 
        elapsed_time, 
        executions, 
        sql_text 
    from v$sql  
    ORDER BY elapsed_time desc
) 
where rownum < 11;
docker run -d -e ACCEPT_EULA=Y -e "SA_PASSWORD=P@ssW0rd" -p 1433:1433 \
  --restart unless-stopped \
  -v /var/opt/mssql/data:/var/opt/mssql/data \
  -v /tmp/:/backups/ \
  --name sqlserver \
  mcr.microsoft.com/mssql/server

#backup:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP DATABASE [dbname] TO DISK = N'/tmp/dbname-full.bak' WITH NOFORMAT, NOINIT, NAME = 'dbname-bak-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP LOG [dbname] TO DISK = N'/tmp/dbname-log.bak' WITH NOFORMAT, NOINIT, NAME = N'dbname-bak-log', NOSKIP, NOREWIND, NOUNLOAD, STATS = 5"

#restore:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE DATABASE [dbname] FROM DISK = N'/tmp/dbname-full.bak' WITH FILE = 1, NOUNLOAD, REPLACE, NORECOVERY, STATS = 5"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE LOG [dbname] FROM DISK = N'/var/opt/mssql/data/dbname-log.bak'"


#create login myuser with password ='strongPass';
#create user myuser for login myuser;
#ALTER LOGIN [myuser] enable;
#Increment timeout and max_children:

/etc/php/7.0/fpm/php.ini  =>   default_socket_timeout = 60000
/etc/php/7.0/fpm/php.ini  =>   pm.max_children = 20
/etc/php/7.0/fpm/pool.d/www.conf  =>   request_terminate_timeout = 60000

#Increment timeout on /etc/nginx/nginx.conf:
keepalive_timeout 65000;

#After Restart php-fpm and nginx:

sudo service php7.0-fpm restart
sudo service nginx restart
export ORACLE_SID=$1
export NLS_LANG=AMERICAN_AMERICA.WE8ISO8859P9
export USUARIO=system/org24h
export PATHBACKUP=/respaldo/o24/export
export FILENAME=CMLGDB`date +%d%m%Y%H%M`.DMP
export FILENAMELOG=CMLGDB`date +%d%m%Y%H%M`.log
echo  $PATHBACKUP

rm $PATHBACKUP/*.* -rf

if [ -a $PATHBACKUP ] ; then
	expdp $USUARIO FULL=yes DUMPFILE=dpump_dir1:$FILENAME LOGFILE=dpump_dir1:$FILENAMELOG
	#exp $USUARIO file=$PATHBACKUP/$FILENAME full=yes compress=yes indexes=no consistent=yes log=$PATHBACKUP/$FILENAMELOG
else
	echo "ERROR: Export no encontro el directorio de Respaldo"
	exit 1
fi
alter session set "_ORACLE_SCRIPT"=true;

CREATE USER SISTEMAS IDENTIFIED BY las36horas;

GRANT CREATE TABLE TO SISTEMAS;
GRANT CONNECT TO SISTEMAS;
GRANT CTXAPP TO SISTEMAS;
GRANT RESOURCE TO SISTEMAS;
GRANT CREATE ANY CONTEXT TO SISTEMAS;
GRANT CREATE ANY SYNONYM TO SISTEMAS;
GRANT CREATE VIEW TO SISTEMAS;
GRANT DROP ANY CONTEXT TO SISTEMAS;
GRANT QUERY REWRITE TO SISTEMAS;
EXECUTE ON SYS.DBMS_AQ_BQVIEW
EXECUTE ON SYS.DBMS_LOB
EXECUTE ON SYS.UTL_RAW
DROP TABLESPACE MYSPACENAME INCLUDING contents;

CREATE TABLESPACE MYSPACENAME 
   DATAFILE 'ts1_filename.dbf' 
   SIZE 2000m 
   autoextend on NEXT 1000m maxsize unlimited;
   
 alter tablespace MYSPACENAME coalesce;
#use oracle user from system:

sqlplus "/ as sysdba"

SQL> ALTER USER SYS IDENTIFIED BY [password]; 
SQL> ALTER USER SYSTEM IDENTIFIED BY [password];
CREATE OR ALTER PROCEDURE SPLIT(ASTRLIST VARCHAR(32000))
returns (
  STR VARCHAR(255)
)
as
  declare variable StrList varchar(32000);
  declare variable CommaPos integer;
  declare variable StrVal varchar(10);
begin
  StrList = AStrList || '';
  CommaPos = Position(',', StrList);

  while (CommaPos > 0) do
  begin
    StrVal = Trim(SubString(StrList from 1 for CommaPos - 1));

    if (Char_Length(StrVal) > 0) then
    begin
      STR = StrVal;
      suspend;
    end

    if (Char_Length(StrList) > CommaPos) then
      StrList = SubString(StrList from CommaPos + 1);
    else
      StrList = '';

    CommaPos = Position(',', StrList);
  end

  StrList = Trim(StrList);

  if (Char_Length(StrList) > 0) then
  begin
    begin
      STR = StrList;
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE COD_CITY IN (SELECT STR FROM Split('ABC, DEF, GH, IJK, LM, NOP'))

*/
CREATE OR ALTER PROCEDURE GETINTEGERLIST(AINTEGERLIST VARCHAR(32000))
returns (
  ID integer
)
as
  declare variable IntegerList varchar(32000);
  declare variable CommaPos integer;
  declare variable IntegerVal varchar(10);
begin
  IntegerList = AIntegerList || ' ';
  CommaPos = Position(',', IntegerList);

  while (CommaPos > 0) do
  begin
    IntegerVal = Trim(SubString(IntegerList from 1 for CommaPos - 1));

    if (Char_Length(IntegerVal) > 0) then
    begin
      if (IntegerVal similar to '[0-9]*') then
      begin
        ID = Cast(IntegerVal as integer);
        suspend;
      end
    end

    if (Char_Length(IntegerList) > CommaPos) then
      IntegerList = SubString(IntegerList from CommaPos + 1);
    else
      IntegerList = '';

    CommaPos = Position(',', IntegerList);
  end

  IntegerList = Trim(IntegerList);

  if (Char_Length(IntegerList) > 0) then
  begin
    if (IntegerList similar to '[0-9]*') then
    begin
      ID = Cast(IntegerList as integer);
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE ID_CITY IN (SELECT ID FROM GetIntegerList('1, 2, 12, 45, 75, 45'))

*/
sudo mkdir -p /your/custom/path/oracle-19c/oradata/
sudo chmod -R 777 /your/custom/path/

docker run -d --name oracle19db \
  -p 1521:1521 \
  -e ORACLE_SID=ORCL \
  -e ORACLE_PDB=ORCLDB \
  -e ORACLE_PWD=Oracle123 \
  -e ORACLE_CHARSET=AL32UTF8 \
  -v /your/custom/path/oracle-19c/oradata:/opt/oracle/oradata \
  banglamon/oracle193db:19.3.0-ee

# Charset Value: WE8MSWIN1252, AL16UTF8

# ALTER SESSION SET NLS_DATE_FORMAT = 'RRRR-MM-DD';
# ALTER SESSION SET NLS_TIME_FORMAT = 'HH24:MI:SS';
# ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'RRRR-MM-DD HH24:MI:SS';
# ALTER SESSION SET NLS_TIME_TZ_FORMAT = 'HH24:MI:SS TZR';
# ALTER SESSION SET NLS_TIMESTAMP_TZ_FORMAT = 'RRRR-MM-DD HH24:MI:SS TZR';

# docker exec -it oracle19db bash -c "source /home/oracle/.bashrc; sqlplus /nolog”
# connect sys as sysdba;

# alter session set "_ORACLE_SCRIPT"=true;
# create user sistemas identified by las36horas;
# GRANT CONNECT, RESOURCE, DBA TO sistemas;
# GRANT UNLIMITED TABLESPACE TO sistemas;
CREATE TABLE GAP(
    COUNTER INTEGER PRIMARY KEY
);

insert into gap (counter) values (1);
insert into gap (counter) values (2);
--::::: 3
--::::: 4
insert into gap (counter) values (5);
insert into gap (counter) values (6);
--::::: 7
insert into gap (counter) values (8);
insert into gap (counter) values (9);
insert into gap (counter) values (10);



--===== here the trick :

SELECT 
	list(CASE 
		WHEN N2-N1-2 > 0 THEN (N1+1) || '-'|| (N2-1)
		ELSE CAST(N1+1 AS VARCHAR(10))
	END) Missing
FROM (
SELECT a.counter N1, 
  (SELECT counter FROM gap WHERE counter > a.counter rows 1) N2 
FROM gap a
) B
WHERE N2-N1 > 1;



-- results:

Missing
=======
3-4,7
alter role <username> set search_path = <schema1>, ..., <scheman>, public;
WITH RECURSIVE PADRE AS (
	SELECT
		COD_CTA,
		DES_CTA
	FROM
		CATALOGO
	WHERE
		COD_CTA=:COD_CTA
	UNION ALL
		SELECT
			e.COD_CTA,
			e.DES_CTA
		FROM
			CATALOGO e
		INNER JOIN padre p ON POSITION(e.COD_CTA, p.COD_CTA) = 1
		  AND p.COD_CTA > COD_CTA
		ROWS 1
) SELECT
	DISTINCT cod_cta
FROM
	PADRE
WHERE COD_CTA < :COD_CTA
<?php

//===========notifier.sql:
    
/*
CREATE OR REPLACE FUNCTION public.notify_channel()
RETURNS trigger
AS $function$
  BEGIN
	  PERFORM pg_notify('channel_name', row_to_json(NEW)::text);
	  RETURN NULL;
  END;
$function$
LANGUAGE plpgsql;

CREATE TRIGGER trigger_on_insert AFTER INSERT ON mytable
FOR EACH ROW EXECUTE PROCEDURE notify_channel();
*/

set_time_limit(0);

//-- using PDO:

$db = new PDO(
    'pgsql:dbname=dbname host=host port=5432;options=--application_name=APPLICATION_NAME',
    'user',
    'password',
    [
        PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
        PDO::ATTR_DEFAULT_FETCH_MODE => PDO::FETCH_ASSOC,
    ]
);

$db->exec('LISTEN channel_name');

while (true) {
    while ($db->pgsqlGetNotify(PDO::FETCH_ASSOC, 30000)) {
        echo json_encode($result).PHP_EOL;
    }
}

//-- using pg_connect:
//<?php

include '../conn.php';
set_time_limit(0);
ob_end_clean();
pg_query($conn, 'LISTEN table_changed;');

while(true){
    
    $notify = pg_get_notify($conn);
    
	if (!$notify) {
        echo json_encode(array('result'=>false, 'data'=>'No messages')).PHP_EOL;
        ob_flush();
        flush();
        sleep(1);
	} else {
        echo json_encode(array('result'=>true, 'process_id'=>$pid , 'pid' => pg_get_pid($conn), 'data' => $notify)).PHP_EOL;
	}
}

---$> psql: user my_user and database my_original_db must exists:

SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity 
WHERE pg_stat_activity.datname = 'my_original_db' AND pid <> pg_backend_pid();

CREATE DATABASE my_new_db WITH TEMPLATE my_original_db OWNER my_user;
-- Example table:

create table patient (
    id serial primary key,
    firstname varchar(100),
    lastname varchar(100)
);

insert into patient(firstname, lastname) values ('MARCO ANTONIO', 'PEREZ SANDERS');

-- Create field tsvector field with text columns (preferable using insert and update trigger);
alter table patient add tsv_name tsvector;

-- initially fill field.
update patient set tsv_name = to_tsvector(firstname || ' ' || lastname);

-- create index for fast search:
create index tsv_name_idx on patient using gin(tsv_name);

-- then you can search as:
select * from patient where tsv_name @@ to_tsquery('MARCO & PEREZ');
select * from patient where tsv_name @@ to_tsquery('MARCO & SANDERS');
select * from patient where tsv_name @@ to_tsquery('ANTONIO & PEREZ');

select * from m_paciente 
where tsv_nombre @@ to_tsquery(array_to_string(string_to_array('LEIRY SEVERINO',' '),':* & ')||':*');

-- other:

select * from patient where fullname ~ all(array['(?=^LEI.*|\s+LEI\w*)','(?=^ME.*|\s+ME\w*)']);
WITH cte AS (
   SELECT *
   FROM   m_paciente mp 
   WHERE  apellido like 'P%'
   )
SELECT *
FROM  (
   TABLE  cte
   ORDER  BY apellido, nombre
   LIMIT  10
   OFFSET 190
   ) sub
RIGHT  JOIN (SELECT count(*) FROM cte) c(full_count) ON true;
select id, id2, debito, credito, 
  sum(debito-credito) over (partition BY id ORDER BY id, id2) saldo
from 
(
	select m.id, 0 id2, importe debito, 0 credito
		from m_cxc m 
		where m.id=:id
	union all 
	select c.id_m_cxc id, c.id id2, debito, credito
		from d_cxc c 
  		where c.id_m_cxc=:id
) a
SELECT procpid, age(clock_timestamp(), query_start), usename, current_query 
FROM pg_stat_activity 
WHERE current_query != '<IDLE>' AND current_query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- show running queries (9.2)
SELECT pid, age(clock_timestamp(), query_start), usename, query 
FROM pg_stat_activity 
WHERE query != '<IDLE>' AND query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- kill running query
SELECT pg_cancel_backend(procpid);

-- kill idle query
SELECT pg_terminate_backend(procpid);

-- vacuum command
VACUUM (VERBOSE, ANALYZE);

-- all database users
select * from pg_stat_activity where current_query not like '<%';

-- all databases and their sizes
select * from pg_user;

-- all tables and their size, with/without indexes
select datname, pg_size_pretty(pg_database_size(datname))
from pg_database
order by pg_database_size(datname) desc;

-- cache hit rates (should not be less than 0.99)
SELECT sum(heap_blks_read) as heap_read, sum(heap_blks_hit)  as heap_hit, (sum(heap_blks_hit) - sum(heap_blks_read)) / sum(heap_blks_hit) as ratio
FROM pg_statio_user_tables;

-- table index usage rates (should not be less than 0.99)
SELECT relname, 100 * idx_scan / (seq_scan + idx_scan) percent_of_times_index_used, n_live_tup rows_in_table
FROM pg_stat_user_tables 
ORDER BY n_live_tup DESC;

-- how many indexes are in cache
SELECT sum(idx_blks_read) as idx_read, sum(idx_blks_hit)  as idx_hit, (sum(idx_blks_hit) - sum(idx_blks_read)) / sum(idx_blks_hit) as ratio
FROM pg_statio_user_indexes;

-- Dump database on remote host to file
$ pg_dump -U username -h hostname databasename > dump.sql

-- Import dump into existing database
$ psql -d newdb -f dump.sql

--(On 9.2+): Queries running more than 2 minutes
SELECT now() - query_start as "runtime", usename, datname, waiting, state, query
  FROM  pg_stat_activity
  WHERE now() - query_start > '2 minutes'::interval
 ORDER BY runtime DESC;

-- 
select relname,last_vacuum, last_autovacuum, last_analyze, last_autoanalyze from pg_stat_user_tables;

--
select relname, n_dead_tup, last_vacuum, last_autovacuum from 
pg_catalog.pg_stat_all_tables
where n_dead_tup > 0 and relname =  ’table1' order by n_dead_tup desc;

--Tables and views used by a given view:
with recursive view_tree(parent_schema, parent_obj, child_schema, child_obj, ind, ord) as 
(
  select vtu_parent.view_schema, vtu_parent.view_name, 
    vtu_parent.table_schema, vtu_parent.table_name, 
    '', array[row_number() over (order by view_schema, view_name)]
  from information_schema.view_table_usage vtu_parent
  where vtu_parent.view_schema = '<SCHEMA NAME>' and vtu_parent.view_name = '<VIEW NAME>'
  union all
  select vtu_child.view_schema, vtu_child.view_name, 
    vtu_child.table_schema, vtu_child.table_name, 
    vtu_parent.ind || '  ', 
    vtu_parent.ord || (row_number() over (order by view_schema, view_name))
  from view_tree vtu_parent, information_schema.view_table_usage vtu_child
  where vtu_child.view_schema = vtu_parent.child_schema 
  and vtu_child.view_name = vtu_parent.child_obj
) 
select tree.ind || tree.parent_schema || '.' || tree.parent_obj 
  || ' depends on ' || tree.child_schema || '.' || tree.child_obj txt, tree.ord
from view_tree tree
order by ord;


--Check the size (as in disk space) of all databases:
SELECT d.datname AS Name, pg_catalog.pg_get_userbyid(d.datdba) AS Owner,
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT')
    THEN pg_catalog.pg_size_pretty(pg_catalog.pg_database_size(d.datname)) 
    ELSE 'No Access' 
  END AS SIZE 
FROM pg_catalog.pg_database d 
ORDER BY 
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT') 
    THEN pg_catalog.pg_database_size(d.datname)
    ELSE NULL 
  END;

--Check the size (as in disk space) of each table:
SELECT nspname || '.' || relname AS "relation",
   pg_size_pretty(pg_total_relation_size(C.oid)) AS "total_size"
 FROM pg_class C
 LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace)
 WHERE nspname NOT IN ('pg_catalog', 'information_schema')
   AND C.relkind <> 'i'
   AND nspname !~ '^pg_toast'
 ORDER BY pg_total_relation_size(C.oid) DESC;

--Script to kill all running connections of a current database
SELECT pg_terminate_backend(pg_stat_activity.pid)
FROM pg_stat_activity
WHERE datname = current_database()  
  AND pid <> pg_backend_pid();
<?php

/*==========  install at ubuntu: ==============

sudo apt install php-sybase


//==================================

append this into /etc/freetds/freetds.conf:
(192.168.0.250:2638 is an example:)

//==================================

sudo nano /etc/freetds/freetds.conf

[connName]
    host = 192.168.0.250  
    port = 2638
    tds version = 7.4
    database = MYDB
    
//======  php: */

$connName = "connName";
$user = "myuser";
$pass = "mypass";

$st="dblib:host=$connName;charset=iso8859-1";
$conn = new PDO($st,$user,$pass);

/* $conn->prepare($sql_query);
...etc
*/
select regexp_matches('FV{YY}{MM}{DD}-{UU}-{###}', '\{([A-Za-z#]+)\}', 'g');
create table <tablename>_log(
    id serial primary key,
    idx integer,
    antes json,
    despues json,
    id_m_usuario integer,
    fecha_registro timestamp default now(),
    tipo char(1) default 'U',
    unico varchar(36)
);

create function f_<tablename>_log()
returns trigger
LANGUAGE plpgsql as
$body$
begin
    case tg_op
        when 'INSERT' then
            insert into <tablename>_log (idx, despues, tipo)
            select new.id, row_to_json(new), 'I';
            return new;
        when 'UPDATE' then
            if (.. <condiciones new.campo1 <> new.campo2>... ) then
                insert into <tablename>_log (idx, antes, despues, tipo)
                select new.id, row_to_json(old), row_to_json(new), 'U';
            end if;
            return new;
        when 'DELETE' then
            insert into <tablename>_log (idx, antes, tipo)
            select new.id, row_to_json(old), 'D';
            return old;
    end case;
end;
$body$;

create trigger t_<tablename>_log
before insert or update or delete
on <tablename>
for each row execute procedure f_<tablename>_log();
select id, name from table 
where name ilike all(string_to_array(replace('%'||TRIM('JOHN DOE')||'%',' ','%,%'), ','))

-- 1) it converts spaces into commas (,)
-- 2) it converts string to array using resulting comma separated string
-- 3) it apply ilike to each word in search string

-- It finds (for example) name = 'DOE, JHON'  in table
<?php 

$sql = 'select fielda, fieldb, fieldc from table1
order by field, fieldb, fieldc
union all
select field1, field2, field3 from table2
order by field1, field2 desc, field3 asc';


print_r($sql2 = invert_order($sql));


function invert_order($str){
    $re = '/(?<=order\sby\s)(.*)(?=)/mi';

    preg_match_all($re, $str, $matches, PREG_SET_ORDER, 0);
    
    $mat = $matches[sizeof($matches)-1]; //-- get only last order field list
    $mat=$mat[sizeof($mat)-1];
    $mat=explode(",",$mat);
    
    
    for($i=0; $i<sizeof($mat); $i++){   //-- reverse each pair of order field/direction 
        $duet = preg_split("/\s+/", trim($mat[$i]));
        if (sizeof($duet)<2) $duet[1]="";
        switch(strtolower($duet[1])) {
            case "desc":
                $duet[1] = "asc";
                break;
            default:
                $duet[1] = "desc";
                break;
        }
        $mat[$i] = implode(" ",$duet);
    }
    
    $re2 = '/(order\s+by\s+.*)*$/i';    //-- replace last order by with new inverted order by:
    $subst = "order by ".implode(", ",$mat);
    $result = preg_replace($re2, "", $str);
    return $result . " $subst";
}


?>
SELECT *, count(*) OVER() AS full_count
FROM   tbl
WHERE  /* whatever */
ORDER  BY col1
LIMIT  ?
OFFSET ?
create function f_thetable_biud()
returns trigger as
$body$
--declare vars
begin
    -- new, old
    -- tg_op = INSERT,DELETE,UPDATE
    return new/old;
end;
$body$ language plpgsql;

create trigger t_thetable_biud
before/after insert or update or delete
on thetable
for each row execute procedure f_thetable_biud();
<?php

// sudo apt install php7.0-sybase


header("content-type: text/plain; charset=iso-8859-1");
ini_set("display_errors","1");
error_reporting(E_ALL);

$host="localhost";
$db="test";
$uid="sa";
$pwd="mypassword";

$query = "select top 10 * from testtable";

$conn = new PDO( "dblib:host=$host:1433;dbname=$db;", $uid, $pwd);
$stmt = $conn->prepare( $query );
$stmt->execute();

while ($r=$stmt->fetch(PDO::FETCH_ASSOC)){
	print_r($r);
}

?>
SELECT n.nspname AS schema_name
      ,p.proname AS function_name
      ,pg_get_functiondef(p.oid) AS func_def
      ,pg_get_function_arguments(p.oid) AS args
      ,pg_get_function_result(p.oid) AS result
FROM   pg_proc p
JOIN   pg_namespace n ON n.oid = p.pronamespace
WHERE  p.proname ILIKE 'func_name%';
select * from json_to_recordset( (
  select array_to_json(array_agg(row_to_json(t)))
    from (
      select field1, field2, field3,... from mytable
    ) t
)) as x(
  "field1" integer,
  "field2" integer,
  "field3" text,
  :
)

--- ejemplo de texto json a recordset:
select * from json_to_recordset((select detalles from d_factura_caja where id=4216)::json) as x(
  "seq" integer,
  "padre" integer,
  "tipo" text,
  "idx" integer,
  "nivel" integer,
  "descripcion" text,
  "cantidad" numeric(6,2),
  "presentacion" text,
  "precio_unitario" numeric(15,2),
  "precio" numeric(15,2),
  "porc_impuesto" numeric(6,2),
  "impuesto" numeric,
  "neto"numeric
)
SELECT TRUNC (SYSDATE - ROWNUM) dt
  FROM DUAL CONNECT BY ROWNUM < :ndates;
  
/* from now on */
SELECT TRUNC (SYSDATE + ROWNUM - 2) dt
  FROM DUAL CONNECT BY ROWNUM < 100
#just install samba with

$ sudo apt install samba

#and go to this file:

$ sudo nano /etc/samba/smb.conf

#and just at the bottom add these lines:

    [share]
    comment = Ubuntu File Server Share
    path = /path/to/the/folder  #for example /home/user_name/public
    browsable = yes
    guest ok = yes
    read only = no
    create mask = 0755

#restart the samba service

$ sudo service smbd restart
$ sudo service nmbd restart
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
requires:

1) Install Linux dependences (search for specific linux distro instructions for this):
python-dev, python-pip, freetds-dev, freetds-bin, libaio1

2) Install instantclient-basic-lite
follow these instructions:

http://www.oracle.com/technetwork/database/features/linuxx86-64soft-092277.html?printOnly=1
(go to "Installation of ZIP files" section at the bottom of document)

3) Install python includes:

sudo -H pip install cx_Oracle
sudo -H pip install pymssql

"""

import cx_Oracle
import pymssql

""" ====== let's connect to Oracle DB Server ====="""

orcl_host = "host1"
orcl_port = 1521
orcl_user = "user1"
orcl_pwd  = "password1"
orcl_dbn  = "service_name"

connstr = orcl_user+"/"+orcl_pwd+"@"+orcl_host+":"+str(orcl_port)+"/"+orcl_dbn
orcl = cx_Oracle.connect(connstr)

#If all is correct we will see and object print:

print(orcl)

"""===== let's connect to sqlsvr: ====="""

sql_host = "host2"
sql_user = "user2"
sql_pwd  = "password2"
sql_dbn  = "database_name"

conexion_sql = pymssql.connect(sql_host, sql_user, sql_pwd, sql_dbn)

#If all is correct we will see and object print:

print(conexion_sql)

---Sql:
    
CREATE EXTENSION dblink;

---Then you can use it like this:
    
select * from dblink('dbname=otherdbname','select field1, field2 from otherdb_table') as foreign_table(field1 varchar, field2 integer)
# let's create a backup from remote postgresql database using pg_dump:
#
#   pg_dump -h [host address] -Fc -o -U [database user] <database name> > [dump file]
#
# later it could be restored at the same remote server using:
#
#   sudo -u postgres pg_restore -C mydb_backup.dump
#
#Ex:

pg_dump -h 67.8.78.10 -p 5432 -Fc -o -U myuser mydb > mydb_backup.dump

pg_restore -C mydb_backup.dump



#complete (all databases and objects)

pg_dumpall -U myuser -h 67.8.78.10 -p 5432 --clean --file=mydb_backup.dump


#restore from pg_dumpall --clean:

psql -f mydb_backup.dump postgres #it doesn't matter which db you select here
select mnu_code from lu_mnu_code where mnu_code = :mnu_code

DECLARE
   l_exist        CHAR (1);
   l_code_change   CHAR (1);
BEGIN
   SELECT CASE
             WHEN EXISTS (SELECT *
                            FROM lu_mnu_code
                           WHERE NVL (mnu_code, 'null') = :mnu_code
                         ) THEN 'Y'
             ELSE 'N'
          END
     INTO l_exist
     FROM DUAL;

   IF l_exist = 'Y' THEN
      RETURN FALSE;
   ELSE
      RETURN TRUE;
   END IF;
END;
/* SQL Command within postgres: */

SELECT now() - pg_postmaster_start_time() uptime

/* using psql command line utility deployed with Postgres: */
 
$> psql -c "SELECT now() - pg_postmaster_start_time() uptime"
SELECT tablo.columnName FROM 
(SELECT ROW_NUMBER()OVER(Order By columnName)indexer, 
 * FROM tabloName)tablo WHERE tablo.indexer = @sayac
select to_char(to_date('1/10/2011','mm/dd/yyyy'),'mm-dd-yyyy') from dual
SELECT COUNT(*) 
FROM <Table_Name>
WHERE SITETYPE = 'condition'
SELECT DISTINCT <Column_name>
FROM <table_name>
SELECT Id_card, COUNT(Id_card)
FROM user_application_tbl
GROUP BY Id_card
HAVING COUNT(Id_card) > 1
create or replace table `reby-cloud.analytics_reby_v1_eu.pg_topups_json`
as

select
  id,
  created_at,
  gateway,
  gateway_transaction_id,
  stripe_charge_id,
  array(select as struct value_amount as amount, value_currency as currency) as value,
  array(select as struct original_value_amount as amount, original_value_currency as currency) as original_value,
  user_id,
  company_id,
  braintree_transaction_id
from `reby-cloud.staging_eu.pg_topups_stg`
ALTER TABLE DAYSUM
  MODIFY V_TOT_REVN NUMBER GENERATED ALWAYS AS (ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)) VIRTUAL
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)
apex.item( "P368_DATE" ).hide();
-- ==================================================================
-- sys.indexes.type 0 => heap - i.e. no Primary Key 
--					1 => clustered index
--					2 => non-clustered index
-- ==================================================================

SELECT	o.object_id
        , schemaname = OBJECT_SCHEMA_NAME(o.object_id)
		, tablename = o.NAME
		, PrimaryKey = CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END 
		, ClusteredIndexes = CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END
		, NonClusteredIndexes = CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE ''  END
		, [RowCount] = (SELECT SUM(p.Rows) from sys.partitions AS p where p.object_id=o.object_id) / (1+COUNT(ci.object_id)+COUNT(nci.object_id))
		, [IndexCount] = COUNT(ci.object_id)+COUNT(nci.object_id)
FROM sys.objects o
LEFT OUTER JOIN sys.indexes nci ON nci.object_id = o.object_id and nci.type=2
LEFT OUTER JOIN sys.indexes ci ON ci.object_id = o.object_id and ci.type=1
LEFT OUTER JOIN sys.indexes pk ON o.object_id = pk.object_id AND pk.is_primary_key = 1
WHERE o.Type='U' AND ((pk.object_id IS NULL) OR (ci.object_id IS NULL) OR (nci.object_id IS NULL))

GROUP BY o.object_id,
         OBJECT_SCHEMA_NAME(o.object_id), 
         o.NAME, 
		 CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END, 
		 CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE '' END,
		 CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END

ORDER BY TableName ASC
SELECT 
     schemaname = OBJECT_SCHEMA_NAME(o.object_id)
    ,tablename = o.NAME
FROM sys.objects o
INNER JOIN sys.indexes i ON i.OBJECT_ID = o.OBJECT_ID
-- tables that are heaps without any nonclustered indexes
WHERE (
        o.type = 'U'
        AND o.OBJECT_ID NOT IN (
            SELECT OBJECT_ID
            FROM sys.indexes
            WHERE index_id > 0
            )
        )
        --    OR
        -- table that have a clustered index without any nonclustered indexes
        --(o.type='U' 
        --        AND o.OBJECT_ID NOT IN (
        --    SELECT OBJECT_ID 
        --        FROM sys.indexes 
        --        WHERE index_id>1))  
set global sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';
create or replace table `reby-cloud.reby_marketing_eu.user_360` as 
select
  created_at,
  meta_user_id,
  type,
  concat(
    'amount:',if(amount is null,'na',cast(amount as string)),'; ',
    'minutes:',if(minutes is null,'na',cast(minutes as string)),'; ',
    'service_area:',if(service_area is null, 'na',service_area),'; ',
    'vehicle_type:',if(vehicle_type is null, 'na',vehicle_type),'; ',
    'app_used:',if(app_promoting_co is null, 'na',app_promoting_co),'; ',
    'vehicle_co:',if(owner_co is null,'na',owner_co),'; ',
    'topup_through:',if(topup_trough_co is null, 'na',topup_trough_co)
  ) as details
from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` pyrsc
where type not in ('vehicle_reservation')

union all

SELECT
  timestamp(created_at) as created_at,
  user_id as meta_user_id,
  'cx-conversation' as type,
  concat (
    'conversation_id:',conversation_id,'; ',
    'conact_reason1:',if(cantact_reason_level1 is null, 'na',cantact_reason_level1),'; ',
    'conact_reason2:',if(cantact_reason_level2 is null, 'na',cantact_reason_level2)
  ) as details
FROM `reby-cloud.kustomer_eu.conversation_messages_combination` 

union all

select
  created_at,
  user_id as meta_user_id,
  'user-created' as type,
  concat ('company:',co.name) as details
from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on cu.company_id = co.id
select w2.id,w2.value, w.name from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%002619%' limit 1)));
select registrationnum,idcpicregistration from professionaldata where id =
(select  professionaldata_id from account where id =
(select senderaccount_id from workflowexecution where ordernumber like '%010598%' limit 1));
select w2.id,w2.value from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%MATRICULA%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%010598%' limit 1)));
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `analytics_reby_v1_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_demand_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `ridestatus_post_gdpr.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_fin_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_marketing_eu.__TABLES__` --where table_id = 'table_id'
SELECT col1, col2
FROM table
ORDER BY col1, col2;
insert into WORKQUEUE (ID, facilitycode, workaction, description)
  values ((select max(ID)+1 from WORKQUEUE), 'J', 'II', 'TESTVALUES')
# from google.cloud import bigquery
# client = bigquery.Client()
# project = client.project
# dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
# filepath = 'path/to/your_file.csv'

# Retrieves the destination table and checks the length of the schema
table_id = "my_table"
table_ref = dataset_ref.table(table_id)
table = client.get_table(table_ref)
print("Table {} contains {} columns.".format(table_id, len(table.schema)))

# Configures the load job to append the data to the destination table,
# allowing field addition
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
job_config.schema_update_options = [
    bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION
]
# In this example, the existing table contains only the 'full_name' column.
# 'REQUIRED' fields cannot be added to an existing schema, so the
# additional column must be 'NULLABLE'.
job_config.schema = [
    bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"),
    bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"),
]
job_config.source_format = bigquery.SourceFormat.CSV
job_config.skip_leading_rows = 1

with open(filepath, "rb") as source_file:
    job = client.load_table_from_file(
        source_file,
        table_ref,
        location="US",  # Must match the destination dataset location.
        job_config=job_config,
    )  # API request

job.result()  # Waits for table load to complete.
print(
    "Loaded {} rows into {}:{}.".format(
        job.output_rows, dataset_id, table_ref.table_id
    )
)

# Checks the updated length of the schema
table = client.get_table(table)
print("Table {} now contains {} columns.".format(table_id, len(table.schema)))
CREATE TABLE #Procesos (
    SPID        INT,
    Status      VARCHAR(255),
    Login       VARCHAR(255),
    HostName    VARCHAR(255),
    BlkBy       VARCHAR(255),
    DBName      VARCHAR(255),
    Command     VARCHAR(255),
    CPUTime     INT,
    DiskIO      INT,
    LastBatch   VARCHAR(255),
    ProgramName VARCHAR(255),
    SPID2       INT,
    REQUESTID   INT
)

INSERT INTO #Procesos (SPID, Status, Login, HostName, BlkBy, DBName, Command, CPUTime, DiskIO, LastBatch, ProgramName, SPID2, REQUESTID)
EXEC SP_WHO2

SELECT  *
    FROM #Procesos

DROP TABLE #Procesos
SELECT 
    t.NAME AS TableName,
    s.Name AS SchemaName,
    p.rows,
    SUM(a.total_pages) * 8 AS TotalSpaceKB, 
    CAST(ROUND(((SUM(a.total_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS TotalSpaceMB,
    SUM(a.used_pages) * 8 AS UsedSpaceKB, 
    CAST(ROUND(((SUM(a.used_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS UsedSpaceMB, 
    (SUM(a.total_pages) - SUM(a.used_pages)) * 8 AS UnusedSpaceKB,
    CAST(ROUND(((SUM(a.total_pages) - SUM(a.used_pages)) * 8) / 1024.00, 2) AS NUMERIC(36, 2)) AS UnusedSpaceMB
FROM 
    sys.tables t
INNER JOIN      
    sys.indexes i ON t.OBJECT_ID = i.object_id
INNER JOIN 
    sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id
INNER JOIN 
    sys.allocation_units a ON p.partition_id = a.container_id
LEFT OUTER JOIN 
    sys.schemas s ON t.schema_id = s.schema_id
WHERE 
    t.NAME NOT LIKE 'dt%' 
    AND t.is_ms_shipped = 0
    AND i.OBJECT_ID > 255 
GROUP BY 
    t.Name, s.Name, p.Rows
ORDER BY 
    TotalSpaceMB DESC, t.Name
SELECT      COLUMN_NAME AS 'ColumnName'
            ,TABLE_NAME AS  'TableName'
FROM        INFORMATION_SCHEMA.COLUMNS
WHERE       COLUMN_NAME LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
library(DBI)
connection <- dbConnect(RMySQL::MySQL(),
                 dbname = "name",
                 host = "adress.amazonaws.com",
                 port = number,
                 user = "id",
                 password = "pw")
info <- dbGetQuery(connection, "SELECT column1 FROM database WHERE argument = something")

info
WITH
T AS (   SELECT CAST(CONCAT ('$', IIF(TRY_CAST([Key] AS int) IS NOT NULL, CONCAT ('[', [Key], ']'), '.' + [Key])) AS nvarchar(MAX)) AS Path
              , [Key]
              , Value
              , Type
              , 1                                                                                                                   Lvl
         FROM OPENJSON (@json_doc)
         UNION ALL
         SELECT CAST(CONCAT (T.Path, IIF(TRY_CAST(O.[Key] AS int) IS NOT NULL, CONCAT ('[', O.[Key], ']'), '.' + O.[Key])) AS nvarchar(MAX))
              , O.[Key]
              , O.Value
              , O.Type
              , T.Lvl + 1
         FROM T
             CROSS APPLY OPENJSON (T.Value) O
         WHERE T.Type IN ( 4, 5 ))
SELECT Path, T.[Key], T.Value, T.Type, T.Lvl FROM T;
select
  *
from
  db1.SomeTable a
    inner join 
  db2.SomeTable b on b.SomeColumn = a.SomeColumn;
SELECT *
FROM tbl_name
WHERE 
(id_field IN ('value1', 'value2', 'value3') OR id_field IS NULL)
INSERT `reby-cloud.bq_logs.bigquery_usage_datepart` 
SELECT
    timestamp AS Date,
    resource.labels.project_id AS ProjectId,
    protopayload_auditlog.serviceName AS ServiceName,
    protopayload_auditlog.methodName AS MethodName,
    protopayload_auditlog.status.code AS ErrorCode,
    protopayload_auditlog.status.message AS ErrorMessage,
    protopayload_auditlog.authenticationInfo.principalEmail AS UserId,
    logName AS JobId,
    JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query") AS Query,
     JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") AS DestinationTableId,
    CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) AS BillableBytes,
    (CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) / 1099511627776) * 6 AS TotalCost,
    1 AS QueryCount,
     CASE
    WHEN JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") LIKE '%anon%' THEN 'Query'
    ELSE 'ETL'
  END AS JobType,
  ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"),r"(?i)\s+(?:FROM|JOIN)\s+([^\s\(]+\.[^\s]+)") ) ) AS x
    ORDER BY
      x),', ') AS QueryTables,
      ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL( REGEXP_REPLACE( JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"), r"(?i)\s+(z_+pivot_[a-z0-9_.]+)", ""),r"(?i)\s+(?:WHERE|AND|OR|ON)\s+(?:\s|\(|CAST|`)*([a-z0-9_.]+)(?:AND)?") ) ) AS x
    ORDER BY
      x),', ') AS QueryWhereColumns
  FROM
    `reby-cloud.bq_logs.cloudaudit_googleapis_com_data_access`
  WHERE
    protopayload_auditlog.serviceName = 'bigquery.googleapis.com' and DATE(timestamp) >= '2021-11-28' # Change date to start of gap
  and DATE(timestamp) <= DATE_ADD(CURRENT_DATE(), INTERVAL -1 DAY)
UPDATE photos
   SET caption = REPLACE(caption,'"','\'')
SELECT note as note_original, 

    REPLACE(
        REPLACE(
            REPLACE(
                REPLACE(
                    REPLACE(
                        REPLACE(
                            REPLACE(
                                REPLACE(
                                    REPLACE(
                                        REPLACE(
                                            REPLACE(
                                                REPLACE(
                                                    REPLACE(
                                                        REPLACE(
                                                            REPLACE(
                                                                REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(
                                                                                                REPLACE(
                                                                                                    REPLACE(
                                                                                                        REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(note, '\"', ''),
                                                                                        '.', ''),
                                                                                    '?', ''),
                                                                                '`', ''),
                                                                            '<', ''),
                                                                        '=', ''),
                                                                    '{', ''),
                                                                                                        '}', ''),
                                                                                                    '[', ''),
                                                                                                ']', ''),
                                                                                            '|', ''),
                                                                                        '\'', ''),
                                                                                    ':', ''),
                                                                                ';', ''),
                                                                            '~', ''),
                                                                        '!', ''),
                                                                    '@', ''),
                                                                '#', ''),
                                                            '$', ''),
                                                        '%', ''),
                                                    '^', ''),
                                                '&', ''),
                                            '*', ''),
                                        '_', ''),
                                    '+', ''),
                                ',', ''),
                            '/', ''),
                        '(', ''),
                    ')', ''),
                '-', ''),
            '>', ''),
        ' ', '-'),
    '--', '-') as note_changed FROM invheader
ALTER DATABASE [servername/databasename] MODIFY NAME = [servername/newdatabasename]
/* AQUI CREARE LA TABLA */
CREATE DATABASE fejesus;

CREATE TABLE leccion (
    id_leccion INT NOT NULL AUTO_INCREMENT, 
    titulo VARCHAR(30),
    instructor VARCHAR(50),
    no_leccion INT,

    PRIMARY KEY(id_leccion),
    INDEX(no_leccion)
    
) ENGINE=INNODB;

DESCRIBE leccion;

CREATE TABLE preguntas (
    id_pregunta INT NOT NULL AUTO_INCREMENT,
    pregunta VARCHAR(200),
    verso VARCHAR(40),
    id_leccion INT NOT NULL,
    id_seccion INT NOT NULL,

    PRIMARY KEY (id_pregunta),
    INDEX(id_leccion),
    INDEX(id_seccion),

    FOREIGN KEY (id_leccion)
        REFERENCES leccion(id_leccion)
        ON UPDATE CASCADE ON DELETE RESTRICT
    
) ENGINE=INNODB;

DESCRIBE preguntas;

CREATE TABLE seccion ( 
    id_seccion INT NOT NULL AUTO_INCREMENT, 
    leccion_id INT NOT NULL, 
    titulo VARCHAR(100), 

    PRIMARY KEY(id_seccion), 
    INDEX (leccion_id), 
        
    FOREIGN KEY (leccion_id) 
        REFERENCES leccion(id_leccion) ON UPDATE CASCADE ON DELETE RESTRICT

) ENGINE=INNODB;

DESCRIBE seccion;
ORDER BY array_position(ARRAY['f', 'p', 'i', 'a']::varchar[], x_field)
SELECT add_continuous_aggregate_policy('conditions_summary_daily',
     start_offset => INTERVAL '1 month',
     end_offset => INTERVAL '1 day',
     schedule_interval => INTERVAL '1 hour');
CREATE MATERIALIZED VIEW conditions_summary_daily
WITH (timescaledb.continuous) AS
SELECT device,
   time_bucket(INTERVAL '1 day', time) AS bucket,
   AVG(temperature),
   MAX(temperature),
   MIN(temperature)
FROM conditions
GROUP BY device, bucket;
CREATE SEQUENCE public.hibernate_sequence INCREMENT 1 START 1 MINVALUE 1;
select id from workflowexecution where subject like '%FOZG-069338%';

update workflowexecution set state = 4, cancellationDate=CURRENT_TIMESTAMP where id = 34470;

update workflowdefinitionexecution set state = 4 , cancellationDate=CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34470);

update workflowstepexecution set state = 4 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34470));
select id from workflowexecution where subject like '%OZLO-0907%';

update workflowexecution set state = 1, enddate = CURRENT_TIMESTAMP where id = 34168;

update workflowdefinitionexecution set state = 1, enddate = CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34168);

update workflowstepexecution set state = 1 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34168)); 
SELECT *
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = N'Customers'
 protected override void OnModelCreating(ModelBuilder builder)
        {
            base.OnModelCreating(builder);

            builder.Entity<InternetUsersLog>()
           .HasOne<AspNetBranchs>(sc => sc.Branch)
           .WithMany(s => s.Users)
           .HasForeignKey(sc => sc.Branch_Id);


        }
Scaffold-DbContext "Data Source=.;Initial Catalog=Name;Persist Security Info=True;User ID=Username;Password=@!n$p!r3;Integrated Security=True" Microsoft.EntityFrameWorkCore.SqlServer -outputdir Repository/Models -context AccDbContext -contextdir Repository -DataAnnotations -Force
SELECT Query, UserId, SUM(TotalCost) FROM `reby-cloud.bq_logs.bigquery_usage_datepart` 
WHERE DATE(Date) >= "2022-03-01" and DATE(Date) <= "2022-03-30" 
GROUP BY 1,2
ORDER BY 3 DESC
SELECT Table_Name, 
    Column_Name
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_CATALOG = 'EDM-PRD'
   AND COLUMN_NAME LIKE '%cost%';
SELECT (date '2022-01-09', date '2022-02-10') OVERLAPS
       (date '2022-02-10', date '2022-03-10');
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

--GET THE FIRST VEHICLE RESERVATION PER USER/APP/VEHICLE
create or replace table temp_eu.delete_first_ride_reserve as 

with first_reservations as (
    select
        vrh.created_at,
        vrh.user_id,
        vrh.company_vehicle_id as vehicle_company_id,
        vrh.company_id as app_co_id,
        co1.name as vehicle_company_name,
        co2.name as app_company_name,
        row_number() over (partition by vrh.user_id,vrh.company_vehicle_id,vrh.company_id order by vrh.created_at asc) as r_n
    from `reby-cloud.analytics_reby_v1_eu.pg_vehicle_reservation_history` vrh
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co1
    on vrh.company_vehicle_id = co1.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2
    on vrh.company_vehicle_id = co2.id
    QUALIFY r_n = 1
),
--FINISH

--GET THE FIRST RIDE PER USER/APP/VEHICLE
rides as (
    select 
        id,
        created_at,
        user_id,
        if(vehicle_company_id is null,'c_3qteetfhxjshx4j54111',vehicle_company_id) as vehicle_company_id,
        if(company_id is null,'c_3qteetfhxjshx4j54111',company_id) as company_id
    from analytics_reby_v1_eu.pg_rides_json
),

first_ride_pre as (
    select
        r.id,
        r.user_id,
        r.created_at as ride_date,
        r.vehicle_company_id,
        r.company_id as app_co_id,
        co.name as vehicle_company_name,
        co2.name as app_company_name,
        cu.id as cu_id_vehicle,
        cu2.id as cu_id_app,
        --row_number() over (partition by r.user_id,r.vehicle_company_id order by r.created_at asc) as rank_
    from rides r
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.user_id = cu.user_id and r.vehicle_company_id = cu.company_id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu2 on r.user_id = cu2.user_id and r.company_id = cu2.company_id
),

first_ride_per_app_vehicle as (
select *, row_number() over (partition by user_id, app_company_name, vehicle_company_name order by ride_date asc) as rank_ from first_ride_pre qualify rank_ = 1
),
--FINISH

--GET FIRST DATE BETWEEN FIRST RIDE AND/OR FIRST RESERVATION
first_date_ride_reserv as (
    select
        user_id,
        vehicle_company_name,
        app_company_name,
        created_at,
        vehicle_company_id,
        app_co_id,
        row_number() over (partition by user_id, app_company_name, vehicle_company_name order by created_at asc) as rank_
        from(
            --UNION BETWEEN RIDES AND RESERVATIONS
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                created_at,
                vehicle_company_id,
                app_co_id
            from first_reservations 
            UNION ALL
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                ride_date as created_at,
                vehicle_company_id,
                app_co_id
            from first_ride_per_app_vehicle
        ) QUALIFY rank_=1
)

select * from first_date_ride_reserv
--FINISH
;
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

SELECT DISTINCT column_list
FROM table_list
  JOIN table ON join_condition
WHERE row_filter
ORDER BY column
LIMIT count OFFSET offset
GROUP BY column
HAVING group_filter;
Code language: SQL (Structured Query Language) (sql)
IF object_id('tempdb..#table') IS NOT NULL
BEGIN
   DROP TABLE #table
END

Create Table #table(
	[State] varchar(500),
	[City] varchar(500)
);


Insert into #table ([State], [City]) values ('OH', 'Toledo,Columbus');
Insert into #table ([State], [City]) values ('TN', 'Nashville,Memphis');



SELECT A.[State]
	,Split.a.value('.', 'VARCHAR(100)') AS String
FROM (
	SELECT [State]
		,CAST('<M>' + REPLACE([City], ',', '</M><M>') + '</M>' AS XML) AS String
	FROM #table
	) AS A
CROSS APPLY String.nodes('/M') AS Split(a);
where o.createdate >= dateadd(year, -1, current_date)
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*) DESC;
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*);
SELECT COUNT(*) FROM campers WHERE counselor="Ashley";
SELECT COUNT(column_name) FROM table_name;
SELECT 
    cu.id as user_id,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,longitude) longitude,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,latitude) latitude,
    last_location_at
FROM `reby-cloud.alvaro_misc.pg_user_last_location` ull
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on cu.company_id = ull.company_id and cu.user_id = ull.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
;
select
    uc.from_co_cu_id as user_id,
    uc.consent_timestamp,
    uc.consent_from
from `reby-cloud.misc_eu.user_consent` uc
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = uc.from_co_cu_id
select * from (
select
    r.id,
    cu.id as user_id,
    r.created_at as tiempo_utc,
    r.minutes,
    r.distance/1000 as distancia_km,
    r.cost[offset(0)].amount as coste,
    r.path[safe_offset(0)].latitude as latitude_initial,
    r.path[safe_offset(0)].longitude as longitude_initial,
    r.path[safe_offset(array_length(path)-1)].latitude as latitude_final,
    r.path[safe_offset(array_length(path)-1)].longitude as longitude_final,
    r.starting_battery_level,
    r.ending_battery_level,
    co.name as vehicle_company,
    co2.name as app_company
from `reby-cloud.analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.company_id = cu.company_id and r.user_id = cu.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
--order by r.created_at asc
) where vehicle_company = 'Avant'
Select Count(*)
From   mySchema.myTable
Where  Cast(Col1 As NVarChar(128)) +
       Cast(Col2 As NVarChar(128)) +
       Cast(Coln As NVarChar(128)) Like '%?%'
INSERT INTO TableName (
  Field1,
  Field2,
  Field3
) VALUES (
  'Value1',
  22,
  GETDATE()
)
SELECT Suref, FORMAT(DateFrom, 'd', 'en-gb') AS 'Great Britain English format'
FROM [dbo].[BOCClientIndex - AdultServiceDetails]
WHERE [DateFrom] >= '01/01/2020'
Order by DateFrom
INNER JOIN:
is used when retrieving data from multiple
tables and will return only matching data.

LEFT OUTER JOIN:
is used when retrieving data from
multiple tables and will return
left table and any matching right table records.

RIGHT OUTER JOIN:
is used when retrieving data from
multiple tables and will return right
table and any matching left table records

FULL OUTER JOIN:
is used when retrieving data from
multiple tables and will return both
table records, matching and non-matching.



INNER JOIN :
SELECT select_list From TableA A
Inner Join TableB B
On A.Key = B.Key


LEFT OUTER JOIN :
SELECT select_list From TableA A
Left Join TableB B
On A.Key = B.Key

(where b.key is null)//For delete matching data



RIGTH OUTER JOIN :
SELECT select_list From TableA A
Right Join TableB B
On A.Key = B.Key


FULL JOIN :
SELECT select_list From TableA A
FULL OUTER Join TableB B
On A.Key = B.Key

-- how many stations within 1/10 mile range of each zip code?
SELECT * 
FROM
 (SELECT
        vehicle,
        date1,
        code,
        metric,
        safe_cast(lon as FLOAT64) as lon,
        safe_cast(lat as FLOAT64) as lat,
        ST_GeogPoint(safe_cast(lon as FLOAT64), safe_cast(lat as FLOAT64)) AS point,
    FROM 
        `nytint-prd.gfx_nyc_fleet_logs.test_log_subset_3mo_geo`
    WHERE ABS(safe_cast(lat as FLOAT64)) < 90
    AND ABS(safe_cast(lon as FLOAT64)) < 180) as logs,
    `nytint-prd.gfx_nyc_fleet_logs.school_locations` as schools
WHERE ST_DWithin(
        ST_GeogPoint(safe_cast(schools.LONGITUDE as FLOAT64), safe_cast(schools.LATITUDE as FLOAT64)),
        logs.point,
        1609.34/10)
limit 100
-- how many stations within 1 mile range of each zip code?
SELECT
    zip_code AS zip,
    ANY_VALUE(zip_code_geom) AS polygon,
    COUNT(*) AS bike_stations
FROM
    `bigquery-public-data.new_york.citibike_stations` AS bike_stations,
    `bigquery-public-data.geo_us_boundaries.zip_codes` AS zip_codes
WHERE ST_DWithin(
         zip_codes.zip_code_geom,
         ST_GeogPoint(bike_stations.longitude, bike_stations.latitude),
         1609.34)
GROUP BY zip
ORDER BY bike_stations DESC
SELECT
  *,
  ST_GeogPoint(pLongitude, pLatitude) AS p
FROM
  mytable
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bese - BuildExectionSummaryEnd</Title>
      <Description>execution plan end</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
endofscript:

PRINT '--------------------------------------------------';
PRINT ' ';
PRINT 'elapsed time '+CONVERT(VARCHAR, GETDATE() - @started, 108);
PRINT ' ';
PRINT 'completed '+CONVERT(VARCHAR, GETDATE(), 108);
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>best - BuildExectionSummaryTitle</Title>
      <Description>add title to execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '     '
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummaryStep</Title>
      <Description>executionplan step</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT ' ';
SET @stepdescription = '';
SET @executionsummary = @executionsummary + CHAR(13) + ' - ' + @stepdescription;
PRINT CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription;
PRINT ' ';
IF @step = @step
BEGIN
	--------------------------------------------------------
	--START

	SELECT 1;

	--END
	--------------------------------------------------------
END
PRINT ' ';
PRINT 'END ' + CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription + ' : ' + CONVERT(VARCHAR, GETDATE() - @started, 108);
SET @step += 1;
PRINT ' ';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummary</Title>
      <Description>execution plan start</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '--------------------------------------------';
PRINT '--Execution Summary-------------------------';
PRINT '--------------------------------------------';
PRINT '
-  
';
PRINT '--------------------------------------------';
PRINT '--END Execution Summary---------------------';
PRINT '--------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
DECLARE
       @started          DATETIME
     , @step             INT
     , @stepdescription  VARCHAR(500)
	 , @executionsummary VARCHAR(max);

SET @started = GETDATE();
SET @step = 1;
SET @stepdescription='';
SET @executionsummary='';
PRINT 'started '+CONVERT(VARCHAR, @started, 108);

PRINT ' ';
PRINT '--------------------------------------------------';


]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>besg - BuildExecutionSummaryGenerate</Title>
      <Description>prints out the generated execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '-------------------------------------------------------';
PRINT '--Generated Execution Summary--------------------------';
PRINT '-------------------------------------------------------';
PRINT @executionsummary;
PRINT '----------------------------------------------------------';
PRINT '--END Generated Execution Summary-------------------------';
PRINT '----------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
IF EXISTS (
    SELECT * FROM sysobjects WHERE id = object_id(N'Split_Int') 
    AND xtype IN (N'FN', N'IF', N'TF')
)
    DROP FUNCTION [dbo].[Split_Int]
GO

SET ANSI_NULLS ON
GO

SET QUOTED_IDENTIFIER ON
GO

CREATE FUNCTION [dbo].[Split_Int]
       (
         @InputList VARCHAR(8000) = '',
         @Delimiter NVARCHAR(1) = ','
       )
RETURNS @List TABLE
       (
         [Index] INT NOT NULL
                        IDENTITY(0, 1)
                        PRIMARY KEY,
         [Value] INT NULL
       )
AS BEGIN




	DECLARE @Item varchar(8000)
    WHILE CHARINDEX(@Delimiter, @InputList, 0) <> 0
          BEGIN
                SELECT  @Item = RTRIM(LTRIM(SUBSTRING(@InputList, 1,
                                                       CHARINDEX(@Delimiter, @InputList, 0)
                                                       - 1))) ,
                        @InputList = RTRIM(LTRIM(SUBSTRING(@InputList,
                                                            CHARINDEX(@Delimiter, @InputList, 0)
                                                            + LEN(@Delimiter),
                                                            LEN(@InputList))));

							
 
                IF LEN(@Item) > 0 
                   INSERT   INTO @List
                            SELECT  @Item
          END

    IF LEN(@InputList) > 0 
       INSERT   INTO @List
                SELECT  @InputList -- Put the last item in
   

RETURN
END



GO



select * from dbo.Split_Int('1,2,3,4',',')


select * from dbo.Split_Int('	','	')
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
DECLARE @ClarionDate INT = 47563
DECLARE @SqlDateTime DATETIME 

-- Convert the clarion DATE into and SQL DateTime
SET @SqlDateTime = DateAdd(day, @ClarionDate  - 4, '1801-01-01') 

SELECT @SqlDateTime AS 'SQL Date Time'

-- Now convert it back from and SQL DateTime to a Clarion Date
SET @ClarionDate = DateDiff(day, DateAdd(day, -4, '1801-01-01'), @SqlDateTime)
SELECT @ClarionDate AS 'Clarion Date'
DECLARE @Object AS INT;
DECLARE @ResponseText AS VARCHAR(8000);
DECLARE @Body AS VARCHAR(8000) = 
'{
    "what": 1,
    "ever": "you",
    "need": "to send as the body"
}'  

EXEC sp_OACreate 'MSXML2.XMLHTTP', @Object OUT;
EXEC sp_OAMethod @Object, 'open', NULL, 'post','http://requestb.in/1h83e3n1', 'false'

EXEC sp_OAMethod @Object, 'setRequestHeader', null, 'Content-Type', 'application/json'
EXEC sp_OAMethod @Object, 'send', null, @body

EXEC sp_OAMethod @Object, 'responseText', @ResponseText OUTPUT
SELECT @ResponseText

EXEC sp_OADestroy @Object
BEGIN TRY
    DROP TABLE [Dim].[Date]
END TRY

BEGIN CATCH
    /*No Action*/
END CATCH

CREATE TABLE [Dim].[Date]
(
    [DateKey] INT primary key, 
    [Date] DATETIME,
    [FullDate] CHAR(10),-- Date in MM-dd-yyyy format
    [DayOfMonth] VARCHAR(2), -- Field will hold day number of Month
    [DaySuffix] VARCHAR(4), -- Apply suffix as 1st, 2nd ,3rd etc
    [DayName] VARCHAR(9), -- Contains name of the day, Sunday, Monday 
    [DayOfWeek] CHAR(1),-- First Day Sunday=1 and Saturday=7
    [DayOfWeekInMonth] VARCHAR(2), --1st Monday or 2nd Monday in Month
    [DayOfWeekInYear] VARCHAR(2),
    [DayOfQuarter] VARCHAR(3), 
    [DayOfYear] VARCHAR(3),
    [WeekOfMonth] VARCHAR(1),-- Week Number of Month 
    [WeekOfQuarter] VARCHAR(2), --Week Number of the Quarter
    [WeekOfYear] VARCHAR(2),--Week Number of the Year
    [Month] VARCHAR(2), --Number of the Month 1 to 12
    [MonthName] VARCHAR(9),--January, February etc
    [MonthOfQuarter] VARCHAR(2),-- Month Number belongs to Quarter
    [Quarter] CHAR(1),
    [QuarterName] VARCHAR(9),--First,Second..
    [Year] CHAR(4),-- Year value of Date stored in Row
    [YearName] CHAR(7), --CY 2012,CY 2013
    [MonthYear] CHAR(10), --Jan-2013,Feb-2013
    [MMYYYY] CHAR(6),
    [FirstDayOfMonth] DATE,
    [LastDayOfMonth] DATE,
    [FirstDayOfQuarter] DATE,
    [LastDayOfQuarter] DATE,
    [FirstDayOfYear] DATE,
    [LastDayOfYear] DATE,
    [IsHoliday] BIT,-- Flag 1=National Holiday, 0-No National Holiday
    [IsWeekday] BIT,-- 0=Week End ,1=Week Day
    [HolidayName] VARCHAR(50),--Name of Holiday in US
)
GO

--=========================================================================================
--Specify Start Date and End date here
--Value of Start Date Must be Less than Your End Date 
--=========================================================================================

DECLARE @StartDate DATETIME = '12/29/2014' --Starting value of Date Range
DECLARE @EndDate DATETIME = '01/01/2100' --End Value of Date Range

--Temporary Variables To Hold the Values During Processing of Each Date of Year
DECLARE
    @DayOfWeekInMonth INT,
    @DayOfWeekInYear INT,
    @DayOfQuarter INT,
    @WeekOfMonth INT,
    @CurrentYear INT,
    @CurrentMonth INT,
    @CurrentQuarter INT

/*Table Data type to store the day of week count for the month and year*/
DECLARE @DayOfWeek TABLE
(
    DOW INT,
    MonthCount INT,
    QuarterCount INT,
    YearCount INT
)

INSERT INTO @DayOfWeek VALUES (1, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (2, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (3, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (4, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (5, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (6, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (7, 0, 0, 0)

--Extract and assign various parts of Values from Current Date to Variable

DECLARE @CurrentDate AS DATETIME = @StartDate
SET @CurrentMonth = DATEPART(MM, @CurrentDate)
SET @CurrentYear = DATEPART(YY, @CurrentDate)
SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)

/********************************************************************************************/
--Proceed only if Start Date(Current date) is less than End date you specified above

WHILE @CurrentDate < @EndDate
/*Begin day of week logic*/
BEGIN
    /*Check for Change in Month of the Current date if Month changed then 
    Change variable value*/
    IF @CurrentMonth != DATEPART(MM, @CurrentDate) 
    BEGIN
        UPDATE @DayOfWeek
        SET [MonthCount] = 0
        SET @CurrentMonth = DATEPART(MM, @CurrentDate)
    END

    /* Check for Change in Quarter of the Current date if Quarter changed then change 
        Variable value*/
    IF @CurrentQuarter != DATEPART(QQ, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET [QuarterCount] = 0
        SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)
    END

    /* Check for Change in Year of the Current date if Year changed then change 
        Variable value*/
    IF @CurrentYear != DATEPART(YY, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET YearCount = 0
        SET @CurrentYear = DATEPART(YY, @CurrentDate)
    END

    -- Set values in table data type created above from variables
    UPDATE @DayOfWeek
    SET 
        MonthCount = MonthCount + 1,
        QuarterCount = QuarterCount + 1,
        YearCount = YearCount + 1
    WHERE DOW = DATEPART(DW, @CurrentDate)

    SELECT
        @DayOfWeekInMonth = MonthCount,
        @DayOfQuarter = QuarterCount,
        @DayOfWeekInYear = YearCount
    FROM @DayOfWeek
    WHERE DOW = DATEPART(DW, @CurrentDate)
    
/*End day of week logic*/


/* Populate Your Dimension Table with values*/
    
    INSERT INTO [Dim].[Date]
    SELECT
        
        CONVERT (char(8),@CurrentDate,112) as 'DateKey',
        @CurrentDate AS 'Date',
        CONVERT (char(10),@CurrentDate,101) as 'FullDate',
        DATEPART(DD, @CurrentDate) AS 'DayOfMonth',
        --Apply Suffix values like 1st, 2nd 3rd etc..
        CASE 
            WHEN DATEPART(DD,@CurrentDate) IN (11,12,13) THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 1 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'st'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 2 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'nd'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 3 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'rd'
            ELSE CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th' 
        END AS 'DaySuffix',
        
        DATENAME(DW, @CurrentDate) AS 'DayName',
        DATEPART(DW, @CurrentDate) AS 'DayOfWeek',
        @DayOfWeekInMonth AS 'DayOfWeekInMonth',
        @DayOfWeekInYear AS 'DayOfWeekInYear',
        @DayOfQuarter AS 'DayOfQuarter',
        DATEPART(DY, @CurrentDate) AS 'DayOfYear',
        DATEPART(WW, @CurrentDate) + 1 - DATEPART(WW, CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)) + '/1/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'WeekOfMonth',
        (DATEDIFF(DD, DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0), @CurrentDate) / 7) + 1 AS 'WeekOfQuarter',
        DATEPART(WW, @CurrentDate) AS 'WeekOfYear',
        DATEPART(MM, @CurrentDate) AS 'Month',
        DATENAME(MM, @CurrentDate) AS 'MonthName',
        CASE
            WHEN DATEPART(MM, @CurrentDate) IN (1, 4, 7, 10) THEN 1
            WHEN DATEPART(MM, @CurrentDate) IN (2, 5, 8, 11) THEN 2
            WHEN DATEPART(MM, @CurrentDate) IN (3, 6, 9, 12) THEN 3
        END AS 'MonthOfQuarter',
        DATEPART(QQ, @CurrentDate) AS 'Quarter',
        CASE DATEPART(QQ, @CurrentDate)
            WHEN 1 THEN 'First'
            WHEN 2 THEN 'Second'
            WHEN 3 THEN 'Third'
            WHEN 4 THEN 'Fourth'
        END AS 'QuarterName',
        DATEPART(YEAR, @CurrentDate) AS 'Year',
        'CY ' + CONVERT(VARCHAR, DATEPART(YEAR, @CurrentDate)) AS 'YearName',
        LEFT(DATENAME(MM, @CurrentDate), 3) + '-' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MonthYear',
        RIGHT('0' + CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)),2) + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MMYYYY',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, @CurrentDate) - 1), @CurrentDate))) AS 'FirstDayOfMonth',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, (DATEADD(MM, 1, @CurrentDate)))), DATEADD(MM, 1, @CurrentDate)))) AS 'LastDayOfMonth',
        DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0) AS 'FirstDayOfQuarter',
        DATEADD(QQ, DATEDIFF(QQ, -1, @CurrentDate), -1) AS 'LastDayOfQuarter',
        CONVERT(DATETIME, '01/01/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'FirstDayOfYear',
        CONVERT(DATETIME, '12/31/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'LastDayOfYear',
        NULL AS 'IsHoliday',
        CASE DATEPART(DW, @CurrentDate)
            WHEN 1 THEN 0
            WHEN 2 THEN 1
            WHEN 3 THEN 1
            WHEN 4 THEN 1
            WHEN 5 THEN 1
            WHEN 6 THEN 1
            WHEN 7 THEN 0
        END AS 'IsWeekday',
        NULL AS 'HolidayName'

    SET @CurrentDate = DATEADD(DD, 1, @CurrentDate)
END
        
--============================================================================
-- Step 3.
-- Update Values of Holiday as per USA Govt. Declaration for National Holiday.
--============================================================================

/*Update HOLIDAY Field of USA In dimension*/
    /* New Years Day - January 1 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'New Year''s Day'
    WHERE [Month] = 1 AND [DayOfMonth] = 1

    /* Martin Luther King, Jr. Day - Third Monday in January starting in 1983 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Martin Luther King, Jr. Day'
    WHERE
        [Month] = 1 AND
        [DayOfWeek] = 'Monday' AND
        [Year] >= 1983 AND
        DayOfWeekInMonth = 3

    /* Valentine's Day - February 14 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Valentine''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfMonth] = 14

    /* President's Day - Third Monday in February */
    UPDATE [Dim].[Date]
        SET HolidayName = 'President''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 3

    /* Saint Patrick's Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Saint Patrick''s Day'
    WHERE
        [Month] = 3 AND
        [DayOfMonth] = 17

    /* Memorial Day - Last Monday in May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Memorial Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MAX(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'May' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Mother's Day - Second Sunday of May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Mother''s Day'
    WHERE
        [Month] = 5 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 2

    /* Father's Day - Third Sunday of June */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Father''s Day'
    WHERE
        [Month] = 6 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 3

    /* Independence Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Independance Day'
    WHERE [Month] = 7 AND [DayOfMonth] = 4

    /* Labor Day - First Monday in September */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Labor Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MIN(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'September' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Columbus Day - Second MONDAY in October */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Columbus Day'
    WHERE
        [Month] = 10 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 2

    /* Halloween - 10/31 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Halloween'
    WHERE
        [Month] = 10 AND
        [DayOfMonth] = 31

    /* Veterans Day - November 11 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Veterans Day'
    WHERE
        [Month] = 11 AND
        [DayOfMonth] = 11
    
    /* Thanksgiving - Fourth THURSDAY in November */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Thanksgiving Day'
    WHERE
        [Month] = 11 AND
        [DayOfWeek] = 'Thursday' AND
        [DayOfWeekInMonth] = 4

    /* Christmas */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Christmas Day'
    WHERE [Month] = 12 AND
          [DayOfMonth]  = 25
    
    /* Election Day - The first Tuesday after the first Monday in November */
    BEGIN
    DECLARE @Holidays TABLE
    (
        [ID] INT IDENTITY(1,1),
        [DateID] INT,
        [Week] TINYINT,
        [Year] CHAR(4),
        [Day] CHAR(2)
    )

        INSERT INTO @Holidays([DateID], [Year], [Day])
            SELECT
                [DateKey],
                [Year],
                [DayOfMonth] 
            FROM [Dim].[Date]
            WHERE
                [Month] = 11 AND 
                [DayOfWeek] = 'Monday'
            ORDER BY
                [Year],
                [DayOfMonth]

        DECLARE @CNTR INT,
                @POS INT,
                @STARTYEAR INT,
                @ENDYEAR INT,
                @MINDAY INT

        SELECT @CURRENTYEAR = MIN([Year])
             , @STARTYEAR = MIN([Year])
             , @ENDYEAR = MAX([Year])
        FROM @Holidays

        WHILE @CURRENTYEAR <= @ENDYEAR
        BEGIN
            SELECT @CNTR = COUNT([Year])
            FROM @Holidays
            WHERE [Year] = @CURRENTYEAR

            SET @POS = 1

            WHILE @POS <= @CNTR
            BEGIN
                SELECT @MINDAY = MIN(DAY)
                FROM @Holidays
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Week] IS NULL

                UPDATE @Holidays
                    SET [Week] = @POS
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Day] = @MINDAY

                SELECT @POS = @POS + 1
            END

            SELECT @CURRENTYEAR = @CURRENTYEAR + 1
        END

        UPDATE [Dim].[Date]
            SET HolidayName  = 'Election Day'
        FROM [Dim].[Date] DT
            JOIN @Holidays HL ON (HL.DateID + 1) = DT.DateKey
        WHERE
            [Week] = 1
    END
    --set flag for USA holidays in Dimension
    UPDATE [Dim].[Date]
        SET IsHoliday = CASE WHEN HolidayName IS NULL THEN 0
                                WHEN HolidayName IS NOT NULL THEN 1 END

/*****************************************************************************************/

/* Add Fiscal Calendar columns into table DimDate */

ALTER TABLE [Dim].[Date] ADD
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] CHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] CHAR(4),
    [FiscalYearName] CHAR(7),
    [FiscalMonthYear] CHAR(10),
    [FiscalMMYYYY] CHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE

GO

/***************************************************************************
The following section needs to be populated for defining the fiscal calendar
***************************************************************************/

DECLARE
    @dtFiscalYearStart SMALLDATETIME = 'December 29, 2014',
    @FiscalYear INT = 2015,
    @LastYear INT = 2100,
    @FirstLeapYearInPeriod INT = 2012

/*****************************************************************************************/

DECLARE
    @iTemp INT,
    @LeapWeek INT,
    @CurrentDate DATETIME,
    @FiscalDayOfYear INT,
    @FiscalWeekOfYear INT,
    @FiscalMonth INT,
    @FiscalQuarter INT,
    @FiscalQuarterName VARCHAR(10),
    @FiscalYearName VARCHAR(7),
    @LeapYear INT,
    @FiscalFirstDayOfYear DATE,
    @FiscalFirstDayOfQuarter DATE,
    @FiscalFirstDayOfMonth DATE,
    @FiscalLastDayOfYear DATE,
    @FiscalLastDayOfQuarter DATE,
    @FiscalLastDayOfMonth DATE

/*Holds the years that have 455 in last quarter*/

DECLARE @LeapTable TABLE (leapyear INT)

/*TABLE to contain the fiscal year calendar*/

DECLARE @tb TABLE
(
    [PeriodDate] DATETIME,
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] VARCHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] VARCHAR(4),
    [FiscalYearName] VARCHAR(7),
    [FiscalMonthYear] VARCHAR(10),
    [FiscalMMYYYY] VARCHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE
)

/*Populate the table with all leap years*/

SET @LeapYear = @FirstLeapYearInPeriod
WHILE (@LeapYear < @LastYear)
    BEGIN
        INSERT INTO @leapTable VALUES (@LeapYear)
        SET @LeapYear = @LeapYear + 6
    END

/*Initiate parameters before loop*/

SET @CurrentDate = @dtFiscalYearStart
SET @FiscalDayOfYear = 1
SET @FiscalWeekOfYear = 1
SET @FiscalMonth = 1
SET @FiscalQuarter = 1
SET @FiscalWeekOfYear = 1

IF (EXISTS (SELECT * FROM @LeapTable WHERE @FiscalYear = leapyear))
    BEGIN
        SET @LeapWeek = 1
    END
    ELSE
    BEGIN
        SET @LeapWeek = 0
    END

/*******************************************************************************************/

/* Loop on days in interval*/

WHILE (DATEPART(yy,@CurrentDate) <= @LastYear)
BEGIN
    
/*SET fiscal Month*/
    SELECT @FiscalMonth = CASE
        /* 
        /*Use this section for a 4-5-4 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 9 THEN 2 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 10 AND 13 THEN 3 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 22 THEN 5 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 23 AND 26 THEN 6 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 35 THEN 8 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 36 AND 39 THEN 9 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (48+@LeapWeek) THEN 11 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN (49+@LeapWeek) AND (52+@LeapWeek) THEN 12 /*4 weeks (5 weeks on leap year)*/
        */

        /*Use this section for a 4-4-5 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 8 THEN 2 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 9 AND 13 THEN 3 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 21 THEN 5 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 22 AND 26 THEN 6 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 34 THEN 8 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 35 AND 39 THEN 9 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (47+@leapWeek) THEN 11 /*4 weeks (5 weeks on leap year)*/
        WHEN @FiscalWeekOfYear BETWEEN (48 + @leapWeek) AND (52 + @leapWeek) THEN 12 /*5 weeks*/
        
    END

    /*SET Fiscal Quarter*/
    SELECT @FiscalQuarter = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 1
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 2
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 3
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 4
    END
    
    SELECT @FiscalQuarterName = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 'First'
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 'Second'
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 'Third'
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 'Fourth'
    END
    
    /*Set Fiscal Year Name*/
    SELECT @FiscalYearName = 'FY ' + CONVERT(VARCHAR, @FiscalYear)

    INSERT INTO @tb
    (
        PeriodDate,
        FiscalDayOfYear,
        FiscalWeekOfYear,
        FiscalMonth,
        FiscalQuarter,
        FiscalQuarterName,
        FiscalYear,
        FiscalYearName
    ) VALUES (
        @CurrentDate,
        @FiscalDayOfYear,
        @FiscalWeekOfYear,
        @FiscalMonth,
        @FiscalQuarter,
        @FiscalQuarterName,
        @FiscalYear,
        @FiscalYearName
    )

    /*SET next day*/
    SET @CurrentDate = DATEADD(dd, 1, @CurrentDate)
    SET @FiscalDayOfYear = @FiscalDayOfYear + 1
    SET @FiscalWeekOfYear = ((@FiscalDayOfYear-1) / 7) + 1


    IF (@FiscalWeekOfYear > (52+@LeapWeek))
    BEGIN
        /*Reset a new year*/
        SET @FiscalDayOfYear = 1
        SET @FiscalWeekOfYear = 1
        SET @FiscalYear = @FiscalYear + 1
        IF (EXISTS (SELECT * FROM @leapTable WHERE @FiscalYear = leapyear))
        BEGIN
            SET @LeapWeek = 1
        END
        ELSE
        BEGIN
            SET @LeapWeek = 0
        END
    END
END

/********************************************************************************************/

/*Set first and last days of the fiscal months*/
UPDATE @tb
SET
    FiscalFirstDayOfMonth = minmax.StartDate,
    FiscalLastDayOfMonth = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalMonth,
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) AS StartDate, 
            MAX(PeriodDate) AS EndDate
        FROM @tb
        GROUP BY
            FiscalMonth,
            FiscalQuarter,
            FiscalYear
    ) minmax

WHERE
    t.FiscalMonth = minmax.FiscalMonth AND
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal quarters*/

UPDATE @tb
SET FiscalFirstDayOfQuarter = minmax.StartDate,
    FiscalLastDayOfQuarter = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) as StartDate,
            MAX(PeriodDate) as EndDate
        FROM
            @tb
        GROUP BY
            FiscalQuarter,
            FiscalYear
    ) minmax
WHERE
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal years*/

UPDATE @tb
SET
    FiscalFirstDayOfYear = minmax.StartDate,
    FiscalLastDayOfYear = minmax.EndDate
FROM
@tb t,
(
    SELECT FiscalYear, min(PeriodDate) as StartDate, max(PeriodDate) as EndDate
    FROM @tb
    GROUP BY FiscalYear
) minmax
WHERE
    t.FiscalYear = minmax.FiscalYear 

/*Set FiscalYearMonth*/
UPDATE @tb
SET
    FiscalMonthYear = 
        CASE FiscalMonth
        WHEN 1 THEN 'Jan'
        WHEN 2 THEN 'Feb'
        WHEN 3 THEN 'Mar'
        WHEN 4 THEN 'Apr'
        WHEN 5 THEN 'May'
        WHEN 6 THEN 'Jun'
        WHEN 7 THEN 'Jul'
        WHEN 8 THEN 'Aug'
        WHEN 9 THEN 'Sep'
        WHEN 10 THEN 'Oct'
        WHEN 11 THEN 'Nov'
        WHEN 12 THEN 'Dec'
        END + '-' + CONVERT(VARCHAR, FiscalYear)

/*Set FiscalMMYYYY*/
UPDATE @tb
SET
    FiscalMMYYYY = RIGHT('0' + CONVERT(VARCHAR, FiscalMonth),2) + CONVERT(VARCHAR, FiscalYear)

/********************************************************************************************/

UPDATE [Dim].[Date]
    SET FiscalDayOfYear = a.FiscalDayOfYear
      , FiscalWeekOfYear = a.FiscalWeekOfYear
      , FiscalMonth = a.FiscalMonth
      , FiscalQuarter = a.FiscalQuarter
      , FiscalQuarterName = a.FiscalQuarterName
      , FiscalYear = a.FiscalYear
      , FiscalYearName = a.FiscalYearName
      , FiscalMonthYear = a.FiscalMonthYear
      , FiscalMMYYYY = a.FiscalMMYYYY
      , FiscalFirstDayOfMonth = a.FiscalFirstDayOfMonth
      , FiscalLastDayOfMonth = a.FiscalLastDayOfMonth
      , FiscalFirstDayOfQuarter = a.FiscalFirstDayOfQuarter
      , FiscalLastDayOfQuarter = a.FiscalLastDayOfQuarter
      , FiscalFirstDayOfYear = a.FiscalFirstDayOfYear
      , FiscalLastDayOfYear = a.FiscalLastDayOfYear
FROM @tb a
    INNER JOIN [Dim].[Date] b ON a.PeriodDate = b.[Date]

/********************************************************************************************/

SELECT * FROM [Dim].[Date]
-- rebuild all indexes online
ALTER INDEX ALL ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- rebuild single index online
ALTER INDEX IX_IndexName ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- all fragmented indexes on current db, % fragmentation > 30
SELECT a.index_id, OBJECT_NAME(a.object_id), name, avg_fragmentation_in_percent  
FROM sys.dm_db_index_physical_stats (DB_ID(DB_NAME()), 
      NULL, NULL, NULL, NULL) AS a  
     JOIN sys.indexes AS b 
     ON a.object_id = b.object_id AND a.index_id = b.index_id
where avg_fragmentation_in_percent > 30
order by avg_fragmentation_in_percent desc
GO 
-- to understand who is doing what, alternative view/representation
SELECT
	CAST((SELECT qt.text FROM sys.dm_exec_sql_text(qs.sql_handle) AS qt FOR XML PATH('')) as xml) as query_text,
	qs.blocking_session_id,
	qs.start_time, 
	datediff(ss, qs.start_time, getdate()) as ExecutionTime_Seconds,
	getdate() as  CurrentDate,
	datediff(MINUTE, qs.start_time, getdate()) as ExecutionTime_Minutes,
	qs.session_id,
	qs.command,
	qs.status,
	qs.cpu_time, 
	qs.reads, 
	qs.writes, 
	qs.plan_handle,
	qp.query_plan,
	s.host_name, s.login_name, s.program_name,
	qs.wait_type, qs.open_transaction_count, qs.open_resultset_count, qs.row_count, qs.granted_query_memory, qs.transaction_isolation_level
	--,qs.*
FROM sys.dm_exec_requests AS qs
left join sys.dm_exec_sessions s on s.session_id = qs.session_id ---OUTER APPLY sys.dm_exec_sql_text(qs.sql_handle) AS qt
OUTER APPLY sys.dm_exec_query_plan(qs.plan_handle) AS qp
WHERE 	qs.session_id <> @@SPID
	and qs.command not in ('RESOURCE MONITOR', 'XE TIMER', 'XE DISPATCHER', 'LOG WRITER', 'LOCK MONITOR', 'TASK MANAGER', 'TASK MANAGER', 'CHECKPOINT', 'BRKR TASK', 'LAZY WRITER', 'SIGNAL HANDLER', 'TRACE QUEUE TASK', 'BRKR EVENT HNDLR', 'GHOST CLEANUP', 'RECOVERY WRITER', 'SYSTEM_HEALTH_MONITOR', 'RECEIVE', 'UNKNOWN TOKEN', 'FT FULL PASS', 'FT CRAWL MON')
	and isnull(s.program_name, '') <> 'SQL diagnostic manager Collection Service'
ORDER BY ExecutionTime_Minutes DESC;
/* CHECK SIZE OF DB OBJECTS */
SELECT TOP(10)
      o.[object_id]
    , obj = SCHEMA_NAME(o.[schema_id]) + '.' + o.name
    , o.[type]
    , i.total_rows
    , i.total_size
FROM sys.objects o
JOIN (
    SELECT
          i.[object_id]
        , total_size = CAST(SUM(a.total_pages) * 8. / 1024 AS DECIMAL(18,2))
        , total_rows = SUM(CASE WHEN i.index_id IN (0, 1) AND a.[type] = 1 THEN p.[rows] END)
    FROM sys.indexes i
    JOIN sys.partitions p ON i.[object_id] = p.[object_id] AND i.index_id = p.index_id
    JOIN sys.allocation_units a ON p.[partition_id] = a.container_id
    WHERE i.is_disabled = 0
        AND i.is_hypothetical = 0
    GROUP BY i.[object_id]
) i ON o.[object_id] = i.[object_id]
WHERE o.[type] IN ('V', 'U', 'S')
ORDER BY i.total_size DESC;
/* Monitor query plans */

SELECT
    highest_cpu_queries.plan_handle,  
    highest_cpu_queries.total_worker_time, 
    q.dbid, 
    q.objectid, 
    q.number, 
    q.encrypted, 
    q.[text] 
FROM 
    (SELECT TOP 50  
        qs.plan_handle,  
        qs.total_worker_time 
     FROM 
        sys.dm_exec_query_stats qs 
     ORDER BY qs.total_worker_time desc) AS highest_cpu_queries 
     CROSS APPLY sys.dm_exec_sql_text(plan_handle) AS q 
ORDER BY highest_cpu_queries.total_worker_time desc;
/* Find top 10 queries */

SELECT TOP 10 query_stats.query_hash AS "Query Hash", 
    SUM(query_stats.total_worker_time) / SUM(query_stats.execution_count) AS "Avg CPU Time",
    MIN(query_stats.statement_text) AS "Statement Text"
FROM 
    (SELECT QS.*, 
    SUBSTRING(ST.text, (QS.statement_start_offset/2) + 1,
    ((CASE statement_end_offset 
        WHEN -1 THEN DATALENGTH(st.text)
        ELSE QS.statement_end_offset END 
            - QS.statement_start_offset)/2) + 1) AS statement_text
     FROM sys.dm_exec_query_stats AS QS
     CROSS APPLY sys.dm_exec_sql_text(QS.sql_handle) as ST) as query_stats
GROUP BY query_stats.query_hash
ORDER BY 2 DESC;
GO
/* DMV to find useful indexes: */

PRINT 'Missing Indexes: '
PRINT 'The "improvement_measure" column is an indicator of the (estimated) improvement that might '
PRINT 'be seen if the index was created. This is a unitless number, and has meaning only relative '
PRINT 'the same number for other indexes. The measure is a combination of the avg_total_user_cost, '
PRINT 'avg_user_impact, user_seeks, and user_scans columns in sys.dm_db_missing_index_group_stats.'
PRINT ''
PRINT '-- Missing Indexes --'
SELECT CONVERT (varchar, getdate(), 126) AS runtime, 
  mig.index_group_handle, mid.index_handle, 
  CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) AS improvement_measure, 
  'CREATE INDEX missing_index_' + CONVERT (varchar, mig.index_group_handle) + '_' + CONVERT (varchar, mid.index_handle) 
  + ' ON ' + mid.statement 
  + ' (' + ISNULL (mid.equality_columns,'') 
    + CASE WHEN mid.equality_columns IS NOT NULL AND mid.inequality_columns IS NOT NULL THEN ',' ELSE '' END + ISNULL (mid.inequality_columns, '')
  + ')' 
  + ISNULL (' INCLUDE (' + mid.included_columns + ')', '') AS create_index_statement, 
  migs.*, mid.database_id, mid.[object_id]
FROM sys.dm_db_missing_index_groups mig
INNER JOIN sys.dm_db_missing_index_group_stats migs ON migs.group_handle = mig.index_group_handle
INNER JOIN sys.dm_db_missing_index_details mid ON mig.index_handle = mid.index_handle
WHERE CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) > 10
ORDER BY migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans) DESC
PRINT ''
GO
/* FIND UNUSED INDEXES - MIGHT AFFECT LOG WRITES */

SELECT o.name Object_Name,
i.name Index_name, 
i.Type_Desc
FROM sys.objects AS o
JOIN sys.indexes AS i
ON o.object_id = i.object_id 
LEFT OUTER JOIN 
sys.dm_db_index_usage_stats AS s 
ON i.object_id = s.object_id 
AND i.index_id = s.index_id
WHERE o.type = 'u'
-- Clustered and Non-Clustered indexes
AND i.type IN (1, 2) 
-- Indexes without stats
AND (s.index_id IS NULL) OR
-- Indexes that have been updated but not used
(s.user_seeks = 0 AND s.user_scans = 0 AND s.user_lookups = 0 );
/* STORED PROCEDURES AND FUNCTIONS EXECUTION TIME, COUNT AND AVERAGE */

SELECT DB_NAME(st.dbid) DBName
      ,OBJECT_SCHEMA_NAME(st.objectid,dbid) SchemaName
      ,OBJECT_NAME(st.objectid,dbid) StoredProcedure
      ,max(cp.usecounts) Execution_count
      ,sum(qs.total_worker_time) total_cpu_time
      ,sum(qs.total_worker_time) / (max(cp.usecounts) * 1.0)  avg_cpu_time
 
FROM sys.dm_exec_cached_plans cp join sys.dm_exec_query_stats qs on cp.plan_handle = qs.plan_handle
     CROSS APPLY sys.dm_exec_sql_text(cp.plan_handle) st
where DB_NAME(st.dbid) is not null and cp.objtype = 'proc'
group by DB_NAME(st.dbid),OBJECT_SCHEMA_NAME(objectid,st.dbid), OBJECT_NAME(objectid,st.dbid) 
order by sum(qs.total_worker_time) desc;
SELECT  creation_time 
        ,last_execution_time
        ,total_physical_reads
        ,total_logical_reads 
        ,total_logical_writes
        , execution_count
        , total_worker_time
        , total_elapsed_time
        , (total_elapsed_time / execution_count) avg_elapsed_time
        ,SUBSTRING(st.text, (qs.statement_start_offset/2) + 1,
         ((CASE statement_end_offset
          WHEN -1 THEN DATALENGTH(st.text)
          ELSE qs.statement_end_offset END
            - qs.statement_start_offset)/2) + 1) AS statement_text
FROM sys.dm_exec_query_stats AS qs
CROSS APPLY sys.dm_exec_sql_text(qs.sql_handle) st
WHERE execution_count > 10 -- filter out rare requests
ORDER BY total_elapsed_time / execution_count DESC;
SELECT DB_NAME(dbid) as "Database", COUNT(dbid) as "Number Of Open Connections",
loginame as LoginName
FROM sys.sysprocesses
WHERE dbid > 0
GROUP BY dbid, loginame;
-- Rows with ID existing in both a, b and c
-- JOIN is equivalent to INNER JOIN
SELECT a.ID, a.NAME, b.VALUE1, c.VALUE1 FROM table1 a 
  JOIN table2 b ON a.ID = b.ID
  JOIN table3 c ON a.ID = c.ID
WHERE a.ID >= 1000;
-- ⇓ Test it ⇓ (Fiddle source link)
JOINING 2 Tables in sql

SELECT X.Column_Name , Y.Column_Name2
FROM TABLES1_NAME X 
INNER JOIN TABLES2_NAME Y ON X.Primary_key = Y.Foreign_key;


--FOR EXAMPLE
--GET THE FIRST_NAME AND JOB_TITLE
--USE EMPLOYEES AND JOBS TABLE
--THE RELATIONSHIP IS JOB_ID

SELECT E.FIRST_NAME , J.JOB_TITLE
FROM EMPLOYEES E
INNER JOIN JOBS J ON J.JOB_ID = E.JOB_ID;

/*
For this challenge you need to create a simple SELECT statement that will return all columns from the products table, and join to the companies table so that you can return the company name.

products table schema:
id
name
isbn
company_id
price

companies table schema:
id
name

You should return all product fields as well as the company name as "company_name".
*/

SELECT products.*, companies.name AS company_name
FROM products
JOIN companies ON company_id = companies.id
SELECT * FROM INFORMATION_SCHEMA.Tables WHERE Table_Name LIKE '%sequence%'
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%formulatypeid%'
ORDER BY    TableName
            ,ColumnName;
create materialized view MV1    
build immediate    
refresh fast on commit    
as     
     
create materialized view log on <table>    
with sequence, rowId (...)    
including new values    
     
create table <table_name> (    
        DateMonth Date not null,    
        DateYear Date not null,    
        Name varchar(256) not null,    
        Total integer not null);    
     
insert into <table_name> () (select ...);    
     
create trigger trigger    
after insert or insert of on <table>    
for each row     
declare     
     
begin       
end    


create materialized view GROUPBYMonthYearMuseum
build immediate
refresh FAST ON COMMIT
--enable query rewrite
as
SELECT Month, Year, ticket_type , SUM(num_tickets) as NumTickets,
 SUM(revenue) as TotRevenue
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, ticket_type;

CREATE MATERIALIZED VIEW LOG ON museums_tickets
WITH SEQUENCE, ROWID (id_time, ticket_type, num_tickets, Revenue)
INCLUDING NEW VALUES;
CREATE MATERIALIZED VIEW LOG ON TIMEDIM
WITH SEQUENCE, ROWID (id_time, Month, Year)
INCLUDING NEW VALUES;
---- Trigger Manually
CREATE TABLE VM1 (
DateMonth DATE NOT NULL),
DateYear INTEGER NOT NULL,
Ticket_Type VARCHAR(20) NOT NULL,
TOT_NumberOfTickets INTEGER,
TOT_Revenue INTEGER);

INSERT INTO VM1 (DateMonth, DateYear, Ticket_Type, TOT_NumberOfTickets,
TOT_Revenue)
(SELECT Month, Year, Ticket_Type,
SUM(NumberOfTickets), SUM(Revenue)
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, Ticket_Type);
create TRIGGER TriggerForViewVM1
AFTER INSERT ON museums_tickets
FOR EACH ROW
DECLARE
N NUMBER;
VAR_DateMonth DATE;
VAR_DateYear NUMBER;
BEGIN
UPDATE MS 
    SET    MS.NAME = OC.NAME
    FROM   OPENQUERY(WZ12,
      'select EMP, NAME from Test') OC 
   INNER JOIN [dbo].[TEST1] MS 
     ON OC.EMP = MS.EMP
MERGE my-dataset.dimension_table as MAIN using
my-dataset.temporary_table as TEMP
on MAIN.PRD_SK = TEMP.PRD_SK
when matched then
UPDATE SET
MAIN.PRD_CATEGORY = TEMP.PRD_CATEGORY
when not matched then
INSERT VALUES(TEMP.PRD_SK, TEMP. PRD_ID, TEMP. PRD_SK, TEMP.
PRD_CATEGORY)
with account_session_ste as (
SELECT 
*
FROM `table`
where exists(select 1 from unnest(hits) h 
            where regexp_contains(h.page.hostname, r'signin.account.gov.uk'))
and _table_suffix = FORMAT_DATE('%Y%m%d', DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY))
)
-- group by 1,2)
select 
fullVisitorId AS user_id,
hits.page.hostname as hostname,   
hits.page.pagePath as pagePath,
hits.hitNumber as hit_number 
-- *
from account_session_ste, unnest(hits) as hits
DECLARE Counter INT64;

BEGIN TRANSACTION;
    SET Counter = 0;
    CREATE TEMP TABLE tmp
    (
    dte date,
    days INT64,
    users INT64
    );
    
    WHILE Counter < 28 DO
        INSERT INTO tmp

        with dte as (
            SELECT dte
            FROM unnest(generate_date_array(date('2021-02-01'), date('2021-12-30'))) dte
        ),
        ids as (
            SELECT clientId,
            PARSE_DATE("%Y%m%d", date) as dte
            FROM `govuk-bigquery-analytics.87773428.ga_sessions_2021*`
        WHERE NOT device.operatingSystem = "iOS"
        )

        SELECT dte.dte as dte, max(Counter) as days, count( distinct ids.clientId) AS users
        FROM dte, ids
        WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL Counter DAY) and dte.dte
        group by 1
        order by 1 asc;

        SET Counter = Counter + 1;

    END WHILE;
COMMIT TRANSACTION;
--select * from tmp


SELECT * FROM
  (SELECT * FROM tmp)
  PIVOT(SUM(users) FOR days IN (0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27))
  order by 1 asc
select "field1", "field2", count(*)
from "tableName"
group by "field1", "field2"
HAVING count(*) > 1
### MAKE DIR
mkdir split && cd split

### SPLIT FILE
split -l 1000 /<path>/output-google.sql /<path>/split/split-

### CHANGE EXTENTION
ls | xargs -I % mv % %.sql
CREATE DATABASE mydatabase CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE TABLE dbo.my_queue
(
    id UNIQUEIDENTIFIER NOT NULL PRIMARY KEY,
    created_at SMALLDATETIME NOT NULL,
    popped_at SMALLDATETIME NOT NULL,
    errors VARCHAR(MAX) NULL,
    payload NVARCHAR(MAX) NOT NULL
);
select * from (select * from table order by data_column asc limit count(*)/2) 
order by data_column desc limit 1;
Private Sub Select_Sector()

Dim rs As DAO.Recordset

Dim RegEx As Object
Set RegEx = CreateObject("vbscript.regexp")

Dim qdef As QueryDef
Set qdef = getCurrentDb.QueryDefs("qry_Select_Sector")
qdef.Connect = CurrentDb.TableDefs("BOCClientIndex").Connect


RegEx.Pattern = "IIf\(\[ServiceStatus\]=3,30,20\)\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "IIf([ServiceStatus]=3,30,20))=" & [Forms]![MainMenu_Services]![SelectedStatusIndicator])

RegEx.Pattern = "\(View_qryServiceProviderOrganisationalStructure\.SectorCode\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "(View_qryServiceProviderOrganisationalStructure.SectorCode)=" & [Forms]![MainMenu_Services]![SectorCode])


'For Testing purposes only - Do not use in production code
Set rs = qdef.OpenRecordset

Dim i As Long
For i = 0 To rs.Fields.Count - 1
        Debug.Print rs.Fields(i).Name,
    Next
    rs.MoveFirst
    Do Until rs.EOF
        Debug.Print
        For i = 0 To rs.Fields.Count - 1
            Debug.Print rs.Fields(i).value,
        Next
        rs.MoveNext
    Loop
End Sub
  
create or replace function decode_url(url text)
  returns text as
$BODY$
DECLARE result text;
BEGIN
    if url isnull then
        return null;
    end if;

    BEGIN
        with str AS (
            select
                   case when url ~ '^%[0-9a-fA-F][0-9a-fA-F]'
                   then array['']
                   end
            || regexp_split_to_array(url, '(%[0-9a-fA-F][0-9a-fA-F])+', 'i') plain,

            array(select (regexp_matches(url, '((?:%[0-9a-fA-F][0-9a-fA-F])+)', 'gi'))[1]) encoded
            )

        select string_agg(plain[i] || coalesce(convert_from(decode(replace(encoded[i], '%',''), 'hex'), 'utf8'),''),'')
        from str, (select generate_series(1, array_upper(encoded, 1) + 2) i FROM str) serie
        into result;

    EXCEPTION WHEN OTHERS THEN
        raise notice 'failed: %', url;
        return url;
    END;

    return coalesce(result, url);

END;

$BODY$
  LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
OUT res boolean)
 RETURNS boolean

AS $func$
-- DECLARE res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_pk(tablename TEXT,
OUT res boolean) RETURNS boolean AS $func$ 
res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
columnname TEXT,
OUT res boolean)
 RETURNS boolean
 
AS $func$ 

BEGIN
SELECT
    (count(column_name) > 0)::boolean AS res
INTO
    res
FROM
    information_schema.columns
WHERE
    table_name = tablename
    AND column_name = columnname;
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_columns(tablename TEXT,
VARIADIC columnname TEXT) RETURNS boolean AS $func$
 DECLARE res boolean DEFAULT FALSE;

DECLARE
  x TEXT;

res boolean DEFAULT TRUE;

BEGIN
  FOREACH x IN ARRAY columnname LOOP
      EXIT
WHEN res = FALSE;

SELECT
    table_has_column(tablename,
    x) AS res
INTO
    res;
END;
END;

$func$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION
random_text(n_length INTEGER default 50)
RETURNS TEXT
LANGUAGE SQL
AS $$

select
string_agg(substr(characters, (random() * length(characters) + 0.5)::integer, 1), '') as random_word
from (values('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-')) as symbols(characters)
-- length of word
join generate_series(1, n_length) on 1 = 1;

$$;
user = User.first

result = user.user_books.in_order_of(:status, %w[to_read currently_reading read])

=> #<ActiveRecord::Relation [#<UserBook id: 3, user_id: 1, status: "to_read">, #<UserBook id: 4, user_id: 1, status: "to_read">, #<UserBook id: 5, user_id: 1, status: "currently_reading">, #<UserBook id: 6, user_id: 1, status: "read">]>
ASCII -- Returns the equivalent ASCII value for a specific character.

CHAR_LENGTH -- Returns the character length of a string.

CHARACTER_LENGTH -- Same as CHAR_LENGTH.

CONCAT -- Adds expressions together, with a minimum of 2.

CONCAT_WS -- Adds expressions together, but with a separator between each value.

FIELD -- Returns an index value relative to the position of a value within a list of values.

FIND IN SET -- Returns the position of a string in a list of strings.

FORMAT -- When passed a number, returns that number formatted to include commas (eg 3,400,000).

INSERT -- Allows you to insert one string into another at a certain point, for a certain number of characters.

INSTR -- Returns the position of the first time one string appears within another.

LCASE -- Converts a string to lowercase.

LEFT -- Starting from the left, extracts the given number of characters from a string and returns them as another.

LENGTH -- Returns the length of a string, but in bytes.

LOCATE -- Returns the first occurrence of one string within another,

LOWER -- Same as LCASE.

LPAD -- Left pads one string with another, to a specific length.

LTRIM -- Removes any leading spaces from the given string.

MID -- Extracts one string from another, starting from any position.

POSITION -- Returns the position of the first time one substring appears within another.

REPEAT -- Allows you to repeat a string

REPLACE -- Allows you to replace any instances of a substring within a string, with a new substring.

REVERSE	-- Reverses the string.

RIGHT -- Starting from the right, extracts the given number of characters from a string and returns them as another.

RPAD -- Right pads one string with another, to a specific length.

RTRIM -- Removes any trailing spaces from the given string.

SPACE -- Returns a string full of spaces equal to the amount you pass it.

STRCMP -- Compares 2 strings for differences

SUBSTR -- Extracts one substring from another, starting from any position.

SUBSTRING -- Same as SUBSTR

SUBSTRING_INDEX	-- Returns a substring from a string before the passed substring is found the number of times equals to the passed number.

TRIM --	Removes trailing and leading spaces from the given string. Same as if you were to run LTRIM and RTRIM together.

UCASE -- Converts a string to uppercase.

UPPER -- Same as UCASE.
BIT(size) -- A bit-value type with a default of 1. The allowed number of bits in a value is set via the size parameter, which can hold values from 1 to 64.

TINYINT(size) -- A very small integer with a signed range of -128 to 127, and an unsigned range of 0 to 255. Here, the size parameter specifies the maximum allowed display width, which is 255.

BOOL -- Essentially a quick way of setting the column to TINYINT with a size of 1. 0 is considered false, whilst 1 is considered true.

BOOLEAN	-- Same as BOOL.

SMALLINT(size) -- A small integer with a signed range of -32768 to 32767, and an unsigned range from 0 to 65535. Here, the size parameter specifies the maximum allowed display width, which is 255.

MEDIUMINT(size) -- A medium integer with a signed range of -8388608 to 8388607, and an unsigned range from 0 to 16777215. Here, the size parameter specifies the maximum allowed display width, which is 255.

INT(size) -- A medium integer with a signed range of -2147483648 to 2147483647, and an unsigned range from 0 to 4294967295. Here, the size parameter specifies the maximum allowed display width, which is 255.

INTEGER(size) -- Same as INT.

BIGINT(size) -- A medium integer with a signed range of -9223372036854775808 to 9223372036854775807, and an unsigned range from 0 to 18446744073709551615. Here, the size parameter specifies the maximum allowed display width, which is 255.

FLOAT(p) -- A floating point number value. If the precision (p) parameter is between 0 to 24, then the data type is set to FLOAT(), whilst if it's from 25 to 53, the data type is set to DOUBLE(). This behaviour is to make the storage of values more efficient.

DOUBLE(size, d) -- A floating point number value where the total digits are set by the size parameter, and the number of digits after the decimal point is set by the d parameter.

DECIMAL(size, d) -- An exact fixed point number where the total number of digits is set by the size parameters, and the total number of digits after the decimal point is set by the d parameter.

DEC(size, d) -- Same as DECIMAL.
% -- Equates to zero or more characters.
-- Example: Find all customers with surnames ending in ‘ory’.
SELECT * FROM customers
WHERE surname LIKE '%ory';

_ -- Equates to any single character.
-- Example: Find all customers living in cities beginning with any 3 characters, followed by ‘vale’.
SELECT * FROM customers
WHERE city LIKE '_ _ _vale';

[charlist] -- Equates to any single character in the list.
-- Example: Find all customers with first names beginning with J, K or T.
SELECT * FROM customers
WHERE first_name LIKE '[jkt]%';
BIN -- Returns the given number in binary.

BINARY -- Returns the given value as a binary string.

CAST -- Converst one type into another.

COALESCE -- From a list of values, returns the first non-null value.

CONNECTION_ID -- For the current connection, returns the unique connection ID.

CONV -- Converts the given number from one numeric base system into another.

CONVERT -- Converts the given value into the given datatype or character set.

CURRENT_USER -- Returns the user and hostname which was used to authenticate with the server.

DATABASE -- Gets the name of the current database.

GROUP BY -- Used alongside aggregate functions (COUNT, MAX, MIN, SUM, AVG) to group the results.

HAVING -- Used in the place of WHERE with aggregate functions.

IF -- If the condition is true it returns a value, otherwise it returns another value.

IFNULL -- If the given expression equates to null, it returns the given value.

ISNULL -- If the expression is null, it returns 1, otherwise returns 0.

LAST_INSERT_ID -- For the last row which was added or updated in a table, returns the auto increment ID.

NULLIF -- Compares the 2 given expressions. If they are equal, NULL is returned, otherwise the first expression is returned.

SESSION_USER -- Returns the current user and hostnames.

SYSTEM_USER -- Same as SESSION_USER.

USER -- Same as SESSION_USER.

VERSION -- Returns the current version of the MySQL powering the database.
ADDDATE -- Adds a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/30).

ADDTIME -- Adds a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (07:00).

CURDATE -- Gets the current date.

CURRENT_DATE -- Same as CURDATE.

CURRENT_TIME -- Gest the current time.

CURRENT_TIMESTAMP -- Gets the current date and time.

CURTIME -- Same as CURRENT_TIME.

DATE -- Extracts the date from a datetime expression.

DATEDIFF -- Returns the number of days between the 2 given dates.

DATE_ADD -- Same as ADDDATE.

DATE_FORMAT -- Formats the date to the given pattern.

DATE_SUB -- Subtracts a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/10).

DAY -- Returns the day for the given date.

DAYNAME -- Returns the weekday name for the given date.

DAYOFWEEK -- Returns the index for the weekday for the given date.

DAYOFYEAR -- Returns the day of the year for the given date.

EXTRACT -- Extracts from the date the given part (eg MONTH for 20/01/20 = 01).

FROM DAYS -- Returns the date from the given numeric date value.

HOUR -- Returns the hour from the given date.

LAST DAY -- Gets the last day of the month for the given date.

LOCALTIME -- Gets the current local date and time.

LOCALTIMESTAMP -- Same as LOCALTIME.

MAKEDATE -- Creates a date and returns it, based on the given year and number of days values.

MAKETIME -- Creates a time and returns it, based on the given hour, minute and second values.

MICROSECOND -- Returns the microsecond of a given time or datetime.

MINUTE -- Returns the minute of the given time or datetime.

MONTH -- Returns the month of the given date.

MONTHNAME -- Returns the name of the month of the given date.

NOW -- Same as LOCALTIME.

PERIOD_ADD -- Adds the given number of months to the given period.

PERIOD_DIFF -- Returns the difference between 2 given periods.

QUARTER -- Returns the year quarter for the given date.

SECOND -- Returns the second of a given time or datetime.

SEC_TO_TIME -- Returns a time based on the given seconds.

STR_TO_DATE -- Creates a date and returns it based on the given string and format.

SUBDATE -- Same as DATE_SUB.

SUBTIME -- Subtracts a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (03:00).

SYSDATE -- Same as LOCALTIME.

TIME -- Returns the time from a given time or datetime.

TIME_FORMAT -- Returns the given time in the given format.

TIME_TO_SEC -- Converts and returns a time into seconds.

TIMEDIFF -- Returns the difference between 2 given time/datetime expressions.

TIMESTAMP -- Returns the datetime value of the given date or datetime.

TO_DAYS -- Returns the total number of days that have passed from ‘00-00-0000’ to the given date.

WEEK -- Returns the week number for the given date.

WEEKDAY -- Returns the weekday number for the given date.

WEEKOFYEAR -- Returns the week number for the given date.

YEAR -- Returns the year from the given date.

YEARWEEK -- Returns the year and week number for the given date.
ABS -- Returns the absolute value of the given number.

ACOS -- Returns the arc cosine of the given number.

ASIN -- Returns the arc sine of the given number.

ATAN -- Returns the arc tangent of one or 2 given numbers.

ATAN2 -- Returns the arc tangent of 2 given numbers.

AVG -- Returns the average value of the given expression.

CEIL -- Returns the closest whole number (integer) upwards from a given decimal point number.

CEILING -- Same as CEIL.

COS -- Returns the cosine of a given number.

COT -- Returns the cotangent of a given number.

COUNT -- Returns the amount of records that are returned by a SELECT query.

DEGREES -- Converts a radians value to degrees.

DIV -- Allows you to divide integers.

EXP -- Returns e to the power of the given number.

FLOOR -- Returns the closest whole number (integer) downwards from a given decimal point number.

GREATEST -- Returns the highest value in a list of arguments.

LEAST -- Returns the smallest value in a list of arguments.

LN -- Returns the natural logarithm of the given number.

LOG -- Returns the natural logarithm of the given number, or the logarithm of the given number to the given base.

LOG10 -- Does the same as LOG, but to base 10.

LOG2 -- Does the same as LOG, but to base 2.

MAX -- Returns the highest value from a set of values.

MIN -- Returns the lowest value from a set of values.

MOD -- Returns the remainder of the given number divided by the other given number.

PI -- Returns PI.

POW -- Returns the value of the given number raised to the power of the other given number.

POWER -- Same as POW.

RADIANS -- Converts a degrees value to radians.

RAND -- Returns a random number.

ROUND -- Rounds the given number to the given amount of decimal places.

SIGN -- Returns the sign of the given number.

SIN -- Returns the sine of the given number.

SQRT -- Returns the square root of the given number.

SUM -- Returns the value of the given set of values combined.

TAN -- Returns the tangent of the given number.

TRUNCATE -- Returns a number truncated to the given number of decimal places.
with agg_ as (
SELECT
    ec.ride_id, 
    ev.ride_id as segment_received_ride,
    ev.user_id as segment_received_user_id,
    ev.full_cost,
    ev.service_area_name,
    ev.minutes,
    ev.company_id,
    ec.user_id,
    array_agg(struct(ec.time_publish,ec.phase) ORDER BY time_publish asc) as status
FROM `reby-cloud.temp_eu.email_comms` ec
LEFT JOIN `reby-safir.data_python.end_of_ride_email_v4_view` ev
    on ec.ride_id = ev.ride_id
group by 1,2,3,4,5,6,7,8
)

select *
    , (
        select as struct 
            time_publish,
            phase
        from unnest(status) 
        where phase = 'sent-to-segment'
        order by time_publish asc
        limit 1
    ).*
from agg_
order by time_publish desc
left_df.merge(right_df, on='user_id', how='left', indicator=True)
create or replace table `temp_eu.delete_stripe_refunds` as (
select * from (
with pre_process as (
    SELECT
		*
		FROM
		`reby-cloud.reby_stripe.py_stripe_preprocessed`
		--where id in (select id from`reby-cloud.temp_eu.delete_stripe` where amount_refunded > 0 )
		--where customer = 'cus_GorMSGP2VREHQs'
		where rn = 1
		order by customer,id,rn asc
)

select
    case when refunds.charge is not null and charges.type ='refund' then refunds.balance_transaction else charges.balance_transaction
        end as balance_transaction,
    case when refunds.charge is not null and charges.type ='refund' then refunds.created_at else charges.created_at
        end as created_at,
    charges.charge_id,
    charges.type,
    charges.paid,
    charges.amount as charge_amount,
    case when refunds.charge is not null and charges.type ='refund' then refunds.amount else null
        end as refund_amount,
    charges.customer,
    charges.user_id
from (
    select
        spp.balance_transaction,
        s.*,
        u.id as user_id,
        sa.service_area
    from `reby-cloud.reby_stripe.py_stripe_processed` s
    left join `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
        on s.customer = u.payment_processor[safe_offset(0)].token
    left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` sa
        on u.id = sa.user_id
    left join pre_process spp
            on spp.id = s.charge_id
    --order by 1,2,3,4,5,6,7,8,9,10,11,12,13
    where date(s.created_at) between '2021-02-01' and '2021-09-03'
    --order by balance_transaction desc
    ) as charges
    left join `reby-cloud.reby_stripe.stripe_refunds_api` refunds on charges.charge_id = refunds.charge
    --where refunds.status = 'succeeded'
)
)
;
select
  st.*,
  str.created_at,
  str.balance_transaction,
  str.user_id,
  str.charge_amount 
 from `reby-cloud.temp_eu.delete_stripe_downloaded_transactions` st
 left join `reby-cloud.temp_eu.delete_stripe_refunds` str on st.balance_transaction_id = str.balance_transaction 
SELECT o.name, 
       ps.last_execution_time 
FROM   sys.dm_exec_procedure_stats ps 
INNER JOIN 
       sys.objects o 
       ON ps.object_id = o.object_id 
WHERE  DB_NAME(ps.database_id) = '' 
ORDER  BY 
       ps.last_execution_time DESC  
SELECT * FROM companies 
WHERE company_name SIMILAR TO '%(apple|google|microsoft)%';
SQL> with t1(col) as(
  2    select 1    from dual union all
  3    select 2    from dual union all
  4    select null from dual union all
  5    select 3    from dual
  6  )
  7  select *
  8    from t1
  9  order by col asc nulls last
  10  ;
select ord_num, agent_code, ord_date, ord_amount
from orders
where(agent_code, ord_amount) IN
(SELECT agent_code, MIN(ord_amount)
FROM orders 
GROUP BY agent_code);  
ALTER TABLE stars
ALTER COLUMN name TYPE varchar(50);
DECLARE @AnyDate DATETIME
SET @AnyDate = GETDATE()

SELECT @AnyDate AS 'Input Date',
  DATEADD(q, DATEDIFF(q, 0, @AnyDate), 0) 
                        AS 'Quarter Start Date',       
  DATEADD(d, -1, DATEADD(q, DATEDIFF(q, 0, @AnyDate) + 1, 0)) 
                        AS 'Quarter End Date'
DECLARE @Year DATE = '2013-01-01'
DECLARE @Quarter INT = 4;

SELECT  DATEADD(QUARTER, @Quarter - 1, @Year) ,
        DATEADD(DAY, -1, DATEADD(QUARTER,  @Quarter, @Year))
SELECT title, extract("year" from current_date) - "year" AS age
  FROM films
  ORDER BY age ASC;
postgres-# ( terminal commands)
- list all databases : \l
- list of all tables : \d
- list only the tables : \dt
- list all fields in a table : \d person (name of the table)
- import from external file : \i c:/postgrescourse/person.sql

- connect to database psql and import .sql file
> psql -h localhost -U postgres -d forgeprod -f C:\forgeprod.sql
https://kb.objectrocket.com/postgresql/how-to-run-an-sql-file-in-postgres-846

- access to database to query 
> psql -U postgres -d forgeprod

- datatypes info:
bigserial : increment int in sequence
date : year,month,day



- create database : create database test;
- drop database : drop database test;
- drop table : drop table person;
- connect to a database ( two forms ) :  
\c test (or) -h localhost -p 5432 -U postgres test
- create table sintax : create table table_name ( column name + data type + constraints if any ) 

- create table : 
create table person ( 
	id int,
	first_name varchar(50),
  	last_name varchar(50),
    gender varchar(6),
    date_of_birth date
    );
- create table w/constraints ( need to satisfied rules): 
create table person (
 	id bigserial not null primary key,
 	first_name varchar(50) not null,
 	last_name varchar(50) not null,
    gender varchar(6) not null, 
    date_of_birth date not null
	);

- create table : 
insert into person ( first_name, last_name, gender, date_of_birth) values ('Anne','Smith','Female',date '1988-01-09');

- Using OFFSET and LIMIT : select * from person OFFSET 5 LIMIT 5;
- Using OFFSET and FETCH : select * from person OFFSET 5 FETCH 5 ROW ONLY;
- Using BETWEEN : select * from person where date_of_birth BETWEEN DATE '2000-01-01' and '2015-01-01';
- Diferences between LIKE and ILIKE : ILIKE is case insensitive. 

- Using Group By : select country_of_birth, count(*) from person group by country_of_birth; ( will count how many people are from that country )
- Using Having with Group By : select country_of_birth, count(*) from person having count(*) > 5 group by country_of_birth; ( must have above 5 to show )





 USE Your_Database;
 GO
 EXECUTE AS USER = N'the_user_name';
 GO
 SELECT 
    s.name,
    o.name,
    p.[permission_name]
 FROM sys.objects AS o 
 INNER JOIN sys.schemas AS s
 ON o.[schema_id] = s.[schema_id]
 CROSS APPLY sys.fn_my_permissions(QUOTENAME(s.name) 
   + N'.' + QUOTENAME(o.name), N'OBJECT') AS p
   WHERE o.[type] IN (N'U', N'V') -- tables and views
   AND p.subentity_name = N''; -- ignore column permissions 
 GO
 REVERT;
SELECT setval(
        pg_get_serial_sequence('TABLE','COLUMN'), 
        max(id)) 
FROM TABLE;
<?php
$results = $wpdb->get_results( "SELECT * FROM $table_name"); // Query to fetch data from database table and storing in $results
if(!empty($results))                        // Checking if $results have some values or not
{    
    echo "<table width='100%' border='0'>"; // Adding <table> and <tbody> tag outside foreach loop so that it wont create again and again
    echo "<tbody>";      
    foreach($results as $row){   
    $userip = $row->user_ip;               //putting the user_ip field value in variable to use it later in update query
    echo "<tr>";                           // Adding rows of table inside foreach loop
    echo "<th>ID</th>" . "<td>" . $row->id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>User IP</th>" . "<td>" . $row->user_ip . "</td>";   //fetching data from user_ip field
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Post ID</th>" . "<td>" . $row->post_id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Time</th>" . "<td>" . $row->time . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    }
    echo "</tbody>";
    echo "</table>"; 

}
?>
SELECT column-names
  FROM table-name1
 WHERE value IN (SELECT column-name
                   FROM table-name2
                  WHERE condition)
SELECT Fname, Lname
FROM Employee
ORDER BY Salary
OFFSET 2 ROWS;
select schema_name(tab.schema_id) as [schema_name], 
    tab.[name] as table_name
from sys.tables tab
    left outer join sys.indexes pk
        on tab.object_id = pk.object_id 
        and pk.is_primary_key = 1
where pk.object_id is null
order by schema_name(tab.schema_id),
    tab.[name]
DELETE FROM [dbo].[logs] WHERE date < DATEADD(DAY, -30, GETDATE())
DECLARE
  dates STRING;
SET
  dates = (
  SELECT
    CONCAT('("', STRING_AGG(DISTINCT REPLACE(LEFT(started_at,10),"-",""), '", "'), '")'),
  FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption` );

EXECUTE IMMEDIATE
  FORMAT("""
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in %s)""",dates)
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in (list_of_values))
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT DISTINCT * FROM talentcards.users
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT
## replace "sale" with variable name and "sales" with table name
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sale) FROM sales;
\d tablename # view table
\dt # show all tables
\q # exit psql console
\du # show all users
\l # show all databases
\conninfo # get connection info
psql -U username -d dbname
psql -h 127.0.0.1 -U postgres
sudo -u username createdb dbname
sudo -u postgres createuser --superuser username
sudo /etc/init.d/postgresql restart
DELETE FROM tbl_sku
WHERE sku <> 'hello';
INSERT INTO tbl_name (col1, col2, col3) VALUES ('03/03/2021', 13.76, 'MAS-465');
ALTER ROLE postgres
WITH PASSWORD 'newpassword';
ALTER TABLE tbl_sku
ALTER COLUMN col_name TYPE character(50);
ALTER TABLE tbl_sku
RENAME COLUMN old_name TO new_name;
ALTER TABLE tbl_sku AS ts
ADD COLUMN new_col character(15);
SELECT 
	t.col1,
	s.col3
FROM tbl1 AS t, tbl2 AS s
WHERE t.col2 = s.col1;
CREATE TEMPORARY TABLE __copy AS (SELECT * FROM tbl_sku LIMIT 0);

COPY __copy (col_1, col_2, col_3) FROM 'C:/Users/CB06/Downloads/upload5.csv' WITH (FORMAT csv, DELIMITER ',', FORCE_NULL (sku, title, supplier, ean), HEADER); # import from csv file

UPDATE tbl_sku SET col1 = __copy.col1, col2 = __copy.col2 FROM __copy WHERE tbl_sku.sku = __copy.sku # update rows
           UPDATE table1 
           SET status = (SELECT t2.status FROM table2 t2 WHERE t2.trans_id = id) , 
               name = (SELECT t2.name FROM table2 t2 WHERE t2.trans_id = id)
           WHERE id IN (SELECT trans_id FROM table2 t2 WHERE t2.trans_id= id)
SELECT CompanyName, 
       ProductCount = (SELECT COUNT(P.id)
                         FROM [Product] P
                        WHERE P.SupplierId = S.Id)
  FROM Supplier S
DATE_SUB(CURRENT_DATE('UTC-3:00'), INTERVAL 1 DAY)
CURRENT_DATE('UTC-3:00')
SELECT
     *
     FROM
     EXTERNAL_QUERY("reby-cloud.eu.reby_prod",
     '''
     select 
     concat('cu_',id_to_text(cu.id)) as id,
     id_to_time(cu.id) as created_at,
     concat('acc_',id_to_text(cu.balance_account_id)) as account_id,
     concat('c_',id_to_text(cu.company_id)) as company_id,
     concat('usr_',id_to_text(cu.user_id)) as user_id,
     --id_to_time(cu.user_id) as user_first_created_at,
     firebase_user_token,
     is_deleted,
     updated_at
     from company_user cu 
     --where id >= id_from_time(NOW() - INTERVAL '1 DAY')
     --where date(id_to_time(id)) > now() - interval '3 day'
     where id < id_from_time(date('2020-06-01'))
     ''' )
     ;
# Packages
 
import os
from google.cloud import bigquery
from google.oauth2 import service_account
 
 
# Parameters
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]='/Users/jmbenedetto/Library/Mobile Documents/com~apple~CloudDocs/#Formação/Dissertation/Research/02 Research v2/00 Base/key.json'
 
client = bigquery.Client(project=project_name)
dataset_ref = client.dataset(dataset_name)
load_config = bigquery.LoadJobConfig()
 
 
# Code
 
query = """
        DELETE 
        FROM dataset_name.table_name
        WHERE criteria;
"""
query_params = [
    bigquery.ArrayQueryParameter("file", "STRING", file_source)
]
 
job_config = bigquery.QueryJobConfig()
job_config.query_parameters = query_params
query_job = client.query(
    query,
    # Location must match that of the dataset(s) referenced in the query.
    location="EU",
    job_config=job_config,
)  # API request - starts the query
 
query_job.result(timeout=60.0)
assert query_job.state == "DONE"
You are able to search only 15 private snippets.  to search them all.
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
postgresql://[user[:password]@][netloc][:port][/dbname][?param1=value1&...]
--Creación de usuarixs nuevxs
with base_data as (
select 
    date(u.created_at) as date,
    date_trunc(date(u.created_at),MONTH) as month,
    mv.service_area,
    sum(if(mv.first_ride_date is not null,1,0)) as has_ride,
    count(*) users_created
from `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv
    on u.id = mv.user_id
where mv.service_area = 'Sevilla'
group by 1,2,3
--order by 1 desc
)
select 
    *
    --lag(users_created) over (partition by month order by month asc) as prev_month_users
from base_data
order by date desc
;
--métricas de viajes individuales
select
    date(created_at) as date,
    r.id as ride_id,
    r.minutes,
    extract(hour from datetime(created_at,"Europe/Madrid")) as hour,
    if(plan_usage_id is null,0,1) as plan_usage,
    FORMAT_DATE('%A', date(created_at)) AS weekday_name,
    r.distance/100 as distance_km,
    r.ride_cost_time as ride_cost,
    geo.Distri_11D as distrito_inicial,
    geo2.Distri_11D as distrito_final,
from analytics_reby_v1_eu.pg_rides r
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo
    on st_within(st_geogpoint(r.longitude_initial,r.latitude_initial),geo.geometry)
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo2
    on st_within(st_geogpoint(r.longitude_final,r.latitude_final),geo2.geometry)
where r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
order by 1 desc
;
--ride linestrings
with rides_data as (
select
	id,
  created_at,
  starting.latitude as lat_init,
  starting.longitude as long_init,
  ending.latitude as lat_fin,
  ending.longitude as long_fin,
  distance/1000 as dist_km,
  minutes,
	--CONCAT(starting.latitude,',',starting.longitude) AS marker_start,
	--CONCAT(ending.latitude,',',ending.longitude) AS marker_end
from (
select
  r.created_at,
  r.minutes,
  r.distance,
  r.id,
  --path,
  path[offset(0)] as starting,
  path[offset(array_length(path)-1)] as ending,
  --array_length(path)
from `analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_vehicles` v
  on r.vehicle_id = v.id
where array_length(path) > 3
and date(r.created_at) >= '2021-06-01'
and date(r.created_at) <= '2021-09-01'
and r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
--and v.vehicle_type = 'motorcycle'
--where id = 'r_3qm5ua4jymv1ta3tbmq1'
--group by 1
  )
),

linestrings as (
select 
  ri.id,
  --st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  concat("Linestring(",string_agg(concat(p.longitude," ",p.latitude), ","),")") as path
from `analytics_reby_v1_eu.pg_rides_json` ri, unnest(path) as p
where id in (select id from rides_data)
and date(created_at) >= '2021-06-01'
and date(created_at) <= '2021-09-01'
group by 1
),

linestrings_geo as (
select 
  rd.id,
  st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  st_asgeojson(st_geogpoint(rd.long_fin,rd.lat_fin)) as ending_point,
  st_asgeojson(SAFE.st_geogfromtext(path)) as paths
from linestrings ls
join rides_data rd on ls.id = rd.id
)

select * from linestrings_geo
with dte as (
    SELECT dte
    FROM unnest(generate_date_array(date('2021-03-11'), date('2021-07-31'))) dte
),
ids as (
    SELECT clientId,
    PARSE_DATE("%Y%m%d", date) as dte
    FROM `*table*`
)

SELECT dte.dte, count( distinct ids.clientId)
FROM dte, ids
WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL 7 DAY) and dte.dte
group by 1
order by 1 asc 
create or replace table `temp_eu.delete_gerardm_rcnp_` as (
select npr_rscb = RCnp as bool,* from (
SELECT 
        --vd.*,
        r.created_at,
        r.user_id,
        r.id as ride_id,
        SUM(rc.theoretical_cost)/100 AS RC, 
        SUM(rc.non_promoted)/100 AS RCnp, 
        SUM(rc.theoretical_cost-rc.non_promoted)/100 AS RCp,
        --ops_managers_dashboard.TRUNC_DATE_ISO('DAY', date(datetime(r.created_at,"Europe/Madrid"))) as week_at 
    FROM`reby-cloud.analytics_reby_v1_eu.pg_rides` AS r
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.vehicle_daily` AS vd
        ON r.vehicle_id = vd.vehicle_id AND date(datetime(r.created_at,"Europe/Madrid")) = vd.date
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` AS rc
        ON r.id = rc.ride_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_vehicles` AS v
        ON r.vehicle_id = v.id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_service_area` AS sa
        ON r.service_area_id = sa.service_area_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_tpl` AS tpl
        ON vd.tpl_provider_id = tpl.id
   -- WHERE 
        --date(datetime(r.created_at,"Europe/Madrid")) >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND ST_DISTANCE (ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GEOGPOINT(tpl.longitude, tpl.latitude)) > 200
        --AND ST_WITHIN(ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GeogFromText(sa.geometry))
        --AND v.printed_code IS NOT NULL AND vd.date >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND rc.ride_id IS NOT NULL
        --where r.id = 'r_3rytmr4br7g5qlu4jbqh'
        group by 1,2,3
    --GROUP BY week_at 
    --ORDER BY week_at DESC
) as a--where ride_id = 'r_3t26l79xmu2xa7nyu3n1'
full join (select ride_id ride_id_rscb,service_area, sum(non_promoted)/100 as npr_rscb
            from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` group by 1,2) b
    on a.ride_id = b.ride_id_rscb
)

/*
create or replace table `reby-cloud.analytics_reby_v1_eu.vehicle_daily` 
PARTITION BY date
CLUSTER BY service_area_id,company_id
AS
select * except(r_n) from (
select
    *,
    row_number() over (partition by date,vehicle_id order by created_at asc) as r_n
from `reby-cloud.analytics_reby_v1_eu.vehicle_daily`
) where r_n = 1
*/
WHERE DATE(cat_tbl.date)
    BETWEEN PARSE_DATE('%Y%m%d', @DS_START_DATE) AND
    PARSE_DATE('%Y%m%d', @DS_END_DATE)
select * from table where random() < 0.01 limit 1000;
%sql SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';
INSERT `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
    (date, name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount)
SELECT date('2021-07-17'), name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount
FROM `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
where date = '2021-07-18'
--'usr_3t7nh9pkvax1j6412nv1'
select * from (
with promotions as (
SELECT * FROM EXTERNAL_QUERY("reby-cloud.eu.reby_prod", 
'''
SELECT
    id_to_time(id) as created_at,
    concat('acc_',id_to_text(account_id)) as account_id,
    balance__amount as balance_amount,
    balance__currency as balance_currency,
    concat('c_',id_to_text(company_id)) as company_id,
    free_unlocks,
    concat('cpua_',id_to_text(id)) as id,
    concat('tx_',id_to_text(last_promotion_transaction_id)) as transaction_id,
    concat('usr_',id_to_text(user_id)) as user_id
FROM company_user_promotion_account
--where user_id = id_from_text('3t7nh9pkvax1j6412nv1')
;
'''
    )
)
select
    *
from (
    SELECT
    --promotions.balance_amount,
    sa.service_area,
    cp.code,
    cpu.*
    FROM
    `reby-cloud.analytics_reby_v1_eu.pg_company_promotion_usage` cpu
    left join `reby-cloud.analytics_reby_v1_eu.pg_company_promotion` cp on cpu.company_promotion_id = cp.id
    left join `reby-cloud.analytics_reby_v1_eu.reby_users_servicearea` sa on sa.user_id = cpu.user_id
    --left join promotions on cpu.user_id =promotions.user_id
    --where sa.service_area in ('Zaragoza','Gijón')
    --and date(expiration_date) > current_date and date(cpu.created_at) > current_date -15
    --where cpu.user_id = 'usr_3t7nh9pkvax1j6412nv1'
    --order by cpu.created_at desc
)
where
    (service_area = 'Zaragoza' AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Sevilla' AND code in ('REBYBCN','REBYJON','REBYGONA','TOTAREBY','HOLAMOTOS','REBYGO')) OR
    (service_area = 'Gijón' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Terrassa' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES')) OR
    (service_area in ('Napoles','Lecce','Minturno','Bergamo','Grosseto','Caserta') AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','REBYGO','HOLAMOTOS'))
)
--where user_id = 'usr_3t7nh9pkvax1j6412nv1'
where value_remaining_amount > 0
and is_expired is false
with ads_data as (
SELECT
    ad_id,
    adset_id,
    campaign_id,
    status,
    sum(impressions) clicks,
    sum(spend) as investment
FROM `reby-cloud.facebook_ads.insights` ai
left join `reby-cloud.facebook_ads.ads` ads on ai.ad_id = ads.id

--WHERE DATE(_PARTITIONTIME) = "2021-07-06"
group by 1,2,3,4
)
select * from ads_data where ad_id = '23843560033720454' order by clicks desc
--with application_opened as (
select
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id
from(
SELECT
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id,
    row_number() over (partition by anonymous_id order by original_timestamp asc) as r_n
FROM
  `reby-safir.react_native.install_attributed`
WHERE
  context_app_name = 'Reby'
  --and user_id = 'usr_3t8h6t8c4ppubjp9vjvh'
  and anonymous_id = '1eafe4e3-f4d8-416d-80c1-db0a95191b55'
  /*
  and _PARTITIONTIME BETWEEN TIMESTAMP_TRUNC(TIMESTAMP_MICROS(UNIX_MICROS(CURRENT_TIMESTAMP()) - 60 * 60 * 60 * 24 * 1000000), DAY, 'UTC')
					AND TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY, 'UTC')
    
			
    */
    )
--where r_n = 1
order by r_n asc
--)
;
select *
from `reby-safir.react_native.identifies`
where user_id = 'usr_3t8cxanx6kehxg38n5m1'
order by original_timestamp asc
;
select *
from `reby-safir.react_native.finish_ride`
where user_id = 'usr_3t8ethqfzs32sp1mcu5h'
order by original_timestamp asc
;

SELECT
    id,
    stats[offset(0)].total_rides,
    mv.service_area
FROM
  `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv on u.id = mv.user_id
where date(created_at) = current_date -2
order by 2 desc
;
select
    money_in,
    money_out,
    count(*),
    sum(amount)
from (
select
 amount,
 date(date_ts) as date,
 case
    when amount between 0 and 20 then "0-20"
    when amount between 21 and 500 then "20-500"
    when amount between 21 and 500 then "500-1000"
    when amount >1000 then "+1000"
    else "other"
end as money_in,
case
    when amount between -20 and 0 then "0-20"
    when amount between -500 and -1000 then "20-500"
    when amount between -1000 and -500 then "500-1000"
    when amount <-1000 then "+1000"
    else "other"
end as money_out,

from `reby-cloud.reby_fin_eu.payments_all_platforms`
where
  date(date_ts) BETWEEN '2021-04-01' and '2021-06-30'
  and payment_provider = 'kernel'
--order by date_ts desc
)
group by 1,2
with data as (
select
    date(created_at) as date,
    sum(if(app_promoting_co = 'Reby' and owner_co != 'Reby',1,0)) as hey,
    count(*) as all_
from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
where type like 'ride%'
    and date(created_at) > current_date - 50
    and service_area = 'Barcelona'
group by 1
--order by 1 desc
)
select hey / all_ from data order by 1 desc
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Search column</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[SELECT s.name [Schema], tb.name [Table], c.name [Column], tp.name [Type]
FROM sys.schemas s
JOIN sys.tables tb ON tb.schema_id = s.schema_id
JOIN sys.columns c ON c.object_id = tb.object_id
JOIN sys.types tp ON tp.system_type_id = c.system_type_id
WHERE c.name LIKE N'$ColumnName$'
AND NOT tp.name = N'sysname'
ORDER BY [Schema], [Table], [Column];]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Check nr of fractional digits</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>Exponent</ID>
					<ToolTip>Exponentiation</ToolTip>
					<Default>2</Default>
				</Literal>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
				<Literal>
					<ID>TableName</ID>
					<ToolTip>Name of the table</ToolTip>
					<Default>TableName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[DECLARE @power DECIMAL = POWER(10, $Exponent$);
SELECT Value
FROM (SELECT $ColumnName$ AS Value FROM dbo.$TableName$) AS Focus
WHERE ROUND(Value * @power, 0, 1) <> (Value * @power);]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
select pgc.conname as constraint_name,
       ccu.table_schema as table_schema,
       ccu.table_name,
       ccu.column_name,
       pgc.* as definition
from pg_constraint pgc
join pg_namespace nsp on nsp.oid = pgc.connamespace
join pg_class  cls on pgc.conrelid = cls.oid
left join information_schema.constraint_column_usage ccu
          on pgc.conname = ccu.constraint_name
          and nsp.nspname = ccu.constraint_schema
where contype ='c'
order by pgc.conname;
CREATE TABLE test ( 
  id INT NOT NULL AUTO_INCREMENT,
  name VARCHAR(50) NOT NULL,
  email VARCHAR(100) NOT NULL,
  PRIMARY KEY (id)
);
import wrds
db = wrds.Connection(wrds_username='joe')
db.raw_sql('SELECT date,dji FROM djones.djdaily')
/* Gets reps */
SELECT fieldA, COUNT(*)
FROM tableA
GROUP BY fieldA
HAVING COUNT(*) > 1

/* Use reps to filter results */
SELECT a.*
FROM tableA a
JOIN (
	SELECT fieldA, COUNT(*) as 'count'
	FROM tableA
	GROUP BY fieldA
	HAVING COUNT(*) > 1
) b
ON a.fieldA = b.fieldA
CREATE TABLE Persons (
    ID int NOT NULL,
    LastName varchar(255) NOT NULL,
    FirstName varchar(255),
    Age int,
    CHECK (Age>=18)
);
select studentID, FirstName, LastName, FirstName + ' ' + LastName as FullName
from student;
select users.last_login,us.*,users.* from users left join user_school us on us.user_id = users.id where school_id is not null
and last_login between '2021-01-01'::date and '2021-12-31'::date
UPDATE public.users
SET name=concat('User',id), email=concat('user',id,'@email.com')
WHERE name not like '%Testbruker';
/*
This script is given "As Is" with no warranties and plenty of caveats. Use at your own risk!
For more on data profiling, see Chapter 10 in "SQL Server 2012 Data Integration Recipes", Apress, 2012
*/
-----------------------------------------------------------------------
-- User-defined variables
-----------------------------------------------------------------------
USE CarSales -- Your database here
GO
DECLARE @TABLE_SCHEMA NVARCHAR(128) = 'dbo'  -- Your schema here
DECLARE @TABLE_NAME NVARCHAR(128) = 'client' -- Your table here
DECLARE @ColumnListIN NVARCHAR(4000) = ''    -- Enter a comma-separated list of specific columns
                                                     -- to profile, or leave blank for all
DECLARE @TextCol BIT = 1  -- Analyse all text (char/varchar/nvarchar) data type columns
DECLARE @NumCol BIT = 1   -- Analyse all numeric data type columns
DECLARE @DateCol BIT = 1  -- Analyse all date data type data type columns
DECLARE @LobCol BIT = 1   -- Analyse all VAR(char/nchar/binary) MAX data type columns (potentially time-consuming)
DECLARE @AdvancedAnalysis BIT = 1 -- Perform advanced analysis (threshold counts/domain analysis) 
                                  --(potentially time-consuming)
DECLARE @DistinctValuesMinimum INT = 200 -- Minimum number of distinct values to suggest a reference 
                                         -- table and/or perform domain analysis
DECLARE @BoundaryPercent NUMERIC(3,2) = 0.57 -- Percent of records at upper/lower threshold to suggest
                                             -- a possible anomaly
DECLARE @NullBoundaryPercent NUMERIC(5,2) = 90.00 -- Percent of NULLs to suggest a possible anomaly
DECLARE @DataTypePercentage INT = 2 -- Percentage variance allowed when suggesting another data type 
                                    -- for a column
-----------------------------------------------------------------------
-- Process variables
-----------------------------------------------------------------------
DECLARE @DATA_TYPE VARCHAR(128) = ''
DECLARE @FULLSQL VARCHAR(MAX) = ''
DECLARE @SQLMETADATA VARCHAR(MAX) = ''
DECLARE @NUMSQL VARCHAR(MAX) = ''
DECLARE @DATESQL VARCHAR(MAX) = ''
DECLARE @LOBSQL VARCHAR(MAX) = ''
DECLARE @COLUMN_NAME VARCHAR(128)
DECLARE @CHARACTER_MAXIMUM_LENGTH INT
DECLARE @ROWCOUNT BIGINT = 0
DECLARE @ColumnList VARCHAR(4000) = ' '
DECLARE @TableCheck TINYINT
DECLARE @ColumnCheck SMALLINT
DECLARE @DataTypeVariance INT
-----------------------------------------------------------------------
-- Start the process:
BEGIN
TRY
-- Test that the schema and table exist
SELECT
 @TableCheck = COUNT (*) 
   FROM INFORMATION_SCHEMA.TABLES 
   WHERE TABLE_SCHEMA = @TABLE_SCHEMA 
   AND TABLE_NAME = @TABLE_NAME
IF @TableCheck <> 1
 BEGIN
  RAISERROR ('The table does not exist',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Parse list of columns to process / get list of columns according to types required
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ColumnList') IS NOT NULL
 DROP TABLE tempdb..#ColumnList;
CREATE TABLE #ColumnList (COLUMN_NAME VARCHAR(128), DATA_TYPE VARCHAR(128), CHARACTER_MAXIMUM_LENGTH INT) -- Used to hold list of columns to process
IF @ColumnListIN <> '' -- See if there is a list of columns to process
BEGIN
 -- Process list
 SET @ColumnList = @ColumnListIN + ','
 DECLARE @CharPosition int
 WHILE CHARINDEX(',', @ColumnList) > 0
  BEGIN
   SET @CharPosition = CHARINDEX(',', @ColumnList)
   INSERT INTO #ColumnList (COLUMN_NAME) VALUES (LTRIM(RTRIM(LEFT(@ColumnList, @CharPosition - 1))))
   SET @ColumnList = STUFF(@ColumnList, 1, @CharPosition, '')
  END -- While loop
-- update with datatype and length
  UPDATE CL
   SET CL.CHARACTER_MAXIMUM_LENGTH = ISNULL(ISC.CHARACTER_MAXIMUM_LENGTH,0)
      ,CL.DATA_TYPE = ISC.DATA_TYPE
   FROM #ColumnList CL
   INNER JOIN INFORMATION_SCHEMA.COLUMNS ISC
     ON CL.COLUMN_NAME = ISC.COLUMN_NAME
  WHERE ISC.TABLE_NAME = @TABLE_NAME
  AND ISC.TABLE_SCHEMA = @TABLE_SCHEMA
 END
-- If test for list of column names
ELSE
 BEGIN
 -- Use all column names, to avoid filtering
  IF @TextCol = 1
   BEGIN
    INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
     SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
     WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar', 'binary')
     AND TABLE_NAME = @TABLE_NAME
     AND TABLE_SCHEMA = @TABLE_SCHEMA
     AND CHARACTER_MAXIMUM_LENGTH > 0
   END
 IF @NumCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
 IF @DateCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('Date', 'DateTime', 'SmallDateTime', #39;DateTime2', 'time')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
IF @LOBCol = 1
 BEGIN
  INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'varbinary', 'xml')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
   AND CHARACTER_MAXIMUM_LENGTH = -1
 END
END
-- Else test to get all column names
-----------------------------------------------------------------------
-- Test that there are columns to analyse
SELECT @ColumnCheck = COUNT (*) FROM #ColumnList WHERE DATA_TYPE IS NOT NULL
IF @ColumnCheck = 0
 BEGIN
  RAISERROR('The columns do not exist in the selected database or no columns are selected',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Create Temp table used to hold profiling data
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ProfileData') IS NOT NULL
 DROP TABLE tempdb..#ProfileData;
 CREATE TABLE #ProfileData
 (
  TABLE_SCHEMA NVARCHAR(128),
  TABLE_NAME NVARCHAR(128),
  COLUMN_NAME NVARCHAR(128),
  ColumnDataLength INT,
  DataType VARCHAR(128),
  MinDataLength BIGINT,
  MaxDataLength BIGINT,
  AvgDataLength BIGINT,
  MinDate SQL_VARIANT,
  MaxDate SQL_VARIANT,
  NoDistinct BIGINT,
  NoNulls NUMERIC(32,4),
  NoZeroLength NUMERIC(32,4),
  PercentageNulls NUMERIC(9,4),
  PercentageZeroLength NUMERIC(9,4),
  NoDateWithHourminuteSecond BIGINT NULL,
  NoDateWithSecond BIGINT NULL,
  NoIsNumeric BIGINT NULL,
  NoIsDate BIGINT NULL,
  NoAtLimit BIGINT NULL,
  IsFK BIT NULL DEFAULT 0,
  DataTypeComments NVARCHAR(1500)
 );
-- Get row count
DECLARE @ROWCOUNTTEXT NVARCHAR(1000) = ''
DECLARE @ROWCOUNTPARAM NVARCHAR(50) = ''
SET @ROWCOUNTTEXT = 'SELECT @ROWCOUNTOUT = COUNT (*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)'
SET @ROWCOUNTPARAM = '@ROWCOUNTOUT INT OUTPUT'
EXECUTE sp_executesql @ROWCOUNTTEXT, @ROWCOUNTPARAM, @ROWCOUNTOUT = @ROWCOUNT OUTPUT
-----------------------------------------------------------------------
-- Test that there are records to analyse
IF @ROWCOUNT = 0
 BEGIN
  RAISERROR('There is no data in the table to analyse',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Define the dynamic SQL used for each column to analyse
-----------------------------------------------------------------------
SET @SQLMETADATA = 'INSERT INTO #ProfileData (ColumnDataLength,COLUMN_NAME,TABLE_SCHEMA,TABLE_NAME,DataType,MaxDataLength,MinDataLength,AvgDataLength,MaxDate,MinDate,NoDateWithHourminuteSecond,NoDateWithSecond,NoIsNumeric,NoIsDate,NoNulls,NoZeroLength,NoDistinct)'
DECLARE SQLMETADATA_CUR CURSOR LOCAL FAST_FORWARD FOR 
 SELECT COLUMN_NAME, CHARACTER_MAXIMUM_LENGTH, DATA_TYPE FROM #ColumnList
OPEN SQLMETADATA_CUR 
FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
WHILE @@FETCH_STATUS = 0 
 BEGIN 
  SET @SQLMETADATA = @SQLMETADATA +'
  SELECT TOP 100 PERCENT ' + CAST(@CHARACTER_MAXIMUM_LENGTH AS VARCHAR(20)) + ' ,''' + QUOTENAME(@COLUMN_NAME) + '''
  ,''' + QUOTENAME(@TABLE_SCHEMA) + '''
  ,''' + QUOTENAME(@TABLE_NAME) + '''
  ,''' + @DATA_TYPE + ''''
   + CASE
      WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') 
	   AND @CHARACTER_MAXIMUM_LENGTH >= 0 
	     THEN + '
  , MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISNUMERIC(' + QUOTENAME(@COLUMN_NAME) + ') = 1) 
  ,(SELECT COUNT (*) from ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISDATE(' + QUOTENAME(@COLUMN_NAME) + ') = 1) '
  WHEN @DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN + '
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,AVG(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NUMERIC(36,2)))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('DateTime', 'SmallDateTime') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,(SELECT COUNT (*) from ' 
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE (CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ' ) - FLOOR(CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ')) <> 0))
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE DATEPART(ss,' + QUOTENAME(@COLUMN_NAME) + ') <> 0 OR DATEPART(mcs,' + QUOTENAME(@COLUMN_NAME) + ') <> 0) 
  ,NULL 
  ,NULL '
    WHEN @DATA_TYPE IN ('DateTime2') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL
  ,NULL
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('Date') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX('
   + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN('
  + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NLL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('xml') THEN + '
  ,MAX(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,MIN(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,AVG(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  WHEN @DATA_TYPE IN ('varbinary','varchar','nvarchar') AND  @CHARACTER_MAXIMUM_LENGTH = -1 THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('binary') THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('time') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   ELSE + '
  ,NULL 
  ,NULL
  ,NULL
  ,NULL
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  END + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NULL)'
   + CASE
   WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') THEN + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) +  ' WHERE LEN(LTRIM(RTRIM(' + QUOTENAME(@COLUMN_NAME) + '))) = '''')'
   ELSE + '
  ,NULL'
   END + '
  ,(SELECT COUNT(DISTINCT ' + QUOTENAME(@COLUMN_NAME) + ') FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NOT NULL )
  FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)
  UNION'
 FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
END 
CLOSE SQLMETADATA_CUR 
DEALLOCATE SQLMETADATA_CUR 
SET @SQLMETADATA = LEFT(@SQLMETADATA, LEN(@SQLMETADATA) -5)
EXEC (@SQLMETADATA)
-----------------------------------------------------------------------
-- Final Calculations
-----------------------------------------------------------------------
-- Indicate Foreign Keys
; WITH FK_CTE (FKColumnName)
AS
(
 SELECT
   DISTINCT CU.COLUMN_NAME
  FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
   INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CU
     ON TC.CONSTRAINT_NAME = CU.CONSTRAINT_NAME 
     AND TC.TABLE_SCHEMA = CU.TABLE_SCHEMA 
     AND TC.TABLE_NAME = CU.TABLE_NAME
     AND TC.TABLE_SCHEMA = @TABLE_SCHEMA
     AND TC.TABLE_NAME = @TABLE_NAME
     AND CONSTRAINT_TYPE = 'FOREIGN KEY'
)
UPDATE P
 SET P.IsFK = 1
 FROM #ProfileData P
  INNER JOIN FK_CTE CTE
   ON P.COLUMN_NAME = CTE.FKColumnName
-- Calculate percentages
UPDATE #ProfileData
 SET PercentageNulls = (NoNulls / @ROWCOUNT) * 100
    ,PercentageZeroLength = (NoZeroLength / @ROWCOUNT) * 100
-- Add any comments
-- Datatype suggestions
-- First get number of records where a variation could be an anomaly
SET @DataTypeVariance = ROUND((@ROWCOUNT * @DataTypePercentage) / 100, 0)
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the DATE types. '
 WHERE NoIsDate BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the NUMERIC types. '
 WHERE NoIsNumeric BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be INT type. '
 WHERE MinDataLength >= -2147483648
 AND MaxDataLength <= 2147483648
 AND DataType IN ('bigint')
 
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLINT type. '
 WHERE MinDataLength >= -32768
 AND MaxDataLength <= 32767
 AND DataType IN ('bigint','int')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be TINYINT type. '
 WHERE MinDataLength >= 0
 AND MaxDataLength <= 255
 AND DataType IN ('bigint','int','smallint')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLDATE type. '
 WHERE NoDateWithSecond = 0
 AND MinDate >= '19000101'
 AND MaxDate <= '20790606'
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATE type (SQL Server 2008 only). '
 WHERE NoDateWithHourminuteSecond = 0
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATETIME type. '
 WHERE MinDate >= '17530101'
 AND MaxDate <= '99991231'
 AND DataType IN ('datetime2')
-- Empty column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Seems empty - is it required? '
 WHERE (PercentageNulls = 100 OR PercentageZeroLength = 100)
 AND IsFK = 0
-- Null column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'There is a large percentage of NULLs - attention may be required. '
 WHERE PercentageNulls >= @NullBoundaryPercent
-- Distinct value suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for reference/lookup table (contains NULLs).'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND PercentageNulls <> 100
 AND NoNulls <> 0
-- FK suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for Foreign Key.'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND NoNulls = 0
 AND DataType NOT LIKE '%Date%'
 AND DataType <> 'Time'
-- Filestream suggestions
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength >= 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly not a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength < 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
-- Sparse Column Suggestions
IF OBJECT_ID('tempdb..#SparseThresholds') IS NOT NULL
  DROP TABLE tempdb..#SparseThresholds;
  CREATE TABLE #SparseThresholds (DataType VARCHAR(128), Threshold NUMERIC(9,4))
  INSERT INTO #SparseThresholds (DataType, Threshold)
   VALUES 
    ('tinyint',86),
    ('smallint',76),    
    ('int',64),    
    ('bigint',52),    
    ('real',64),    
    ('float',52),    
    ('money',64),    
    ('smallmoney',64),    
    ('smalldatetime',52),    
    ('datetime',52),    
    ('uniqueidentifier',43),    
    ('date',69),    
    ('datetime2',52),    
    ('decimal',42),    
    ('nuumeric',42),    
    ('char',60),    
    ('varchar',60),    
    ('nchar',60),    
    ('nvarchar',60),    
    ('binary',60),    
    ('varbinary',60),    
    ('xml',60)    
; WITH Sparse_CTE (COLUMN_NAME, SparseComment)
AS
(
SELECT
  P.COLUMN_NAME
 ,CASE
  WHEN P.PercentageNulls >= T.Threshold THEN 'Could benefit from sparse columns. '
  ELSE ''
  END AS SparseComment
FROM #ProfileData P
 INNER JOIN #SparseThresholds T
  ON P.DataType = T.DataType
)
UPDATE PT
  SET PT.DataTypeComments = 
      CASE WHEN PT.DataTypeComments IS NULL THEN CTE.SparseComment
           ELSE ISNULL(PT.DataTypeComments,'') + CTE.SparseComment + '. '
      END
 FROM #ProfileData PT
  INNER JOIN Sparse_CTE CTE
   ON PT.COLUMN_NAME = CTE.COLUMN_NAME
-----------------------------------------------------------------------
-- Optional advanced analysis
-----------------------------------------------------------------------
IF @AdvancedAnalysis = 1
 BEGIN
-----------------------------------------------------------------------
-- Data at data boundaries
-----------------------------------------------------------------------
  IF OBJECT_ID('tempdb..#LimitTest') IS NOT NULL
    DROP TABLE tempdb..#LimitTest;
    CREATE TABLE #LimitTest (COLUMN_NAME VARCHAR(128), NoAtLimit BIGINT);
    DECLARE @advancedtestSQL VARCHAR(MAX) = 'INSERT INTO #LimitTest (COLUMN_NAME, NoAtLimit)' + CHAR(13)
    SELECT @advancedtestSQL = @advancedtestSQL + 'SELECT '''+ COLUMN_NAME + ''', COUNT('+ COLUMN_NAME + ') FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + 
     CASE
       WHEN DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN ' WHERE '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
       ELSE ' WHERE LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
     END
    FROM #ProfileData 
    WHERE DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real','varchar', 'nvarchar', 'char', 'nchar', 'binary')
    SET @advancedtestSQL = LEFT(@advancedtestSQL,LEN(@advancedtestSQL) -6) 
    EXEC (@advancedtestSQL)
    UPDATE M
      SET M.NoAtLimit = T.NoAtLimit
         ,M.DataTypeComments = 
           CASE
             WHEN CAST(T.NoAtLimit AS NUMERIC(36,2)) / CAST(@ROWCOUNT AS NUMERIC(36,2)) >= @BoundaryPercent THEN ISNULL(M.DataTypeComments,'') + 'Large numbers of data elements at the max/minvalues. '
             ELSE M.DataTypeComments
           END
    FROM #ProfileData M
     INNER JOIN #LimitTest T
      ON M.COLUMN_NAME = T.COLUMN_NAME
   -----------------------------------------------------------------------
   -- Domain analysis
   -----------------------------------------------------------------------
   IF OBJECT_ID('tempdb..#DomainAnalysis') IS NOT NULL
     DROP TABLE tempdb..#DomainAnalysis;
   CREATE TABLE #DomainAnalysis
   (
    DomainName NVARCHAR(128)
   ,DomainElement NVARCHAR(4000)
   ,DomainCounter BIGINT
   ,DomainPercent NUMERIC(7,4)
   );
   DECLARE @DOMAINSQL VARCHAR(MAX) = 'INSERT INTO #DomainAnalysis (DomainName, DomainElement, DomainCounter) '
   DECLARE SQLDOMAIN_CUR CURSOR LOCAL FAST_FORWARD FOR 
     SELECT COLUMN_NAME, DataType 
	  FROM #ProfileData 
	   WHERE NoDistinct < @DistinctValuesMinimum
   OPEN SQLDOMAIN_CUR 
   FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   WHILE @@FETCH_STATUS = 0 
    BEGIN 
     SET @DOMAINSQL = @DOMAINSQL + 'SELECT ''' + @COLUMN_NAME + ''' AS DomainName, CAST( '+ @COLUMN_NAME + ' AS VARCHAR(4000)) AS DomainElement, COUNT(ISNULL(CAST(' + @COLUMN_NAME + ' AS NVARCHAR(MAX)),'''')) AS DomainCounter FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + ' GROUP BY ' + @COLUMN_NAME + ''
     + ' UNION '
     FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   END 
  CLOSE SQLDOMAIN_CUR 
  DEALLOCATE SQLDOMAIN_CUR 
  SET @DOMAINSQL = LEFT(@DOMAINSQL, LEN(@DOMAINSQL) -5) + ' ORDER BY DomainName ASC, DomainCounter DESC '
   EXEC (@DOMAINSQL)
   -- Now calculate percentages (this appraoch is faster than doing it when performing the domain analysis)
   ; WITH DomainCounter_CTE (DomainName, DomainCounterTotal)
   AS
  (
   SELECT DomainName, SUM(ISNULL(DomainCounter,0)) AS DomainCounterTotal
    FROM #DomainAnalysis 
    GROUP BY DomainName
  )
  UPDATE D
    SET D.DomainPercent = (CAST(D.DomainCounter AS NUMERIC(36,4)) / CAST(CTE.DomainCounterTotal AS NUMERIC(36,4))) * 100
   FROM #DomainAnalysis D
    INNER JOIN DomainCounter_CTE CTE
     ON D.DomainName = CTE.DomainName
   WHERE D.DomainCounter <> 0
 END
-- Advanced analysis
-----------------------------------------------------------------------
-- Output results from the profile and domain data tables
-----------------------------------------------------------------------
select
   *
 from #ProfileData
IF @AdvancedAnalysis = 1
 BEGIN
  select
    *
   from #DomainAnalysis
 END
END TRY
BEGIN CATCH
 SELECT
  ERROR_NUMBER() AS ErrorNumber
 ,ERROR_SEVERITY() AS ErrorSeverity
 ,ERROR_STATE() AS ErrorState
 ,ERROR_PROCEDURE() AS ErrorProcedure
 ,ERROR_LINE() AS ErrorLine
 ,ERROR_MESSAGE() AS ErrorMessage;
 
END CATCH
--this will update only one row that matches id = 1
update sessions
set start_date = '2020-04-20 10:12:15.653',
    end_date = '2020-04-22 15:40:30.123'
where id = 1;

--this will update multiple rows that match category = 1
update sessions
set end_date = null
where category = 1;
SELECT u.id, u.name as user_name, u.email as user_email, st.name as street_name, scr.id as scr_id, scr.name as scr_name, scr.orientation, scr.url_token, c.id AS clip_id, c.title AS clip_title FROM users AS u
JOIN city_user AS cu ON cu.user_id = u.id
JOIN streets AS st ON st.city_id = cu.city_id
JOIN screens AS scr ON scr.street_id = st.id
JOIN clip_screen as cs ON cs.screen_id = scr.id
JOIN clips AS c ON c.id = cs.clip_id
JOIN templates AS t ON t.id = c.template_id
WHERE c.id = 217130
GROUP BY u.id, scr.id, c.id
with tc as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined`
    where date(created_at) < current_date
    group by 1
),

trf as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
    where date(created_at) < current_date
    group by 1
)
select * from (
select  
    tc.user_id,
    tc.cnt as tc_cnt,
    trf.cnt as trf_cnt
from tc left join trf on tc.user_id = trf.user_id
--where tc.user_id = 'usr_3sqh76qtht1s97xa1qu1'
)
where tc_cnt > trf_cnt
-- these scripts will delete the tables if it's already there

DROP TABLE IF EXISTS StudentEnrollments;
DROP TABLE IF EXISTS Students;
DROP TABLE IF EXISTS Classrooms;


-- create and populate the students table
CREATE TABLE Students
(
	StudentId INTEGER PRIMARY KEY,
	FirstName VARCHAR(200) NOT NULL,
	LastName VARCHAR(200) NOT NULL,
	Nationality VARCHAR(100) NOT NULL,
	DateOfBirth DATETIME NULL
);


INSERT INTO Students
	(StudentId, FirstName, LastName, Nationality, DateOfBirth)
VALUES
	('1','Mickey', 'Mouse', 'American', '1991-05-02'),
	('2','Donald', 'Duck', 'Japanese', '1992-11-12'),
	('3','Goofy', 'Goof', 'American', '1980-04-15'),
	('4','Daisy', 'Duck', 'French', '1985-02-16'),
	('5','Huey', 'Duck', 'French', '1986-05-19'),
	('6','Scrooge', 'McDuck', 'Japanese', '1983-11-11'),
	('7','Minnie', 'Mouse', 'Canadian', '1983-11-30'),
	('8','Louie', 'Duck', 'French', '1985-09-09');

-- create and populate the classroom table
CREATE TABLE Classrooms
(
	ClassroomId INTEGER PRIMARY KEY,
	ClassName VARCHAR(200) NOT NULL,
	Weight DECIMAL NOT NULL
);

INSERT INTO Classrooms
	(ClassroomId, ClassName, Weight)
VALUES
	(1, 'Public Interaction', 0.10),
	(2, 'Pranks', 0.15),
	(3, 'Running', 0.15),
	(4, 'Acting', 0.30),
	(5, 'Making Jokes', 0.30);

-- create and populate the student enrollment table
CREATE TABLE StudentEnrollments
(
	StudentEnrollmentId INTEGER PRIMARY KEY,
	StudentId INTEGER NOT NULL,
	ClassroomId INTEGER NOT NULL,
	Grade DECIMAL NOT NULL,
	FOREIGN KEY(StudentId) REFERENCES Students(StudentId),
	FOREIGN KEY(ClassroomId) REFERENCES Classrooms(ClassroomId)
);

INSERT INTO StudentEnrollments
	(StudentEnrollmentId, StudentId, ClassroomId, Grade)
VALUES
	(1, 1, 1, 91),
	(2, 1, 2, 68),
	(3, 1, 3, 89),
	(4, 1, 4, 60),
	(5, 1, 5, 65),
	(6, 2, 1, 79),
	(7, 2, 2, 85),
	(8, 2, 3, 68),
	(9, 2, 4, 89),
	(10, 2, 5, 80),
	(11, 3, 1, 96),
	(12, 3, 2, 62),
	(13, 3, 3, 78),
	(14, 3, 4, 100),
	(15, 3, 5, 64),
	(16, 4, 1, 81),
	(17, 4, 2, 90),
	(18, 4, 3, 85),
	(19, 4, 4, 95),
	(20, 4, 5, 64),
	(21, 5, 1, 81),
	(22, 5, 2, 73),
	(23, 5, 3, 60),
	(24, 5, 4, 99),
	(25, 5, 5, 70),
	(26, 6, 1, 75),
	(27, 6, 2, 74),
	(28, 6, 3, 69),
	(29, 6, 4, 79),
	(30, 6, 5, 88),
	(31, 7, 1, 60),
	(32, 7, 2, 75),
	(33, 7, 3, 82),
	(34, 7, 4, 66),
	(35, 7, 5, 65),
	(36, 8, 1, 69),
	(37, 8, 2, 81),
	(38, 8, 3, 100),
	(39, 8, 4, 63),
	(40, 8, 5, 62);
CREATE TABLE tbl_price_history (
	pk_price_history SERIAL PRIMARY KEY,
	fk_sku character(10) REFERENCES tbl_sku (sku),
	date_of_change date,
	retail_price numeric
);
SELECT (DATE '2011-01-28', DATE '2011-02-01' + 1) OVERLAPS
       (DATE '2011-02-01', DATE '2011-02-01'    )
select
    v.printed_code,
    id_to_text(vd.id) as id_,
    id_to_time(vd.id) as created_at,
    vd.*
from vehicle_diagnostic vd 
left join vehicle v on vd.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
select
    v.printed_code,
    id_to_text(vr.id) as id_,
    id_to_time(vr.id) as created_at,
    vr.*
from vehicle_repair vr
left join vehicle v on vr.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
----
select
    *
from vehicle_distribution v
order by id desc
Better: COPY {table_name}({table_columns}) FROM STDIN WITH (DELIMITER ',', NULL '?', FORMAT CSV, HEADER TRUE);


Also Possible, but worse: copy r from 'file.csv' WITH NULL AS 'NULL' DELIMITER ';' CSV HEADER;
-- updating a table during runtime of the script with a join

ALTER TABLE shops_per_city ADD merchant_type DOUBLE;
/*ALTER TABLE shops_per_city ADD city_latitude DOUBLE*/

UPDATE shops_per_city AS shop
INNER JOIN city_list AS city
ON shop.city_id = city.city_id
SET shop.city_latitude = city.city_latitude;


-- selecting the list for a where statement with a subquery

SELECT * 
FROM shopstation
WHERE Dealer_ID_Long IN 
	(select Dealer_ID_Long
	FROM shop_list);
	
-- also possible to join on a subquery (notice that there is no ; in the subquery)
SELECT *, -- this select needs to have all columns of the result table
FROM shopcounter_category_daily_v2 AS shop
INNER JOIN 
	(SELECT *
	FROM weather_owm_daily_2014
	WHERE cityId = 2761369) AS weather
ON shop.RECORD_DATE = weather.DATE;

-- update a column using CASE
UPDATE creditcard_merchtype 
SET online =
	(CASE
    	WHEN Merchantname LIKE "%.at%" THEN 1
    	WHEN Merchantname LIKE "%.com%" THEN 1
    	ELSE 0
	END);
	
-- alter column name	
ALTER TABLE creditcard_at_v1 CHANGE cityId city_id INT;


-- creating temporary table for later use:

CREATE TEMPORARY TABLE id_list AS
SELECT COUNT(*) AS days, dealer_name, DEALER_ID, Dealer_ID_Long
FROM shopcounter
WHERE YEAR(RECORD_DATE) > 2017
GROUP BY dealer_name, DEALER_ID, Dealer_ID_Long
HAVING days > 752;

CREATE TABLE shopcounter_stable AS
SELECT *
FROM shopcounter
WHERE Dealer_ID_Long IN 
	(SELECT Dealer_ID_Long
	FROM id_list) && YEAR(RECORD_DATE) > 2017;
#!/bin/bash -v

hadoop fs -put ./AllstarFull.csv /user/bigdataproject/AllstarFull.csv
hive -f ./AllstarFull.hive






hadoop fs -put ./Appearances.csv /user/bigdataproject/Appearances.csv
hive -f ./Appearances.hive






hadoop fs -put ./AwardsManagers.csv /user/bigdataproject/AwardsManagers.csv
hive -f ./AwardsManagers.hive
insert into table db_name.table_name
select 'ALL','Done';
-- create the databases
CREATE DATABASE IF NOT EXISTS projectone;

-- create the users for each database
CREATE USER 'projectoneuser'@'%' IDENTIFIED BY 'somepassword';
GRANT CREATE, ALTER, INDEX, LOCK TABLES, REFERENCES, UPDATE, DELETE, DROP, SELECT, INSERT ON `projectone`.* TO 'projectoneuser'@'%';

FLUSH PRIVILEGES;
RUN sed -Ei 's/^(bind-address|log)/#&/' /etc/mysql/my.cnf
select * 
from folder f
  join uploads u ON u.id = f.folderId 
where '8' = ANY (string_to_array(some_column,','))
SELECT string_to_array('xx~^~yy~^~zz', '~^~', 'yy');


ALTER TABLE test1 ADD COLUMN id SERIAL PRIMARY KEY;

This is all you need to:

Add the id column
Populate it with a sequence from 1 to count(*).
Set it as primary key / not null.
SELECT POSITION('our' in 'w3resource');


SELECT id, account_id, amount, created_at, null as type
 	FROM subscription_invoice
UNION ALL
SELECT id, account_id, amount, created_at, 
	`type` as type
FROM subscription_payment
ORDER BY created_at DESC
function processSQLFile(fileName) {

  // Extract SQL queries from files. Assumes no ';' in the fileNames
  var queries = fs.readFileSync(fileName).toString()
    .replace(/(\r\n|\n|\r)/gm," ") // remove newlines
    .replace(/\s+/g, ' ') // excess white space
    .split(";") // split into all statements
    .map(Function.prototype.call, String.prototype.trim)
    .filter(function(el) {return el.length != 0}); // remove any empty ones

  // Execute each SQL query sequentially
  queries.forEach(function(query) {
    batch.push(function(done) {
      if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
        var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
        var matches = regexp.exec(query);
        var table = matches[1];
        var fileName = matches[2];
        var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
        var stream = client.copyFrom(copyString);
        stream.on('close', function () {
          done();
        });
        var csvFile = __dirname + '/' + fileName;
        var str = fs.readFileSync(csvFile);
        stream.write(str);
        stream.end();
      } else { // Other queries don't need special treatment
        client.query(query, function(result) {
          done();
        });
      }
    });
  });
}
SELECT 
    orderNumber,
    orderDate,
    customerName,
    orderLineNumber,
    productName,
    quantityOrdered,
    priceEach
FROM
    orders
INNER JOIN orderdetails 
    USING (orderNumber)
INNER JOIN products 
    USING (productCode)
INNER JOIN customers 
    USING (customerNumber)
ORDER BY 
    orderNumber, 
    orderLineNumber;
ALTER PARTITION SCHEME [sh_trans_date]
 NEXT USED [PRIMARY]
 ALTER PARTITION FUNCTION [pf_trans_date_byhour]() SPLIT RANGE('2019/03/29')
ALTER DATABASE database CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE USER 'user'@'localhost' IDENTIFIED BY 'pass';
GRANT ALL PRIVILEGES ON database.* TO 'user'@'localhost';
SELECT 
   SS.SEC_NAME,
   STUFF((SELECT '; ' + US.USR_NAME 
          FROM USRS US
          WHERE US.SEC_ID = SS.SEC_ID
          ORDER BY USR_NAME
          FOR XML PATH('')), 1, 1, '') [SECTORS/USERS]
FROM SALES_SECTORS SS
GROUP BY SS.SEC_ID, SS.SEC_NAME
ORDER BY 1
SELECT
    TableName = tbl.table_schema + '.' + tbl.table_name, 
    TableDescription = tableProp.value,
    ColumnName = col.column_name, 
    ColumnDataType = col.data_type,
    ColumnDescription = colDesc.ColumnDescription
FROM information_schema.tables tbl
INNER JOIN information_schema.columns col 
    ON col.table_name = tbl.table_name
LEFT JOIN sys.extended_properties tableProp 
    ON tableProp.major_id = object_id(tbl.table_schema + '.' + tbl.table_name) 
        AND tableProp.minor_id = 0
        AND tableProp.name = 'MS_Description' 
LEFT JOIN (
    SELECT sc.object_id, sc.column_id, sc.name, colProp.[value] AS ColumnDescription
    FROM sys.columns sc
    INNER JOIN sys.extended_properties colProp
        ON colProp.major_id = sc.object_id
            AND colProp.minor_id = sc.column_id
            AND colProp.name = 'MS_Description' 
) colDesc
    ON colDesc.object_id = object_id(tbl.table_schema + '.' + tbl.table_name)
        AND colDesc.name = col.COLUMN_NAME
WHERE tbl.table_type = 'base table'
--AND tableProp.[value] IS NOT NULL OR colDesc.ColumnDescription IS NOT null
mysql > SET GLOBAL sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SELECT column_name as 'Column Name', data_type as 'Data Type'
FROM information_schema.columns
WHERE table_name = 'hrStaff' 
-- From https://github.com/bertwagner/SQLServer/blob/master/Non-SARGable%20Execution%20Plans.sql
-- This script will check the execution plan cache for any queries that are non-SARGable.
-- It does this by finding table and index scans that contain a scalar operators

SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED
 
DECLARE @dbname SYSNAME
SET @dbname = QUOTENAME(DB_NAME());
 
WITH XMLNAMESPACES (DEFAULT 'http://schemas.microsoft.com/sqlserver/2004/07/showplan')

SELECT
   stmt.value('(@StatementText)[1]', 'varchar(max)') AS [Query],
   query_plan AS [QueryPlan],
   sc.value('(.//Identifier/ColumnReference/@Schema)[1]', 'varchar(128)') AS [Schema], 
   sc.value('(.//Identifier/ColumnReference/@Table)[1]', 'varchar(128)') AS [Table], 
   sc.value('(.//Identifier/ColumnReference/@Column)[1]', 'varchar(128)') AS [Column] ,
   CASE WHEN s.exist('.//TableScan') = 1 THEN 'TableScan' ELSE 'IndexScan' END AS [ScanType],
   sc.value('(@ScalarString)[1]', 'varchar(128)') AS [ScalarString]
FROM 
	sys.dm_exec_cached_plans AS cp
	CROSS APPLY sys.dm_exec_query_plan(cp.plan_handle) AS qp
	CROSS APPLY query_plan.nodes('/ShowPlanXML/BatchSequence/Batch/Statements/StmtSimple') AS batch(stmt)
	CROSS APPLY stmt.nodes('.//RelOp[TableScan or IndexScan]') AS scan(s)
	CROSS APPLY s.nodes('.//ScalarOperator') AS scalar(sc)
WHERE
    s.exist('.//ScalarOperator[@ScalarString]!=""') = 1 
    AND sc.exist('.//Identifier/ColumnReference[@Database=sql:variable("@dbname")][@Schema!="[sys]"]') = 1
	AND sc.value('(@ScalarString)[1]', 'varchar(128)') IS NOT NULL
select 
				   a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 day'::interval, 'DD-MM-YYYY')  jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
				   a.plafond_or,
				   a.penjaminan nilai_penjaminan,
				   c.is_konvensional,
				   0 nilai_asuransi_1,
					0 nilai_asuransi_2,
				  (a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
				 CASE
				        when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
				 END umur,
				  '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
							--(ijp cogar - feebase cogar) x persen komisi

						

                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_obligee f on a.id_obligee = f.id_obligee
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where 
				--( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND
				--d.id_reference is null and
				--a.flag_deletion is null and
	              -- d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				  a.flag_deletion is null and 		
					d.flag_delete is null and 
					( 
						( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
							( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
						) OR  
						
						( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
					)  
					
	          
				 union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					0 nilai_asuransi_1,
					0 nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship != 9
	              
	                union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					nilai_asuransi_1,
					nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    a.catatan note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							nilai_asuransi_2 * 5/100 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship_asuransi a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	          left join (
                    SELECT id_transaksi_suretyship, 
                            sum(nilai_asuransi_1)  nilai_asuransi_1, 
                            sum(nilai_asuransi_2)  nilai_asuransi_2 FROM (
                                SELECT
                                    id_transaksi_suretyship,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 1
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_1,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 2
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_2 
                                FROM
                                tr_transaksi_suretyship_ext	 
                            ) ss GROUP BY id_transaksi_suretyship
                ) i ON a.id_transaksi_suretyship = i.id_transaksi_suretyship
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship = 9
	               
ALTER TABLE provider DROP PRIMARY KEY, ADD PRIMARY KEY(person, place, thing);
Use Sum aggregate function with case statement as below, if you want sum of those t.value based on t.clock then do group by on that column so that you will get clock while sum of values.

select sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
from hosts h, items i,history_uint t
where i.hostid=h.hostid and t.itemid=i.itemid
If it is based on t.clock then

   select t.clock, sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
    from hosts h, items i,history_uint t
    where i.hostid=h.hostid and t.itemid=i.itemid
    group by t.clock
    0

You can try this below logic-

SELECT i.hostid,t.itemid,t.value as OneValues, t.clock as time
FROM hosts h 
INNER JOIN items i 
    ON i.hostid=h.hostid
    AND i.hostid IN ('223344','112233')
    AND i.itemid IN ('0223344','0112233')
INNER JOIN  history_uint t 
    ON t.itemid=i.itemid
dim tempo = listbox1.SelectedItems
dim students as string = ""
for each selected in tempo
students = selected.ToString & "," & students
next
star

Fri Sep 23 2022 23:41:42 GMT+0000 (UTC)

#sql #sqlserver
star

Fri Sep 23 2022 23:26:41 GMT+0000 (UTC)

#sql #sqlserver
star

Fri Sep 23 2022 21:40:49 GMT+0000 (UTC)

#sql #sqlserver
star

Fri Sep 23 2022 11:08:32 GMT+0000 (UTC) https://stackoverflow.com/questions/22180930/primary-foreign-key-constraints-confusion

#sql
star

Wed Sep 21 2022 20:38:16 GMT+0000 (UTC) undefined

#powershell #windows #sql #pwsh
star

Mon Sep 19 2022 00:57:38 GMT+0000 (UTC)

#sql
star

Thu Sep 15 2022 18:40:28 GMT+0000 (UTC) https://stackoverflow.com/questions/13607076/conversion-failed-when-converting-the-varchar-value-select-to-data-type-int

#sql
star

Thu Sep 15 2022 15:22:33 GMT+0000 (UTC) https://stackoverflow.com/questions/20935265/select-query-with-case-condition-and-sum

#sql
star

Tue Sep 13 2022 16:03:16 GMT+0000 (UTC)

#sql
star

Wed Sep 07 2022 08:10:48 GMT+0000 (UTC) https://gist.github.com/nextab/6a9b33919ab15e1f59828b4798682dd4

#mysql #sql
star

Thu Sep 01 2022 12:51:07 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 19:56:27 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 19:54:42 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 19:53:39 GMT+0000 (UTC)

#bash #sql
star

Mon Aug 29 2022 19:27:44 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 19:19:41 GMT+0000 (UTC)

#bash #docker #sql
star

Mon Aug 29 2022 19:10:43 GMT+0000 (UTC)

#bash #sql
star

Mon Aug 29 2022 19:09:59 GMT+0000 (UTC)

#bash #sql
star

Mon Aug 29 2022 18:52:53 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:43:04 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:42:20 GMT+0000 (UTC)

#bash #sql
star

Mon Aug 29 2022 18:37:26 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:36:31 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:33:43 GMT+0000 (UTC)

#sql #bash #docker
star

Mon Aug 29 2022 18:32:04 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:26:52 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 18:08:13 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 17:09:44 GMT+0000 (UTC)

#sql #php
star

Mon Aug 29 2022 17:05:21 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 17:04:20 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:55:57 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:53:46 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:35:00 GMT+0000 (UTC) https://gist.github.com/rgreenjr/3637525

#sql
star

Mon Aug 29 2022 16:19:59 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:19:20 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:14:44 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 16:06:01 GMT+0000 (UTC)

#sql #php
star

Mon Aug 29 2022 15:59:39 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 15:46:27 GMT+0000 (UTC)

#sql
star

Mon Aug 29 2022 14:43:52 GMT+0000 (UTC)

#sql