Snippets Collections
-- The purpose of this query is to join all 'daily' information into a single query for easiness and comprehension.
-- In this query: primary table is 'daily intensity' measure, joined with daily caloric measurement and steps taken as foreign tables.
SELECT
  *
FROM 
  `capstone-project-track-1.intensities_data.daily_intensities` AS daily_intensities -- activity/intensity table
JOIN
  `capstone-project-track-1.calories_data.daily_calories`AS daily_calories -- calories table
ON 
  daily_intensities.Id = daily_calories.Id
AND
  daily_intensities.ActivityDay = daily_calories.ActivityDay
JOIN
  `capstone-project-track-1.steps_data.daily_steps`AS daily_steps -- steps table
ON 
  daily_intensities.Id = daily_steps.Id
AND
  daily_intensities.ActivityDay = daily_steps.ActivityDay
ORDER BY
  daily_intensities.ActivityDay
-- What has been the avg. death rate by race?
SELECT
    STUB_NAME, STUB_LABEL, YEAR, ESTIMATE
FROM
    death_rates
WHERE
    STUB_LABEL NOT LIKE '%years%' -- exclude age bracket descriptions
AND
    STUB_NAME LIKE '%and%' -- include all categories except 'sex only'
-- What has been the avg. estimated death rate by sex since 1950?
SELECT
    STUB_LABEL, YEAR, ESTIMATE
FROM
    death_rates
WHERE
    STUB_LABEL IN ('Male', 'Female')
GROUP BY
    STUB_LABEL, YEAR
-- What is the average estimated death rate by sex & age since 1950?
SELECT
    STUB_LABEL, ESTIMATE, YEAR
FROM
    death_rates
WHERE
    STUB_NAME = 'Sex and age'
GROUP BY
    STUB_LABEL, YEAR
Dans la requêtes =>    
donnee.flag_rafraichissement    AS histoLectureFlashRaw,
donnee.envoi_auto               AS envoiAutoEnergyRaw,
  
import org.springframework.beans.factory.annotation.Value;    


    @Value("#{target.envoiAutoEnergyRaw==null?null:target.envoiAutoEnergyRaw== 1}")
    Boolean isEnvoiAutoEnergy();

    @Value("#{target.histoLectureFlashRaw == 1}")
    boolean getHistoLectureFlash();
SELECT *
FROM all_source
WHERE REGEXP_LIKE(text, '(^|\W)Your Text Here($|\W)', 'i')
AND owner = 'LLV3';
BEGIN
  DBMS_MVIEW.REFRESH('MV_MINIBAR_ITEM');
END;
/
WITH geocoded AS (
    SELECT 
        *,
        {{ address_to_coordinates( 'ADDRESS_LINE_ONE' ) }} AS METADATA

    FROM {{ ref('test_address__mock_data') }}

    WHERE
        ( 
            address_line_one NOT IN {{ var('mask_data_pattern') }} 
            AND
            address_line_two NOT IN {{ var('mask_data_pattern') }} 
        )
        OR
        (
            address_line_one IS NULL 
            AND
            address_line_two IS NULL
        )
)
// 
// 24-8-65
if {RequestInfo.Report_By} = 'มัทนาวดี' then  'จพ.วิทยา'
else if {RequestInfo.Report_By} = 'สายทิพย์ ' then  'จพ.งาน'
else if {RequestInfo.Report_By} = 'ทนพ.ญ. ญาณิศา0' then  'Laboratory'
else if {RequestInfo.Report_By} = 'เข็มทอง' then  'Laboratory'
else if {RequestInfo.Report_By} = 'ทนพ.ญ.วัลดี  ' then  'Laboratory'
else if {RequestInfo.Report_By} = 'นายขวัญชัย สม0' then  'นักเทคนิคการแพทย์ชำนาญการ'
else if {RequestInfo.Report_By} = 'จริยา อิน' then  'นักเทคนิคการแพทย์'
else if {RequestInfo.Report_By} = 'administrator' then  'Administrator LIS'
insert into analytics.dbt_klee.mock_orders (order_id, status, created_at, updated_at) 
values
    (1, 'delivered', '2020-01-01', '2020-01-04'),
    (2, 'shipped', '2020-01-02', '2020-01-04'),
    (3, 'shipped', '2020-01-03', '2020-01-04'),
    (4, 'processed', '2020-01-04', '2020-01-04');
create or replace transient table analytics.dbt_klee.mock_orders(
    order_id integer,
    status varchar(100),
    created_at date,
    updated_at date
)
MINUTE( DATETIME(1970,1,1,0,
 CASE 
  WHEN CAST(Nb of days as NUMBER) <= 1 THEN Hour
  WHEN CAST(Nb of days as NUMBER) >= 180 THEN ((MONTH(Date)-MONTH(PARSE_DATE("%Y%m%d",Min date)))+((YEAR(Date)-YEAR(PARSE_DATE("%Y%m%d",Min date)))*12))  
  WHEN CAST(Nb of days as NUMBER) >= 56 THEN (((YEAR(Date)-1)*52)+IF(WEEK(Date) IN (52,53) AND MONTH(Date) = 1, 0, WEEK(Date)))-(((YEAR(PARSE_DATE("%Y%m%d",Min date))-1)*52)+IF(WEEK(PARSE_DATE("%Y%m%d",Min date)) IN (52,53), 0, WEEK(PARSE_DATE("%Y%m%d",Min date))))
  ELSE DATE_DIFF(Date,PARSE_DATE("%Y%m%d",Min date))
 END
 ,0))
<!-- TeamViewer Logo (generated at https://www.teamviewer.com) -->
<div style="position:relative; width:234px; height:60px;">
    <a href="https://www.teamviewer.com/link/?url=842558&id=1682695569731" style="text-decoration:none;">
        <img src="https://static.teamviewer.com/resources/badges/teamviewer_badge_flat4.png" alt="Download TeamViewer Remote Control" title="Download TeamViewer Remote Control" border="0" width="234" height="60" />

    </a>
</div>
SELECT table_name, column_name, comments
FROM all_col_comments
WHERE owner = 'LLV3'--Schema name
AND (table_name LIKE '%recipe%' OR column_name LIKE '%recipe%' OR comments LIKE '%recipe%')
OR (table_name LIKE '%element%' OR column_name LIKE '%element%' OR comments LIKE '%element%')
OR (table_name LIKE '%RCP%' OR column_name LIKE '%RCP%' OR comments LIKE '%RCP%')
ORDER BY table_name, column_name;


--How to know your SCHEMA
SELECT sys_context('USERENV', 'CURRENT_SCHEMA') as schema_name
FROM dual;
declare @value decimal(10,2)
set @value = (select 
CASE WHEN %CH003% = 0 THEN 0 
WHEN {AGE,YEAR} > 130.0 THEN 0 
WHEN {AGE,YEAR} < 18.0 THEN ((0.41 * {HEIGHT}) / %CH003%) 
WHEN {SEX} = "M" AND %CH003% <= 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -0.411))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "M" AND %CH003% > 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -1.209))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% <= 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -0.329))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% > 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -1.209))) * POWER(0.993, {AGE,YEAR})) ELSE 0 END)

SELECT CASE WHEN {AGE,YEAR} < 18.0 THEN "-"
WHEN @value >= 90.0 THEN "Stage 1"      
WHEN @value >= 60.0 AND @value <= 89.99 THEN "Stage 2"      
WHEN @value >= 45.0 AND @value <= 59.99 THEN "Stage 3a"      
WHEN @value >= 30.0 AND @value <= 44.99 THEN "Stage 3b"      
WHEN @value >= 15.0 AND @value <= 29.99 THEN "Stage 4"      
WHEN @value <  15.0  AND @value >= 0.0 THEN "Stage 5"      
ELSE "" END
declare @value decimal(10,2)
set @value = (select 
CASE WHEN %CH003% = 0 THEN 0 
WHEN {AGE,YEAR} > 130.0 THEN 0 
WHEN {AGE,YEAR} < 18.0 THEN ((0.41 * {HEIGHT}) / %CH003%) 
WHEN {SEX} = "M" AND %CH003% <= 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -0.411))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "M" AND %CH003% > 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -1.209))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% <= 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -0.329))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% > 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -1.209))) * POWER(0.993, {AGE,YEAR})) ELSE 0 END)

SELECT CASE WHEN @value >= 90.0 THEN "Stage 1"      
WHEN @value >= 60.0 AND @value <= 89.99 THEN "Stage 2"      
WHEN @value >= 45.0 AND @value <= 59.99 THEN "Stage 3a"      
WHEN @value >= 30.0 AND @value <= 44.99 THEN "Stage 3b"      
WHEN @value >= 15.0 AND @value <= 29.99 THEN "Stage 4"      
WHEN @value <  15.0  AND @value >= 0.0 THEN "Stage 5"      
ELSE "" END
BEGIN
	SET NOCOUNT ON;
	
	--
	declare @LN nvarchar(20);
	declare @Testcode nvarchar(10);
	declare @Data nvarchar(MAX);
	declare @Result nvarchar(MAX);


	select @LN = LN from inserted;
	select @Result = Result from inserted;
	select @Testcode = Result_Test_Code from inserted;

	--

	if (@Testcode = 'UA001')
	BEGIN
	delete tbl_lab_concate_test where LN = @LN
	INSERT INTO tbl_lab_concate_test VALUES (@LN,'','N');   
    END

	if (@Testcode = 'URT203' and @Result is not null) -- Bacreria
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+'Bacteria = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT203'
    END

	if (@Testcode = 'URT203' and @Result is  null) -- Bacreria
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT203'
    END


	if (@Testcode = 'URT200' and @Result is not null) --Hyaline Cast
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Hyaline Cast = '+@Result+'/LPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT200'
    END

	if (@Testcode = 'URT200' and @Result is  null) --Hyaline Cast
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT200'
    END

	
	if (@Testcode = 'URT204' and @Result is not null) -- Calcium Oxalate
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',CaOx monohydrate = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT204'
    END
	
	if (@Testcode = 'URT204' and @Result is null) -- Calcium Oxalate
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT204'
    END 

	if (@Testcode = 'URT205' and @Result is not null) -- CaOx Dihydrate
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',CaOx Dihydrate = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT205'
    END
	
	if (@Testcode = 'URT205' and @Result is null) -- CaOx Dihydrate
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT205'
    END
	
	if (@Testcode = 'URT209' and @Result is not null) -- Triple Phosphate Crystals
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Triple Phosphate Crystals = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT209'
    END
	
	if (@Testcode = 'URT209' and @Result is null) --Triple Phosphate Crystals
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT209'
    END

	--if (@Testcode = 'URT202' and @Result is not null) -- None Sq.EPI.
	--BEGIN
	
 --   update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',None Sq.EPI. = '+@Result+'/HPF'	where LN = @LN
	--update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT202'
 --   END
	
	--if (@Testcode = 'URT202' and @Result is null) --None Sq.EPI.
	--BEGIN
	
	--update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT202'
 --   END

	if (@Testcode = 'URT201' and @Result is not null and @Result <> '0') -- PAT
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',PAT = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT201'
    END

	if (@Testcode = 'URT201' and @Result is null) -- PAT
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT201'
    END



	if (@Testcode = 'URT208' and @Result is not null) -- Mucous Thread
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Mucous = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT208'
    END

	if (@Testcode = 'URT208' and @Result is null) -- Mucous Thread
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT208'
    END

	if (@Testcode = 'URT071' and @Result is not null) -- Amorphous
	BEGIN
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Amorphous = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT071'
    END

	if (@Testcode = 'URT071' and @Result is null) -- Amorphous
	BEGIN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT071'
    END

	if (@Testcode = 'URT207' and @Result is not null) -- Yeast cell
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Yeast cell = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT207'
    END

	if (@Testcode = 'URT207' and @Result is null) -- Yeast cell
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT207'
    END

	if (@Testcode = 'URT230' and @Result is not null) -- Uric acid Crytal
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Uric acid Crytal = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT230'
    END

	if (@Testcode = 'URT230' and @Result is null) -- Uric acid Crytal
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT230'
    END

	if (@Testcode = 'URT231' and @Result is not null) -- Spermatozao
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Uric acid Crytal = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT231'
    END

	if (@Testcode = 'URT231' and @Result is null) -- Spermatozao
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT231'
    END

	if (@Testcode = 'URT500') -- Other
	BEGIN
	 if (@Result = '-')
	 update B set B.Result = A.Temp_Data
	 from tbl_lab_concate_test A, tbl_lab_information_ExportToHIS B
	 where A.LN = B.LN and B.Result_Test_Code = 'URT500'

	 if (@Result <> '-')
	 update B set B.Result = A.Temp_Data+','+@Result
	 from tbl_lab_concate_test A, tbl_lab_information_ExportToHIS B
	 where A.LN = B.LN and B.Result_Test_Code = 'URT500'

    update 	tbl_lab_concate_test set Inactive = 'Y'	where LN = @LN

    END
END
load data infile '/home/cloudera/Desktop/shared_local/flipkart_dataset.csv'
into table flipkart_tab
fields terminated by ','
enclosed by '"' LINES
TERMINATED BY '\n' (category_1,category_2,category_3,title,product_rating,selling_price,mrp,seller_name,seller_rating,description,highlights,image_links);
import sqlite3
JList = []

class MyJournal:
    
    def __init__(self,id,name):
        self.id = id
        self.name = name
        
    def description(self):
        return "Journal number: " + str(self.id) + " has the name " + self.name

conn = sqlite3.connect('academic_papers_populated.db')

cursor = conn.cursor()


for row in cursor.execute('''SELECT * FROM Journal;'''):

    JList.append(MyJournal(row[0],row[1]))

cursor.close()
import sqlite3
conn = sqlite3.connect('academic_papers_populated.db')

def apaReference(ArticleId):
    SQL = '''SELECT AuthorNumber, LastName, Initials, Year, Title, Name  
             FROM Article as p, Journal as j, Author as a, Article_Authors as b
             WHERE p.JournalID = j.JournalID
             AND p.ArticleID = b.ArticleID
             AND b.AuthorID = a.AuthorID
             AND p.Articleid = :id
             ORDER BY AuthorNumber;'''

    cursor = conn.cursor()
    record = cursor.execute(SQL,{'id':ArticleId}).fetchall()
    cursor.close()
    if len(record) ==0:
        raise Exception("Invalid Article")
    else:
        ref = ''
        count = 0
        for row in record:
            ref = ref + row[1]+', '+row[2]
            count += 1
            if count < len(record):
                if count + 1 < len(record):
                    ref = ref +', '
                else:
                    ref = ref +', & '
        ref = ref + ' (' + str(record[0][3]) + ') '+ record[0][4]+ '. ' +record[0][5]+'.'
        return ref
import sqlite3

# Create a connection to the database
conn = sqlite3.connect('Northwind2020.db')

# Create a cursor object to execute SQL queries
cursor = conn.cursor()

# Retrieve the number of unique suppliers that have discontinued products
query = '''
SELECT COUNT(DISTINCT SupplierId)
FROM Product
WHERE IsDiscontinued = 1
'''

cursor.execute(query)
num_discontinued_suppliers = cursor.fetchone()[0]

# Display an appropriate message based on the number of discontinued suppliers
if num_discontinued_suppliers >= 20:
    print("20 or more suppliers have discontinued products.")
elif num_discontinued_suppliers <= 0:
    print("All products are available.")
else:
    print("Less than 20 suppliers have discontinued products.")

# Close the database connection
conn.close()




import sqlite3
conn = sqlite3.connect('Northwind2020.db')
SQL = '''SELECT COUNT(*)As NoMore
         FROM
         (SELECT Distinct(S.Id)
         FROM Supplier S, Product P
         WHERE S.Id == P.SupplierId
         AND P.IsDiscontinued = 1
         GROUP BY S.CompanyName) '''

cursor = conn.cursor()
cursor.execute(SQL)
answer = cursor.fetchone()

if answer[0]!= None:
    if int((answer[0]) >= 20):
        print("More than 20 suppliers have discontinued products.")
    else:
        print("Less than 20 suppliers have discontinued products.")
else:
    print("All products are available.")
    
cursor.close()
from datetime import datetime

currentyear = 2021
currentmonth = 2
currentday = 10

def validate_age(DOB):
    current_date = datetime(currentyear, currentmonth, currentday)
    day, month, year = map(int, DOB.split('-'))
    dob_date = datetime(year, month, day)
    age_in_years = current_date.year - dob_date.year - ((current_date.month, current_date.day) < (dob_date.month, dob_date.day))
    return age_in_years



currentyear = 2021
currentmonth = 2
currentday = 10

def validate_age(DOB):
    #your code goes here
    b_year = int(DOB[-4:])
    b_month = int(DOB[-7:-5])
    b_day = int(DOB[:2])
    
    age = currentyear - b_year
    if b_month > currentmonth:
        age -= 1
    elif b_month == currentmonth:
        if b_day > currentday:
            age -= 1
    
    return int(age)
import sqlite3

# Connect to the database
conn = sqlite3.connect('BookCollection.db')

# Define the SQL query
query = '''
SELECT Authors.Surname, Authors.Name, Books.Title
FROM Authors
INNER JOIN Books ON Authors.AuthorID = Books.AuthorID
ORDER BY Authors.Surname, Books.Title
'''

# Execute the query and print the results
cursor = conn.cursor()
for row in cursor.execute(query):
    print(row)

# Close the database connection
conn.close()


import sqlite3
conn = sqlite3.connect('BookCollection.db')

cursor = conn.cursor()
for row in cursor.execute('''SELECT Surname, Name, Title
                             FROM Authors, Books
                             WHERE Authors.AuthorID = Books.AuthorID
                             ORDER BY Surname, Name, Title;'''):
    print(row)
cursor.close()
SQL = '''SELECT O.OrderNumber, O.OrderDate, TotalAmount, O.CustomerID
         FROM [Order] O, Customer C
         WHERE O.CustomerID = C.Id
         AND C.Id = :custID'''

for row in cursor.execute(SQL, {'custID':inputid}):
    print(row)
cursor.close()

import sqlite3
conn = sqlite3.connect('Northwind2020.db')

def CustOrderCountDetails(CustomerID):
    cursor = conn.cursor()
    cursor.execute("SELECT FirstName, LastName, COUNT(*) FROM Customer JOIN [Order] ON Customer.Id = [Order].CustomerId WHERE Customer.Id = ? GROUP BY Customer.Id", (CustomerID,))
    result = cursor.fetchone()
    conn.close()
    return f"Customer {result[0]} {result[1]} had a total of {result[2]} orders"





import sqlite3
conn = sqlite3.connect('Northwind2020.db')
    
def CustOrderCountDetails(CustomerID):
    # Your code goes here
    cursor = conn.cursor()

    SQL = '''SELECT FirstName, lastname, COUNT([Order].Id) as OrderCount
    From Customer, [Order]
    WHERE customer.id = CustomerId
    and customer.id = :Cid
    GROUP BY FirstName, LastName'''


    result = cursor.execute(SQL,{'Cid': CustomerID}).fetchone()
    return "Customer " + result[0] + " " + result[1] + " had a total of " + str(result[2]) + " orders"

    cursor.close()
SELECT
	tbl_lab_setup_Test.[Test_Code],
	tbl_lab_setup_Test.Test_Name,
	[Sex_Description],
	[Age_Text],
	[Test_Reference_Text],
	[Unit_Title],
	[Test_Reference_Cri1],
	[Test_Reference_Cri2]
FROM
	[LAB_DB].[dbo].[view_lab_setup_Test_Numeric_Reference]
JOIN tbl_lab_setup_Test ON [view_lab_setup_Test_Numeric_Reference].Test_Code = tbl_lab_setup_Test.Test_Code
WHERE
	Test_Reference_Id IS NOT NULL
AND tbl_lab_setup_Test.Test_Inactive = '0'
ORDER BY
	tbl_lab_setup_Test.Test_Code,
	Test_Reference_Id
SELECT
    planname,
    '{"c":"baz"}'::JSON,
    json_build_object(planname,1,'bar',2)
FROM bplaene
LIMIT 10
;

-- Complex nested JSON with arrays
SELECT json_build_object(
               'trans_id', t.trans_id,
               'user_data', json_build_object(
                       'user_id', t.user_id,
                       'user_username', t.user_username,
                       'user_full_name', t.user_full_name,
                       'user_street', t.user_street,
                       'user_postal_code', t.user_postal_code,
                       'user_additional_info', t.user_additional_info,
                       'user_country', t.user_country,
                       'user_vat_number', t.user_vat_number),
               'order_data', json_build_object(
                       'order_date', t.order_date,
                       'order_sum', t.order_sum,
                       'order_vat', t.order_vat,
                       'order_invoice_nr', t.order_invoice_nr
                   ),
               'locations',
               (SELECT json_agg(row_to_json(locations))
                FROM (SELECT l.address,
                             l.project_title,
                             (SELECT json_agg(row_to_json(f))
                              FROM (SELECT layername,
                                           data
                                    FROM sales.features) f) features

                      FROM sales.locations l) locations)
           ) transaction
FROM sales.transactions t
SELECT 
    req.session_id
    , req.total_elapsed_time AS duration_ms
    , req.cpu_time AS cpu_time_ms
    , req.total_elapsed_time - req.cpu_time AS wait_time
    , req.logical_reads
    , REPLACE (REPLACE (SUBSTRING (ST.text, (req.statement_start_offset/2) + 1, 
       ((CASE statement_end_offset
           WHEN -1
           THEN DATALENGTH(ST.text)  
           ELSE req.statement_end_offset
         END - req.statement_start_offset)/2) + 1) , CHAR(10), ' '), CHAR(13), ' ')  AS statement_text  
FROM sys.dm_exec_requests AS req
    CROSS APPLY sys.dm_exec_sql_text(req.sql_handle) AS ST
ORDER BY total_elapsed_time DESC;
with pg_tx as (
SELECT
  account_id,
  sum(value.amount)/100 as tx_amount
FROM `reby-cloud.analytics_reby_v1_eu.pg_transactions_json`
where type like '%top_up'
and date(created_at) >= date_sub(current_date, INTERVAL 6 MONTH)
group by 1
),

account as (
  select
    acc.id,
    acc.balance__amount/100 as balance_amount,
    acc.updated_at,
    tx.account_id,
    tx.tx_amount
  from `reby-cloud.analytics_reby_v1_eu.pg_account` acc
  left join pg_tx tx on acc.id = tx.account_id

),
join_user as (
select
  acc.*,
  cu.user_id,
  cu.company_id
from account acc
  left join `reby-cloud.reby_marketing_eu.pg_company_user` cu
  on acc.id = cu.account_id
where acc.account_id is not null
and acc.balance_amount > 0
and cu.company_id = 'c_3qteetfhxjshx4j54111'
),

join_sa as (
select
  u.*,
  fmr.service_area
from join_user u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` fmr
  on u.user_id = fmr.user_id
)

select service_area, sum(balance_amount) as amount from join_sa group by 1
;
SELECT SalesPerson, [Oranges] AS Oranges, [Pickles] AS Pickles
FROM 
   ( SELECT SalesPerson, Product, SalesAmount
     FROM ProductSales 
   ) ps
PIVOT
   ( SUM (SalesAmount)
     FOR Product IN ( [Oranges], [Pickles])
   ) AS pvt
UPDATE [LAB_DB_DES].[dbo].[tbl_lab_setup_Test]
SET [Specimen_Type_Code] ='98'

--SELECT* FROM tbl_lab_setup_Test
WHERE
	([Test_His_Code] IN ('i39','i40','i44','i46','i50','i51','i55','i58','i61','i63','i65','i71','i72','i73','i93','i102','i622','i623','i626','i637','i638','i652','i656','i657','i658','i659','i668','i669','i670','i671','i163','i676','i683','i686','i437','i182','i184','i440','i441','i703','i455','i502','i505','i507','i509'))
UPDATE [LAB_DB_DES].[dbo].[tbl_lab_setup_Test]
SET [Specimen_Type_Code] ='98'

--SELECT* FROM tbl_lab_setup_Test
WHERE
	([Test_His_Code] IN ('i39','i40','i44','i46','i50','i51','i55','i58','i61','i63','i65','i71','i72','i73','i93','i102','i622','i623','i626','i637','i638','i652','i656','i657','i658','i659','i668','i669','i670','i671','i163','i676','i683','i686','i437','i182','i184','i440','i441','i703','i455','i502','i505','i507','i509'))
/*use "sa" user*/

/*backup*/
BACKUP DATABASE [servicedesk] TO DISK = N'/var/opt/mssql/data/SDDB20230221.BAK';

/*restore*/
ALTER DATABASE [servicedesk] SET OFFLINE WITH ROLLBACK IMMEDIATE;
RESTORE DATABASE [servicedesk] FROM DISK = N'/var/opt/mssql/data/SDDB20230221.BAK' WITH REPLACE;

/*go online after restore*/
ALTER DATABASE [servicedesk] SET ONLINE;
CREATE TABLE Employee
(
	ID				INT,
	FirstName		VARCHAR(50),
	LastName		VARCHAR(50),
	DepartmentID	INT,
	ReportToID		INT
)
CREATE Table Departments
(
	ID				INT,
	DepartmentName	VARCHAR(50)
)
INSERT INTO Employee VALUES (1, 'F1', 'L1', 1, NULL);
INSERT INTO Employee VALUES (2, 'F2', 'L2', 2, 1);
INSERT INTO Employee VALUES (3, 'F3', 'L3', 1, 2);
INSERT INTO Employee VALUES (4, 'F4', 'L4', 2, 3);
INSERT INTO Employee VALUES (5, 'F5', 'L5', 1, NULL);
INSERT INTO Employee VALUES (6, 'F6', 'L6', 2, 1);
INSERT INTO Employee VALUES (7, 'F7', 'L7', 1, 2);
INSERT INTO Departments VALUES ( 1 , 'D1');
INSERT INTO Departments VALUES ( 2 , 'D2');
INSERT INTO Departments VALUES ( 3 , 'D3');
SELECT *
FROM Employee
SELECT *
FROM Departments
;WITH cte AS
(
SELECT ID,firstname,lastname,departmentid,reporttoid
FROM Employee WHERE FirstName='F2' -- Change this
UNION ALL
SELECT a.ID,a.firstname,a.lastname,a.departmentid,a.reporttoid
FROM Employee a
INNER JOIN cte b ON a.ReportToID=b.Id
),cte1 AS 
(
SELECT CTE.iD AS EmployeeId
	,e.Id AS ManagerId
	,cte.FirstName AS EmployeeFirstName
	,cte.LastName AS EmployeeLastName
	,e.FirstName AS ManagerFirstName
	,e.LastName AS ManagerLastName
	,cte.DepartmentId  AS EmpDepId--Emp
	,e.DepartmentId AS MgrDepId-- Mgr
FROM cte
	CROSS APPLY Employee e
WHERE cte.Reporttoid=e.ID
),cte2 AS 
(
SELECT cte.*
	,d1.DepartmentName AS EmployeeDepartmentName
FROM cte1 AS cte
	LEFT JOIN Departments AS d1
		ON cte.EmpDepId=d1.ID
)
SELECT cte2.EmployeeId
	,cte2.ManagerId
	,cte2.EmployeeFirstName
	,cte2.EmployeeLastName
	,cte2.ManagerFirstName
	,cte2.ManagerLastName
	,cte2.EmployeeDepartmentName
	,d2.DepartmentName AS ManagerDepartmentName
FROM cte2		
	LEFT JOIN Departments AS d2
		ON cte2.MgrDepId=d2.Id
<div style ="overflow-x: scroll;overflow-y: scroll;height:500px;">
CASE WHEN "%HIV1%" = "Non-Reactive" 
       and "%HIV2%" <> "Reactive" 
       and "%HIV3%" <> "Reactive" then "Negative" 
     WHEN "%HIV1%" = "Reactive" 
       and "%HIV2%" <> "Non-Reactive" 
       and "%HIV3%" <> "Non-Reactive" then "Positive" 
	else CASE WHEN "%HIV2%" = "Reactive" and "%HIV3%" = "Reactive" then "Positive" 
		else "Inconclusive" 
		end 
end
/*
https://codebeautify.org/sqlformatter/y23e67982
*\
//var ig$ = apex.region('uiCount').widget();
var gridView = apex.region('uiCount').call('getViews').grid;

if ($v('P111_ITYPE') === 'DE') {  //Hide columns when DE type is selected

    gridView.view$.grid('hideColumn', 'EXPIRYDT');
    gridView.view$.grid('hideColumn', 'PUOM');
    gridView.view$.grid('hideColumn', 'PUOMDES');
    gridView.view$.grid('hideColumn', 'COUNT_QTY_PUOM');
    gridView.view$.grid('hideColumn', 'SUOM');
    gridView.view$.grid('hideColumn', 'SUOMDES');
    gridView.view$.grid('showColumn', 'MENUCODE');
    gridView.view$.grid('showColumn', 'MNU_DESCRIPTION');
    gridView.view$.grid('showColumn', 'DESCRIPTION');

}else{

    gridView.view$.grid('showColumn', 'EXPIRYDT');
    gridView.view$.grid('showColumn', 'PUOM');
    gridView.view$.grid('showColumn', 'PUOMDES');
    gridView.view$.grid('showColumn', 'COUNT_QTY_PUOM');
    gridView.view$.grid('showColumn', 'SUOM');
    gridView.view$.grid('showColumn', 'SUOMDES');
    gridView.view$.grid('hideColumn', 'MENUCODE');
    gridView.view$.grid('hideColumn', 'MNU_DESCRIPTION');
    gridView.view$.grid('hideColumn', 'DESCRIPTION');

}
public class NetworkSharedDrive
    {
        #region Consts
        const int RESOURCE_CONNECTED = 0x00000001;
        const int RESOURCE_GLOBALNET = 0x00000002;
        const int RESOURCE_REMEMBERED = 0x00000003;

        const int RESOURCETYPE_ANY = 0x00000000;
        const int RESOURCETYPE_DISK = 0x00000001;
        const int RESOURCETYPE_PRINT = 0x00000002;

        const int RESOURCEDISPLAYTYPE_GENERIC = 0x00000000;
        const int RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001;
        const int RESOURCEDISPLAYTYPE_SERVER = 0x00000002;
        const int RESOURCEDISPLAYTYPE_SHARE = 0x00000003;
        const int RESOURCEDISPLAYTYPE_FILE = 0x00000004;
        const int RESOURCEDISPLAYTYPE_GROUP = 0x00000005;

        const int RESOURCEUSAGE_CONNECTABLE = 0x00000001;
        const int RESOURCEUSAGE_CONTAINER = 0x00000002;


        const int CONNECT_INTERACTIVE = 0x00000008;
        const int CONNECT_PROMPT = 0x00000010;
        const int CONNECT_REDIRECT = 0x00000080;
        const int CONNECT_UPDATE_PROFILE = 0x00000001;
        const int CONNECT_COMMANDLINE = 0x00000800;
        const int CONNECT_CMD_SAVECRED = 0x00001000;

        const int CONNECT_LOCALDRIVE = 0x00000100;
        #endregion

        #region Errors
        const int NO_ERROR = 0;

        const int ERROR_ACCESS_DENIED = 5;
        const int ERROR_ALREADY_ASSIGNED = 85;
        const int ERROR_BAD_DEVICE = 1200;
        const int ERROR_BAD_NET_NAME = 67;
        const int ERROR_BAD_PROVIDER = 1204;
        const int ERROR_CANCELLED = 1223;
        const int ERROR_EXTENDED_ERROR = 1208;
        const int ERROR_INVALID_ADDRESS = 487;
        const int ERROR_INVALID_PARAMETER = 87;
        const int ERROR_INVALID_PASSWORD = 1216;
        const int ERROR_MORE_DATA = 234;
        const int ERROR_NO_MORE_ITEMS = 259;
        const int ERROR_NO_NET_OR_BAD_PATH = 1203;
        const int ERROR_NO_NETWORK = 1222;

        const int ERROR_BAD_PROFILE = 1206;
        const int ERROR_CANNOT_OPEN_PROFILE = 1205;
        const int ERROR_DEVICE_IN_USE = 2404;
        const int ERROR_NOT_CONNECTED = 2250;
        const int ERROR_OPEN_FILES = 2401;

        private struct ErrorClass
        {
            public int num;
            public string message;
            public ErrorClass(int num, string message)
            {
                this.num = num;
                this.message = message;
            }
        }

        private static ErrorClass[] ERROR_LIST = new ErrorClass[] {
        new ErrorClass(ERROR_ACCESS_DENIED, "Error: Access Denied"), 
        new ErrorClass(ERROR_ALREADY_ASSIGNED, "Error: Already Assigned"), 
        new ErrorClass(ERROR_BAD_DEVICE, "Error: Bad Device"), 
        new ErrorClass(ERROR_BAD_NET_NAME, "Error: Bad Net Name"), 
        new ErrorClass(ERROR_BAD_PROVIDER, "Error: Bad Provider"), 
        new ErrorClass(ERROR_CANCELLED, "Error: Cancelled"), 
        new ErrorClass(ERROR_EXTENDED_ERROR, "Error: Extended Error"), 
        new ErrorClass(ERROR_INVALID_ADDRESS, "Error: Invalid Address"), 
        new ErrorClass(ERROR_INVALID_PARAMETER, "Error: Invalid Parameter"), 
        new ErrorClass(ERROR_INVALID_PASSWORD, "Error: Invalid Password"), 
        new ErrorClass(ERROR_MORE_DATA, "Error: More Data"), 
        new ErrorClass(ERROR_NO_MORE_ITEMS, "Error: No More Items"), 
        new ErrorClass(ERROR_NO_NET_OR_BAD_PATH, "Error: No Net Or Bad Path"), 
        new ErrorClass(ERROR_NO_NETWORK, "Error: No Network"), 
        new ErrorClass(ERROR_BAD_PROFILE, "Error: Bad Profile"), 
        new ErrorClass(ERROR_CANNOT_OPEN_PROFILE, "Error: Cannot Open Profile"), 
        new ErrorClass(ERROR_DEVICE_IN_USE, "Error: Device In Use"), 
        new ErrorClass(ERROR_EXTENDED_ERROR, "Error: Extended Error"), 
        new ErrorClass(ERROR_NOT_CONNECTED, "Error: Not Connected"), 
        new ErrorClass(ERROR_OPEN_FILES, "Error: Open Files"), 
    };

        private static string getErrorForNumber(int errNum)
        {
            foreach (ErrorClass er in ERROR_LIST)
            {
                if (er.num == errNum) return er.message;
            }
            return "Error: Unknown, " + errNum;
        }
        #endregion

        [DllImport("Mpr.dll")]
        private static extern int WNetUseConnection(
            IntPtr hwndOwner,
            NETRESOURCE lpNetResource,
            string lpPassword,
            string lpUserID,
            int dwFlags,
            string lpAccessName,
            string lpBufferSize,
            string lpResult
            );

        [DllImport("Mpr.dll")]
        private static extern int WNetCancelConnection2(
            string lpName,
            int dwFlags,
            bool fForce
            );

        [StructLayout(LayoutKind.Sequential)]
        private class NETRESOURCE
        {
            public int dwScope = 0;
            public int dwType = 0;
            public int dwDisplayType = 0;
            public int dwUsage = 0;
            public string lpLocalName = "";
            public string lpRemoteName = "";
            public string lpComment = "";
            public string lpProvider = "";
        }


        public static string connectToRemote(string remoteUNC, string username, string password)
        {
            return connectToRemote(remoteUNC, username, password, false);
        }

        public static string connectToRemote(string remoteUNC, string username, string password, bool promptUser)
        {
            NETRESOURCE nr = new NETRESOURCE();
            nr.dwType = RESOURCETYPE_DISK;
            nr.lpRemoteName = remoteUNC;

            int ret;
            if (promptUser)
                ret = WNetUseConnection(IntPtr.Zero, nr, "", "", CONNECT_INTERACTIVE | CONNECT_PROMPT, null, null, null);
            else
                ret = WNetUseConnection(IntPtr.Zero, nr, password, username, 0, null, null, null);

            if (ret == NO_ERROR) return null;
            return getErrorForNumber(ret);
        }

        public static string disconnectRemote(string remoteUNC)
        {
            int ret = WNetCancelConnection2(remoteUNC, CONNECT_UPDATE_PROFILE, false);
            if (ret == NO_ERROR) return null;
            return getErrorForNumber(ret);
        }
    }
}
--all times - all museums

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Roman_Baths'
	--or project_name = 'Mauricewood_School'
	--or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Roman_Baths'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
alter table ams_eq_inc_fund_stats enable row movement;
flashback table ams_eq_inc_fund_stats to timestamp to_timestamp('2023-01-31 07:30:00', 'YYYY-MM-DD HH:MI:SS');
function(options) {
    options.defaultGridOptions = {
        columns: [{
            ITYPE: {
                heading: "IType",
                width: 50,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            NAMED: {
                heading: "ItemName",
                width: 200,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            MNU_DESCRIPTION: {
                heading: "Section",
                width: 120,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            PUOM_COST: {
                heading: "Cost",
                width: 50,
                alignment: "end",
                headingAlignment: "end",
                canSort: true
            },
            PUOMD: {
                heading: "PurchaseUnit",
                width: 70,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            SUOMD: {
                heading: "SalesUnit",
                width: 70,
                alignment: "start",
                headingAlignment: "start",
                canSort: true           
            },
            CONRATIO: {
                heading: "Conratio",
                width: 70,
                alignment: "end",
                headingAlignment: "end",
                canSort: true
            }
        }]

    };

    return options;

}
 EXTRACT (MONTH FROM OPENDT) = 11
 EXTRACT (YEAR FROM OPENDT) = 2022
# remove references to the deprecated value
UPDATE job SET job_status = 'running' WHERE job_status = 'waiting';

# rename the existing type
ALTER TYPE status_enum RENAME TO status_enum_old;

# create the new type
CREATE TYPE status_enum AS ENUM('queued', 'running', 'done');

# update the columns to use the new type
ALTER TABLE job ALTER COLUMN job_status TYPE status_enum USING job_status::text::status_enum;
# if you get an error, see bottom of post

# remove the old type
DROP TYPE status_enum_old;
select 
  tbl_lab_setup_Test.Test_Code
  ,tbl_lab_setup_Test.Test_Name
  ,count(tbl_lab_setup_Test.Test_Name) as Count
  from [tbl_lab_log_Staff_Action]
  join view_lab_information_Request on [tbl_lab_log_Staff_Action].LSA_LN = view_lab_information_Request.IREQ_LN
  join tbl_lab_setup_Test on [tbl_lab_log_Staff_Action].[LSA_TEST_CODE] = tbl_lab_setup_Test.Test_Code
  where [LSA_TYPE] = 'ResRerun'
select tmptable.project_name, tmptable.geometry_name, tmptable.geometry_id, tmptable.diff --, tmptable.lost_for_seconds

from 
(SELECT 	
 	project_name,
	geometry_name, 
 	geometry_id,
 	max(time) as time, 
 	(now()-max(time)) as diff,
 	(extract(epoch from (now()-max(time)))) as lost_for_seconds
 	FROM public.metrics_app_timeandgeometric as a
	join public.metrics_app_geometry as b on a.geometry_id = b.id
	where project_name in ('Lasswade', 
						   'Mauricewood_School',
						   'Leconfield_House',
						   'Roman_Baths'
						   )-- PROJECT_NAME
-- 	and (geometry_name like '%_iaq'
-- 		 or geometry_name like '%_count'
-- 		 or geometry_name like '%_sound')
 	and metric_name not in ('Occupancy', 'Utilisation', 'PlannedOccupancy', 'PlannedUtilisation', 
						   	'peopleCount', 'inCountTotal', 'outCountTotal'
						   )
 	and sensor_id IS NOT NULL
 	and geometry_name not like 'Window%' 
 	and geometry_name not like 'Desk%'
 	--and value != 0
	group by project_name, geometry_name, geometry_id
	--order by time desc
) as tmptable

where tmptable.lost_for_seconds > 1*60*60 --HOUR*MINUTES*SECONDS
	--and tmptable.geometry_name not like 'Window%'
order by tmptable.project_name asc, tmptable.lost_for_seconds desc
SELECT COUNT (1)
  FROM pti
 WHERE EXTRACT (MONTH FROM oarslogdt) = 11 
 AND EXTRACT (YEAR FROM oarslogdt) = 2022
Select OrderID = 
    Case OrderID 
        When 1 Then 'Customer1'
        When 2 Or 3 Then 'Customer2'
        Else 'Unknown Customer'
    End 
From OrdersPlaced
SELECT 
    pg_terminate_backend(pid) 
FROM 
    pg_stat_activity 
WHERE 
    -- don't kill my own connection!
    pid <> pg_backend_pid()
    -- don't kill the connections to other databases
    AND datname = 'database_name'
    ;
use LAB_DB_GEE
UPDATE tbl_lab_setup_Test_Control
SET Test_Text_Keyin_Flag = '1';
UPDATE tbl_lab_setup_Test
SET Test_Inactive = '1',
 Test_His_Code = 'XXX',
 Test_Checkup_Code = '' 

DELETE FROM tbl_lab_configuration_Interface
WHERE CFGI_NAME=N''

DELETE FROM  tbl_lab_setup_Source_Group 
WHERE Source_Group_Code=''

DELETE FROM tbl_lab_setup_Source_Definition 
WHERE Source_Definition_Code <>'000000'

DELETE FROM tbl_lab_management_Users 
WHERE ([User_ID]<>N'admin')


UPDATE  tbl_lab_setup_Test_Group
SET Test_Group_Report_Form_Path=N'',
Test_Group_Inactive='1'


UPDATE tbl_lab_setup_Test_Control
SET Test_Default_Value_Flag='0'

UPDATE tbl_lab_setup_Test_Control 
set Test_Text_Keyin_Flag = '1'
 select comments from user_col_comments where table_name = 'PTI' and column_name = 'DISCOUNT'
CREATE FUNCTION dbo.ToDateTime2 ( @Ticks bigint )
  RETURNS datetime2
AS
BEGIN
    DECLARE @DateTime datetime2 = '00010101';
    SET @DateTime = DATEADD( DAY, @Ticks / 864000000000, @DateTime );
    SET @DateTime = DATEADD( SECOND, ( @Ticks % 864000000000) / 10000000, @DateTime );
    RETURN DATEADD( NANOSECOND, ( @Ticks % 10000000 ) * 100, @DateTime );
END
CREATE FUNCTION dbo.ToTicks ( @DateTime datetime2 )
  RETURNS bigint
AS
BEGIN
    DECLARE @Days bigint = DATEDIFF( DAY, '00010101', cast( @DateTime as date ) );
    DECLARE @Seconds bigint = DATEDIFF( SECOND, '00:00', cast( @DateTime as time( 7 ) ) );
    DECLARE @Nanoseconds bigint = DATEPART( NANOSECOND, @DateTime );
    RETURN  @Days * 864000000000 + @Seconds * 10000000 + @Nanoseconds / 100;
END
CREATE FUNCTION dbo.ToTicks ( @DateTime datetime2 )
  RETURNS bigint
AS
BEGIN
 
    RETURN DATEDIFF_BIG( microsecond, '00010101', @DateTime ) * 10 +
           ( DATEPART( NANOSECOND, @DateTime ) % 1000 ) / 100;
END
--working hours - all offices

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Leconfield_House'
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
--tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
tablewithcounts."Day" not like '%Saturday%'
and tablewithcounts."Day" not like '%Sunday%'
and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

and tablewithcounts.time not between '2022-01-01' and '2022-01-01'
and tablewithcounts.time not between '2022-01-02' and '2022-01-02'
and tablewithcounts.time not between '2022-01-03' and '2022-01-03'
and tablewithcounts.time not between '2022-01-04' and '2022-01-04'
and tablewithcounts.time not between '2022-03-17' and '2022-03-17'
and tablewithcounts.time not between '2022-04-15' and '2022-04-15'
and tablewithcounts.time not between '2022-04-18' and '2022-04-18'
and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
and tablewithcounts.time not between '2022-06-02' and '2022-06-02'
and tablewithcounts.time not between '2022-06-03' and '2022-06-03'
and tablewithcounts.time not between '2022-07-12' and '2022-07-12'
and tablewithcounts.time not between '2022-08-01' and '2022-08-01'
and tablewithcounts.time not between '2022-08-29' and '2022-08-29'
and tablewithcounts.time not between '2022-11-30' and '2022-11-30'
and tablewithcounts.time not between '2022-12-25' and '2022-12-25'
and tablewithcounts.time not between '2022-12-26' and '2022-12-26'
and tablewithcounts.time not between '2022-12-27' and '2022-12-27'
and tablewithcounts.time not between '2023-01-01' and '2023-01-01'
and tablewithcounts.time not between '2023-01-02' and '2023-01-02'
and tablewithcounts.time not between '2023-01-03' and '2023-01-03'
and tablewithcounts.time not between '2023-03-17' and '2023-03-17'
and tablewithcounts.time not between '2023-04-07' and '2023-04-07'
and tablewithcounts.time not between '2023-04-10' and '2023-04-10'
and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
and tablewithcounts.time not between '2023-05-29' and '2023-05-29'
and tablewithcounts.time not between '2023-07-12' and '2023-07-12'
and tablewithcounts.time not between '2023-08-07' and '2023-08-07'
and tablewithcounts.time not between '2023-08-28' and '2023-08-28'
and tablewithcounts.time not between '2023-11-30' and '2023-11-30'
and tablewithcounts.time not between '2023-12-25' and '2023-12-25'
and tablewithcounts.time not between '2023-12-26' and '2023-12-26'

group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
--all times - all offices

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
SELECT id, project_name, metric_name, "time", value, display_value, geometry_id
	FROM public.metrics_app_timeandgeometric
	where project_name = 'Lasswade'
	and metric_name in ('peopleCount', 'Occupancy', 'Utilisation')
	and geometry_id in (1210, 5127)
	and display_value is NULL
	and time > '2022-12-08'
	order by time, metric_name
 select comments from user_col_comments where table_name = 'PTI' and column_name = 'DISCOUNT'


COMMENT ON COLUMN item.shelf_life
   IS 'the number of days for which an item remains usable, fit for consumption, or saleable.'
SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)) AS color;


UPDATE `frm_staff` SET `profile_colour` = (SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)))
-- INCOUNT - OUTCOUNT

-- 1 - inCount to inCount_1
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCount_1'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCount'




-- 2 - outCount to inCount
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCount'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'outCount'




-- 3 - inCount_1 to outCount
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'outCount'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCount_1'



-- INCOUNTTOTAL - OUTCOUNTTOTAL

-- 4 - inCountTotal to inCountTotal_1
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCountTotal_1'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCountTotal'




-- 5 - outCountTotal to inCountTotal
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCountTotal'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'outCountTotal'




-- 6 - inCountTotal_1 to outCountTotal
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'outCountTotal'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCountTotal_1'





-- PEOPLECOUNT

-- 7 - peopleCount to peopleCount * -1
UPDATE public.metrics_app_timeandgeometric_historic
	
	--SET metric_name= 'peopleCount'
	SET value = value * -1
	
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'peopleCount'
	



-- 8 - peopleCount to abs(peopleCount)
UPDATE public.metrics_app_timeandgeometric_historic
	
	--SET metric_name= 'peopleCount'
	SET value = abs(value * 1)
	
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'peopleCount'
	and value = 0
	

	
	
	
	
-- non-working hours - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 0) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')

-- WEEKENDS
(
(tablewithcounts."Day" in ('Saturday ', 'Sunday   ')
and 
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 24)
)
-- WEEKDAYS (MON-THU)
or
(
(tablewithcounts."Day" not in ('Saturday ', 'Sunday   '))
and 
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 8
or tablewithcounts."Hour" >= 16 and tablewithcounts."Hour" < 24)
)
-- FRIDAYS
or
(
(tablewithcounts."Day" in ('Friday   '))
and
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 8
or tablewithcounts."Hour" >= 12 and tablewithcounts."Hour" < 24)
)
)

-- HOLIDAYS
or
(
(
tablewithcounts.time between '2021-08-01' and '2021-08-18' -- '2021-08-17'
or tablewithcounts.time between '2021-09-20' and '2021-09-21' -- '2021-09-20'
or tablewithcounts.time between '2021-10-18' and '2021-10-26' -- '2021-10-25'
or tablewithcounts.time between '2021-12-23' and '2022-01-11' -- '2022-01-10'
or tablewithcounts.time between '2022-02-14' and '2022-02-19' -- '2022-02-18'
or tablewithcounts.time between '2022-04-11' and '2022-04-23' -- '2022-04-22'
or tablewithcounts.time between '2022-05-02' and '2022-05-03' -- '2022-05-02'
or tablewithcounts.time between '2022-05-23' and '2022-05-24' -- '2022-05-23'
or tablewithcounts.time between '2022-07-01' and '2022-08-01' -- '2022-07-31'
or tablewithcounts.time between '2022-08-01' and '2022-08-17' -- '2022-08-16'
or tablewithcounts.time between '2022-09-16' and '2022-09-20' -- '2022-09-19'
or tablewithcounts.time between '2022-10-17' and '2022-10-25' -- '2022-10-24'
or tablewithcounts.time between '2022-12-21' and '2023-01-05' -- '2023-01-04'
or tablewithcounts.time between '2023-02-13' and '2023-02-18' -- '2023-02-17'
or tablewithcounts.time between '2023-04-03' and '2023-04-15' -- '2023-04-14'
or tablewithcounts.time between '2023-05-01' and '2023-05-02' -- '2023-05-01'
or tablewithcounts.time between '2023-05-22' and '2023-05-23' -- '2023-05-22'
or tablewithcounts.time between '2023-06-29' and '2023-08-01' -- '2023-07-31'
)
and
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 24)
)


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--order by tablewithcounts.time
--limit 10000
--working hours - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 0) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
--tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
tablewithcounts."Day" not like '%Saturday%'
and tablewithcounts."Day" not like '%Sunday%'
and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
SELECT DISTINCT Category, creationDate
FROM MonitoringJob
ORDER BY CreationDate DESC
CREATE TABLE identity_types (
    transaction_id UUID NOT NULL REFERENCES transaction_doctor(id) ON DELETE CASCADE,
    appointment_id UUID NOT NULL REFERENCES appointment(id) ON DELETE CASCADE,
    PRIMARY KEY (transaction_id, appointment_id),
    additional_attribute_1 TEXT NULL,
    additional_attribute_2 TEXT NULL,
    additional_attribute_3 TEXT NULL,
    additional_attribute_4 TEXT NULL,
    additional_attribute_5 TEXT NULL,
    created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
    updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

CREATE OR REPLACE FUNCTION trigger_set_timestamp()
    RETURNS TRIGGER AS $$
BEGIN
    NEW.updated_at = NOW();
    RETURN NEW;
END;
$$ LANGUAGE plpgsql;

CREATE TRIGGER set_timestamp
    BEFORE UPDATE ON identity_types
    FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
mysql> SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)) AS color;
+---------+
| color   |
+---------+
| #0E74A9 |
+---------+
1 row in set (0.00 sec)
DROP TABLE EMP
DROP TABLE DEPT
DROP TABLE BONUS
DROP TABLE SALGRADE
DROP TABLE DUMMY

CREATE TABLE EMP
(EMPNO NUMERIC(4) NOT NULL,
ENAME VARCHAR(10),
JOB VARCHAR(9),
MGR NUMERIC(4),
HIREDATE DATETIME,
SAL NUMERIC(7, 2),
COMM NUMERIC(7, 2),
DEPTNO NUMERIC(2))

INSERT INTO EMP VALUES
(7369, 'SMITH', 'CLERK', 7902, '17-DEC-1980', 800, NULL, 20)
INSERT INTO EMP VALUES
(7499, 'ALLEN', 'SALESMAN', 7698, '20-FEB-1981', 1600, 300, 30)
INSERT INTO EMP VALUES
(7521, 'WARD', 'SALESMAN', 7698, '22-FEB-1981', 1250, 500, 30)
INSERT INTO EMP VALUES
(7566, 'JONES', 'MANAGER', 7839, '2-APR-1981', 2975, NULL, 20)
INSERT INTO EMP VALUES
(7654, 'MARTIN', 'SALESMAN', 7698, '28-SEP-1981', 1250, 1400, 30)
INSERT INTO EMP VALUES
(7698, 'BLAKE', 'MANAGER', 7839, '1-MAY-1981', 2850, NULL, 30)
INSERT INTO EMP VALUES
(7782, 'CLARK', 'MANAGER', 7839, '9-JUN-1981', 2450, NULL, 10)
INSERT INTO EMP VALUES
(7788, 'SCOTT', 'ANALYST', 7566, '09-DEC-1982', 3000, NULL, 20)
INSERT INTO EMP VALUES
(7839, 'KING', 'PRESIDENT', NULL, '17-NOV-1981', 5000, NULL, 10)
INSERT INTO EMP VALUES
(7844, 'TURNER', 'SALESMAN', 7698, '8-SEP-1981', 1500, 0, 30)
INSERT INTO EMP VALUES
(7876, 'ADAMS', 'CLERK', 7788, '12-JAN-1983', 1100, NULL, 20)
INSERT INTO EMP VALUES
(7900, 'JAMES', 'CLERK', 7698, '3-DEC-1981', 950, NULL, 30)
INSERT INTO EMP VALUES
(7902, 'FORD', 'ANALYST', 7566, '3-DEC-1981', 3000, NULL, 20)
INSERT INTO EMP VALUES
(7934, 'MILLER', 'CLERK', 7782, '23-JAN-1982', 1300, NULL, 10)

CREATE TABLE DEPT
(DEPTNO NUMERIC(2),
DNAME VARCHAR(14),
LOC VARCHAR(13) )

INSERT INTO DEPT VALUES (10, 'ACCOUNTING', 'NEW YORK')
INSERT INTO DEPT VALUES (20, 'RESEARCH', 'DALLAS')
INSERT INTO DEPT VALUES (30, 'SALES', 'CHICAGO')
INSERT INTO DEPT VALUES (40, 'OPERATIONS', 'BOSTON')

CREATE TABLE BONUS
(ENAME VARCHAR(10),
JOB VARCHAR(9),
SAL NUMERIC,
COMM NUMERIC)

CREATE TABLE SALGRADE
(GRADE NUMERIC,
LOSAL NUMERIC,
HISAL NUMERIC)

INSERT INTO SALGRADE VALUES (1, 700, 1200)
INSERT INTO SALGRADE VALUES (2, 1201, 1400)
INSERT INTO SALGRADE VALUES (3, 1401, 2000)
INSERT INTO SALGRADE VALUES (4, 2001, 3000)
INSERT INTO SALGRADE VALUES (5, 3001, 9999)

CREATE TABLE DUMMY
(DUMMY NUMERIC)

INSERT INTO DUMMY VALUES (0)
---------------------Example 1
SELECT
    hits.page.searchKeyword as searchKeyword,
    COUNT(hits.page.searchKeyword) AS HIT_COUNT,
    COUNT(DISTINCT sessions.fullVisitorId) AS USER_COUNT,
    COUNT(DISTINCT CONCAT(sessions.fullVisitorId, CAST(sessions.visitStartTime AS STRING))) AS SESSION_COUNT
FROM
    `digicce.142980449.ga_sessions_*` sessions,
UNNEST(sessions.hits) as hits
WHERE
    _TABLE_SUFFIX BETWEEN '20200701' AND '20211001'
    -- _TABLE_SUFFIX = '20210401'
    AND hits.page.hostname = "www.pearson.com"
    AND REGEXP_CONTAINS(hits.page.pagePath, r"\/pearsonplus")
    -- AND REGEXP_CONTAINS(hits.page.pagePath, r"^\/(en-us|(store\/(en-us|p)))")
    AND hits.page.searchKeyword is not null
GROUP BY
    hits.page.searchKeyword
ORDER BY
    HIT_COUNT DESC
            
            
---------------------Example 2            
SELECT
    date,
    -- clientId,
    sessions.fullVisitorId AS fullVisitorId,
    CONCAT(sessions.fullVisitorId, CAST(sessions.visitStartTime AS STRING)) as visitIdentifier,
    hits.eventInfo.eventCategory as hitEventCategory,
    hits.eventInfo.eventAction as hitEventAction,
    hits.eventInfo.eventLabel as hitEventLabel,
    REGEXP_EXTRACT(hits.page.pagePath, r"^([^\?]+)") as pagePath,
    hits.page.searchKeyword as searchKeyword,
    hits.type as hitType,
    hits.hitNumber as hitNumber
    -- hits.product AS product,
    -- hits.transaction AS transaction,
    -- (SELECT MAX(cd.value) FROM UNNEST(hits.customDimensions) cd WHERE cd.index=13) as pageCategory
    -- (SELECT MAX(cd.value) FROM UNNEST(hits.customDimensions) cd WHERE cd.index=26) as countryLocale
    -- sessions.channelGrouping AS channelGrouping
FROM
    `digicce.142980449.ga_sessions_*` sessions,
    UNNEST(sessions.hits) as hits
WHERE
    sessions.totals.visits > 0
    -- AND _TABLE_SUFFIX BETWEEN '20210101' AND '20210419'
    AND _TABLE_SUFFIX = '20210401'
    AND hits.page.hostname = "www.pearson.com"
    AND REGEXP_CONTAINS(hits.page.pagePath, r"^\/(en-us|(store\/(en-us|p)))")
    AND (
        (
            hits.type = "PAGE"
        ) OR (
            hits.type = "EVENT" 
            AND hits.eventInfo.eventCategory IN ('ecommerce')
            AND hits.eventInfo.eventAction IN ('purchase')
        )
    )
ORDER BY
    fullVisitorId,
    visitIdentifier,
    hitNumber
--all times - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
CASE WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "-" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Reactive" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "-" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "Reactive" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "-" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Reactive" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "Reactive" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "-" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Non-reactive" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "-" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "Non-reactive" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "-" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Non-reactive" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "Non-reactive" and "%IM095%" = "Non-reactive" THEN "Negative" 
ELSE "Inconclusive" END
SELECT
lab_items_sub_group_list.lab_items_code,
lab_items.lab_items_code,
lab_items_sub_group_list.lab_items_sub_group_code,
lab_items.lab_items_name,
lab_items.ecode,
lab_items.display_order
FROM
lab_items_sub_group_list
INNER JOIN lab_items ON lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
WHERE
lab_items_sub_group_list.lab_items_sub_group_code = ---80
ORDER BY
lab_items.display_order ASC
SELECT
lab_items_sub_group_list.lab_items_code,
lab_items.lab_items_code,
lab_items_sub_group_list.lab_items_sub_group_code,
lab_items.lab_items_name,
lab_items.ecode,
lab_items.display_order
FROM
lab_items_sub_group_list
INNER JOIN lab_items ON lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
WHERE
lab_items_sub_group_list.lab_items_sub_group_code = ---80
ORDER BY
lab_items.display_order ASC
CASE
WHEN "%IM0951%" = "Reactive" THEN
    "Positive"
    OR
WHEN "%IM0952%" = "Reactive"THEN
    "Positive"
    OR
WHEN "%IM095%" = "Reactive" THEN
    "Positive"
WHEN "%IM0951%" = "Non-reactive" THEN
    "Negative"
    OR
WHEN "%IM0952%" = "Non-reactive"THEN
    "Negative"
    OR
WHEN "%IM095%" = "Non-reactive" THEN
    "Negative"
ELSE
    "Inconclusive"
END
CASE  WHEN %HM014% >= 0 AND %HM014% < 50.0 THEN "Markedly decreased"  WHEN %HM014% >= 50.0 AND %HM014% <= 100.0 THEN "Decreased"      WHEN %HM014% >= 100.001 AND %HM014% <= 150.0 THEN "Slightly decreased"      WHEN %HM014% >= 150.001 AND %HM014% <= 450.0 THEN "Adequate"      WHEN %HM014% >= 450.001 AND %HM014% <= 500.0 THEN "Slightly increased"          WHEN %HM014% >  500.001 THEN "Markedly increase"      ELSE "N/A" END
$post = Post::find(1);
$newPost = $post->replicate();
$newPost->created_at = Carbon::now();
$newPost->save();
SELECT ct.ID, 
       ISNULL(NULLIF(ct.LaunchDate, ''), null) [LaunchDate]
FROM   [dbo].[CustomerTable] ct
select @var1 = avg(someColumn), @var2 = avg(otherColumn) 
from theTable
UPDATE 
    t1
SET 
    t1.c1 = t2.c2,
    t1.c2 = expression,
    ...   
FROM 
    t1
    [INNER | LEFT] JOIN t2 ON join_predicate
WHERE 
    where_predicate;
create or replace trigger "AL_ITEM_UI"
AFTER
INSERT OR UPDATE on AL
FOR EACH ROW

BEGIN
 IF INSERTING THEN
      INSERT INTO ITEM   ( itemid,
                            itype,
                            named,
                            price,
                            puom,
                            suom,
                            puomdes,
                            suomdes,
                            sources,
                            component,
                            supplier,
                            parentid,
                            section,
                            inactive,
                            notes,
                            rcp_recalc,
                            conratio,
                            rtsb,
                            currency,
                            suom_convert,
                            cost_method
							)
							
   VALUES 				 (  :NEW.ID
						   , :NEW.itype
						   , :NEW.NAME
						   , :NEW.PRICE
						   , :NEW.PUOM
						   , :NEW.SUOM
						   , :NEW.PUOMDES
						   , :NEW.SUOMDES
						   , :NEW.SOURCER
						   , :NEW.COMPONENT
						   , :NEW.SUPPLIER
						   , :NEW.FROMID
						   , :NEW.SECTIONC
						   , :NEW.INACTIVE
						   , :NEW.NOTES
						   , :NEW.RCP_RECALC
						   , :NEW.CONRATIO
						   , :NEW.RTSB
						   , :NEW.CURRENCY
                           , :NEW.SUOMCONV
                           , :NEW.COST_METHOD
					       );
 ELSIF UPDATING THEN
      UPDATE ITEM
         SET               
                            itype=		:NEW.itype,
                            named=		:NEW.NAME,
                            price=		:NEW.PRICE,
                            puom=		:NEW.PUOM,
                            suom=		:NEW.SUOM,
                            puomdes=	:NEW.PUOMDES,
                            suomdes=	:NEW.SUOMDES,
                            sources=	:NEW.SOURCER,
                            component=	:NEW.COMPONENT,
                            supplier=	:NEW.SUPPLIER,
                            parentid=	:NEW.FROMID,
                            section=	:NEW.SECTIONC,
                            inactive=	:NEW.INACTIVE,
                            notes=		:NEW.NOTES,
                            rcp_recalc=	:NEW.RCP_RECALC,
                            conratio =	:NEW.CONRATIO,
                            rtsb =		:NEW.RTSB,
                            currency=	:NEW.CURRENCY,
                            cost_method=:NEW.COST_METHOD
					       ;
       END IF;

END;
SELECT CAST(dtDateTime as Date) As TheDate, count(*) as TotalRecs, 
COUNT(CASE WHEN nDBWTS_EffluentFlow<0 THEN 1 END) AS CountNegatives, 
Min(nDBWTS_EffluentFlow) as MinFlow, Max(nDBWTS_EffluentFlow) as MaxFlow, 
SUM(nDBWTS_EffluentFlow) as TotAllFlow, 
AVG(nDBWTS_EffluentFlow) as AvgAllFlow, 
AVG(nDBWTS_EffluentFlow)*0.001 * 60 * 1440 as AllEnvirSum, 
SUM(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END) as TotPosFlow,
AVG(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END) AS AvgPosFlow,
AVG(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END)*0.001 * 60 * 1440  AS PosEnvirSum
FROM [EnvolvData].[dbo].[Environmental]
WHERE CAST(dtDateTime AS Date) BETWEEN '2022-09-01' AND '2022-09-20'
GROUP BY CAST(dtDateTime as Date)
ORDER BY CAST(dtDateTime as Date) ASC
SELECT duration_seconds,
       SUM(duration_seconds) OVER (ORDER BY start_time) AS running_total
  FROM tutorial.dc_bikeshare_q1_2012
sudo apt update
sudo apt install mysql-server
sudo service mysql start
sudo mysql_secure_installation
sudo mysql -u root 

In mysql console:
DROP USER 'root'@'localhost';
CREATE USER 'root'@'%' IDENTIFIED BY 'YOURPASSWORD';
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;
FLUSH PRIVILEGES;
CREATE TABLE  ITEM_TEST 
   (	ITEMID NUMBER(6,0) NOT NULL ENABLE, 
	ITYPE VARCHAR2(2 CHAR) NOT NULL ENABLE, 
	NAMED VARCHAR2(50), 
	PRICE NUMBER(8,2), 
	PUOM_COST NUMBER(8,2), 
	SUOM_CONVERT NUMBER(12,6), 
	PUOM NUMBER(7,2), 
	SUOM NUMBER(7,2), 
	PUOMDES CHAR(1), 
	SUOMDES CHAR(1), 
	SOURCE CHAR(1), 
	MINIBAR CHAR(1), 
	COMPONENT CHAR(1), 
	SUPPLIER NUMBER(6,0), 
	LOT_SIZE NUMBER(6,0), 
	PARENTID NUMBER(5,0), 
	SECTION CHAR(1), 
	INACTIVE CHAR(1), 
	NOTES VARCHAR2(600), 
	REORDER NUMBER(4,0), 
	OARSLOGDT TIMESTAMP (6), 
	UPDATEDT TIMESTAMP (0) WITH LOCAL TIME ZONE, 
	UPDATEBY NUMBER(6,0), 
	VINTAGE NUMBER(4,0), 
	RCP_RECALC NUMBER(8,2), 
	BRAND VARCHAR2(10), 
	MENUCODE VARCHAR2(5), 
	VARIETY CHAR(1), 
	LDES VARCHAR2(200), 
	APPROVEDBY NUMBER(5,0), 
	 CONSTRAINT ITEM_TEST_PK PRIMARY KEY (ITEMID)
   )
SELECT agent_code,COUNT(agent_code),MAX(ord_amount) 
FROM orders 
GROUP BY agent_code 
HAVING MAX(ord_amount) IN(500,800,2000);
Select * from Opportunity
Select * from Contract
INSERT INTO country (iso, name, nicename, iso3, numcode, phonecode) VALUES
('AF', 'AFGHANISTAN', 'Afghanistan', 'AFG', 4, 93),
('AL', 'ALBANIA', 'Albania', 'ALB', 8, 355),
('DZ', 'ALGERIA', 'Algeria', 'DZA', 12, 213),
('AS', 'AMERICAN SAMOA', 'American Samoa', 'ASM', 16, 1684),
('AD', 'ANDORRA', 'Andorra', 'AND', 20, 376),
('AO', 'ANGOLA', 'Angola', 'AGO', 24, 244),
('AI', 'ANGUILLA', 'Anguilla', 'AIA', 660, 1264),
('AQ', 'ANTARCTICA', 'Antarctica', 'ATA', 10, 0),
('AG', 'ANTIGUA AND BARBUDA', 'Antigua and Barbuda', 'ATG', 28, 1268),
('AR', 'ARGENTINA', 'Argentina', 'ARG', 32, 54),
('AM', 'ARMENIA', 'Armenia', 'ARM', 51, 374),
('AW', 'ARUBA', 'Aruba', 'ABW', 533, 297),
('AU', 'AUSTRALIA', 'Australia', 'AUS', 36, 61),
('AT', 'AUSTRIA', 'Austria', 'AUT', 40, 43),
('AZ', 'AZERBAIJAN', 'Azerbaijan', 'AZE', 31, 994),
('BS', 'BAHAMAS', 'Bahamas', 'BHS', 44, 1242),
('BH', 'BAHRAIN', 'Bahrain', 'BHR', 48, 973),
('BD', 'BANGLADESH', 'Bangladesh', 'BGD', 50, 880),
('BB', 'BARBADOS', 'Barbados', 'BRB', 52, 1246),
('BY', 'BELARUS', 'Belarus', 'BLR', 112, 375),
('BE', 'BELGIUM', 'Belgium', 'BEL', 56, 32),
('BZ', 'BELIZE', 'Belize', 'BLZ', 84, 501),
('BJ', 'BENIN', 'Benin', 'BEN', 204, 229),
('BM', 'BERMUDA', 'Bermuda', 'BMU', 60, 1441),
('BT', 'BHUTAN', 'Bhutan', 'BTN', 64, 975),
('BO', 'BOLIVIA', 'Bolivia', 'BOL', 68, 591),
('BA', 'BOSNIA AND HERZEGOVINA', 'Bosnia and Herzegovina', 'BIH', 70, 387),
('BW', 'BOTSWANA', 'Botswana', 'BWA', 72, 267),
('BV', 'BOUVET ISLAND', 'Bouvet Island', 'BVT', 74, 0),
('BR', 'BRAZIL', 'Brazil', 'BRA', 76, 55),
('IO', 'BRITISH INDIAN OCEAN TERRITORY', 'British Indian Ocean Territory', 'IOT', 86, 246),
('BN', 'BRUNEI DARUSSALAM', 'Brunei Darussalam', 'BRN', 96, 673),
('BG', 'BULGARIA', 'Bulgaria', 'BGR', 100, 359),
('BF', 'BURKINA FASO', 'Burkina Faso', 'BFA', 854, 226),
('BI', 'BURUNDI', 'Burundi', 'BDI', 108, 257),
('KH', 'CAMBODIA', 'Cambodia', 'KHM', 116, 855),
('CM', 'CAMEROON', 'Cameroon', 'CMR', 120, 237),
('CA', 'CANADA', 'Canada', 'CAN', 124, 1),
('CV', 'CAPE VERDE', 'Cape Verde', 'CPV', 132, 238),
('KY', 'CAYMAN ISLANDS', 'Cayman Islands', 'CYM', 136, 1345),
('CF', 'CENTRAL AFRICAN REPUBLIC', 'Central African Republic', 'CAF', 140, 236),
('TD', 'CHAD', 'Chad', 'TCD', 148, 235),
('CL', 'CHILE', 'Chile', 'CHL', 152, 56),
('CN', 'CHINA', 'China', 'CHN', 156, 86),
('CX', 'CHRISTMAS ISLAND', 'Christmas Island', 'CXR', 162, 61),
('CC', 'COCOS (KEELING) ISLANDS', 'Cocos (Keeling) Islands', NULL, NULL, 672),
('CO', 'COLOMBIA', 'Colombia', 'COL', 170, 57),
('KM', 'COMOROS', 'Comoros', 'COM', 174, 269),
('CG', 'CONGO', 'Congo', 'COG', 178, 242),
('CD', 'CONGO, THE DEMOCRATIC REPUBLIC OF THE', 'Congo, the Democratic Republic of the', 'COD', 180, 242),
('CK', 'COOK ISLANDS', 'Cook Islands', 'COK', 184, 682),
('CR', 'COSTA RICA', 'Costa Rica', 'CRI', 188, 506),
('CI', 'COTE D''IVOIRE', 'Cote D''Ivoire', 'CIV', 384, 225),
('HR', 'CROATIA', 'Croatia', 'HRV', 191, 385),
('CU', 'CUBA', 'Cuba', 'CUB', 192, 53),
('CY', 'CYPRUS', 'Cyprus', 'CYP', 196, 357),
('CZ', 'CZECHIA', 'Czech Republic', 'CZE', 203, 420),
('DK', 'DENMARK', 'Denmark', 'DNK', 208, 45),
('DJ', 'DJIBOUTI', 'Djibouti', 'DJI', 262, 253),
('DM', 'DOMINICA', 'Dominica', 'DMA', 212, 1767),
('DO', 'DOMINICAN REPUBLIC', 'Dominican Republic', 'DOM', 214, 1),
('EC', 'ECUADOR', 'Ecuador', 'ECU', 218, 593),
('EG', 'EGYPT', 'Egypt', 'EGY', 818, 20),
('SV', 'EL SALVADOR', 'El Salvador', 'SLV', 222, 503),
('GQ', 'EQUATORIAL GUINEA', 'Equatorial Guinea', 'GNQ', 226, 240),
('ER', 'ERITREA', 'Eritrea', 'ERI', 232, 291),
('EE', 'ESTONIA', 'Estonia', 'EST', 233, 372),
('ET', 'ETHIOPIA', 'Ethiopia', 'ETH', 231, 251),
('FK', 'FALKLAND ISLANDS (MALVINAS)', 'Falkland Islands (Malvinas)', 'FLK', 238, 500),
('FO', 'FAROE ISLANDS', 'Faroe Islands', 'FRO', 234, 298),
('FJ', 'FIJI', 'Fiji', 'FJI', 242, 679),
('FI', 'FINLAND', 'Finland', 'FIN', 246, 358),
('FR', 'FRANCE', 'France', 'FRA', 250, 33),
('GF', 'FRENCH GUIANA', 'French Guiana', 'GUF', 254, 594),
('PF', 'FRENCH POLYNESIA', 'French Polynesia', 'PYF', 258, 689),
('TF', 'FRENCH SOUTHERN TERRITORIES', 'French Southern Territories', 'ATF', 260, 0),
('GA', 'GABON', 'Gabon', 'GAB', 266, 241),
('GM', 'GAMBIA', 'Gambia', 'GMB', 270, 220),
('GE', 'GEORGIA', 'Georgia', 'GEO', 268, 995),
('DE', 'GERMANY', 'Germany', 'DEU', 276, 49),
('GH', 'GHANA', 'Ghana', 'GHA', 288, 233),
('GI', 'GIBRALTAR', 'Gibraltar', 'GIB', 292, 350),
('GR', 'GREECE', 'Greece', 'GRC', 300, 30),
('GL', 'GREENLAND', 'Greenland', 'GRL', 304, 299),
('GD', 'GRENADA', 'Grenada', 'GRD', 308, 1473),
('GP', 'GUADELOUPE', 'Guadeloupe', 'GLP', 312, 590),
('GU', 'GUAM', 'Guam', 'GUM', 316, 1671),
('GT', 'GUATEMALA', 'Guatemala', 'GTM', 320, 502),
('GN', 'GUINEA', 'Guinea', 'GIN', 324, 224),
('GW', 'GUINEA-BISSAU', 'Guinea-Bissau', 'GNB', 624, 245),
('GY', 'GUYANA', 'Guyana', 'GUY', 328, 592),
('HT', 'HAITI', 'Haiti', 'HTI', 332, 509),
('HM', 'HEARD ISLAND AND MCDONALD ISLANDS', 'Heard Island and Mcdonald Islands', 'HMD', 334, 0),
('VA', 'HOLY SEE (VATICAN CITY STATE)', 'Holy See (Vatican City State)', 'VAT', 336, 39),
('HN', 'HONDURAS', 'Honduras', 'HND', 340, 504),
('HK', 'HONG KONG', 'Hong Kong', 'HKG', 344, 852),
('HU', 'HUNGARY', 'Hungary', 'HUN', 348, 36),
('IS', 'ICELAND', 'Iceland', 'ISL', 352, 354),
('IN', 'INDIA', 'India', 'IND', 356, 91),
('ID', 'INDONESIA', 'Indonesia', 'IDN', 360, 62),
('IR', 'IRAN, ISLAMIC REPUBLIC OF', 'Iran, Islamic Republic of', 'IRN', 364, 98),
('IQ', 'IRAQ', 'Iraq', 'IRQ', 368, 964),
('IE', 'IRELAND', 'Ireland', 'IRL', 372, 353),
('IL', 'ISRAEL', 'Israel', 'ISR', 376, 972),
('IT', 'ITALY', 'Italy', 'ITA', 380, 39),
('JM', 'JAMAICA', 'Jamaica', 'JAM', 388, 1876),
('JP', 'JAPAN', 'Japan', 'JPN', 392, 81),
('JO', 'JORDAN', 'Jordan', 'JOR', 400, 962),
('KZ', 'KAZAKHSTAN', 'Kazakhstan', 'KAZ', 398, 7),
('KE', 'KENYA', 'Kenya', 'KEN', 404, 254),
('KI', 'KIRIBATI', 'Kiribati', 'KIR', 296, 686),
('KP', 'KOREA, DEMOCRATIC PEOPLE''S REPUBLIC OF', 'Korea, Democratic People''s Republic of', 'PRK', 408, 850),
('KR', 'KOREA, REPUBLIC OF', 'Korea, Republic of', 'KOR', 410, 82),
('KW', 'KUWAIT', 'Kuwait', 'KWT', 414, 965),
('KG', 'KYRGYZSTAN', 'Kyrgyzstan', 'KGZ', 417, 996),
('LA', 'LAO PEOPLE''S DEMOCRATIC REPUBLIC', 'Lao People''s Democratic Republic', 'LAO', 418, 856),
('LV', 'LATVIA', 'Latvia', 'LVA', 428, 371),
('LB', 'LEBANON', 'Lebanon', 'LBN', 422, 961),
('LS', 'LESOTHO', 'Lesotho', 'LSO', 426, 266),
('LR', 'LIBERIA', 'Liberia', 'LBR', 430, 231),
('LY', 'LIBYAN ARAB JAMAHIRIYA', 'Libyan Arab Jamahiriya', 'LBY', 434, 218),
('LI', 'LIECHTENSTEIN', 'Liechtenstein', 'LIE', 438, 423),
('LT', 'LITHUANIA', 'Lithuania', 'LTU', 440, 370),
('LU', 'LUXEMBOURG', 'Luxembourg', 'LUX', 442, 352),
('MO', 'MACAO', 'Macao', 'MAC', 446, 853),
('MK', 'NORTH MACEDONIA', 'North Macedonia', 'MKD', 807, 389),
('MG', 'MADAGASCAR', 'Madagascar', 'MDG', 450, 261),
('MW', 'MALAWI', 'Malawi', 'MWI', 454, 265),
('MY', 'MALAYSIA', 'Malaysia', 'MYS', 458, 60),
('MV', 'MALDIVES', 'Maldives', 'MDV', 462, 960),
('ML', 'MALI', 'Mali', 'MLI', 466, 223),
('MT', 'MALTA', 'Malta', 'MLT', 470, 356),
('MH', 'MARSHALL ISLANDS', 'Marshall Islands', 'MHL', 584, 692),
('MQ', 'MARTINIQUE', 'Martinique', 'MTQ', 474, 596),
('MR', 'MAURITANIA', 'Mauritania', 'MRT', 478, 222),
('MU', 'MAURITIUS', 'Mauritius', 'MUS', 480, 230),
('YT', 'MAYOTTE', 'Mayotte', 'MYT', 175, 269),
('MX', 'MEXICO', 'Mexico', 'MEX', 484, 52),
('FM', 'MICRONESIA, FEDERATED STATES OF', 'Micronesia, Federated States of', 'FSM', 583, 691),
('MD', 'MOLDOVA, REPUBLIC OF', 'Moldova, Republic of', 'MDA', 498, 373),
('MC', 'MONACO', 'Monaco', 'MCO', 492, 377),
('MN', 'MONGOLIA', 'Mongolia', 'MNG', 496, 976),
('MS', 'MONTSERRAT', 'Montserrat', 'MSR', 500, 1664),
('MA', 'MOROCCO', 'Morocco', 'MAR', 504, 212),
('MZ', 'MOZAMBIQUE', 'Mozambique', 'MOZ', 508, 258),
('MM', 'MYANMAR', 'Myanmar', 'MMR', 104, 95),
('NA', 'NAMIBIA', 'Namibia', 'NAM', 516, 264),
('NR', 'NAURU', 'Nauru', 'NRU', 520, 674),
('NP', 'NEPAL', 'Nepal', 'NPL', 524, 977),
('NL', 'NETHERLANDS', 'Netherlands', 'NLD', 528, 31),
('AN', 'NETHERLANDS ANTILLES', 'Netherlands Antilles', 'ANT', 530, 599),
('NC', 'NEW CALEDONIA', 'New Caledonia', 'NCL', 540, 687),
('NZ', 'NEW ZEALAND', 'New Zealand', 'NZL', 554, 64),
('NI', 'NICARAGUA', 'Nicaragua', 'NIC', 558, 505),
('NE', 'NIGER', 'Niger', 'NER', 562, 227),
('NG', 'NIGERIA', 'Nigeria', 'NGA', 566, 234),
('NU', 'NIUE', 'Niue', 'NIU', 570, 683),
('NF', 'NORFOLK ISLAND', 'Norfolk Island', 'NFK', 574, 672),
('MP', 'NORTHERN MARIANA ISLANDS', 'Northern Mariana Islands', 'MNP', 580, 1670),
('NO', 'NORWAY', 'Norway', 'NOR', 578, 47),
('OM', 'OMAN', 'Oman', 'OMN', 512, 968),
('PK', 'PAKISTAN', 'Pakistan', 'PAK', 586, 92),
('PW', 'PALAU', 'Palau', 'PLW', 585, 680),
('PS', 'PALESTINIAN TERRITORY, OCCUPIED', 'Palestinian Territory, Occupied', NULL, NULL, 970),
('PA', 'PANAMA', 'Panama', 'PAN', 591, 507),
('PG', 'PAPUA NEW GUINEA', 'Papua New Guinea', 'PNG', 598, 675),
('PY', 'PARAGUAY', 'Paraguay', 'PRY', 600, 595),
('PE', 'PERU', 'Peru', 'PER', 604, 51),
('PH', 'PHILIPPINES', 'Philippines', 'PHL', 608, 63),
('PN', 'PITCAIRN', 'Pitcairn', 'PCN', 612, 0),
('PL', 'POLAND', 'Poland', 'POL', 616, 48),
('PT', 'PORTUGAL', 'Portugal', 'PRT', 620, 351),
('PR', 'PUERTO RICO', 'Puerto Rico', 'PRI', 630, 1787),
('QA', 'QATAR', 'Qatar', 'QAT', 634, 974),
('RE', 'REUNION', 'Reunion', 'REU', 638, 262),
('RO', 'ROMANIA', 'Romania', 'ROU', 642, 40),
('RU', 'RUSSIAN FEDERATION', 'Russian Federation', 'RUS', 643, 7),
('RW', 'RWANDA', 'Rwanda', 'RWA', 646, 250),
('SH', 'SAINT HELENA', 'Saint Helena', 'SHN', 654, 290),
('KN', 'SAINT KITTS AND NEVIS', 'Saint Kitts and Nevis', 'KNA', 659, 1869),
('LC', 'SAINT LUCIA', 'Saint Lucia', 'LCA', 662, 1758),
('PM', 'SAINT PIERRE AND MIQUELON', 'Saint Pierre and Miquelon', 'SPM', 666, 508),
('VC', 'SAINT VINCENT AND THE GRENADINES', 'Saint Vincent and the Grenadines', 'VCT', 670, 1784),
('WS', 'SAMOA', 'Samoa', 'WSM', 882, 684),
('SM', 'SAN MARINO', 'San Marino', 'SMR', 674, 378),
('ST', 'SAO TOME AND PRINCIPE', 'Sao Tome and Principe', 'STP', 678, 239),
('SA', 'SAUDI ARABIA', 'Saudi Arabia', 'SAU', 682, 966),
('SN', 'SENEGAL', 'Senegal', 'SEN', 686, 221),
('RS', 'SERBIA', 'Serbia', 'SRB', 688, 381),
('SC', 'SEYCHELLES', 'Seychelles', 'SYC', 690, 248),
('SL', 'SIERRA LEONE', 'Sierra Leone', 'SLE', 694, 232),
('SG', 'SINGAPORE', 'Singapore', 'SGP', 702, 65),
('SK', 'SLOVAKIA', 'Slovakia', 'SVK', 703, 421),
('SI', 'SLOVENIA', 'Slovenia', 'SVN', 705, 386),
('SB', 'SOLOMON ISLANDS', 'Solomon Islands', 'SLB', 90, 677),
('SO', 'SOMALIA', 'Somalia', 'SOM', 706, 252),
('ZA', 'SOUTH AFRICA', 'South Africa', 'ZAF', 710, 27),
('GS', 'SOUTH GEORGIA AND THE SOUTH SANDWICH ISLANDS', 'South Georgia and the South Sandwich Islands', 'SGS', 239, 0),
('ES', 'SPAIN', 'Spain', 'ESP', 724, 34),
('LK', 'SRI LANKA', 'Sri Lanka', 'LKA', 144, 94),
('SD', 'SUDAN', 'Sudan', 'SDN', 736, 249),
('SR', 'SURINAME', 'Suriname', 'SUR', 740, 597),
('SJ', 'SVALBARD AND JAN MAYEN', 'Svalbard and Jan Mayen', 'SJM', 744, 47),
('SZ', 'SWAZILAND', 'Swaziland', 'SWZ', 748, 268),
('SE', 'SWEDEN', 'Sweden', 'SWE', 752, 46),
('CH', 'SWITZERLAND', 'Switzerland', 'CHE', 756, 41),
('SY', 'SYRIAN ARAB REPUBLIC', 'Syrian Arab Republic', 'SYR', 760, 963),
('TW', 'TAIWAN, PROVINCE OF CHINA', 'Taiwan, Province of China', 'TWN', 158, 886),
('TJ', 'TAJIKISTAN', 'Tajikistan', 'TJK', 762, 992),
('TZ', 'TANZANIA, UNITED REPUBLIC OF', 'Tanzania, United Republic of', 'TZA', 834, 255),
('TH', 'THAILAND', 'Thailand', 'THA', 764, 66),
('TL', 'TIMOR-LESTE', 'Timor-Leste', 'TLS', 626, 670),
('TG', 'TOGO', 'Togo', 'TGO', 768, 228),
('TK', 'TOKELAU', 'Tokelau', 'TKL', 772, 690),
('TO', 'TONGA', 'Tonga', 'TON', 776, 676),
('TT', 'TRINIDAD AND TOBAGO', 'Trinidad and Tobago', 'TTO', 780, 1868),
('TN', 'TUNISIA', 'Tunisia', 'TUN', 788, 216),
('TR', 'TURKEY', 'Turkey', 'TUR', 792, 90),
('TM', 'TURKMENISTAN', 'Turkmenistan', 'TKM', 795, 993),
('TC', 'TURKS AND CAICOS ISLANDS', 'Turks and Caicos Islands', 'TCA', 796, 1649),
('TV', 'TUVALU', 'Tuvalu', 'TUV', 798, 688),
('UG', 'UGANDA', 'Uganda', 'UGA', 800, 256),
('UA', 'UKRAINE', 'Ukraine', 'UKR', 804, 380),
('AE', 'UNITED ARAB EMIRATES', 'United Arab Emirates', 'ARE', 784, 971),
('GB', 'UNITED KINGDOM', 'United Kingdom', 'GBR', 826, 44),
('US', 'UNITED STATES', 'United States', 'USA', 840, 1),
('UM', 'UNITED STATES MINOR OUTLYING ISLANDS', 'United States Minor Outlying Islands', 'UMI', 581, 1),
('UY', 'URUGUAY', 'Uruguay', 'URY', 858, 598),
('UZ', 'UZBEKISTAN', 'Uzbekistan', 'UZB', 860, 998),
('VU', 'VANUATU', 'Vanuatu', 'VUT', 548, 678),
('VE', 'VENEZUELA', 'Venezuela', 'VEN', 862, 58),
('VN', 'VIET NAM', 'Viet Nam', 'VNM', 704, 84),
('VG', 'VIRGIN ISLANDS, BRITISH', 'Virgin Islands, British', 'VGB', 92, 1284),
('VI', 'VIRGIN ISLANDS, U.S.', 'Virgin Islands, U.s.', 'VIR', 850, 1340),
('WF', 'WALLIS AND FUTUNA', 'Wallis and Futuna', 'WLF', 876, 681),
('EH', 'WESTERN SAHARA', 'Western Sahara', 'ESH', 732, 212),
('YE', 'YEMEN', 'Yemen', 'YEM', 887, 967),
('ZM', 'ZAMBIA', 'Zambia', 'ZMB', 894, 260),
('ZW', 'ZIMBABWE', 'Zimbabwe', 'ZWE', 716, 263),
('ME', 'MONTENEGRO', 'Montenegro', 'MNE', 499, 382),
('XK', 'KOSOVO', 'Kosovo', 'XKX', 0, 383),
('AX', 'ALAND ISLANDS', 'Aland Islands', 'ALA', '248', '358'),
('BQ', 'BONAIRE, SINT EUSTATIUS AND SABA', 'Bonaire, Sint Eustatius and Saba', 'BES', '535', '599'),
('CW', 'CURACAO', 'Curacao', 'CUW', '531', '599'),
('GG', 'GUERNSEY', 'Guernsey', 'GGY', '831', '44'),
('IM', 'ISLE OF MAN', 'Isle of Man', 'IMN', '833', '44'),
('JE', 'JERSEY', 'Jersey', 'JEY', '832', '44'),
('BL', 'SAINT BARTHELEMY', 'Saint Barthelemy', 'BLM', '652', '590'),
('MF', 'SAINT MARTIN', 'Saint Martin', 'MAF', '663', '590'),
('SX', 'SINT MAARTEN', 'Sint Maarten', 'SXM', '534', '1'),
('SS', 'SOUTH SUDAN', 'South Sudan', 'SSD', '728', '211');
SELECT colonne1, colonne2, REPLACE(colonne3, 'exemple insulte, 'CENSURE')
FROM table
##sql query with replace function
#syntax
UPDATE tableName
SET  column_name = REPLACE(column_name, 'fromStringValue', 'toStringValue');

#Example 
Update  tbl_employee
Set designation = REPLACE(designation, 'SEO', 'Developer');
from os import environ, remove
from pathlib import Path

from dotenv import find_dotenv, load_dotenv
from sqlalchemy import MetaData
from sqlalchemy_schemadisplay import create_schema_graph

load_dotenv(find_dotenv())
DB_NAME = environ.get("DB_NAME")
DB_PASS = environ.get("DB_PASS")
DB_USER = environ.get("DB_USER")
DB_HOST = environ.get("DB_HOST")
SQLALCHEMY_DATABASE_URL = (
    f"postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:5432/{DB_NAME}"
)


if __name__ == "__main__":
    FILE_NAME = 'ERD.png'
    try:
        os.remove(FILE_NAME)
    except:
        pass
    graph = create_schema_graph(metadata=MetaData(
        SQLALCHEMY_DATABASE_URL))
    graph.write_png(FILE_NAME)
return $model->where('created_at', '>=', date('Y-m-d').' 00:00:00');
drop table ManImp_002_working

select [ID], [DT], 
  CASE WHEN WS='NA' THEN NULL ELSE CAST(WS AS FLOAT) END AS [WS], 
  CASE WHEN WD='NA' THEN NULL ELSE CAST(WD AS FLOAT) END AS [WD], 
  CASE WHEN AT='NA' THEN NULL ELSE CAST([AT] AS FLOAT) END AS [AT],  
  CASE WHEN RH='NA' THEN NULL ELSE CAST(RH AS FLOAT) END AS [RH], 
  [SampleType], [RecCount], [DT_End] into ManImp_002_working 
from ManImp_002

ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [PK_MI2_ID] PRIMARY KEY CLUSTERED 
(
	[ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]

CREATE NONCLUSTERED INDEX [IX_MI2_DT] ON [dbo].[ManImp_002_working]
(
	[DT] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, SORT_IN_TEMPDB = OFF, DROP_EXISTING = OFF, ONLINE = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
GO


ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [CNSTRT_MI2_SampleType]  DEFAULT ('Seconds') FOR [SampleType]
GO

ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [CNSTRT_MI2_RecCount]  DEFAULT ((1)) FOR [RecCount]
GO

SELECT DT as TheDate, Count(*) as TheCount
From ManImp
Group By DT 
Having count(*)>1
Order by TheCount DESC
delete
FROM ManImp
WHERE ID NOT IN
(
    SELECT MAX(ID)
	FROM ManImp
	GROUP BY DT
)
BULK INSERT ManualImport
FROM 'C:\temp\Second_Data-2022-09-15.csv'
WITH
(
    FIRSTROW = 2, -- as 1st one is header
    FIELDTERMINATOR = ',',  --CSV field delimiter
    ROWTERMINATOR = '\n',   --Use to shift the control to next row
    TABLOCK
)

SELECT MIN(DT) as MinDate, MAX(DT) as MaxDate, Count(DT) as [RowCount]
FROM ManualImport
ALTER TABLE DopingTest
ADD(
  CONSTRAINT fk_RaceIDDT
    FOREIGN KEY (RaceID, HorseID)
    REFERENCES Results(RaceID, HorseID)
);
/*
Show all connections to the database
*/

SELECT
	datname, pid, usename, client_addr, client_port,
    query_start, state_change, state, query
FROM
	pg_stat_activity
WHERE
	datname = '<database name HERE>'
ORDER BY
	state_change DESC;
SELECT
  table_schema,
  table_name
FROM
  information_schema.tables
WHERE
  table_schema NOT IN ('pg_catalog', 'information_schema')
  AND table_type = 'BASE TABLE';
DECLARE @selectsql nvarchar(4000),
DECLARE @cnt int

SELECT @selectsql = N' SELECT ' + CAST(@cnt AS NVARCHAR(10)) + N'= COUNT(*) FROM Vwbckup' 
Select SUM(CASE When CPayment='Cash' Then CAmount Else 0 End ) as CashPaymentAmount,
       SUM(CASE When CPayment='Check' Then CAmount Else 0 End ) as CheckPaymentAmount
from TableOrderPayment
Where ( CPayment='Cash' Or CPayment='Check' ) AND CDate<=SYSDATETIME() and CStatus='Active';
select 
  case greatest(col1,col2,col3,col4) 
    when col1 then 'col1:' || col1
    when col2 then 'col2:' || col2
    when col3 then 'col3:' || col3
    when col4 then 'col4:' || col4
    else null
  end as greatestcolumnname
from mytable;
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
  FROM v_codevalue_web_codes_full cv
 WHERE cv.CODEVALUENAME LIKE 'Alipay%'
   AND cv.DOMAINNAME LIKE 'ClgSys';
# connection name == any name we can give 
#ex: connection name = Mysql_connection
#connection_method = standard(tcp/ip)
#host name  = localhost , port = 3306
#username = root (for local then only it work)
#password = root (for local server we much give the root as password )
#test connection
select 44652, '4/1/2022', dateadd(d,44652,'1899-12-30'), DateDiff(dd, '1899-12-30', '4/1/2022')
CREATE OR ALTER FUNCTION DATE_TO_CHAR(TDATE TIMESTAMP, FORMAT VARCHAR(100))
RETURNS VARCHAR(100)
AS
DECLARE SDATE VARCHAR(100);
DECLARE DD VARCHAR(2);
DECLARE MM VARCHAR(2);
DECLARE YY VARCHAR(4);
DECLARE HH VARCHAR(2);
DECLARE MI VARCHAR(2);
DECLARE SS VARCHAR(2);
DECLARE XFORMAT VARCHAR(100);
DECLARE AM VARCHAR(2);
BEGIN
	XFORMAT = UPPER(FORMAT);
	SDATE = CAST(TDATE AS VARCHAR(100));
	YY = SUBSTRING(SDATE FROM 1 FOR 4);
	MM = SUBSTRING(SDATE FROM 6 FOR 2);
	DD = SUBSTRING(SDATE FROM 9 FOR 2);
	HH = SUBSTRING(SDATE FROM 12 FOR 2);
	MI = SUBSTRING(SDATE FROM 15 FOR 2);
	SS = SUBSTRING(SDATE FROM 18 FOR 2);
	XFORMAT = REPLACE(XFORMAT, 'YYYY', YY);
	XFORMAT = REPLACE(XFORMAT, 'MM', MM);
	XFORMAT = REPLACE(XFORMAT, 'DD', DD);
	XFORMAT = REPLACE(XFORMAT, 'YY', SUBSTRING(YY FROM 3 FOR 2));
	XFORMAT = REPLACE(XFORMAT, 'HH24', HH);
	AM = 'AM';

	IF (HH='12') THEN
	BEGIN
		AM = 'M';
		IF (MI > '00') THEN 
		BEGIN
			AM='PM';
		END
	END
	
	IF (HH='00') THEN 
	BEGIN
		HH='12';
		AM='AM';
	END
	
	IF (HH>'12') THEN
	BEGIN
		HH = TRIM(CAST(CAST(HH AS INTEGER)-12 AS VARCHAR(2)));
		IF (CHAR_LENGTH(HH)<2) THEN 
		BEGIN
			HH='0'||HH;
		END
		AM='PM';
	END
	XFORMAT = REPLACE(XFORMAT, 'HH12', HH);
	XFORMAT = REPLACE(XFORMAT, 'HH', HH);
	XFORMAT = REPLACE(XFORMAT, 'MI', MI);
	XFORMAT = REPLACE(XFORMAT, 'SS', SS);
	
	RETURN XFORMAT;
END;

/* Examples:

    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH24:MI:SS') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH12:MI AM') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YY HH24:MI') FROM RDB$DATABASE;
    -- still uncomplete...
*/
CREATE OR ALTER PROCEDURE date_range(
    startdate date, 
    enddate date, 
    interv integer DEFAULT 1, 
	unit varchar(6) DEFAULT 'DAY', 
	fromtime time DEFAULT null, 
	totime time DEFAULT null
) RETURNS (dateval timestamp)
AS
BEGIN
    dateval = startdate;
    while (dateval < enddate) do
    BEGIN
	    IF ((:fromtime IS NULL OR cast(:dateval AS time) >= :fromtime)
		  AND (:totime IS NULL OR cast(:dateval AS time) < :totime)) THEN 
		BEGIN 
			suspend;
		END 
		
  		SELECT 
	  		CASE  
		  		WHEN :unit = 'MINUTE' THEN dateadd(MINUTE, :interv, :dateval)
		  		WHEN :unit = 'HOUR' THEN dateadd(HOUR, :interv, :dateval)
		  		WHEN :unit = 'YEAR' THEN dateadd(YEAR, :interv, :dateval)
		  		WHEN :unit = 'MONTH' THEN dateadd(MONTH, :interv, :dateval)
		  		else dateadd(DAY, :interv, :dateval)
			END 
		FROM RDB$DATABASE 
		INTO :dateval;
    END
END;

/*
Example: ranges from today to next 10 days, from 8:00am to 6:00pm, in fraction of 30 minutes:

SELECT 
    cast(dateval AS date) TheDate, 
    CAST(dateval AS time) FromTime, 
    CAST(dateadd(MINUTE, 30, dateval) AS time) ToTime 
FROM date_range(
    current_date, ---- initial date
    current_date + 10, ---- next 10 days
    30, 'MINUTE', ---- fraction
    time '08:00', time '18:00') --- from time, to time
*/
select 
  DATE_PART('year', AGE('2012-03-05', '2010-04-01')) AS years,
  DATE_PART('month', AGE('2012-03-05', '2010-04-01')) AS months,
  DATE_PART('day', AGE('2012-03-05', '2010-04-01')) AS days;
CREATE OR ALTER FUNCTION AGE(DFROM TIMESTAMP, DTO TIMESTAMP, CODED BOOLEAN = FALSE)
RETURNS VARCHAR(30)
AS
DECLARE y varchar(3);
DECLARE m varchar(2);
DECLARE d varchar(2);
BEGIN
	y = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				THEN datediff(year, :DFROM, :DTO)-1
			ELSE datediff(year, :DFROM, :DTO)
		END;
	m = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)-1
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)>=0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)
			WHEN datediff(year, :DFROM, :DTO) = datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)-1
			ELSE datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)
		END;
	d = CASE 
			WHEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO)-1, :DFROM), :DTO)
			ELSE datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)
		END;
	RETURN 
		CASE 
			WHEN :CODED THEN lpad(Y,3,'0')||'-'||lpad(m,2,'0')||'-'||lpad(d,2,'0')
			ELSE Y||'y '||m||'m '||d||'d' 
		END;
END;
#Backup

gbak -b -v -user SYSDBA -password "masterkey" D:\database.FDB E:\database.fbk

#Restore

gbak -c -user SYSDBA -password masterkey E:\database.fbk E:\database_restore.fdb
docker run -v /home/marco:/backup --rm svarcoe/mssql-scripter mssql-scripter -S 172.18.0.3 -d CMUCE -U sa -P CMuce1970@ --schema-and-data -f /backup/mssql-scripter-CMUCE.sql

# BACKUP: 
BACKUP DATABASE [YourDB] TO  DISK = N'C:\xxxxx or /var/opt/mssql/backup/YourDB.bak'
WITH NOFORMAT, NOINIT, NAME = N'YourDB-Full Database Backup',
SKIP, NOREWIND, NOUNLOAD, STATS = 10
GO

# RESTORE:
sqlcmd -S localhost -U SA

RESTORE DATABASE YourDB
FROM DISK = '/var/opt/mssql/backup/YourDB.bak'
WITH MOVE 'YourDB' TO '/var/opt/mssql/data/YourDB.mdf',
MOVE 'YourDB_Log' TO '/var/opt/mssql/data/YourDB_Log.ldf'
GO
select * from 
( 
    select 
        sql_id, 
        elapsed_time, 
        executions, 
        sql_text 
    from v$sql  
    ORDER BY elapsed_time desc
) 
where rownum < 11;
docker run -d -e ACCEPT_EULA=Y -e "SA_PASSWORD=P@ssW0rd" -p 1433:1433 \
  --restart unless-stopped \
  -v /var/opt/mssql/data:/var/opt/mssql/data \
  -v /tmp/:/backups/ \
  --name sqlserver \
  mcr.microsoft.com/mssql/server

#backup:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP DATABASE [dbname] TO DISK = N'/tmp/dbname-full.bak' WITH NOFORMAT, NOINIT, NAME = 'dbname-bak-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP LOG [dbname] TO DISK = N'/tmp/dbname-log.bak' WITH NOFORMAT, NOINIT, NAME = N'dbname-bak-log', NOSKIP, NOREWIND, NOUNLOAD, STATS = 5"

#restore:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE DATABASE [dbname] FROM DISK = N'/tmp/dbname-full.bak' WITH FILE = 1, NOUNLOAD, REPLACE, NORECOVERY, STATS = 5"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE LOG [dbname] FROM DISK = N'/var/opt/mssql/data/dbname-log.bak'"


#create login myuser with password ='strongPass';
#create user myuser for login myuser;
#ALTER LOGIN [myuser] enable;
#Increment timeout and max_children:

/etc/php/7.0/fpm/php.ini  =>   default_socket_timeout = 60000
/etc/php/7.0/fpm/php.ini  =>   pm.max_children = 20
/etc/php/7.0/fpm/pool.d/www.conf  =>   request_terminate_timeout = 60000

#Increment timeout on /etc/nginx/nginx.conf:
keepalive_timeout 65000;

#After Restart php-fpm and nginx:

sudo service php7.0-fpm restart
sudo service nginx restart
export ORACLE_SID=$1
export NLS_LANG=AMERICAN_AMERICA.WE8ISO8859P9
export USUARIO=system/org24h
export PATHBACKUP=/respaldo/o24/export
export FILENAME=CMLGDB`date +%d%m%Y%H%M`.DMP
export FILENAMELOG=CMLGDB`date +%d%m%Y%H%M`.log
echo  $PATHBACKUP

rm $PATHBACKUP/*.* -rf

if [ -a $PATHBACKUP ] ; then
	expdp $USUARIO FULL=yes DUMPFILE=dpump_dir1:$FILENAME LOGFILE=dpump_dir1:$FILENAMELOG
	#exp $USUARIO file=$PATHBACKUP/$FILENAME full=yes compress=yes indexes=no consistent=yes log=$PATHBACKUP/$FILENAMELOG
else
	echo "ERROR: Export no encontro el directorio de Respaldo"
	exit 1
fi
alter session set "_ORACLE_SCRIPT"=true;

CREATE USER SISTEMAS IDENTIFIED BY las36horas;

GRANT CREATE TABLE TO SISTEMAS;
GRANT CONNECT TO SISTEMAS;
GRANT CTXAPP TO SISTEMAS;
GRANT RESOURCE TO SISTEMAS;
GRANT CREATE ANY CONTEXT TO SISTEMAS;
GRANT CREATE ANY SYNONYM TO SISTEMAS;
GRANT CREATE VIEW TO SISTEMAS;
GRANT DROP ANY CONTEXT TO SISTEMAS;
GRANT QUERY REWRITE TO SISTEMAS;
EXECUTE ON SYS.DBMS_AQ_BQVIEW
EXECUTE ON SYS.DBMS_LOB
EXECUTE ON SYS.UTL_RAW
DROP TABLESPACE MYSPACENAME INCLUDING contents;

CREATE TABLESPACE MYSPACENAME 
   DATAFILE 'ts1_filename.dbf' 
   SIZE 2000m 
   autoextend on NEXT 1000m maxsize unlimited;
   
 alter tablespace MYSPACENAME coalesce;
#use oracle user from system:

sqlplus "/ as sysdba"

SQL> ALTER USER SYS IDENTIFIED BY [password]; 
SQL> ALTER USER SYSTEM IDENTIFIED BY [password];
CREATE OR ALTER PROCEDURE SPLIT(ASTRLIST VARCHAR(32000))
returns (
  STR VARCHAR(255)
)
as
  declare variable StrList varchar(32000);
  declare variable CommaPos integer;
  declare variable StrVal varchar(10);
begin
  StrList = AStrList || '';
  CommaPos = Position(',', StrList);

  while (CommaPos > 0) do
  begin
    StrVal = Trim(SubString(StrList from 1 for CommaPos - 1));

    if (Char_Length(StrVal) > 0) then
    begin
      STR = StrVal;
      suspend;
    end

    if (Char_Length(StrList) > CommaPos) then
      StrList = SubString(StrList from CommaPos + 1);
    else
      StrList = '';

    CommaPos = Position(',', StrList);
  end

  StrList = Trim(StrList);

  if (Char_Length(StrList) > 0) then
  begin
    begin
      STR = StrList;
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE COD_CITY IN (SELECT STR FROM Split('ABC, DEF, GH, IJK, LM, NOP'))

*/
CREATE OR ALTER PROCEDURE GETINTEGERLIST(AINTEGERLIST VARCHAR(32000))
returns (
  ID integer
)
as
  declare variable IntegerList varchar(32000);
  declare variable CommaPos integer;
  declare variable IntegerVal varchar(10);
begin
  IntegerList = AIntegerList || ' ';
  CommaPos = Position(',', IntegerList);

  while (CommaPos > 0) do
  begin
    IntegerVal = Trim(SubString(IntegerList from 1 for CommaPos - 1));

    if (Char_Length(IntegerVal) > 0) then
    begin
      if (IntegerVal similar to '[0-9]*') then
      begin
        ID = Cast(IntegerVal as integer);
        suspend;
      end
    end

    if (Char_Length(IntegerList) > CommaPos) then
      IntegerList = SubString(IntegerList from CommaPos + 1);
    else
      IntegerList = '';

    CommaPos = Position(',', IntegerList);
  end

  IntegerList = Trim(IntegerList);

  if (Char_Length(IntegerList) > 0) then
  begin
    if (IntegerList similar to '[0-9]*') then
    begin
      ID = Cast(IntegerList as integer);
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE ID_CITY IN (SELECT ID FROM GetIntegerList('1, 2, 12, 45, 75, 45'))

*/
sudo mkdir -p /your/custom/path/oracle-19c/oradata/
sudo chmod -R 777 /your/custom/path/

docker run -d --name oracle19db \
  -p 1521:1521 \
  -e ORACLE_SID=ORCL \
  -e ORACLE_PDB=ORCLDB \
  -e ORACLE_PWD=Oracle123 \
  -e ORACLE_CHARSET=AL32UTF8 \
  -v /your/custom/path/oracle-19c/oradata:/opt/oracle/oradata \
  banglamon/oracle193db:19.3.0-ee

# Charset Value: WE8MSWIN1252, AL16UTF8

# ALTER SESSION SET NLS_DATE_FORMAT = 'RRRR-MM-DD';
# ALTER SESSION SET NLS_TIME_FORMAT = 'HH24:MI:SS';
# ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'RRRR-MM-DD HH24:MI:SS';
# ALTER SESSION SET NLS_TIME_TZ_FORMAT = 'HH24:MI:SS TZR';
# ALTER SESSION SET NLS_TIMESTAMP_TZ_FORMAT = 'RRRR-MM-DD HH24:MI:SS TZR';

# docker exec -it oracle19db bash -c "source /home/oracle/.bashrc; sqlplus /nolog”
# connect sys as sysdba;

# alter session set "_ORACLE_SCRIPT"=true;
# create user sistemas identified by las36horas;
# GRANT CONNECT, RESOURCE, DBA TO sistemas;
# GRANT UNLIMITED TABLESPACE TO sistemas;
CREATE TABLE GAP(
    COUNTER INTEGER PRIMARY KEY
);

insert into gap (counter) values (1);
insert into gap (counter) values (2);
--::::: 3
--::::: 4
insert into gap (counter) values (5);
insert into gap (counter) values (6);
--::::: 7
insert into gap (counter) values (8);
insert into gap (counter) values (9);
insert into gap (counter) values (10);



--===== here the trick :

SELECT 
	list(CASE 
		WHEN N2-N1-2 > 0 THEN (N1+1) || '-'|| (N2-1)
		ELSE CAST(N1+1 AS VARCHAR(10))
	END) Missing
FROM (
SELECT a.counter N1, 
  (SELECT counter FROM gap WHERE counter > a.counter rows 1) N2 
FROM gap a
) B
WHERE N2-N1 > 1;



-- results:

Missing
=======
3-4,7
alter role <username> set search_path = <schema1>, ..., <scheman>, public;
WITH RECURSIVE PADRE AS (
	SELECT
		COD_CTA,
		DES_CTA
	FROM
		CATALOGO
	WHERE
		COD_CTA=:COD_CTA
	UNION ALL
		SELECT
			e.COD_CTA,
			e.DES_CTA
		FROM
			CATALOGO e
		INNER JOIN padre p ON POSITION(e.COD_CTA, p.COD_CTA) = 1
		  AND p.COD_CTA > COD_CTA
		ROWS 1
) SELECT
	DISTINCT cod_cta
FROM
	PADRE
WHERE COD_CTA < :COD_CTA
<?php

//===========notifier.sql:
    
/*
CREATE OR REPLACE FUNCTION public.notify_channel()
RETURNS trigger
AS $function$
  BEGIN
	  PERFORM pg_notify('channel_name', row_to_json(NEW)::text);
	  RETURN NULL;
  END;
$function$
LANGUAGE plpgsql;

CREATE TRIGGER trigger_on_insert AFTER INSERT ON mytable
FOR EACH ROW EXECUTE PROCEDURE notify_channel();
*/

set_time_limit(0);

//-- using PDO:

$db = new PDO(
    'pgsql:dbname=dbname host=host port=5432;options=--application_name=APPLICATION_NAME',
    'user',
    'password',
    [
        PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
        PDO::ATTR_DEFAULT_FETCH_MODE => PDO::FETCH_ASSOC,
    ]
);

$db->exec('LISTEN channel_name');

while (true) {
    while ($db->pgsqlGetNotify(PDO::FETCH_ASSOC, 30000)) {
        echo json_encode($result).PHP_EOL;
    }
}

//-- using pg_connect:
//<?php

include '../conn.php';
set_time_limit(0);
ob_end_clean();
pg_query($conn, 'LISTEN table_changed;');

while(true){
    
    $notify = pg_get_notify($conn);
    
	if (!$notify) {
        echo json_encode(array('result'=>false, 'data'=>'No messages')).PHP_EOL;
        ob_flush();
        flush();
        sleep(1);
	} else {
        echo json_encode(array('result'=>true, 'process_id'=>$pid , 'pid' => pg_get_pid($conn), 'data' => $notify)).PHP_EOL;
	}
}

---$> psql: user my_user and database my_original_db must exists:

SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity 
WHERE pg_stat_activity.datname = 'my_original_db' AND pid <> pg_backend_pid();

CREATE DATABASE my_new_db WITH TEMPLATE my_original_db OWNER my_user;
-- Example table:

create table patient (
    id serial primary key,
    firstname varchar(100),
    lastname varchar(100)
);

insert into patient(firstname, lastname) values ('MARCO ANTONIO', 'PEREZ SANDERS');

-- Create field tsvector field with text columns (preferable using insert and update trigger);
alter table patient add tsv_name tsvector;

-- initially fill field.
update patient set tsv_name = to_tsvector(firstname || ' ' || lastname);

-- create index for fast search:
create index tsv_name_idx on patient using gin(tsv_name);

-- then you can search as:
select * from patient where tsv_name @@ to_tsquery('MARCO & PEREZ');
select * from patient where tsv_name @@ to_tsquery('MARCO & SANDERS');
select * from patient where tsv_name @@ to_tsquery('ANTONIO & PEREZ');

select * from m_paciente 
where tsv_nombre @@ to_tsquery(array_to_string(string_to_array('LEIRY SEVERINO',' '),':* & ')||':*');

-- other:

select * from patient where fullname ~ all(array['(?=^LEI.*|\s+LEI\w*)','(?=^ME.*|\s+ME\w*)']);
WITH cte AS (
   SELECT *
   FROM   m_paciente mp 
   WHERE  apellido like 'P%'
   )
SELECT *
FROM  (
   TABLE  cte
   ORDER  BY apellido, nombre
   LIMIT  10
   OFFSET 190
   ) sub
RIGHT  JOIN (SELECT count(*) FROM cte) c(full_count) ON true;
select id, id2, debito, credito, 
  sum(debito-credito) over (partition BY id ORDER BY id, id2) saldo
from 
(
	select m.id, 0 id2, importe debito, 0 credito
		from m_cxc m 
		where m.id=:id
	union all 
	select c.id_m_cxc id, c.id id2, debito, credito
		from d_cxc c 
  		where c.id_m_cxc=:id
) a
SELECT procpid, age(clock_timestamp(), query_start), usename, current_query 
FROM pg_stat_activity 
WHERE current_query != '<IDLE>' AND current_query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- show running queries (9.2)
SELECT pid, age(clock_timestamp(), query_start), usename, query 
FROM pg_stat_activity 
WHERE query != '<IDLE>' AND query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- kill running query
SELECT pg_cancel_backend(procpid);

-- kill idle query
SELECT pg_terminate_backend(procpid);

-- vacuum command
VACUUM (VERBOSE, ANALYZE);

-- all database users
select * from pg_stat_activity where current_query not like '<%';

-- all databases and their sizes
select * from pg_user;

-- all tables and their size, with/without indexes
select datname, pg_size_pretty(pg_database_size(datname))
from pg_database
order by pg_database_size(datname) desc;

-- cache hit rates (should not be less than 0.99)
SELECT sum(heap_blks_read) as heap_read, sum(heap_blks_hit)  as heap_hit, (sum(heap_blks_hit) - sum(heap_blks_read)) / sum(heap_blks_hit) as ratio
FROM pg_statio_user_tables;

-- table index usage rates (should not be less than 0.99)
SELECT relname, 100 * idx_scan / (seq_scan + idx_scan) percent_of_times_index_used, n_live_tup rows_in_table
FROM pg_stat_user_tables 
ORDER BY n_live_tup DESC;

-- how many indexes are in cache
SELECT sum(idx_blks_read) as idx_read, sum(idx_blks_hit)  as idx_hit, (sum(idx_blks_hit) - sum(idx_blks_read)) / sum(idx_blks_hit) as ratio
FROM pg_statio_user_indexes;

-- Dump database on remote host to file
$ pg_dump -U username -h hostname databasename > dump.sql

-- Import dump into existing database
$ psql -d newdb -f dump.sql

--(On 9.2+): Queries running more than 2 minutes
SELECT now() - query_start as "runtime", usename, datname, waiting, state, query
  FROM  pg_stat_activity
  WHERE now() - query_start > '2 minutes'::interval
 ORDER BY runtime DESC;

-- 
select relname,last_vacuum, last_autovacuum, last_analyze, last_autoanalyze from pg_stat_user_tables;

--
select relname, n_dead_tup, last_vacuum, last_autovacuum from 
pg_catalog.pg_stat_all_tables
where n_dead_tup > 0 and relname =  ’table1' order by n_dead_tup desc;

--Tables and views used by a given view:
with recursive view_tree(parent_schema, parent_obj, child_schema, child_obj, ind, ord) as 
(
  select vtu_parent.view_schema, vtu_parent.view_name, 
    vtu_parent.table_schema, vtu_parent.table_name, 
    '', array[row_number() over (order by view_schema, view_name)]
  from information_schema.view_table_usage vtu_parent
  where vtu_parent.view_schema = '<SCHEMA NAME>' and vtu_parent.view_name = '<VIEW NAME>'
  union all
  select vtu_child.view_schema, vtu_child.view_name, 
    vtu_child.table_schema, vtu_child.table_name, 
    vtu_parent.ind || '  ', 
    vtu_parent.ord || (row_number() over (order by view_schema, view_name))
  from view_tree vtu_parent, information_schema.view_table_usage vtu_child
  where vtu_child.view_schema = vtu_parent.child_schema 
  and vtu_child.view_name = vtu_parent.child_obj
) 
select tree.ind || tree.parent_schema || '.' || tree.parent_obj 
  || ' depends on ' || tree.child_schema || '.' || tree.child_obj txt, tree.ord
from view_tree tree
order by ord;


--Check the size (as in disk space) of all databases:
SELECT d.datname AS Name, pg_catalog.pg_get_userbyid(d.datdba) AS Owner,
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT')
    THEN pg_catalog.pg_size_pretty(pg_catalog.pg_database_size(d.datname)) 
    ELSE 'No Access' 
  END AS SIZE 
FROM pg_catalog.pg_database d 
ORDER BY 
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT') 
    THEN pg_catalog.pg_database_size(d.datname)
    ELSE NULL 
  END;

--Check the size (as in disk space) of each table:
SELECT nspname || '.' || relname AS "relation",
   pg_size_pretty(pg_total_relation_size(C.oid)) AS "total_size"
 FROM pg_class C
 LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace)
 WHERE nspname NOT IN ('pg_catalog', 'information_schema')
   AND C.relkind <> 'i'
   AND nspname !~ '^pg_toast'
 ORDER BY pg_total_relation_size(C.oid) DESC;

--Script to kill all running connections of a current database
SELECT pg_terminate_backend(pg_stat_activity.pid)
FROM pg_stat_activity
WHERE datname = current_database()  
  AND pid <> pg_backend_pid();
<?php

/*==========  install at ubuntu: ==============

sudo apt install php-sybase


//==================================

append this into /etc/freetds/freetds.conf:
(192.168.0.250:2638 is an example:)

//==================================

sudo nano /etc/freetds/freetds.conf

[connName]
    host = 192.168.0.250  
    port = 2638
    tds version = 7.4
    database = MYDB
    
//======  php: */

$connName = "connName";
$user = "myuser";
$pass = "mypass";

$st="dblib:host=$connName;charset=iso8859-1";
$conn = new PDO($st,$user,$pass);

/* $conn->prepare($sql_query);
...etc
*/
select regexp_matches('FV{YY}{MM}{DD}-{UU}-{###}', '\{([A-Za-z#]+)\}', 'g');
create table <tablename>_log(
    id serial primary key,
    idx integer,
    antes json,
    despues json,
    id_m_usuario integer,
    fecha_registro timestamp default now(),
    tipo char(1) default 'U',
    unico varchar(36)
);

create function f_<tablename>_log()
returns trigger
LANGUAGE plpgsql as
$body$
begin
    case tg_op
        when 'INSERT' then
            insert into <tablename>_log (idx, despues, tipo)
            select new.id, row_to_json(new), 'I';
            return new;
        when 'UPDATE' then
            if (.. <condiciones new.campo1 <> new.campo2>... ) then
                insert into <tablename>_log (idx, antes, despues, tipo)
                select new.id, row_to_json(old), row_to_json(new), 'U';
            end if;
            return new;
        when 'DELETE' then
            insert into <tablename>_log (idx, antes, tipo)
            select new.id, row_to_json(old), 'D';
            return old;
    end case;
end;
$body$;

create trigger t_<tablename>_log
before insert or update or delete
on <tablename>
for each row execute procedure f_<tablename>_log();
select id, name from table 
where name ilike all(string_to_array(replace('%'||TRIM('JOHN DOE')||'%',' ','%,%'), ','))

-- 1) it converts spaces into commas (,)
-- 2) it converts string to array using resulting comma separated string
-- 3) it apply ilike to each word in search string

-- It finds (for example) name = 'DOE, JHON'  in table
<?php 

$sql = 'select fielda, fieldb, fieldc from table1
order by field, fieldb, fieldc
union all
select field1, field2, field3 from table2
order by field1, field2 desc, field3 asc';


print_r($sql2 = invert_order($sql));


function invert_order($str){
    $re = '/(?<=order\sby\s)(.*)(?=)/mi';

    preg_match_all($re, $str, $matches, PREG_SET_ORDER, 0);
    
    $mat = $matches[sizeof($matches)-1]; //-- get only last order field list
    $mat=$mat[sizeof($mat)-1];
    $mat=explode(",",$mat);
    
    
    for($i=0; $i<sizeof($mat); $i++){   //-- reverse each pair of order field/direction 
        $duet = preg_split("/\s+/", trim($mat[$i]));
        if (sizeof($duet)<2) $duet[1]="";
        switch(strtolower($duet[1])) {
            case "desc":
                $duet[1] = "asc";
                break;
            default:
                $duet[1] = "desc";
                break;
        }
        $mat[$i] = implode(" ",$duet);
    }
    
    $re2 = '/(order\s+by\s+.*)*$/i';    //-- replace last order by with new inverted order by:
    $subst = "order by ".implode(", ",$mat);
    $result = preg_replace($re2, "", $str);
    return $result . " $subst";
}


?>
SELECT *, count(*) OVER() AS full_count
FROM   tbl
WHERE  /* whatever */
ORDER  BY col1
LIMIT  ?
OFFSET ?
create function f_thetable_biud()
returns trigger as
$body$
--declare vars
begin
    -- new, old
    -- tg_op = INSERT,DELETE,UPDATE
    return new/old;
end;
$body$ language plpgsql;

create trigger t_thetable_biud
before/after insert or update or delete
on thetable
for each row execute procedure f_thetable_biud();
<?php

// sudo apt install php7.0-sybase


header("content-type: text/plain; charset=iso-8859-1");
ini_set("display_errors","1");
error_reporting(E_ALL);

$host="localhost";
$db="test";
$uid="sa";
$pwd="mypassword";

$query = "select top 10 * from testtable";

$conn = new PDO( "dblib:host=$host:1433;dbname=$db;", $uid, $pwd);
$stmt = $conn->prepare( $query );
$stmt->execute();

while ($r=$stmt->fetch(PDO::FETCH_ASSOC)){
	print_r($r);
}

?>
SELECT n.nspname AS schema_name
      ,p.proname AS function_name
      ,pg_get_functiondef(p.oid) AS func_def
      ,pg_get_function_arguments(p.oid) AS args
      ,pg_get_function_result(p.oid) AS result
FROM   pg_proc p
JOIN   pg_namespace n ON n.oid = p.pronamespace
WHERE  p.proname ILIKE 'func_name%';
select * from json_to_recordset( (
  select array_to_json(array_agg(row_to_json(t)))
    from (
      select field1, field2, field3,... from mytable
    ) t
)) as x(
  "field1" integer,
  "field2" integer,
  "field3" text,
  :
)

--- ejemplo de texto json a recordset:
select * from json_to_recordset((select detalles from d_factura_caja where id=4216)::json) as x(
  "seq" integer,
  "padre" integer,
  "tipo" text,
  "idx" integer,
  "nivel" integer,
  "descripcion" text,
  "cantidad" numeric(6,2),
  "presentacion" text,
  "precio_unitario" numeric(15,2),
  "precio" numeric(15,2),
  "porc_impuesto" numeric(6,2),
  "impuesto" numeric,
  "neto"numeric
)
SELECT TRUNC (SYSDATE - ROWNUM) dt
  FROM DUAL CONNECT BY ROWNUM < :ndates;
  
/* from now on */
SELECT TRUNC (SYSDATE + ROWNUM - 2) dt
  FROM DUAL CONNECT BY ROWNUM < 100
#just install samba with

$ sudo apt install samba

#and go to this file:

$ sudo nano /etc/samba/smb.conf

#and just at the bottom add these lines:

    [share]
    comment = Ubuntu File Server Share
    path = /path/to/the/folder  #for example /home/user_name/public
    browsable = yes
    guest ok = yes
    read only = no
    create mask = 0755

#restart the samba service

$ sudo service smbd restart
$ sudo service nmbd restart
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
requires:

1) Install Linux dependences (search for specific linux distro instructions for this):
python-dev, python-pip, freetds-dev, freetds-bin, libaio1

2) Install instantclient-basic-lite
follow these instructions:

http://www.oracle.com/technetwork/database/features/linuxx86-64soft-092277.html?printOnly=1
(go to "Installation of ZIP files" section at the bottom of document)

3) Install python includes:

sudo -H pip install cx_Oracle
sudo -H pip install pymssql

"""

import cx_Oracle
import pymssql

""" ====== let's connect to Oracle DB Server ====="""

orcl_host = "host1"
orcl_port = 1521
orcl_user = "user1"
orcl_pwd  = "password1"
orcl_dbn  = "service_name"

connstr = orcl_user+"/"+orcl_pwd+"@"+orcl_host+":"+str(orcl_port)+"/"+orcl_dbn
orcl = cx_Oracle.connect(connstr)

#If all is correct we will see and object print:

print(orcl)

"""===== let's connect to sqlsvr: ====="""

sql_host = "host2"
sql_user = "user2"
sql_pwd  = "password2"
sql_dbn  = "database_name"

conexion_sql = pymssql.connect(sql_host, sql_user, sql_pwd, sql_dbn)

#If all is correct we will see and object print:

print(conexion_sql)

---Sql:
    
CREATE EXTENSION dblink;

---Then you can use it like this:
    
select * from dblink('dbname=otherdbname','select field1, field2 from otherdb_table') as foreign_table(field1 varchar, field2 integer)
# let's create a backup from remote postgresql database using pg_dump:
#
#   pg_dump -h [host address] -Fc -o -U [database user] <database name> > [dump file]
#
# later it could be restored at the same remote server using:
#
#   sudo -u postgres pg_restore -C mydb_backup.dump
#
#Ex:

pg_dump -h 67.8.78.10 -p 5432 -Fc -o -U myuser mydb > mydb_backup.dump

pg_restore -C mydb_backup.dump



#complete (all databases and objects)

pg_dumpall -U myuser -h 67.8.78.10 -p 5432 --clean --file=mydb_backup.dump


#restore from pg_dumpall --clean:

psql -f mydb_backup.dump postgres #it doesn't matter which db you select here
select mnu_code from lu_mnu_code where mnu_code = :mnu_code

DECLARE
   l_exist        CHAR (1);
   l_code_change   CHAR (1);
BEGIN
   SELECT CASE
             WHEN EXISTS (SELECT *
                            FROM lu_mnu_code
                           WHERE NVL (mnu_code, 'null') = :mnu_code
                         ) THEN 'Y'
             ELSE 'N'
          END
     INTO l_exist
     FROM DUAL;

   IF l_exist = 'Y' THEN
      RETURN FALSE;
   ELSE
      RETURN TRUE;
   END IF;
END;
/* SQL Command within postgres: */

SELECT now() - pg_postmaster_start_time() uptime

/* using psql command line utility deployed with Postgres: */
 
$> psql -c "SELECT now() - pg_postmaster_start_time() uptime"
SELECT tablo.columnName FROM 
(SELECT ROW_NUMBER()OVER(Order By columnName)indexer, 
 * FROM tabloName)tablo WHERE tablo.indexer = @sayac
select to_char(to_date('1/10/2011','mm/dd/yyyy'),'mm-dd-yyyy') from dual
SELECT COUNT(*) 
FROM <Table_Name>
WHERE SITETYPE = 'condition'
SELECT DISTINCT <Column_name>
FROM <table_name>
SELECT Id_card, COUNT(Id_card)
FROM user_application_tbl
GROUP BY Id_card
HAVING COUNT(Id_card) > 1
create or replace table `reby-cloud.analytics_reby_v1_eu.pg_topups_json`
as

select
  id,
  created_at,
  gateway,
  gateway_transaction_id,
  stripe_charge_id,
  array(select as struct value_amount as amount, value_currency as currency) as value,
  array(select as struct original_value_amount as amount, original_value_currency as currency) as original_value,
  user_id,
  company_id,
  braintree_transaction_id
from `reby-cloud.staging_eu.pg_topups_stg`
ALTER TABLE DAYSUM
  MODIFY V_TOT_REVN NUMBER GENERATED ALWAYS AS (ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)) VIRTUAL
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)
apex.item( "P368_DATE" ).hide();
-- ==================================================================
-- sys.indexes.type 0 => heap - i.e. no Primary Key 
--					1 => clustered index
--					2 => non-clustered index
-- ==================================================================

SELECT	o.object_id
        , schemaname = OBJECT_SCHEMA_NAME(o.object_id)
		, tablename = o.NAME
		, PrimaryKey = CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END 
		, ClusteredIndexes = CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END
		, NonClusteredIndexes = CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE ''  END
		, [RowCount] = (SELECT SUM(p.Rows) from sys.partitions AS p where p.object_id=o.object_id) / (1+COUNT(ci.object_id)+COUNT(nci.object_id))
		, [IndexCount] = COUNT(ci.object_id)+COUNT(nci.object_id)
FROM sys.objects o
LEFT OUTER JOIN sys.indexes nci ON nci.object_id = o.object_id and nci.type=2
LEFT OUTER JOIN sys.indexes ci ON ci.object_id = o.object_id and ci.type=1
LEFT OUTER JOIN sys.indexes pk ON o.object_id = pk.object_id AND pk.is_primary_key = 1
WHERE o.Type='U' AND ((pk.object_id IS NULL) OR (ci.object_id IS NULL) OR (nci.object_id IS NULL))

GROUP BY o.object_id,
         OBJECT_SCHEMA_NAME(o.object_id), 
         o.NAME, 
		 CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END, 
		 CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE '' END,
		 CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END

ORDER BY TableName ASC
SELECT 
     schemaname = OBJECT_SCHEMA_NAME(o.object_id)
    ,tablename = o.NAME
FROM sys.objects o
INNER JOIN sys.indexes i ON i.OBJECT_ID = o.OBJECT_ID
-- tables that are heaps without any nonclustered indexes
WHERE (
        o.type = 'U'
        AND o.OBJECT_ID NOT IN (
            SELECT OBJECT_ID
            FROM sys.indexes
            WHERE index_id > 0
            )
        )
        --    OR
        -- table that have a clustered index without any nonclustered indexes
        --(o.type='U' 
        --        AND o.OBJECT_ID NOT IN (
        --    SELECT OBJECT_ID 
        --        FROM sys.indexes 
        --        WHERE index_id>1))  
set global sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';
create or replace table `reby-cloud.reby_marketing_eu.user_360` as 
select
  created_at,
  meta_user_id,
  type,
  concat(
    'amount:',if(amount is null,'na',cast(amount as string)),'; ',
    'minutes:',if(minutes is null,'na',cast(minutes as string)),'; ',
    'service_area:',if(service_area is null, 'na',service_area),'; ',
    'vehicle_type:',if(vehicle_type is null, 'na',vehicle_type),'; ',
    'app_used:',if(app_promoting_co is null, 'na',app_promoting_co),'; ',
    'vehicle_co:',if(owner_co is null,'na',owner_co),'; ',
    'topup_through:',if(topup_trough_co is null, 'na',topup_trough_co)
  ) as details
from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` pyrsc
where type not in ('vehicle_reservation')

union all

SELECT
  timestamp(created_at) as created_at,
  user_id as meta_user_id,
  'cx-conversation' as type,
  concat (
    'conversation_id:',conversation_id,'; ',
    'conact_reason1:',if(cantact_reason_level1 is null, 'na',cantact_reason_level1),'; ',
    'conact_reason2:',if(cantact_reason_level2 is null, 'na',cantact_reason_level2)
  ) as details
FROM `reby-cloud.kustomer_eu.conversation_messages_combination` 

union all

select
  created_at,
  user_id as meta_user_id,
  'user-created' as type,
  concat ('company:',co.name) as details
from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on cu.company_id = co.id
select w2.id,w2.value, w.name from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%002619%' limit 1)));
select registrationnum,idcpicregistration from professionaldata where id =
(select  professionaldata_id from account where id =
(select senderaccount_id from workflowexecution where ordernumber like '%010598%' limit 1));
select w2.id,w2.value from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%MATRICULA%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%010598%' limit 1)));
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `analytics_reby_v1_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_demand_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `ridestatus_post_gdpr.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_fin_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_marketing_eu.__TABLES__` --where table_id = 'table_id'
SELECT col1, col2
FROM table
ORDER BY col1, col2;
insert into WORKQUEUE (ID, facilitycode, workaction, description)
  values ((select max(ID)+1 from WORKQUEUE), 'J', 'II', 'TESTVALUES')
# from google.cloud import bigquery
# client = bigquery.Client()
# project = client.project
# dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
# filepath = 'path/to/your_file.csv'

# Retrieves the destination table and checks the length of the schema
table_id = "my_table"
table_ref = dataset_ref.table(table_id)
table = client.get_table(table_ref)
print("Table {} contains {} columns.".format(table_id, len(table.schema)))

# Configures the load job to append the data to the destination table,
# allowing field addition
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
job_config.schema_update_options = [
    bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION
]
# In this example, the existing table contains only the 'full_name' column.
# 'REQUIRED' fields cannot be added to an existing schema, so the
# additional column must be 'NULLABLE'.
job_config.schema = [
    bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"),
    bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"),
]
job_config.source_format = bigquery.SourceFormat.CSV
job_config.skip_leading_rows = 1

with open(filepath, "rb") as source_file:
    job = client.load_table_from_file(
        source_file,
        table_ref,
        location="US",  # Must match the destination dataset location.
        job_config=job_config,
    )  # API request

job.result()  # Waits for table load to complete.
print(
    "Loaded {} rows into {}:{}.".format(
        job.output_rows, dataset_id, table_ref.table_id
    )
)

# Checks the updated length of the schema
table = client.get_table(table)
print("Table {} now contains {} columns.".format(table_id, len(table.schema)))
CREATE TABLE #Procesos (
    SPID        INT,
    Status      VARCHAR(255),
    Login       VARCHAR(255),
    HostName    VARCHAR(255),
    BlkBy       VARCHAR(255),
    DBName      VARCHAR(255),
    Command     VARCHAR(255),
    CPUTime     INT,
    DiskIO      INT,
    LastBatch   VARCHAR(255),
    ProgramName VARCHAR(255),
    SPID2       INT,
    REQUESTID   INT
)

INSERT INTO #Procesos (SPID, Status, Login, HostName, BlkBy, DBName, Command, CPUTime, DiskIO, LastBatch, ProgramName, SPID2, REQUESTID)
EXEC SP_WHO2

SELECT  *
    FROM #Procesos

DROP TABLE #Procesos
SELECT 
    t.NAME AS TableName,
    s.Name AS SchemaName,
    p.rows,
    SUM(a.total_pages) * 8 AS TotalSpaceKB, 
    CAST(ROUND(((SUM(a.total_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS TotalSpaceMB,
    SUM(a.used_pages) * 8 AS UsedSpaceKB, 
    CAST(ROUND(((SUM(a.used_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS UsedSpaceMB, 
    (SUM(a.total_pages) - SUM(a.used_pages)) * 8 AS UnusedSpaceKB,
    CAST(ROUND(((SUM(a.total_pages) - SUM(a.used_pages)) * 8) / 1024.00, 2) AS NUMERIC(36, 2)) AS UnusedSpaceMB
FROM 
    sys.tables t
INNER JOIN      
    sys.indexes i ON t.OBJECT_ID = i.object_id
INNER JOIN 
    sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id
INNER JOIN 
    sys.allocation_units a ON p.partition_id = a.container_id
LEFT OUTER JOIN 
    sys.schemas s ON t.schema_id = s.schema_id
WHERE 
    t.NAME NOT LIKE 'dt%' 
    AND t.is_ms_shipped = 0
    AND i.OBJECT_ID > 255 
GROUP BY 
    t.Name, s.Name, p.Rows
ORDER BY 
    TotalSpaceMB DESC, t.Name
SELECT      COLUMN_NAME AS 'ColumnName'
            ,TABLE_NAME AS  'TableName'
FROM        INFORMATION_SCHEMA.COLUMNS
WHERE       COLUMN_NAME LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
library(DBI)
connection <- dbConnect(RMySQL::MySQL(),
                 dbname = "name",
                 host = "adress.amazonaws.com",
                 port = number,
                 user = "id",
                 password = "pw")
info <- dbGetQuery(connection, "SELECT column1 FROM database WHERE argument = something")

info
WITH
T AS (   SELECT CAST(CONCAT ('$', IIF(TRY_CAST([Key] AS int) IS NOT NULL, CONCAT ('[', [Key], ']'), '.' + [Key])) AS nvarchar(MAX)) AS Path
              , [Key]
              , Value
              , Type
              , 1                                                                                                                   Lvl
         FROM OPENJSON (@json_doc)
         UNION ALL
         SELECT CAST(CONCAT (T.Path, IIF(TRY_CAST(O.[Key] AS int) IS NOT NULL, CONCAT ('[', O.[Key], ']'), '.' + O.[Key])) AS nvarchar(MAX))
              , O.[Key]
              , O.Value
              , O.Type
              , T.Lvl + 1
         FROM T
             CROSS APPLY OPENJSON (T.Value) O
         WHERE T.Type IN ( 4, 5 ))
SELECT Path, T.[Key], T.Value, T.Type, T.Lvl FROM T;
select
  *
from
  db1.SomeTable a
    inner join 
  db2.SomeTable b on b.SomeColumn = a.SomeColumn;
SELECT *
FROM tbl_name
WHERE 
(id_field IN ('value1', 'value2', 'value3') OR id_field IS NULL)
INSERT `reby-cloud.bq_logs.bigquery_usage_datepart` 
SELECT
    timestamp AS Date,
    resource.labels.project_id AS ProjectId,
    protopayload_auditlog.serviceName AS ServiceName,
    protopayload_auditlog.methodName AS MethodName,
    protopayload_auditlog.status.code AS ErrorCode,
    protopayload_auditlog.status.message AS ErrorMessage,
    protopayload_auditlog.authenticationInfo.principalEmail AS UserId,
    logName AS JobId,
    JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query") AS Query,
     JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") AS DestinationTableId,
    CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) AS BillableBytes,
    (CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) / 1099511627776) * 6 AS TotalCost,
    1 AS QueryCount,
     CASE
    WHEN JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") LIKE '%anon%' THEN 'Query'
    ELSE 'ETL'
  END AS JobType,
  ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"),r"(?i)\s+(?:FROM|JOIN)\s+([^\s\(]+\.[^\s]+)") ) ) AS x
    ORDER BY
      x),', ') AS QueryTables,
      ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL( REGEXP_REPLACE( JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"), r"(?i)\s+(z_+pivot_[a-z0-9_.]+)", ""),r"(?i)\s+(?:WHERE|AND|OR|ON)\s+(?:\s|\(|CAST|`)*([a-z0-9_.]+)(?:AND)?") ) ) AS x
    ORDER BY
      x),', ') AS QueryWhereColumns
  FROM
    `reby-cloud.bq_logs.cloudaudit_googleapis_com_data_access`
  WHERE
    protopayload_auditlog.serviceName = 'bigquery.googleapis.com' and DATE(timestamp) >= '2021-11-28' # Change date to start of gap
  and DATE(timestamp) <= DATE_ADD(CURRENT_DATE(), INTERVAL -1 DAY)
UPDATE photos
   SET caption = REPLACE(caption,'"','\'')
SELECT note as note_original, 

    REPLACE(
        REPLACE(
            REPLACE(
                REPLACE(
                    REPLACE(
                        REPLACE(
                            REPLACE(
                                REPLACE(
                                    REPLACE(
                                        REPLACE(
                                            REPLACE(
                                                REPLACE(
                                                    REPLACE(
                                                        REPLACE(
                                                            REPLACE(
                                                                REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(
                                                                                                REPLACE(
                                                                                                    REPLACE(
                                                                                                        REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(note, '\"', ''),
                                                                                        '.', ''),
                                                                                    '?', ''),
                                                                                '`', ''),
                                                                            '<', ''),
                                                                        '=', ''),
                                                                    '{', ''),
                                                                                                        '}', ''),
                                                                                                    '[', ''),
                                                                                                ']', ''),
                                                                                            '|', ''),
                                                                                        '\'', ''),
                                                                                    ':', ''),
                                                                                ';', ''),
                                                                            '~', ''),
                                                                        '!', ''),
                                                                    '@', ''),
                                                                '#', ''),
                                                            '$', ''),
                                                        '%', ''),
                                                    '^', ''),
                                                '&', ''),
                                            '*', ''),
                                        '_', ''),
                                    '+', ''),
                                ',', ''),
                            '/', ''),
                        '(', ''),
                    ')', ''),
                '-', ''),
            '>', ''),
        ' ', '-'),
    '--', '-') as note_changed FROM invheader
ALTER DATABASE [servername/databasename] MODIFY NAME = [servername/newdatabasename]
/* AQUI CREARE LA TABLA */
CREATE DATABASE fejesus;

CREATE TABLE leccion (
    id_leccion INT NOT NULL AUTO_INCREMENT, 
    titulo VARCHAR(30),
    instructor VARCHAR(50),
    no_leccion INT,

    PRIMARY KEY(id_leccion),
    INDEX(no_leccion)
    
) ENGINE=INNODB;

DESCRIBE leccion;

CREATE TABLE preguntas (
    id_pregunta INT NOT NULL AUTO_INCREMENT,
    pregunta VARCHAR(200),
    verso VARCHAR(40),
    id_leccion INT NOT NULL,
    id_seccion INT NOT NULL,

    PRIMARY KEY (id_pregunta),
    INDEX(id_leccion),
    INDEX(id_seccion),

    FOREIGN KEY (id_leccion)
        REFERENCES leccion(id_leccion)
        ON UPDATE CASCADE ON DELETE RESTRICT
    
) ENGINE=INNODB;

DESCRIBE preguntas;

CREATE TABLE seccion ( 
    id_seccion INT NOT NULL AUTO_INCREMENT, 
    leccion_id INT NOT NULL, 
    titulo VARCHAR(100), 

    PRIMARY KEY(id_seccion), 
    INDEX (leccion_id), 
        
    FOREIGN KEY (leccion_id) 
        REFERENCES leccion(id_leccion) ON UPDATE CASCADE ON DELETE RESTRICT

) ENGINE=INNODB;

DESCRIBE seccion;
ORDER BY array_position(ARRAY['f', 'p', 'i', 'a']::varchar[], x_field)
SELECT add_continuous_aggregate_policy('conditions_summary_daily',
     start_offset => INTERVAL '1 month',
     end_offset => INTERVAL '1 day',
     schedule_interval => INTERVAL '1 hour');
CREATE MATERIALIZED VIEW conditions_summary_daily
WITH (timescaledb.continuous) AS
SELECT device,
   time_bucket(INTERVAL '1 day', time) AS bucket,
   AVG(temperature),
   MAX(temperature),
   MIN(temperature)
FROM conditions
GROUP BY device, bucket;
CREATE SEQUENCE public.hibernate_sequence INCREMENT 1 START 1 MINVALUE 1;
select id from workflowexecution where subject like '%FOZG-069338%';

update workflowexecution set state = 4, cancellationDate=CURRENT_TIMESTAMP where id = 34470;

update workflowdefinitionexecution set state = 4 , cancellationDate=CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34470);

update workflowstepexecution set state = 4 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34470));
select id from workflowexecution where subject like '%OZLO-0907%';

update workflowexecution set state = 1, enddate = CURRENT_TIMESTAMP where id = 34168;

update workflowdefinitionexecution set state = 1, enddate = CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34168);

update workflowstepexecution set state = 1 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34168)); 
SELECT *
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = N'Customers'
 protected override void OnModelCreating(ModelBuilder builder)
        {
            base.OnModelCreating(builder);

            builder.Entity<InternetUsersLog>()
           .HasOne<AspNetBranchs>(sc => sc.Branch)
           .WithMany(s => s.Users)
           .HasForeignKey(sc => sc.Branch_Id);


        }
Scaffold-DbContext "Data Source=.;Initial Catalog=Name;Persist Security Info=True;User ID=Username;Password=@!n$p!r3;Integrated Security=True" Microsoft.EntityFrameWorkCore.SqlServer -outputdir Repository/Models -context AccDbContext -contextdir Repository -DataAnnotations -Force
SELECT Query, UserId, SUM(TotalCost) FROM `reby-cloud.bq_logs.bigquery_usage_datepart` 
WHERE DATE(Date) >= "2022-03-01" and DATE(Date) <= "2022-03-30" 
GROUP BY 1,2
ORDER BY 3 DESC
SELECT Table_Name, 
    Column_Name
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_CATALOG = 'EDM-PRD'
   AND COLUMN_NAME LIKE '%cost%';
SELECT (date '2022-01-09', date '2022-02-10') OVERLAPS
       (date '2022-02-10', date '2022-03-10');
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

--GET THE FIRST VEHICLE RESERVATION PER USER/APP/VEHICLE
create or replace table temp_eu.delete_first_ride_reserve as 

with first_reservations as (
    select
        vrh.created_at,
        vrh.user_id,
        vrh.company_vehicle_id as vehicle_company_id,
        vrh.company_id as app_co_id,
        co1.name as vehicle_company_name,
        co2.name as app_company_name,
        row_number() over (partition by vrh.user_id,vrh.company_vehicle_id,vrh.company_id order by vrh.created_at asc) as r_n
    from `reby-cloud.analytics_reby_v1_eu.pg_vehicle_reservation_history` vrh
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co1
    on vrh.company_vehicle_id = co1.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2
    on vrh.company_vehicle_id = co2.id
    QUALIFY r_n = 1
),
--FINISH

--GET THE FIRST RIDE PER USER/APP/VEHICLE
rides as (
    select 
        id,
        created_at,
        user_id,
        if(vehicle_company_id is null,'c_3qteetfhxjshx4j54111',vehicle_company_id) as vehicle_company_id,
        if(company_id is null,'c_3qteetfhxjshx4j54111',company_id) as company_id
    from analytics_reby_v1_eu.pg_rides_json
),

first_ride_pre as (
    select
        r.id,
        r.user_id,
        r.created_at as ride_date,
        r.vehicle_company_id,
        r.company_id as app_co_id,
        co.name as vehicle_company_name,
        co2.name as app_company_name,
        cu.id as cu_id_vehicle,
        cu2.id as cu_id_app,
        --row_number() over (partition by r.user_id,r.vehicle_company_id order by r.created_at asc) as rank_
    from rides r
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.user_id = cu.user_id and r.vehicle_company_id = cu.company_id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu2 on r.user_id = cu2.user_id and r.company_id = cu2.company_id
),

first_ride_per_app_vehicle as (
select *, row_number() over (partition by user_id, app_company_name, vehicle_company_name order by ride_date asc) as rank_ from first_ride_pre qualify rank_ = 1
),
--FINISH

--GET FIRST DATE BETWEEN FIRST RIDE AND/OR FIRST RESERVATION
first_date_ride_reserv as (
    select
        user_id,
        vehicle_company_name,
        app_company_name,
        created_at,
        vehicle_company_id,
        app_co_id,
        row_number() over (partition by user_id, app_company_name, vehicle_company_name order by created_at asc) as rank_
        from(
            --UNION BETWEEN RIDES AND RESERVATIONS
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                created_at,
                vehicle_company_id,
                app_co_id
            from first_reservations 
            UNION ALL
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                ride_date as created_at,
                vehicle_company_id,
                app_co_id
            from first_ride_per_app_vehicle
        ) QUALIFY rank_=1
)

select * from first_date_ride_reserv
--FINISH
;
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

SELECT DISTINCT column_list
FROM table_list
  JOIN table ON join_condition
WHERE row_filter
ORDER BY column
LIMIT count OFFSET offset
GROUP BY column
HAVING group_filter;
Code language: SQL (Structured Query Language) (sql)
IF object_id('tempdb..#table') IS NOT NULL
BEGIN
   DROP TABLE #table
END

Create Table #table(
	[State] varchar(500),
	[City] varchar(500)
);


Insert into #table ([State], [City]) values ('OH', 'Toledo,Columbus');
Insert into #table ([State], [City]) values ('TN', 'Nashville,Memphis');



SELECT A.[State]
	,Split.a.value('.', 'VARCHAR(100)') AS String
FROM (
	SELECT [State]
		,CAST('<M>' + REPLACE([City], ',', '</M><M>') + '</M>' AS XML) AS String
	FROM #table
	) AS A
CROSS APPLY String.nodes('/M') AS Split(a);
where o.createdate >= dateadd(year, -1, current_date)
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*) DESC;
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*);
SELECT COUNT(*) FROM campers WHERE counselor="Ashley";
SELECT COUNT(column_name) FROM table_name;
SELECT 
    cu.id as user_id,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,longitude) longitude,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,latitude) latitude,
    last_location_at
FROM `reby-cloud.alvaro_misc.pg_user_last_location` ull
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on cu.company_id = ull.company_id and cu.user_id = ull.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
;
select
    uc.from_co_cu_id as user_id,
    uc.consent_timestamp,
    uc.consent_from
from `reby-cloud.misc_eu.user_consent` uc
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = uc.from_co_cu_id
select * from (
select
    r.id,
    cu.id as user_id,
    r.created_at as tiempo_utc,
    r.minutes,
    r.distance/1000 as distancia_km,
    r.cost[offset(0)].amount as coste,
    r.path[safe_offset(0)].latitude as latitude_initial,
    r.path[safe_offset(0)].longitude as longitude_initial,
    r.path[safe_offset(array_length(path)-1)].latitude as latitude_final,
    r.path[safe_offset(array_length(path)-1)].longitude as longitude_final,
    r.starting_battery_level,
    r.ending_battery_level,
    co.name as vehicle_company,
    co2.name as app_company
from `reby-cloud.analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.company_id = cu.company_id and r.user_id = cu.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
--order by r.created_at asc
) where vehicle_company = 'Avant'
Select Count(*)
From   mySchema.myTable
Where  Cast(Col1 As NVarChar(128)) +
       Cast(Col2 As NVarChar(128)) +
       Cast(Coln As NVarChar(128)) Like '%?%'
INSERT INTO TableName (
  Field1,
  Field2,
  Field3
) VALUES (
  'Value1',
  22,
  GETDATE()
)
SELECT Suref, FORMAT(DateFrom, 'd', 'en-gb') AS 'Great Britain English format'
FROM [dbo].[BOCClientIndex - AdultServiceDetails]
WHERE [DateFrom] >= '01/01/2020'
Order by DateFrom
INNER JOIN:
is used when retrieving data from multiple
tables and will return only matching data.

LEFT OUTER JOIN:
is used when retrieving data from
multiple tables and will return
left table and any matching right table records.

RIGHT OUTER JOIN:
is used when retrieving data from
multiple tables and will return right
table and any matching left table records

FULL OUTER JOIN:
is used when retrieving data from
multiple tables and will return both
table records, matching and non-matching.



INNER JOIN :
SELECT select_list From TableA A
Inner Join TableB B
On A.Key = B.Key


LEFT OUTER JOIN :
SELECT select_list From TableA A
Left Join TableB B
On A.Key = B.Key

(where b.key is null)//For delete matching data



RIGTH OUTER JOIN :
SELECT select_list From TableA A
Right Join TableB B
On A.Key = B.Key


FULL JOIN :
SELECT select_list From TableA A
FULL OUTER Join TableB B
On A.Key = B.Key

-- how many stations within 1/10 mile range of each zip code?
SELECT * 
FROM
 (SELECT
        vehicle,
        date1,
        code,
        metric,
        safe_cast(lon as FLOAT64) as lon,
        safe_cast(lat as FLOAT64) as lat,
        ST_GeogPoint(safe_cast(lon as FLOAT64), safe_cast(lat as FLOAT64)) AS point,
    FROM 
        `nytint-prd.gfx_nyc_fleet_logs.test_log_subset_3mo_geo`
    WHERE ABS(safe_cast(lat as FLOAT64)) < 90
    AND ABS(safe_cast(lon as FLOAT64)) < 180) as logs,
    `nytint-prd.gfx_nyc_fleet_logs.school_locations` as schools
WHERE ST_DWithin(
        ST_GeogPoint(safe_cast(schools.LONGITUDE as FLOAT64), safe_cast(schools.LATITUDE as FLOAT64)),
        logs.point,
        1609.34/10)
limit 100
-- how many stations within 1 mile range of each zip code?
SELECT
    zip_code AS zip,
    ANY_VALUE(zip_code_geom) AS polygon,
    COUNT(*) AS bike_stations
FROM
    `bigquery-public-data.new_york.citibike_stations` AS bike_stations,
    `bigquery-public-data.geo_us_boundaries.zip_codes` AS zip_codes
WHERE ST_DWithin(
         zip_codes.zip_code_geom,
         ST_GeogPoint(bike_stations.longitude, bike_stations.latitude),
         1609.34)
GROUP BY zip
ORDER BY bike_stations DESC
SELECT
  *,
  ST_GeogPoint(pLongitude, pLatitude) AS p
FROM
  mytable
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bese - BuildExectionSummaryEnd</Title>
      <Description>execution plan end</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
endofscript:

PRINT '--------------------------------------------------';
PRINT ' ';
PRINT 'elapsed time '+CONVERT(VARCHAR, GETDATE() - @started, 108);
PRINT ' ';
PRINT 'completed '+CONVERT(VARCHAR, GETDATE(), 108);
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>best - BuildExectionSummaryTitle</Title>
      <Description>add title to execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '     '
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummaryStep</Title>
      <Description>executionplan step</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT ' ';
SET @stepdescription = '';
SET @executionsummary = @executionsummary + CHAR(13) + ' - ' + @stepdescription;
PRINT CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription;
PRINT ' ';
IF @step = @step
BEGIN
	--------------------------------------------------------
	--START

	SELECT 1;

	--END
	--------------------------------------------------------
END
PRINT ' ';
PRINT 'END ' + CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription + ' : ' + CONVERT(VARCHAR, GETDATE() - @started, 108);
SET @step += 1;
PRINT ' ';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummary</Title>
      <Description>execution plan start</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '--------------------------------------------';
PRINT '--Execution Summary-------------------------';
PRINT '--------------------------------------------';
PRINT '
-  
';
PRINT '--------------------------------------------';
PRINT '--END Execution Summary---------------------';
PRINT '--------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
DECLARE
       @started          DATETIME
     , @step             INT
     , @stepdescription  VARCHAR(500)
	 , @executionsummary VARCHAR(max);

SET @started = GETDATE();
SET @step = 1;
SET @stepdescription='';
SET @executionsummary='';
PRINT 'started '+CONVERT(VARCHAR, @started, 108);

PRINT ' ';
PRINT '--------------------------------------------------';


]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>besg - BuildExecutionSummaryGenerate</Title>
      <Description>prints out the generated execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '-------------------------------------------------------';
PRINT '--Generated Execution Summary--------------------------';
PRINT '-------------------------------------------------------';
PRINT @executionsummary;
PRINT '----------------------------------------------------------';
PRINT '--END Generated Execution Summary-------------------------';
PRINT '----------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
IF EXISTS (
    SELECT * FROM sysobjects WHERE id = object_id(N'Split_Int') 
    AND xtype IN (N'FN', N'IF', N'TF')
)
    DROP FUNCTION [dbo].[Split_Int]
GO

SET ANSI_NULLS ON
GO

SET QUOTED_IDENTIFIER ON
GO

CREATE FUNCTION [dbo].[Split_Int]
       (
         @InputList VARCHAR(8000) = '',
         @Delimiter NVARCHAR(1) = ','
       )
RETURNS @List TABLE
       (
         [Index] INT NOT NULL
                        IDENTITY(0, 1)
                        PRIMARY KEY,
         [Value] INT NULL
       )
AS BEGIN




	DECLARE @Item varchar(8000)
    WHILE CHARINDEX(@Delimiter, @InputList, 0) <> 0
          BEGIN
                SELECT  @Item = RTRIM(LTRIM(SUBSTRING(@InputList, 1,
                                                       CHARINDEX(@Delimiter, @InputList, 0)
                                                       - 1))) ,
                        @InputList = RTRIM(LTRIM(SUBSTRING(@InputList,
                                                            CHARINDEX(@Delimiter, @InputList, 0)
                                                            + LEN(@Delimiter),
                                                            LEN(@InputList))));

							
 
                IF LEN(@Item) > 0 
                   INSERT   INTO @List
                            SELECT  @Item
          END

    IF LEN(@InputList) > 0 
       INSERT   INTO @List
                SELECT  @InputList -- Put the last item in
   

RETURN
END



GO



select * from dbo.Split_Int('1,2,3,4',',')


select * from dbo.Split_Int('	','	')
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
DECLARE @ClarionDate INT = 47563
DECLARE @SqlDateTime DATETIME 

-- Convert the clarion DATE into and SQL DateTime
SET @SqlDateTime = DateAdd(day, @ClarionDate  - 4, '1801-01-01') 

SELECT @SqlDateTime AS 'SQL Date Time'

-- Now convert it back from and SQL DateTime to a Clarion Date
SET @ClarionDate = DateDiff(day, DateAdd(day, -4, '1801-01-01'), @SqlDateTime)
SELECT @ClarionDate AS 'Clarion Date'
DECLARE @Object AS INT;
DECLARE @ResponseText AS VARCHAR(8000);
DECLARE @Body AS VARCHAR(8000) = 
'{
    "what": 1,
    "ever": "you",
    "need": "to send as the body"
}'  

EXEC sp_OACreate 'MSXML2.XMLHTTP', @Object OUT;
EXEC sp_OAMethod @Object, 'open', NULL, 'post','http://requestb.in/1h83e3n1', 'false'

EXEC sp_OAMethod @Object, 'setRequestHeader', null, 'Content-Type', 'application/json'
EXEC sp_OAMethod @Object, 'send', null, @body

EXEC sp_OAMethod @Object, 'responseText', @ResponseText OUTPUT
SELECT @ResponseText

EXEC sp_OADestroy @Object
BEGIN TRY
    DROP TABLE [Dim].[Date]
END TRY

BEGIN CATCH
    /*No Action*/
END CATCH

CREATE TABLE [Dim].[Date]
(
    [DateKey] INT primary key, 
    [Date] DATETIME,
    [FullDate] CHAR(10),-- Date in MM-dd-yyyy format
    [DayOfMonth] VARCHAR(2), -- Field will hold day number of Month
    [DaySuffix] VARCHAR(4), -- Apply suffix as 1st, 2nd ,3rd etc
    [DayName] VARCHAR(9), -- Contains name of the day, Sunday, Monday 
    [DayOfWeek] CHAR(1),-- First Day Sunday=1 and Saturday=7
    [DayOfWeekInMonth] VARCHAR(2), --1st Monday or 2nd Monday in Month
    [DayOfWeekInYear] VARCHAR(2),
    [DayOfQuarter] VARCHAR(3), 
    [DayOfYear] VARCHAR(3),
    [WeekOfMonth] VARCHAR(1),-- Week Number of Month 
    [WeekOfQuarter] VARCHAR(2), --Week Number of the Quarter
    [WeekOfYear] VARCHAR(2),--Week Number of the Year
    [Month] VARCHAR(2), --Number of the Month 1 to 12
    [MonthName] VARCHAR(9),--January, February etc
    [MonthOfQuarter] VARCHAR(2),-- Month Number belongs to Quarter
    [Quarter] CHAR(1),
    [QuarterName] VARCHAR(9),--First,Second..
    [Year] CHAR(4),-- Year value of Date stored in Row
    [YearName] CHAR(7), --CY 2012,CY 2013
    [MonthYear] CHAR(10), --Jan-2013,Feb-2013
    [MMYYYY] CHAR(6),
    [FirstDayOfMonth] DATE,
    [LastDayOfMonth] DATE,
    [FirstDayOfQuarter] DATE,
    [LastDayOfQuarter] DATE,
    [FirstDayOfYear] DATE,
    [LastDayOfYear] DATE,
    [IsHoliday] BIT,-- Flag 1=National Holiday, 0-No National Holiday
    [IsWeekday] BIT,-- 0=Week End ,1=Week Day
    [HolidayName] VARCHAR(50),--Name of Holiday in US
)
GO

--=========================================================================================
--Specify Start Date and End date here
--Value of Start Date Must be Less than Your End Date 
--=========================================================================================

DECLARE @StartDate DATETIME = '12/29/2014' --Starting value of Date Range
DECLARE @EndDate DATETIME = '01/01/2100' --End Value of Date Range

--Temporary Variables To Hold the Values During Processing of Each Date of Year
DECLARE
    @DayOfWeekInMonth INT,
    @DayOfWeekInYear INT,
    @DayOfQuarter INT,
    @WeekOfMonth INT,
    @CurrentYear INT,
    @CurrentMonth INT,
    @CurrentQuarter INT

/*Table Data type to store the day of week count for the month and year*/
DECLARE @DayOfWeek TABLE
(
    DOW INT,
    MonthCount INT,
    QuarterCount INT,
    YearCount INT
)

INSERT INTO @DayOfWeek VALUES (1, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (2, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (3, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (4, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (5, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (6, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (7, 0, 0, 0)

--Extract and assign various parts of Values from Current Date to Variable

DECLARE @CurrentDate AS DATETIME = @StartDate
SET @CurrentMonth = DATEPART(MM, @CurrentDate)
SET @CurrentYear = DATEPART(YY, @CurrentDate)
SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)

/********************************************************************************************/
--Proceed only if Start Date(Current date) is less than End date you specified above

WHILE @CurrentDate < @EndDate
/*Begin day of week logic*/
BEGIN
    /*Check for Change in Month of the Current date if Month changed then 
    Change variable value*/
    IF @CurrentMonth != DATEPART(MM, @CurrentDate) 
    BEGIN
        UPDATE @DayOfWeek
        SET [MonthCount] = 0
        SET @CurrentMonth = DATEPART(MM, @CurrentDate)
    END

    /* Check for Change in Quarter of the Current date if Quarter changed then change 
        Variable value*/
    IF @CurrentQuarter != DATEPART(QQ, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET [QuarterCount] = 0
        SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)
    END

    /* Check for Change in Year of the Current date if Year changed then change 
        Variable value*/
    IF @CurrentYear != DATEPART(YY, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET YearCount = 0
        SET @CurrentYear = DATEPART(YY, @CurrentDate)
    END

    -- Set values in table data type created above from variables
    UPDATE @DayOfWeek
    SET 
        MonthCount = MonthCount + 1,
        QuarterCount = QuarterCount + 1,
        YearCount = YearCount + 1
    WHERE DOW = DATEPART(DW, @CurrentDate)

    SELECT
        @DayOfWeekInMonth = MonthCount,
        @DayOfQuarter = QuarterCount,
        @DayOfWeekInYear = YearCount
    FROM @DayOfWeek
    WHERE DOW = DATEPART(DW, @CurrentDate)
    
/*End day of week logic*/


/* Populate Your Dimension Table with values*/
    
    INSERT INTO [Dim].[Date]
    SELECT
        
        CONVERT (char(8),@CurrentDate,112) as 'DateKey',
        @CurrentDate AS 'Date',
        CONVERT (char(10),@CurrentDate,101) as 'FullDate',
        DATEPART(DD, @CurrentDate) AS 'DayOfMonth',
        --Apply Suffix values like 1st, 2nd 3rd etc..
        CASE 
            WHEN DATEPART(DD,@CurrentDate) IN (11,12,13) THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 1 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'st'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 2 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'nd'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 3 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'rd'
            ELSE CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th' 
        END AS 'DaySuffix',
        
        DATENAME(DW, @CurrentDate) AS 'DayName',
        DATEPART(DW, @CurrentDate) AS 'DayOfWeek',
        @DayOfWeekInMonth AS 'DayOfWeekInMonth',
        @DayOfWeekInYear AS 'DayOfWeekInYear',
        @DayOfQuarter AS 'DayOfQuarter',
        DATEPART(DY, @CurrentDate) AS 'DayOfYear',
        DATEPART(WW, @CurrentDate) + 1 - DATEPART(WW, CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)) + '/1/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'WeekOfMonth',
        (DATEDIFF(DD, DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0), @CurrentDate) / 7) + 1 AS 'WeekOfQuarter',
        DATEPART(WW, @CurrentDate) AS 'WeekOfYear',
        DATEPART(MM, @CurrentDate) AS 'Month',
        DATENAME(MM, @CurrentDate) AS 'MonthName',
        CASE
            WHEN DATEPART(MM, @CurrentDate) IN (1, 4, 7, 10) THEN 1
            WHEN DATEPART(MM, @CurrentDate) IN (2, 5, 8, 11) THEN 2
            WHEN DATEPART(MM, @CurrentDate) IN (3, 6, 9, 12) THEN 3
        END AS 'MonthOfQuarter',
        DATEPART(QQ, @CurrentDate) AS 'Quarter',
        CASE DATEPART(QQ, @CurrentDate)
            WHEN 1 THEN 'First'
            WHEN 2 THEN 'Second'
            WHEN 3 THEN 'Third'
            WHEN 4 THEN 'Fourth'
        END AS 'QuarterName',
        DATEPART(YEAR, @CurrentDate) AS 'Year',
        'CY ' + CONVERT(VARCHAR, DATEPART(YEAR, @CurrentDate)) AS 'YearName',
        LEFT(DATENAME(MM, @CurrentDate), 3) + '-' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MonthYear',
        RIGHT('0' + CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)),2) + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MMYYYY',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, @CurrentDate) - 1), @CurrentDate))) AS 'FirstDayOfMonth',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, (DATEADD(MM, 1, @CurrentDate)))), DATEADD(MM, 1, @CurrentDate)))) AS 'LastDayOfMonth',
        DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0) AS 'FirstDayOfQuarter',
        DATEADD(QQ, DATEDIFF(QQ, -1, @CurrentDate), -1) AS 'LastDayOfQuarter',
        CONVERT(DATETIME, '01/01/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'FirstDayOfYear',
        CONVERT(DATETIME, '12/31/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'LastDayOfYear',
        NULL AS 'IsHoliday',
        CASE DATEPART(DW, @CurrentDate)
            WHEN 1 THEN 0
            WHEN 2 THEN 1
            WHEN 3 THEN 1
            WHEN 4 THEN 1
            WHEN 5 THEN 1
            WHEN 6 THEN 1
            WHEN 7 THEN 0
        END AS 'IsWeekday',
        NULL AS 'HolidayName'

    SET @CurrentDate = DATEADD(DD, 1, @CurrentDate)
END
        
--============================================================================
-- Step 3.
-- Update Values of Holiday as per USA Govt. Declaration for National Holiday.
--============================================================================

/*Update HOLIDAY Field of USA In dimension*/
    /* New Years Day - January 1 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'New Year''s Day'
    WHERE [Month] = 1 AND [DayOfMonth] = 1

    /* Martin Luther King, Jr. Day - Third Monday in January starting in 1983 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Martin Luther King, Jr. Day'
    WHERE
        [Month] = 1 AND
        [DayOfWeek] = 'Monday' AND
        [Year] >= 1983 AND
        DayOfWeekInMonth = 3

    /* Valentine's Day - February 14 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Valentine''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfMonth] = 14

    /* President's Day - Third Monday in February */
    UPDATE [Dim].[Date]
        SET HolidayName = 'President''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 3

    /* Saint Patrick's Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Saint Patrick''s Day'
    WHERE
        [Month] = 3 AND
        [DayOfMonth] = 17

    /* Memorial Day - Last Monday in May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Memorial Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MAX(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'May' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Mother's Day - Second Sunday of May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Mother''s Day'
    WHERE
        [Month] = 5 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 2

    /* Father's Day - Third Sunday of June */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Father''s Day'
    WHERE
        [Month] = 6 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 3

    /* Independence Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Independance Day'
    WHERE [Month] = 7 AND [DayOfMonth] = 4

    /* Labor Day - First Monday in September */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Labor Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MIN(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'September' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Columbus Day - Second MONDAY in October */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Columbus Day'
    WHERE
        [Month] = 10 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 2

    /* Halloween - 10/31 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Halloween'
    WHERE
        [Month] = 10 AND
        [DayOfMonth] = 31

    /* Veterans Day - November 11 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Veterans Day'
    WHERE
        [Month] = 11 AND
        [DayOfMonth] = 11
    
    /* Thanksgiving - Fourth THURSDAY in November */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Thanksgiving Day'
    WHERE
        [Month] = 11 AND
        [DayOfWeek] = 'Thursday' AND
        [DayOfWeekInMonth] = 4

    /* Christmas */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Christmas Day'
    WHERE [Month] = 12 AND
          [DayOfMonth]  = 25
    
    /* Election Day - The first Tuesday after the first Monday in November */
    BEGIN
    DECLARE @Holidays TABLE
    (
        [ID] INT IDENTITY(1,1),
        [DateID] INT,
        [Week] TINYINT,
        [Year] CHAR(4),
        [Day] CHAR(2)
    )

        INSERT INTO @Holidays([DateID], [Year], [Day])
            SELECT
                [DateKey],
                [Year],
                [DayOfMonth] 
            FROM [Dim].[Date]
            WHERE
                [Month] = 11 AND 
                [DayOfWeek] = 'Monday'
            ORDER BY
                [Year],
                [DayOfMonth]

        DECLARE @CNTR INT,
                @POS INT,
                @STARTYEAR INT,
                @ENDYEAR INT,
                @MINDAY INT

        SELECT @CURRENTYEAR = MIN([Year])
             , @STARTYEAR = MIN([Year])
             , @ENDYEAR = MAX([Year])
        FROM @Holidays

        WHILE @CURRENTYEAR <= @ENDYEAR
        BEGIN
            SELECT @CNTR = COUNT([Year])
            FROM @Holidays
            WHERE [Year] = @CURRENTYEAR

            SET @POS = 1

            WHILE @POS <= @CNTR
            BEGIN
                SELECT @MINDAY = MIN(DAY)
                FROM @Holidays
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Week] IS NULL

                UPDATE @Holidays
                    SET [Week] = @POS
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Day] = @MINDAY

                SELECT @POS = @POS + 1
            END

            SELECT @CURRENTYEAR = @CURRENTYEAR + 1
        END

        UPDATE [Dim].[Date]
            SET HolidayName  = 'Election Day'
        FROM [Dim].[Date] DT
            JOIN @Holidays HL ON (HL.DateID + 1) = DT.DateKey
        WHERE
            [Week] = 1
    END
    --set flag for USA holidays in Dimension
    UPDATE [Dim].[Date]
        SET IsHoliday = CASE WHEN HolidayName IS NULL THEN 0
                                WHEN HolidayName IS NOT NULL THEN 1 END

/*****************************************************************************************/

/* Add Fiscal Calendar columns into table DimDate */

ALTER TABLE [Dim].[Date] ADD
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] CHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] CHAR(4),
    [FiscalYearName] CHAR(7),
    [FiscalMonthYear] CHAR(10),
    [FiscalMMYYYY] CHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE

GO

/***************************************************************************
The following section needs to be populated for defining the fiscal calendar
***************************************************************************/

DECLARE
    @dtFiscalYearStart SMALLDATETIME = 'December 29, 2014',
    @FiscalYear INT = 2015,
    @LastYear INT = 2100,
    @FirstLeapYearInPeriod INT = 2012

/*****************************************************************************************/

DECLARE
    @iTemp INT,
    @LeapWeek INT,
    @CurrentDate DATETIME,
    @FiscalDayOfYear INT,
    @FiscalWeekOfYear INT,
    @FiscalMonth INT,
    @FiscalQuarter INT,
    @FiscalQuarterName VARCHAR(10),
    @FiscalYearName VARCHAR(7),
    @LeapYear INT,
    @FiscalFirstDayOfYear DATE,
    @FiscalFirstDayOfQuarter DATE,
    @FiscalFirstDayOfMonth DATE,
    @FiscalLastDayOfYear DATE,
    @FiscalLastDayOfQuarter DATE,
    @FiscalLastDayOfMonth DATE

/*Holds the years that have 455 in last quarter*/

DECLARE @LeapTable TABLE (leapyear INT)

/*TABLE to contain the fiscal year calendar*/

DECLARE @tb TABLE
(
    [PeriodDate] DATETIME,
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] VARCHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] VARCHAR(4),
    [FiscalYearName] VARCHAR(7),
    [FiscalMonthYear] VARCHAR(10),
    [FiscalMMYYYY] VARCHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE
)

/*Populate the table with all leap years*/

SET @LeapYear = @FirstLeapYearInPeriod
WHILE (@LeapYear < @LastYear)
    BEGIN
        INSERT INTO @leapTable VALUES (@LeapYear)
        SET @LeapYear = @LeapYear + 6
    END

/*Initiate parameters before loop*/

SET @CurrentDate = @dtFiscalYearStart
SET @FiscalDayOfYear = 1
SET @FiscalWeekOfYear = 1
SET @FiscalMonth = 1
SET @FiscalQuarter = 1
SET @FiscalWeekOfYear = 1

IF (EXISTS (SELECT * FROM @LeapTable WHERE @FiscalYear = leapyear))
    BEGIN
        SET @LeapWeek = 1
    END
    ELSE
    BEGIN
        SET @LeapWeek = 0
    END

/*******************************************************************************************/

/* Loop on days in interval*/

WHILE (DATEPART(yy,@CurrentDate) <= @LastYear)
BEGIN
    
/*SET fiscal Month*/
    SELECT @FiscalMonth = CASE
        /* 
        /*Use this section for a 4-5-4 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 9 THEN 2 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 10 AND 13 THEN 3 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 22 THEN 5 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 23 AND 26 THEN 6 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 35 THEN 8 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 36 AND 39 THEN 9 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (48+@LeapWeek) THEN 11 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN (49+@LeapWeek) AND (52+@LeapWeek) THEN 12 /*4 weeks (5 weeks on leap year)*/
        */

        /*Use this section for a 4-4-5 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 8 THEN 2 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 9 AND 13 THEN 3 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 21 THEN 5 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 22 AND 26 THEN 6 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 34 THEN 8 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 35 AND 39 THEN 9 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (47+@leapWeek) THEN 11 /*4 weeks (5 weeks on leap year)*/
        WHEN @FiscalWeekOfYear BETWEEN (48 + @leapWeek) AND (52 + @leapWeek) THEN 12 /*5 weeks*/
        
    END

    /*SET Fiscal Quarter*/
    SELECT @FiscalQuarter = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 1
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 2
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 3
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 4
    END
    
    SELECT @FiscalQuarterName = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 'First'
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 'Second'
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 'Third'
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 'Fourth'
    END
    
    /*Set Fiscal Year Name*/
    SELECT @FiscalYearName = 'FY ' + CONVERT(VARCHAR, @FiscalYear)

    INSERT INTO @tb
    (
        PeriodDate,
        FiscalDayOfYear,
        FiscalWeekOfYear,
        FiscalMonth,
        FiscalQuarter,
        FiscalQuarterName,
        FiscalYear,
        FiscalYearName
    ) VALUES (
        @CurrentDate,
        @FiscalDayOfYear,
        @FiscalWeekOfYear,
        @FiscalMonth,
        @FiscalQuarter,
        @FiscalQuarterName,
        @FiscalYear,
        @FiscalYearName
    )

    /*SET next day*/
    SET @CurrentDate = DATEADD(dd, 1, @CurrentDate)
    SET @FiscalDayOfYear = @FiscalDayOfYear + 1
    SET @FiscalWeekOfYear = ((@FiscalDayOfYear-1) / 7) + 1


    IF (@FiscalWeekOfYear > (52+@LeapWeek))
    BEGIN
        /*Reset a new year*/
        SET @FiscalDayOfYear = 1
        SET @FiscalWeekOfYear = 1
        SET @FiscalYear = @FiscalYear + 1
        IF (EXISTS (SELECT * FROM @leapTable WHERE @FiscalYear = leapyear))
        BEGIN
            SET @LeapWeek = 1
        END
        ELSE
        BEGIN
            SET @LeapWeek = 0
        END
    END
END

/********************************************************************************************/

/*Set first and last days of the fiscal months*/
UPDATE @tb
SET
    FiscalFirstDayOfMonth = minmax.StartDate,
    FiscalLastDayOfMonth = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalMonth,
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) AS StartDate, 
            MAX(PeriodDate) AS EndDate
        FROM @tb
        GROUP BY
            FiscalMonth,
            FiscalQuarter,
            FiscalYear
    ) minmax

WHERE
    t.FiscalMonth = minmax.FiscalMonth AND
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal quarters*/

UPDATE @tb
SET FiscalFirstDayOfQuarter = minmax.StartDate,
    FiscalLastDayOfQuarter = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) as StartDate,
            MAX(PeriodDate) as EndDate
        FROM
            @tb
        GROUP BY
            FiscalQuarter,
            FiscalYear
    ) minmax
WHERE
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal years*/

UPDATE @tb
SET
    FiscalFirstDayOfYear = minmax.StartDate,
    FiscalLastDayOfYear = minmax.EndDate
FROM
@tb t,
(
    SELECT FiscalYear, min(PeriodDate) as StartDate, max(PeriodDate) as EndDate
    FROM @tb
    GROUP BY FiscalYear
) minmax
WHERE
    t.FiscalYear = minmax.FiscalYear 

/*Set FiscalYearMonth*/
UPDATE @tb
SET
    FiscalMonthYear = 
        CASE FiscalMonth
        WHEN 1 THEN 'Jan'
        WHEN 2 THEN 'Feb'
        WHEN 3 THEN 'Mar'
        WHEN 4 THEN 'Apr'
        WHEN 5 THEN 'May'
        WHEN 6 THEN 'Jun'
        WHEN 7 THEN 'Jul'
        WHEN 8 THEN 'Aug'
        WHEN 9 THEN 'Sep'
        WHEN 10 THEN 'Oct'
        WHEN 11 THEN 'Nov'
        WHEN 12 THEN 'Dec'
        END + '-' + CONVERT(VARCHAR, FiscalYear)

/*Set FiscalMMYYYY*/
UPDATE @tb
SET
    FiscalMMYYYY = RIGHT('0' + CONVERT(VARCHAR, FiscalMonth),2) + CONVERT(VARCHAR, FiscalYear)

/********************************************************************************************/

UPDATE [Dim].[Date]
    SET FiscalDayOfYear = a.FiscalDayOfYear
      , FiscalWeekOfYear = a.FiscalWeekOfYear
      , FiscalMonth = a.FiscalMonth
      , FiscalQuarter = a.FiscalQuarter
      , FiscalQuarterName = a.FiscalQuarterName
      , FiscalYear = a.FiscalYear
      , FiscalYearName = a.FiscalYearName
      , FiscalMonthYear = a.FiscalMonthYear
      , FiscalMMYYYY = a.FiscalMMYYYY
      , FiscalFirstDayOfMonth = a.FiscalFirstDayOfMonth
      , FiscalLastDayOfMonth = a.FiscalLastDayOfMonth
      , FiscalFirstDayOfQuarter = a.FiscalFirstDayOfQuarter
      , FiscalLastDayOfQuarter = a.FiscalLastDayOfQuarter
      , FiscalFirstDayOfYear = a.FiscalFirstDayOfYear
      , FiscalLastDayOfYear = a.FiscalLastDayOfYear
FROM @tb a
    INNER JOIN [Dim].[Date] b ON a.PeriodDate = b.[Date]

/********************************************************************************************/

SELECT * FROM [Dim].[Date]
-- rebuild all indexes online
ALTER INDEX ALL ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- rebuild single index online
ALTER INDEX IX_IndexName ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- all fragmented indexes on current db, % fragmentation > 30
SELECT a.index_id, OBJECT_NAME(a.object_id), name, avg_fragmentation_in_percent  
FROM sys.dm_db_index_physical_stats (DB_ID(DB_NAME()), 
      NULL, NULL, NULL, NULL) AS a  
     JOIN sys.indexes AS b 
     ON a.object_id = b.object_id AND a.index_id = b.index_id
where avg_fragmentation_in_percent > 30
order by avg_fragmentation_in_percent desc
GO 
-- to understand who is doing what, alternative view/representation
SELECT
	CAST((SELECT qt.text FROM sys.dm_exec_sql_text(qs.sql_handle) AS qt FOR XML PATH('')) as xml) as query_text,
	qs.blocking_session_id,
	qs.start_time, 
	datediff(ss, qs.start_time, getdate()) as ExecutionTime_Seconds,
	getdate() as  CurrentDate,
	datediff(MINUTE, qs.start_time, getdate()) as ExecutionTime_Minutes,
	qs.session_id,
	qs.command,
	qs.status,
	qs.cpu_time, 
	qs.reads, 
	qs.writes, 
	qs.plan_handle,
	qp.query_plan,
	s.host_name, s.login_name, s.program_name,
	qs.wait_type, qs.open_transaction_count, qs.open_resultset_count, qs.row_count, qs.granted_query_memory, qs.transaction_isolation_level
	--,qs.*
FROM sys.dm_exec_requests AS qs
left join sys.dm_exec_sessions s on s.session_id = qs.session_id ---OUTER APPLY sys.dm_exec_sql_text(qs.sql_handle) AS qt
OUTER APPLY sys.dm_exec_query_plan(qs.plan_handle) AS qp
WHERE 	qs.session_id <> @@SPID
	and qs.command not in ('RESOURCE MONITOR', 'XE TIMER', 'XE DISPATCHER', 'LOG WRITER', 'LOCK MONITOR', 'TASK MANAGER', 'TASK MANAGER', 'CHECKPOINT', 'BRKR TASK', 'LAZY WRITER', 'SIGNAL HANDLER', 'TRACE QUEUE TASK', 'BRKR EVENT HNDLR', 'GHOST CLEANUP', 'RECOVERY WRITER', 'SYSTEM_HEALTH_MONITOR', 'RECEIVE', 'UNKNOWN TOKEN', 'FT FULL PASS', 'FT CRAWL MON')
	and isnull(s.program_name, '') <> 'SQL diagnostic manager Collection Service'
ORDER BY ExecutionTime_Minutes DESC;
/* CHECK SIZE OF DB OBJECTS */
SELECT TOP(10)
      o.[object_id]
    , obj = SCHEMA_NAME(o.[schema_id]) + '.' + o.name
    , o.[type]
    , i.total_rows
    , i.total_size
FROM sys.objects o
JOIN (
    SELECT
          i.[object_id]
        , total_size = CAST(SUM(a.total_pages) * 8. / 1024 AS DECIMAL(18,2))
        , total_rows = SUM(CASE WHEN i.index_id IN (0, 1) AND a.[type] = 1 THEN p.[rows] END)
    FROM sys.indexes i
    JOIN sys.partitions p ON i.[object_id] = p.[object_id] AND i.index_id = p.index_id
    JOIN sys.allocation_units a ON p.[partition_id] = a.container_id
    WHERE i.is_disabled = 0
        AND i.is_hypothetical = 0
    GROUP BY i.[object_id]
) i ON o.[object_id] = i.[object_id]
WHERE o.[type] IN ('V', 'U', 'S')
ORDER BY i.total_size DESC;
/* Monitor query plans */

SELECT
    highest_cpu_queries.plan_handle,  
    highest_cpu_queries.total_worker_time, 
    q.dbid, 
    q.objectid, 
    q.number, 
    q.encrypted, 
    q.[text] 
FROM 
    (SELECT TOP 50  
        qs.plan_handle,  
        qs.total_worker_time 
     FROM 
        sys.dm_exec_query_stats qs 
     ORDER BY qs.total_worker_time desc) AS highest_cpu_queries 
     CROSS APPLY sys.dm_exec_sql_text(plan_handle) AS q 
ORDER BY highest_cpu_queries.total_worker_time desc;
/* Find top 10 queries */

SELECT TOP 10 query_stats.query_hash AS "Query Hash", 
    SUM(query_stats.total_worker_time) / SUM(query_stats.execution_count) AS "Avg CPU Time",
    MIN(query_stats.statement_text) AS "Statement Text"
FROM 
    (SELECT QS.*, 
    SUBSTRING(ST.text, (QS.statement_start_offset/2) + 1,
    ((CASE statement_end_offset 
        WHEN -1 THEN DATALENGTH(st.text)
        ELSE QS.statement_end_offset END 
            - QS.statement_start_offset)/2) + 1) AS statement_text
     FROM sys.dm_exec_query_stats AS QS
     CROSS APPLY sys.dm_exec_sql_text(QS.sql_handle) as ST) as query_stats
GROUP BY query_stats.query_hash
ORDER BY 2 DESC;
GO
/* DMV to find useful indexes: */

PRINT 'Missing Indexes: '
PRINT 'The "improvement_measure" column is an indicator of the (estimated) improvement that might '
PRINT 'be seen if the index was created. This is a unitless number, and has meaning only relative '
PRINT 'the same number for other indexes. The measure is a combination of the avg_total_user_cost, '
PRINT 'avg_user_impact, user_seeks, and user_scans columns in sys.dm_db_missing_index_group_stats.'
PRINT ''
PRINT '-- Missing Indexes --'
SELECT CONVERT (varchar, getdate(), 126) AS runtime, 
  mig.index_group_handle, mid.index_handle, 
  CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) AS improvement_measure, 
  'CREATE INDEX missing_index_' + CONVERT (varchar, mig.index_group_handle) + '_' + CONVERT (varchar, mid.index_handle) 
  + ' ON ' + mid.statement 
  + ' (' + ISNULL (mid.equality_columns,'') 
    + CASE WHEN mid.equality_columns IS NOT NULL AND mid.inequality_columns IS NOT NULL THEN ',' ELSE '' END + ISNULL (mid.inequality_columns, '')
  + ')' 
  + ISNULL (' INCLUDE (' + mid.included_columns + ')', '') AS create_index_statement, 
  migs.*, mid.database_id, mid.[object_id]
FROM sys.dm_db_missing_index_groups mig
INNER JOIN sys.dm_db_missing_index_group_stats migs ON migs.group_handle = mig.index_group_handle
INNER JOIN sys.dm_db_missing_index_details mid ON mig.index_handle = mid.index_handle
WHERE CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) > 10
ORDER BY migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans) DESC
PRINT ''
GO
/* FIND UNUSED INDEXES - MIGHT AFFECT LOG WRITES */

SELECT o.name Object_Name,
i.name Index_name, 
i.Type_Desc
FROM sys.objects AS o
JOIN sys.indexes AS i
ON o.object_id = i.object_id 
LEFT OUTER JOIN 
sys.dm_db_index_usage_stats AS s 
ON i.object_id = s.object_id 
AND i.index_id = s.index_id
WHERE o.type = 'u'
-- Clustered and Non-Clustered indexes
AND i.type IN (1, 2) 
-- Indexes without stats
AND (s.index_id IS NULL) OR
-- Indexes that have been updated but not used
(s.user_seeks = 0 AND s.user_scans = 0 AND s.user_lookups = 0 );
/* STORED PROCEDURES AND FUNCTIONS EXECUTION TIME, COUNT AND AVERAGE */

SELECT DB_NAME(st.dbid) DBName
      ,OBJECT_SCHEMA_NAME(st.objectid,dbid) SchemaName
      ,OBJECT_NAME(st.objectid,dbid) StoredProcedure
      ,max(cp.usecounts) Execution_count
      ,sum(qs.total_worker_time) total_cpu_time
      ,sum(qs.total_worker_time) / (max(cp.usecounts) * 1.0)  avg_cpu_time
 
FROM sys.dm_exec_cached_plans cp join sys.dm_exec_query_stats qs on cp.plan_handle = qs.plan_handle
     CROSS APPLY sys.dm_exec_sql_text(cp.plan_handle) st
where DB_NAME(st.dbid) is not null and cp.objtype = 'proc'
group by DB_NAME(st.dbid),OBJECT_SCHEMA_NAME(objectid,st.dbid), OBJECT_NAME(objectid,st.dbid) 
order by sum(qs.total_worker_time) desc;
SELECT  creation_time 
        ,last_execution_time
        ,total_physical_reads
        ,total_logical_reads 
        ,total_logical_writes
        , execution_count
        , total_worker_time
        , total_elapsed_time
        , (total_elapsed_time / execution_count) avg_elapsed_time
        ,SUBSTRING(st.text, (qs.statement_start_offset/2) + 1,
         ((CASE statement_end_offset
          WHEN -1 THEN DATALENGTH(st.text)
          ELSE qs.statement_end_offset END
            - qs.statement_start_offset)/2) + 1) AS statement_text
FROM sys.dm_exec_query_stats AS qs
CROSS APPLY sys.dm_exec_sql_text(qs.sql_handle) st
WHERE execution_count > 10 -- filter out rare requests
ORDER BY total_elapsed_time / execution_count DESC;
SELECT DB_NAME(dbid) as "Database", COUNT(dbid) as "Number Of Open Connections",
loginame as LoginName
FROM sys.sysprocesses
WHERE dbid > 0
GROUP BY dbid, loginame;
-- Rows with ID existing in both a, b and c
-- JOIN is equivalent to INNER JOIN
SELECT a.ID, a.NAME, b.VALUE1, c.VALUE1 FROM table1 a 
  JOIN table2 b ON a.ID = b.ID
  JOIN table3 c ON a.ID = c.ID
WHERE a.ID >= 1000;
-- ⇓ Test it ⇓ (Fiddle source link)
JOINING 2 Tables in sql

SELECT X.Column_Name , Y.Column_Name2
FROM TABLES1_NAME X 
INNER JOIN TABLES2_NAME Y ON X.Primary_key = Y.Foreign_key;


--FOR EXAMPLE
--GET THE FIRST_NAME AND JOB_TITLE
--USE EMPLOYEES AND JOBS TABLE
--THE RELATIONSHIP IS JOB_ID

SELECT E.FIRST_NAME , J.JOB_TITLE
FROM EMPLOYEES E
INNER JOIN JOBS J ON J.JOB_ID = E.JOB_ID;

/*
For this challenge you need to create a simple SELECT statement that will return all columns from the products table, and join to the companies table so that you can return the company name.

products table schema:
id
name
isbn
company_id
price

companies table schema:
id
name

You should return all product fields as well as the company name as "company_name".
*/

SELECT products.*, companies.name AS company_name
FROM products
JOIN companies ON company_id = companies.id
SELECT * FROM INFORMATION_SCHEMA.Tables WHERE Table_Name LIKE '%sequence%'
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%formulatypeid%'
ORDER BY    TableName
            ,ColumnName;
create materialized view MV1    
build immediate    
refresh fast on commit    
as     
     
create materialized view log on <table>    
with sequence, rowId (...)    
including new values    
     
create table <table_name> (    
        DateMonth Date not null,    
        DateYear Date not null,    
        Name varchar(256) not null,    
        Total integer not null);    
     
insert into <table_name> () (select ...);    
     
create trigger trigger    
after insert or insert of on <table>    
for each row     
declare     
     
begin       
end    


create materialized view GROUPBYMonthYearMuseum
build immediate
refresh FAST ON COMMIT
--enable query rewrite
as
SELECT Month, Year, ticket_type , SUM(num_tickets) as NumTickets,
 SUM(revenue) as TotRevenue
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, ticket_type;

CREATE MATERIALIZED VIEW LOG ON museums_tickets
WITH SEQUENCE, ROWID (id_time, ticket_type, num_tickets, Revenue)
INCLUDING NEW VALUES;
CREATE MATERIALIZED VIEW LOG ON TIMEDIM
WITH SEQUENCE, ROWID (id_time, Month, Year)
INCLUDING NEW VALUES;
---- Trigger Manually
CREATE TABLE VM1 (
DateMonth DATE NOT NULL),
DateYear INTEGER NOT NULL,
Ticket_Type VARCHAR(20) NOT NULL,
TOT_NumberOfTickets INTEGER,
TOT_Revenue INTEGER);

INSERT INTO VM1 (DateMonth, DateYear, Ticket_Type, TOT_NumberOfTickets,
TOT_Revenue)
(SELECT Month, Year, Ticket_Type,
SUM(NumberOfTickets), SUM(Revenue)
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, Ticket_Type);
create TRIGGER TriggerForViewVM1
AFTER INSERT ON museums_tickets
FOR EACH ROW
DECLARE
N NUMBER;
VAR_DateMonth DATE;
VAR_DateYear NUMBER;
BEGIN
UPDATE MS 
    SET    MS.NAME = OC.NAME
    FROM   OPENQUERY(WZ12,
      'select EMP, NAME from Test') OC 
   INNER JOIN [dbo].[TEST1] MS 
     ON OC.EMP = MS.EMP
MERGE my-dataset.dimension_table as MAIN using
my-dataset.temporary_table as TEMP
on MAIN.PRD_SK = TEMP.PRD_SK
when matched then
UPDATE SET
MAIN.PRD_CATEGORY = TEMP.PRD_CATEGORY
when not matched then
INSERT VALUES(TEMP.PRD_SK, TEMP. PRD_ID, TEMP. PRD_SK, TEMP.
PRD_CATEGORY)
with account_session_ste as (
SELECT 
*
FROM `table`
where exists(select 1 from unnest(hits) h 
            where regexp_contains(h.page.hostname, r'signin.account.gov.uk'))
and _table_suffix = FORMAT_DATE('%Y%m%d', DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY))
)
-- group by 1,2)
select 
fullVisitorId AS user_id,
hits.page.hostname as hostname,   
hits.page.pagePath as pagePath,
hits.hitNumber as hit_number 
-- *
from account_session_ste, unnest(hits) as hits
DECLARE Counter INT64;

BEGIN TRANSACTION;
    SET Counter = 0;
    CREATE TEMP TABLE tmp
    (
    dte date,
    days INT64,
    users INT64
    );
    
    WHILE Counter < 28 DO
        INSERT INTO tmp

        with dte as (
            SELECT dte
            FROM unnest(generate_date_array(date('2021-02-01'), date('2021-12-30'))) dte
        ),
        ids as (
            SELECT clientId,
            PARSE_DATE("%Y%m%d", date) as dte
            FROM `govuk-bigquery-analytics.87773428.ga_sessions_2021*`
        WHERE NOT device.operatingSystem = "iOS"
        )

        SELECT dte.dte as dte, max(Counter) as days, count( distinct ids.clientId) AS users
        FROM dte, ids
        WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL Counter DAY) and dte.dte
        group by 1
        order by 1 asc;

        SET Counter = Counter + 1;

    END WHILE;
COMMIT TRANSACTION;
--select * from tmp


SELECT * FROM
  (SELECT * FROM tmp)
  PIVOT(SUM(users) FOR days IN (0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27))
  order by 1 asc
select "field1", "field2", count(*)
from "tableName"
group by "field1", "field2"
HAVING count(*) > 1
### MAKE DIR
mkdir split && cd split

### SPLIT FILE
split -l 1000 /<path>/output-google.sql /<path>/split/split-

### CHANGE EXTENTION
ls | xargs -I % mv % %.sql
CREATE DATABASE mydatabase CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE TABLE dbo.my_queue
(
    id UNIQUEIDENTIFIER NOT NULL PRIMARY KEY,
    created_at SMALLDATETIME NOT NULL,
    popped_at SMALLDATETIME NOT NULL,
    errors VARCHAR(MAX) NULL,
    payload NVARCHAR(MAX) NOT NULL
);
select * from (select * from table order by data_column asc limit count(*)/2) 
order by data_column desc limit 1;
Private Sub Select_Sector()

Dim rs As DAO.Recordset

Dim RegEx As Object
Set RegEx = CreateObject("vbscript.regexp")

Dim qdef As QueryDef
Set qdef = getCurrentDb.QueryDefs("qry_Select_Sector")
qdef.Connect = CurrentDb.TableDefs("BOCClientIndex").Connect


RegEx.Pattern = "IIf\(\[ServiceStatus\]=3,30,20\)\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "IIf([ServiceStatus]=3,30,20))=" & [Forms]![MainMenu_Services]![SelectedStatusIndicator])

RegEx.Pattern = "\(View_qryServiceProviderOrganisationalStructure\.SectorCode\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "(View_qryServiceProviderOrganisationalStructure.SectorCode)=" & [Forms]![MainMenu_Services]![SectorCode])


'For Testing purposes only - Do not use in production code
Set rs = qdef.OpenRecordset

Dim i As Long
For i = 0 To rs.Fields.Count - 1
        Debug.Print rs.Fields(i).Name,
    Next
    rs.MoveFirst
    Do Until rs.EOF
        Debug.Print
        For i = 0 To rs.Fields.Count - 1
            Debug.Print rs.Fields(i).value,
        Next
        rs.MoveNext
    Loop
End Sub
  
create or replace function decode_url(url text)
  returns text as
$BODY$
DECLARE result text;
BEGIN
    if url isnull then
        return null;
    end if;

    BEGIN
        with str AS (
            select
                   case when url ~ '^%[0-9a-fA-F][0-9a-fA-F]'
                   then array['']
                   end
            || regexp_split_to_array(url, '(%[0-9a-fA-F][0-9a-fA-F])+', 'i') plain,

            array(select (regexp_matches(url, '((?:%[0-9a-fA-F][0-9a-fA-F])+)', 'gi'))[1]) encoded
            )

        select string_agg(plain[i] || coalesce(convert_from(decode(replace(encoded[i], '%',''), 'hex'), 'utf8'),''),'')
        from str, (select generate_series(1, array_upper(encoded, 1) + 2) i FROM str) serie
        into result;

    EXCEPTION WHEN OTHERS THEN
        raise notice 'failed: %', url;
        return url;
    END;

    return coalesce(result, url);

END;

$BODY$
  LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
OUT res boolean)
 RETURNS boolean

AS $func$
-- DECLARE res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_pk(tablename TEXT,
OUT res boolean) RETURNS boolean AS $func$ 
res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
columnname TEXT,
OUT res boolean)
 RETURNS boolean
 
AS $func$ 

BEGIN
SELECT
    (count(column_name) > 0)::boolean AS res
INTO
    res
FROM
    information_schema.columns
WHERE
    table_name = tablename
    AND column_name = columnname;
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_columns(tablename TEXT,
VARIADIC columnname TEXT) RETURNS boolean AS $func$
 DECLARE res boolean DEFAULT FALSE;

DECLARE
  x TEXT;

res boolean DEFAULT TRUE;

BEGIN
  FOREACH x IN ARRAY columnname LOOP
      EXIT
WHEN res = FALSE;

SELECT
    table_has_column(tablename,
    x) AS res
INTO
    res;
END;
END;

$func$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION
random_text(n_length INTEGER default 50)
RETURNS TEXT
LANGUAGE SQL
AS $$

select
string_agg(substr(characters, (random() * length(characters) + 0.5)::integer, 1), '') as random_word
from (values('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-')) as symbols(characters)
-- length of word
join generate_series(1, n_length) on 1 = 1;

$$;
user = User.first

result = user.user_books.in_order_of(:status, %w[to_read currently_reading read])

=> #<ActiveRecord::Relation [#<UserBook id: 3, user_id: 1, status: "to_read">, #<UserBook id: 4, user_id: 1, status: "to_read">, #<UserBook id: 5, user_id: 1, status: "currently_reading">, #<UserBook id: 6, user_id: 1, status: "read">]>
ASCII -- Returns the equivalent ASCII value for a specific character.

CHAR_LENGTH -- Returns the character length of a string.

CHARACTER_LENGTH -- Same as CHAR_LENGTH.

CONCAT -- Adds expressions together, with a minimum of 2.

CONCAT_WS -- Adds expressions together, but with a separator between each value.

FIELD -- Returns an index value relative to the position of a value within a list of values.

FIND IN SET -- Returns the position of a string in a list of strings.

FORMAT -- When passed a number, returns that number formatted to include commas (eg 3,400,000).

INSERT -- Allows you to insert one string into another at a certain point, for a certain number of characters.

INSTR -- Returns the position of the first time one string appears within another.

LCASE -- Converts a string to lowercase.

LEFT -- Starting from the left, extracts the given number of characters from a string and returns them as another.

LENGTH -- Returns the length of a string, but in bytes.

LOCATE -- Returns the first occurrence of one string within another,

LOWER -- Same as LCASE.

LPAD -- Left pads one string with another, to a specific length.

LTRIM -- Removes any leading spaces from the given string.

MID -- Extracts one string from another, starting from any position.

POSITION -- Returns the position of the first time one substring appears within another.

REPEAT -- Allows you to repeat a string

REPLACE -- Allows you to replace any instances of a substring within a string, with a new substring.

REVERSE	-- Reverses the string.

RIGHT -- Starting from the right, extracts the given number of characters from a string and returns them as another.

RPAD -- Right pads one string with another, to a specific length.

RTRIM -- Removes any trailing spaces from the given string.

SPACE -- Returns a string full of spaces equal to the amount you pass it.

STRCMP -- Compares 2 strings for differences

SUBSTR -- Extracts one substring from another, starting from any position.

SUBSTRING -- Same as SUBSTR

SUBSTRING_INDEX	-- Returns a substring from a string before the passed substring is found the number of times equals to the passed number.

TRIM --	Removes trailing and leading spaces from the given string. Same as if you were to run LTRIM and RTRIM together.

UCASE -- Converts a string to uppercase.

UPPER -- Same as UCASE.
BIT(size) -- A bit-value type with a default of 1. The allowed number of bits in a value is set via the size parameter, which can hold values from 1 to 64.

TINYINT(size) -- A very small integer with a signed range of -128 to 127, and an unsigned range of 0 to 255. Here, the size parameter specifies the maximum allowed display width, which is 255.

BOOL -- Essentially a quick way of setting the column to TINYINT with a size of 1. 0 is considered false, whilst 1 is considered true.

BOOLEAN	-- Same as BOOL.

SMALLINT(size) -- A small integer with a signed range of -32768 to 32767, and an unsigned range from 0 to 65535. Here, the size parameter specifies the maximum allowed display width, which is 255.

MEDIUMINT(size) -- A medium integer with a signed range of -8388608 to 8388607, and an unsigned range from 0 to 16777215. Here, the size parameter specifies the maximum allowed display width, which is 255.

INT(size) -- A medium integer with a signed range of -2147483648 to 2147483647, and an unsigned range from 0 to 4294967295. Here, the size parameter specifies the maximum allowed display width, which is 255.

INTEGER(size) -- Same as INT.

BIGINT(size) -- A medium integer with a signed range of -9223372036854775808 to 9223372036854775807, and an unsigned range from 0 to 18446744073709551615. Here, the size parameter specifies the maximum allowed display width, which is 255.

FLOAT(p) -- A floating point number value. If the precision (p) parameter is between 0 to 24, then the data type is set to FLOAT(), whilst if it's from 25 to 53, the data type is set to DOUBLE(). This behaviour is to make the storage of values more efficient.

DOUBLE(size, d) -- A floating point number value where the total digits are set by the size parameter, and the number of digits after the decimal point is set by the d parameter.

DECIMAL(size, d) -- An exact fixed point number where the total number of digits is set by the size parameters, and the total number of digits after the decimal point is set by the d parameter.

DEC(size, d) -- Same as DECIMAL.
% -- Equates to zero or more characters.
-- Example: Find all customers with surnames ending in ‘ory’.
SELECT * FROM customers
WHERE surname LIKE '%ory';

_ -- Equates to any single character.
-- Example: Find all customers living in cities beginning with any 3 characters, followed by ‘vale’.
SELECT * FROM customers
WHERE city LIKE '_ _ _vale';

[charlist] -- Equates to any single character in the list.
-- Example: Find all customers with first names beginning with J, K or T.
SELECT * FROM customers
WHERE first_name LIKE '[jkt]%';
BIN -- Returns the given number in binary.

BINARY -- Returns the given value as a binary string.

CAST -- Converst one type into another.

COALESCE -- From a list of values, returns the first non-null value.

CONNECTION_ID -- For the current connection, returns the unique connection ID.

CONV -- Converts the given number from one numeric base system into another.

CONVERT -- Converts the given value into the given datatype or character set.

CURRENT_USER -- Returns the user and hostname which was used to authenticate with the server.

DATABASE -- Gets the name of the current database.

GROUP BY -- Used alongside aggregate functions (COUNT, MAX, MIN, SUM, AVG) to group the results.

HAVING -- Used in the place of WHERE with aggregate functions.

IF -- If the condition is true it returns a value, otherwise it returns another value.

IFNULL -- If the given expression equates to null, it returns the given value.

ISNULL -- If the expression is null, it returns 1, otherwise returns 0.

LAST_INSERT_ID -- For the last row which was added or updated in a table, returns the auto increment ID.

NULLIF -- Compares the 2 given expressions. If they are equal, NULL is returned, otherwise the first expression is returned.

SESSION_USER -- Returns the current user and hostnames.

SYSTEM_USER -- Same as SESSION_USER.

USER -- Same as SESSION_USER.

VERSION -- Returns the current version of the MySQL powering the database.
ADDDATE -- Adds a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/30).

ADDTIME -- Adds a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (07:00).

CURDATE -- Gets the current date.

CURRENT_DATE -- Same as CURDATE.

CURRENT_TIME -- Gest the current time.

CURRENT_TIMESTAMP -- Gets the current date and time.

CURTIME -- Same as CURRENT_TIME.

DATE -- Extracts the date from a datetime expression.

DATEDIFF -- Returns the number of days between the 2 given dates.

DATE_ADD -- Same as ADDDATE.

DATE_FORMAT -- Formats the date to the given pattern.

DATE_SUB -- Subtracts a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/10).

DAY -- Returns the day for the given date.

DAYNAME -- Returns the weekday name for the given date.

DAYOFWEEK -- Returns the index for the weekday for the given date.

DAYOFYEAR -- Returns the day of the year for the given date.

EXTRACT -- Extracts from the date the given part (eg MONTH for 20/01/20 = 01).

FROM DAYS -- Returns the date from the given numeric date value.

HOUR -- Returns the hour from the given date.

LAST DAY -- Gets the last day of the month for the given date.

LOCALTIME -- Gets the current local date and time.

LOCALTIMESTAMP -- Same as LOCALTIME.

MAKEDATE -- Creates a date and returns it, based on the given year and number of days values.

MAKETIME -- Creates a time and returns it, based on the given hour, minute and second values.

MICROSECOND -- Returns the microsecond of a given time or datetime.

MINUTE -- Returns the minute of the given time or datetime.

MONTH -- Returns the month of the given date.

MONTHNAME -- Returns the name of the month of the given date.

NOW -- Same as LOCALTIME.

PERIOD_ADD -- Adds the given number of months to the given period.

PERIOD_DIFF -- Returns the difference between 2 given periods.

QUARTER -- Returns the year quarter for the given date.

SECOND -- Returns the second of a given time or datetime.

SEC_TO_TIME -- Returns a time based on the given seconds.

STR_TO_DATE -- Creates a date and returns it based on the given string and format.

SUBDATE -- Same as DATE_SUB.

SUBTIME -- Subtracts a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (03:00).

SYSDATE -- Same as LOCALTIME.

TIME -- Returns the time from a given time or datetime.

TIME_FORMAT -- Returns the given time in the given format.

TIME_TO_SEC -- Converts and returns a time into seconds.

TIMEDIFF -- Returns the difference between 2 given time/datetime expressions.

TIMESTAMP -- Returns the datetime value of the given date or datetime.

TO_DAYS -- Returns the total number of days that have passed from ‘00-00-0000’ to the given date.

WEEK -- Returns the week number for the given date.

WEEKDAY -- Returns the weekday number for the given date.

WEEKOFYEAR -- Returns the week number for the given date.

YEAR -- Returns the year from the given date.

YEARWEEK -- Returns the year and week number for the given date.
ABS -- Returns the absolute value of the given number.

ACOS -- Returns the arc cosine of the given number.

ASIN -- Returns the arc sine of the given number.

ATAN -- Returns the arc tangent of one or 2 given numbers.

ATAN2 -- Returns the arc tangent of 2 given numbers.

AVG -- Returns the average value of the given expression.

CEIL -- Returns the closest whole number (integer) upwards from a given decimal point number.

CEILING -- Same as CEIL.

COS -- Returns the cosine of a given number.

COT -- Returns the cotangent of a given number.

COUNT -- Returns the amount of records that are returned by a SELECT query.

DEGREES -- Converts a radians value to degrees.

DIV -- Allows you to divide integers.

EXP -- Returns e to the power of the given number.

FLOOR -- Returns the closest whole number (integer) downwards from a given decimal point number.

GREATEST -- Returns the highest value in a list of arguments.

LEAST -- Returns the smallest value in a list of arguments.

LN -- Returns the natural logarithm of the given number.

LOG -- Returns the natural logarithm of the given number, or the logarithm of the given number to the given base.

LOG10 -- Does the same as LOG, but to base 10.

LOG2 -- Does the same as LOG, but to base 2.

MAX -- Returns the highest value from a set of values.

MIN -- Returns the lowest value from a set of values.

MOD -- Returns the remainder of the given number divided by the other given number.

PI -- Returns PI.

POW -- Returns the value of the given number raised to the power of the other given number.

POWER -- Same as POW.

RADIANS -- Converts a degrees value to radians.

RAND -- Returns a random number.

ROUND -- Rounds the given number to the given amount of decimal places.

SIGN -- Returns the sign of the given number.

SIN -- Returns the sine of the given number.

SQRT -- Returns the square root of the given number.

SUM -- Returns the value of the given set of values combined.

TAN -- Returns the tangent of the given number.

TRUNCATE -- Returns a number truncated to the given number of decimal places.
with agg_ as (
SELECT
    ec.ride_id, 
    ev.ride_id as segment_received_ride,
    ev.user_id as segment_received_user_id,
    ev.full_cost,
    ev.service_area_name,
    ev.minutes,
    ev.company_id,
    ec.user_id,
    array_agg(struct(ec.time_publish,ec.phase) ORDER BY time_publish asc) as status
FROM `reby-cloud.temp_eu.email_comms` ec
LEFT JOIN `reby-safir.data_python.end_of_ride_email_v4_view` ev
    on ec.ride_id = ev.ride_id
group by 1,2,3,4,5,6,7,8
)

select *
    , (
        select as struct 
            time_publish,
            phase
        from unnest(status) 
        where phase = 'sent-to-segment'
        order by time_publish asc
        limit 1
    ).*
from agg_
order by time_publish desc
left_df.merge(right_df, on='user_id', how='left', indicator=True)
create or replace table `temp_eu.delete_stripe_refunds` as (
select * from (
with pre_process as (
    SELECT
		*
		FROM
		`reby-cloud.reby_stripe.py_stripe_preprocessed`
		--where id in (select id from`reby-cloud.temp_eu.delete_stripe` where amount_refunded > 0 )
		--where customer = 'cus_GorMSGP2VREHQs'
		where rn = 1
		order by customer,id,rn asc
)

select
    case when refunds.charge is not null and charges.type ='refund' then refunds.balance_transaction else charges.balance_transaction
        end as balance_transaction,
    case when refunds.charge is not null and charges.type ='refund' then refunds.created_at else charges.created_at
        end as created_at,
    charges.charge_id,
    charges.type,
    charges.paid,
    charges.amount as charge_amount,
    case when refunds.charge is not null and charges.type ='refund' then refunds.amount else null
        end as refund_amount,
    charges.customer,
    charges.user_id
from (
    select
        spp.balance_transaction,
        s.*,
        u.id as user_id,
        sa.service_area
    from `reby-cloud.reby_stripe.py_stripe_processed` s
    left join `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
        on s.customer = u.payment_processor[safe_offset(0)].token
    left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` sa
        on u.id = sa.user_id
    left join pre_process spp
            on spp.id = s.charge_id
    --order by 1,2,3,4,5,6,7,8,9,10,11,12,13
    where date(s.created_at) between '2021-02-01' and '2021-09-03'
    --order by balance_transaction desc
    ) as charges
    left join `reby-cloud.reby_stripe.stripe_refunds_api` refunds on charges.charge_id = refunds.charge
    --where refunds.status = 'succeeded'
)
)
;
select
  st.*,
  str.created_at,
  str.balance_transaction,
  str.user_id,
  str.charge_amount 
 from `reby-cloud.temp_eu.delete_stripe_downloaded_transactions` st
 left join `reby-cloud.temp_eu.delete_stripe_refunds` str on st.balance_transaction_id = str.balance_transaction 
SELECT o.name, 
       ps.last_execution_time 
FROM   sys.dm_exec_procedure_stats ps 
INNER JOIN 
       sys.objects o 
       ON ps.object_id = o.object_id 
WHERE  DB_NAME(ps.database_id) = '' 
ORDER  BY 
       ps.last_execution_time DESC  
SELECT * FROM companies 
WHERE company_name SIMILAR TO '%(apple|google|microsoft)%';
SQL> with t1(col) as(
  2    select 1    from dual union all
  3    select 2    from dual union all
  4    select null from dual union all
  5    select 3    from dual
  6  )
  7  select *
  8    from t1
  9  order by col asc nulls last
  10  ;
select ord_num, agent_code, ord_date, ord_amount
from orders
where(agent_code, ord_amount) IN
(SELECT agent_code, MIN(ord_amount)
FROM orders 
GROUP BY agent_code);  
ALTER TABLE stars
ALTER COLUMN name TYPE varchar(50);
DECLARE @AnyDate DATETIME
SET @AnyDate = GETDATE()

SELECT @AnyDate AS 'Input Date',
  DATEADD(q, DATEDIFF(q, 0, @AnyDate), 0) 
                        AS 'Quarter Start Date',       
  DATEADD(d, -1, DATEADD(q, DATEDIFF(q, 0, @AnyDate) + 1, 0)) 
                        AS 'Quarter End Date'
DECLARE @Year DATE = '2013-01-01'
DECLARE @Quarter INT = 4;

SELECT  DATEADD(QUARTER, @Quarter - 1, @Year) ,
        DATEADD(DAY, -1, DATEADD(QUARTER,  @Quarter, @Year))
SELECT title, extract("year" from current_date) - "year" AS age
  FROM films
  ORDER BY age ASC;
postgres-# ( terminal commands)
- list all databases : \l
- list of all tables : \d
- list only the tables : \dt
- list all fields in a table : \d person (name of the table)
- import from external file : \i c:/postgrescourse/person.sql

- connect to database psql and import .sql file
> psql -h localhost -U postgres -d forgeprod -f C:\forgeprod.sql
https://kb.objectrocket.com/postgresql/how-to-run-an-sql-file-in-postgres-846

- access to database to query 
> psql -U postgres -d forgeprod

- datatypes info:
bigserial : increment int in sequence
date : year,month,day



- create database : create database test;
- drop database : drop database test;
- drop table : drop table person;
- connect to a database ( two forms ) :  
\c test (or) -h localhost -p 5432 -U postgres test
- create table sintax : create table table_name ( column name + data type + constraints if any ) 

- create table : 
create table person ( 
	id int,
	first_name varchar(50),
  	last_name varchar(50),
    gender varchar(6),
    date_of_birth date
    );
- create table w/constraints ( need to satisfied rules): 
create table person (
 	id bigserial not null primary key,
 	first_name varchar(50) not null,
 	last_name varchar(50) not null,
    gender varchar(6) not null, 
    date_of_birth date not null
	);

- create table : 
insert into person ( first_name, last_name, gender, date_of_birth) values ('Anne','Smith','Female',date '1988-01-09');

- Using OFFSET and LIMIT : select * from person OFFSET 5 LIMIT 5;
- Using OFFSET and FETCH : select * from person OFFSET 5 FETCH 5 ROW ONLY;
- Using BETWEEN : select * from person where date_of_birth BETWEEN DATE '2000-01-01' and '2015-01-01';
- Diferences between LIKE and ILIKE : ILIKE is case insensitive. 

- Using Group By : select country_of_birth, count(*) from person group by country_of_birth; ( will count how many people are from that country )
- Using Having with Group By : select country_of_birth, count(*) from person having count(*) > 5 group by country_of_birth; ( must have above 5 to show )





 USE Your_Database;
 GO
 EXECUTE AS USER = N'the_user_name';
 GO
 SELECT 
    s.name,
    o.name,
    p.[permission_name]
 FROM sys.objects AS o 
 INNER JOIN sys.schemas AS s
 ON o.[schema_id] = s.[schema_id]
 CROSS APPLY sys.fn_my_permissions(QUOTENAME(s.name) 
   + N'.' + QUOTENAME(o.name), N'OBJECT') AS p
   WHERE o.[type] IN (N'U', N'V') -- tables and views
   AND p.subentity_name = N''; -- ignore column permissions 
 GO
 REVERT;
SELECT setval(
        pg_get_serial_sequence('TABLE','COLUMN'), 
        max(id)) 
FROM TABLE;
<?php
$results = $wpdb->get_results( "SELECT * FROM $table_name"); // Query to fetch data from database table and storing in $results
if(!empty($results))                        // Checking if $results have some values or not
{    
    echo "<table width='100%' border='0'>"; // Adding <table> and <tbody> tag outside foreach loop so that it wont create again and again
    echo "<tbody>";      
    foreach($results as $row){   
    $userip = $row->user_ip;               //putting the user_ip field value in variable to use it later in update query
    echo "<tr>";                           // Adding rows of table inside foreach loop
    echo "<th>ID</th>" . "<td>" . $row->id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>User IP</th>" . "<td>" . $row->user_ip . "</td>";   //fetching data from user_ip field
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Post ID</th>" . "<td>" . $row->post_id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Time</th>" . "<td>" . $row->time . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    }
    echo "</tbody>";
    echo "</table>"; 

}
?>
SELECT column-names
  FROM table-name1
 WHERE value IN (SELECT column-name
                   FROM table-name2
                  WHERE condition)
SELECT Fname, Lname
FROM Employee
ORDER BY Salary
OFFSET 2 ROWS;
select schema_name(tab.schema_id) as [schema_name], 
    tab.[name] as table_name
from sys.tables tab
    left outer join sys.indexes pk
        on tab.object_id = pk.object_id 
        and pk.is_primary_key = 1
where pk.object_id is null
order by schema_name(tab.schema_id),
    tab.[name]
DELETE FROM [dbo].[logs] WHERE date < DATEADD(DAY, -30, GETDATE())